text stringlengths 8 4.13M |
|---|
use crate::peripherals::{R16, R8};
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone)]
pub enum Instruction {
//8-bit load
LD_R_R(R8, R8),
LD_R_N(R8, u8),
LD_iR_R(R16, R8),
LD_iR_N(R16, u8),
LD_R_iR(R8, R16),
LD_R_iN(R8, u16),
LD_iN_R(u16, R8),
LDI_iR_R(R16, R8),
LDI_R_iR(R8, R16),
LDD_iR_R(R16, R8),
LDD_R_iR(R8, R16),
//16-bit load
LD_R16_N(R16, u16),
LD_iN_R16(u16, R16),
LD_R16_R16(R16, R16),
PUSH(R16),
POP(R16),
//8-bit arithmetic/logic
ADD_R(R8),
ADD_N(u8),
ADD_iR(R16),
ADC_R(R8),
ADC_N(u8),
ADC_iR(R16),
SUB_R(R8),
SUB_N(u8),
SUB_iR(R16),
SBC_R(R8),
SBC_N(u8),
SBC_iR(R16),
AND_R(R8),
AND_N(u8),
AND_iR(R16),
XOR_R(R8),
XOR_N(u8),
XOR_iR(R16),
OR_R(R8),
OR_N(u8),
OR_iR(R16),
CMP_R(R8),
CMP_N(u8),
CMP_iR(R16),
INC_R(R8),
INC_iR(R16),
DEC_R(R8),
DEC_iR(R16),
DAA,
CPL,
//16-bit arithmetic/logic
ADD_R16(R16),
INC_R16(R16),
DEC_R16(R16),
ADD_SP_D(i8),
LD_HL_SP_D(i8),
//rotate and shift
RLCA,
RLA,
RRCA,
RRA,
RLC_R(R8),
RLC_iHL,
RL_R(R8),
RL_iHL,
RRC_R(R8),
RRC_iHL,
RR_R(R8),
RR_iHL,
SLA_R(R8),
SLA_iHL,
SWAP_R(R8),
SWAP_iHL,
SRA_R(R8),
SRA_iHL,
SRL_R(R8),
SRL_iHL,
//1-bit operation
BIT_N_R(u8, R8),
BIT_N_iHL(u8),
SET_N_R(u8, R8),
SET_N_iHL(u8),
RES_N_R(u8, R8),
RES_N_iHL(u8),
//CPU control
CCF,
SCF,
NOP,
HALT,
STOP,
DI,
EI,
//jump
JP_N(u16),
JP_HL,
JP_C_N(Condition, u16),
JR_D(i8),
JR_C_D(Condition, i8),
CALL_N(u16),
CALL_C_N(Condition, u16),
RET,
RET_C(Condition),
RETI,
RST(u8),
}
#[derive(Debug, Copy, Clone)]
pub enum Condition {
NZ,
Z,
NC,
C,
}
|
//! This module deals with configuration data including the management of the list of secrets
#![allow(clippy::manual_filter_map)]
use rand::Rng;
use thiserror::Error;
//use serde::Deserialize;
use serde_derive::Deserialize;
/// A tag to enclose parts of the secret to be visible from the start, e.g.
/// "guess_-me_" will be displayed in the game as "_ _ _ _ _ - m e"
pub const CONF_LINE_SECRET_MODIFIER_VISIBLE: char = '_';
/// A tag to insert a linebreak when the secret is displayed.
pub const CONF_LINE_SECRET_MODIFIER_LINEBREAK1: char = '\n';
pub const CONF_LINE_SECRET_MODIFIER_LINEBREAK2: char = '|';
// Custom error type used expressing potential syntax errors when parsing the configuration file.
#[derive(Error, Debug)]
pub enum ConfigParseError {
#[error(
"Syntax error in line {line_number:?}: `{line}`\n\n\
The game modifier must be one of the following:\n\
:traditional-rewarding\n\
:success-rewarding\n\n\
Edit config file and start again.\n"
)]
GameModifier { line_number: usize, line: String },
#[error(
"Syntax error in line {line_number:?}: `{line}`\n\n\
The first character of every non-empty line has to be one of the following:\n\
any letter or digit (secret string),\n\
'#' (comment line),\n\
'-' (secret string),\n\
'|' (ASCII-Art image) or\n\
':' (game modifier).\n\n\
Edit config file and start again.\n"
)]
LineIdentifier { line_number: usize, line: String },
#[error["No image data found."]]
NoImageData,
#[error["A config file must have a least one secret string, which is\n\
a non-empty line starting with a letter, digit, '_' or '-'."]]
NoSecretString,
#[error["Could not parse the proprietary format, because this is\n\
meant to be in (erroneous) YAML format."]]
NotInProprietaryFormat,
#[error(
"Syntax error: Please follow the example below.\n\
(The custom image is optional, it's lines start with a space.):\n\
\t------------------------------\n\
\tsecrets: \n\
\t- guess me\n\
\t- \"guess me: with colon\"\n\
\t- line| break\n\
\t- _disclose _partly\n\
\n\
\timage: |1\n\
\t :\n\
\t |_|>\n\
\t------------------------------\n\
{0}"
)]
NotInYamlFormat(#[from] serde_yaml::Error),
#[error["No line: `secrets:` found (no spaces allowed before)."]]
YamlSecretsLineMissing,
}
/// We need this because `serde_yaml::Error` does not implement `PartialEq`.
/// We compare only types.
impl PartialEq for ConfigParseError {
fn eq(&self, other: &Self) -> bool {
std::mem::discriminant(self) == std::mem::discriminant(other)
&& (self.to_string() == other.to_string())
}
}
/// A dictionary holding all secret sentences from among whom one is chosen randomly at the
/// beginning of the game.
#[derive(Debug, PartialEq, Deserialize)]
pub struct Dict {
secrets: Vec<String>,
}
impl Dict {
/// Parse configuration file as toml data.
pub fn from(lines: &str) -> Result<Self, ConfigParseError> {
// Trim BOM
let lines = lines.trim_start_matches('\u{feff}');
if !lines
.lines()
.filter(|s| !s.trim_start().starts_with('#'))
.filter(|s| s.trim() != "")
.any(|s| s.trim_end() == "secrets:")
{
return Err(ConfigParseError::YamlSecretsLineMissing);
}
let dict: Dict = serde_yaml::from_str(lines)?;
Ok(dict)
}
/// Chooses randomly one secret from the dictionary and removes the secret from list
pub fn get_random_secret(&mut self) -> Option<String> {
match self.secrets.len() {
0 => None,
1 => Some(self.secrets.swap_remove(0)),
_ => {
let mut rng = rand::thread_rng();
let i = rng.gen_range(0..self.secrets.len());
Some(self.secrets.swap_remove(i))
}
}
}
/// Is there exactly one secret left?
pub fn is_empty(&self) -> bool {
self.secrets.is_empty()
}
/// Add a secret to the list.
pub fn add(&mut self, secret: String) {
self.secrets.push(secret);
}
}
// ***********************
#[cfg(test)]
mod tests {
use super::ConfigParseError;
use super::Dict;
/// parse all 3 data types in configuration file format
#[test]
fn test_from() {
let config: &str = "
# comment
secrets:
- guess me
- hang_man_
- _good l_uck
traditional: true
";
let dict = Dict::from(&config).unwrap();
let expected = Dict {
secrets: vec![
"guess me".to_string(),
"hang_man_".to_string(),
"_good l_uck".to_string(),
],
};
assert_eq!(dict, expected);
let config = "# comment\nsecrets:\n - guess me\n";
let dict = Dict::from(&config);
let expected = Ok(Dict {
secrets: vec!["guess me".to_string()],
});
assert_eq!(dict, expected);
let config = "# comment\nsecrets:\n- guess me\n";
let dict = Dict::from(&config);
let expected = Ok(Dict {
secrets: vec!["guess me".to_string()],
});
assert_eq!(dict, expected);
let config = "# comment\nsecrets:\n- 222\n";
let dict = Dict::from(&config);
let expected = Ok(Dict {
secrets: vec!["222".to_string()],
});
assert_eq!(dict, expected);
let config = "sxxxecrets:";
let dict = Dict::from(&config).unwrap_err();
assert!(matches!(dict, ConfigParseError::YamlSecretsLineMissing));
let config = "# comment\nsecrets:\n guess me\n";
let dict = Dict::from(&config).unwrap_err();
assert!(matches!(dict, ConfigParseError::NotInYamlFormat(_)));
}
}
|
#![allow(dead_code)]
use std::sync::Arc;
use serde_json::Value;
use tokio::sync::mpsc;
use tokio::sync::RwLock;
use crate::jsonrpc::endpoints::{
Discv5EndpointKind, HistoryEndpointKind, PortalEndpointKind, StateEndpointKind,
};
use crate::jsonrpc::types::{HistoryJsonRpcRequest, PortalJsonRpcRequest, StateJsonRpcRequest};
use crate::portalnet::discovery::Discovery;
type Responder<T, E> = mpsc::UnboundedSender<Result<T, E>>;
/// Main JSON-RPC handler. It dispatches json-rpc requests to the overlay networks.
pub struct JsonRpcHandler {
pub discovery: Arc<RwLock<Discovery>>,
pub portal_jsonrpc_rx: mpsc::UnboundedReceiver<PortalJsonRpcRequest>,
pub state_jsonrpc_tx: Option<mpsc::UnboundedSender<StateJsonRpcRequest>>,
pub history_jsonrpc_tx: Option<mpsc::UnboundedSender<HistoryJsonRpcRequest>>,
}
impl JsonRpcHandler {
pub async fn process_jsonrpc_requests(mut self) {
while let Some(request) = self.portal_jsonrpc_rx.recv().await {
let response: Value = match request.endpoint {
PortalEndpointKind::Discv5EndpointKind(endpoint) => match endpoint {
Discv5EndpointKind::NodeInfo => {
Value::String(self.discovery.read().await.node_info())
}
Discv5EndpointKind::RoutingTableInfo => {
Value::Array(self.discovery.read().await.routing_table_info())
}
},
PortalEndpointKind::HistoryEndpointKind(endpoint) => {
let response = match self.history_jsonrpc_tx.as_ref() {
Some(tx) => proxy_query_to_history_subnet(tx, endpoint).await,
None => Err("Chain history subnetwork unavailable.".to_string()),
};
response.unwrap_or_else(Value::String)
}
PortalEndpointKind::StateEndpointKind(endpoint) => {
let response = match self.state_jsonrpc_tx.as_ref() {
Some(tx) => proxy_query_to_state_subnet(tx, endpoint).await,
None => Err("State subnetwork unavailable.".to_string()),
};
response.unwrap_or_else(Value::String)
}
_ => Value::String(format!(
"Can't process portal network endpoint {:?}",
request.endpoint
)),
};
let _ = request.resp.send(Ok(response));
}
}
}
async fn proxy_query_to_history_subnet(
subnet_tx: &mpsc::UnboundedSender<HistoryJsonRpcRequest>,
endpoint: HistoryEndpointKind,
) -> Result<Value, String> {
let (resp_tx, mut resp_rx) = mpsc::unbounded_channel::<Result<Value, String>>();
let message = HistoryJsonRpcRequest {
endpoint,
resp: resp_tx,
};
let _ = subnet_tx.send(message);
match resp_rx.recv().await {
Some(val) => match val {
Ok(result) => Ok(result),
Err(msg) => Err(format!(
"Error returned from chain history subnetwork: {:?}",
msg
)),
},
None => Err("No response from chain history subnetwork".to_string()),
}
}
async fn proxy_query_to_state_subnet(
subnet_tx: &mpsc::UnboundedSender<StateJsonRpcRequest>,
endpoint: StateEndpointKind,
) -> Result<Value, String> {
let (resp_tx, mut resp_rx) = mpsc::unbounded_channel::<Result<Value, String>>();
let message = StateJsonRpcRequest {
endpoint,
resp: resp_tx,
};
let _ = subnet_tx.send(message);
match resp_rx.recv().await {
Some(val) => match val {
Ok(result) => Ok(result),
Err(msg) => Err(format!("Error returned from state subnetwork: {:?}", msg)),
},
None => Err("No response from state subnetwork".to_string()),
}
}
|
#[macro_use]
extern crate unitary;
fn main() {
units!(m, s);
let dist = qty!(1.0; m);
let time = qty!(2.0; s);
println!("{}", &dist);
println!("{}", time);
println!("{}", dist.clone() / time);
println!("{}", (dist.clone() / dist));
}
|
/*
* Copyright Stalwart Labs Ltd. See the COPYING
* file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
use serde::{Deserialize, Serialize};
use crate::Method;
use super::{request::ResultReference, Object, RequestParams};
pub trait QueryObject: Object {
type QueryArguments: Default + Serialize;
type Filter: Serialize;
type Sort: Serialize;
}
#[derive(Debug, Clone, Serialize)]
pub struct QueryRequest<O: QueryObject> {
#[serde(skip)]
method: (Method, usize),
#[serde(rename = "accountId")]
account_id: String,
#[serde(rename = "filter")]
#[serde(skip_serializing_if = "Option::is_none")]
filter: Option<Filter<O::Filter>>,
#[serde(rename = "sort")]
#[serde(skip_serializing_if = "Option::is_none")]
sort: Option<Vec<Comparator<O::Sort>>>,
#[serde(rename = "position")]
#[serde(skip_serializing_if = "Option::is_none")]
position: Option<i32>,
#[serde(rename = "anchor")]
#[serde(skip_serializing_if = "Option::is_none")]
anchor: Option<String>,
#[serde(rename = "anchorOffset")]
#[serde(skip_serializing_if = "Option::is_none")]
anchor_offset: Option<i32>,
#[serde(rename = "limit")]
#[serde(skip_serializing_if = "Option::is_none")]
limit: Option<usize>,
#[serde(rename = "calculateTotal")]
#[serde(skip_serializing_if = "Option::is_none")]
calculate_total: Option<bool>,
#[serde(flatten)]
arguments: O::QueryArguments,
}
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub enum Filter<T> {
FilterOperator(FilterOperator<T>),
FilterCondition(T),
}
#[derive(Debug, Clone, Serialize)]
pub struct FilterOperator<T> {
operator: Operator,
conditions: Vec<Filter<T>>,
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
pub enum Operator {
#[serde(rename = "AND")]
And,
#[serde(rename = "OR")]
Or,
#[serde(rename = "NOT")]
Not,
}
#[derive(Debug, Clone, Serialize)]
pub struct Comparator<A> {
#[serde(rename = "isAscending")]
is_ascending: bool,
#[serde(skip_serializing_if = "Option::is_none")]
collation: Option<String>,
#[serde(flatten)]
arguments: A,
}
#[derive(Debug, Clone, Deserialize)]
pub struct QueryResponse {
#[serde(rename = "accountId")]
account_id: String,
#[serde(rename = "queryState")]
query_state: String,
#[serde(rename = "canCalculateChanges")]
can_calculate_changes: Option<bool>,
#[serde(rename = "position")]
position: i32,
#[serde(rename = "ids")]
ids: Vec<String>,
#[serde(rename = "total")]
total: Option<usize>,
#[serde(rename = "limit")]
limit: Option<usize>,
}
impl<O: QueryObject> QueryRequest<O> {
pub fn new(params: RequestParams) -> Self {
QueryRequest {
account_id: params.account_id,
method: (params.method, params.call_id),
filter: None,
sort: None,
position: None,
anchor: None,
anchor_offset: None,
limit: None,
calculate_total: None,
arguments: O::QueryArguments::default(),
}
}
pub fn account_id(&mut self, account_id: impl Into<String>) -> &mut Self {
self.account_id = account_id.into();
self
}
pub fn filter(&mut self, filter: impl Into<Filter<O::Filter>>) -> &mut Self {
self.filter = Some(filter.into());
self
}
pub fn sort(&mut self, sort: impl IntoIterator<Item = Comparator<O::Sort>>) -> &mut Self {
self.sort = Some(sort.into_iter().collect());
self
}
pub fn position(&mut self, position: i32) -> &mut Self {
self.position = position.into();
self
}
pub fn anchor(&mut self, anchor: impl Into<String>) -> &mut Self {
self.anchor = Some(anchor.into());
self
}
pub fn anchor_offset(&mut self, anchor_offset: i32) -> &mut Self {
self.anchor_offset = anchor_offset.into();
self
}
pub fn limit(&mut self, limit: usize) -> &mut Self {
self.limit = Some(limit);
self
}
pub fn calculate_total(&mut self, calculate_total: bool) -> &mut Self {
self.calculate_total = Some(calculate_total);
self
}
pub fn arguments(&mut self) -> &mut O::QueryArguments {
&mut self.arguments
}
pub fn result_reference(&self) -> ResultReference {
ResultReference::new(self.method.0, self.method.1, "/ids")
}
}
impl QueryResponse {
pub fn account_id(&self) -> &str {
&self.account_id
}
pub fn ids(&self) -> &[String] {
&self.ids
}
pub fn id(&self, pos: usize) -> &str {
self.ids[pos].as_str()
}
pub fn take_ids(&mut self) -> Vec<String> {
std::mem::take(&mut self.ids)
}
pub fn total(&self) -> Option<usize> {
self.total
}
pub fn limit(&self) -> Option<usize> {
self.limit
}
pub fn position(&self) -> i32 {
self.position
}
pub fn take_query_state(&mut self) -> String {
std::mem::take(&mut self.query_state)
}
pub fn query_state(&self) -> &str {
&self.query_state
}
pub fn can_calculate_changes(&self) -> bool {
self.can_calculate_changes.unwrap_or(false)
}
}
impl<A> Comparator<A> {
pub fn new(arguments: A) -> Self {
Comparator {
is_ascending: true,
collation: None,
arguments,
}
}
pub fn descending(mut self) -> Self {
self.is_ascending = false;
self
}
pub fn ascending(mut self) -> Self {
self.is_ascending = true;
self
}
pub fn is_ascending(mut self, is_ascending: bool) -> Self {
self.is_ascending = is_ascending;
self
}
pub fn collation(mut self, collation: String) -> Self {
self.collation = Some(collation);
self
}
}
impl<T> From<FilterOperator<T>> for Filter<T> {
fn from(filter: FilterOperator<T>) -> Self {
Filter::FilterOperator(filter)
}
}
impl<T> From<T> for Filter<T> {
fn from(filter: T) -> Self {
Filter::FilterCondition(filter)
}
}
impl<T> Filter<T> {
pub fn operator(operator: Operator, conditions: Vec<Filter<T>>) -> Self {
Filter::FilterOperator(FilterOperator {
operator,
conditions,
})
}
pub fn and<U, V>(conditions: U) -> Self
where
U: IntoIterator<Item = V>,
V: Into<Filter<T>>,
{
Filter::FilterOperator(FilterOperator {
operator: Operator::And,
conditions: conditions.into_iter().map(|t| t.into()).collect(),
})
}
pub fn or<U, V>(conditions: U) -> Self
where
U: IntoIterator<Item = V>,
V: Into<Filter<T>>,
{
Filter::FilterOperator(FilterOperator {
operator: Operator::Or,
conditions: conditions.into_iter().map(|t| t.into()).collect(),
})
}
pub fn not<U, V>(conditions: U) -> Self
where
U: IntoIterator<Item = V>,
V: Into<Filter<T>>,
{
Filter::FilterOperator(FilterOperator {
operator: Operator::Not,
conditions: conditions.into_iter().map(|t| t.into()).collect(),
})
}
}
|
#![feature(test)]
use dev_util::impl_benchmark;
impl_benchmark!(sha0, Sha0);
|
// The Base64-encoded content in text.txt has been encrypted via AES-128 in ECB mode under the key "YELLOW SUBMARINE"
// Symmetric key algorithm
// Modern CPUs have AES instructions built into them! (These are much faster and more secure)
// If you implement your own - people can look up CPU caches!
// a block cipher - encrypts 128 bits / 16 bytes
// treats 16 bytes as 4x4 grid
// initialisation steps - key expansions then rounds
// we can have different sized keys - 128 (10 cycles), 192 (12 cycles) or 256 bits (14 cycles)
// Algorithm
/**
* Message -> Key Expansion -> Add Round Key (xor)
* Rounds => SubBytes -> Shift Rows -> Mix Columns -> Add Round Key REPEAT
* Final Round => SubBytes -> Shift Rows -> Add Round Key
*/
extern crate decrypt_repeating;
use openssl::symm::{ encrypt, Cipher, Crypter, Mode};
pub fn decrypt_data(data: Vec<u8>, key: &[u8], iv: Vec<u8>) -> Result<Vec<u8>, String> {
let cipher = Cipher::aes_128_ecb();
let mut decryted = Crypter::new(
cipher,
Mode::Decrypt,
key,
Some(&iv),
).unwrap();
let mut output = vec![0 as u8; data.len() + Cipher::aes_128_cbc().block_size()];
let decrypted_result = decryted.update(&data, &mut output);
match decrypted_result {
Ok(_) => Ok(output),
Err(e) => Err(format!("Error decrypting text: {}", e)),
}
}
pub fn encrypt_data(data: Vec<u8>, key: &[u8], iv: Vec<u8>) -> Result<Vec<u8>, String> {
let cipher = Cipher::aes_128_ecb();
let decrypted_text = encrypt(
cipher,
key,
Some(&iv),
&data
);
match decrypted_text {
Ok(text) => Ok(text),
Err(_) => Err(String::from("Error encrypting text")),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use decrypt_repeating::base_64;
#[test]
fn test_decryption() {
let text = fs::read_to_string("text.txt").expect("Unable to read file");
let iv = b"ABCDEFGHIJKLMNOP";
let text_bytes = base_64::decode(text.as_bytes());
let key = b"YELLOW SUBMARINE";
let decrypted = decrypt_data(text_bytes, key, iv.to_vec());
let result = String::from_utf8(decrypted.unwrap()).unwrap();
let actual = fs::read_to_string("decrypted.txt").expect("Unable to read file");
assert_eq!(result, actual);
}
#[test]
fn test_encryption() {
let encypted_text_original = fs::read_to_string("text.txt").expect("Unable to read file").replace("\n", "");
let un_encrypted_text = fs::read_to_string("decrypted.txt").expect("Unable to read file");
let key = b"YELLOW SUBMARINE";
let iv = b"ABCDEFGHIJKLMNOP";
let encrypted = encrypt_data(un_encrypted_text.as_bytes().to_owned(), key, iv.to_vec());
let base_64_encoded = base_64::encode(&encrypted.unwrap());
let result = String::from_utf8(base_64_encoded).unwrap();
assert_eq!(result, encypted_text_original);
}
}
|
//! Data Processing Instruction Definitions
//!
//!
//! The instructions defined in this module are listed in the ARMv7-m Architecture Reference Manual on page 131:
//!
//! OpCode Instruction
//! 0000 Bitwise AND AND (register) on page A7-201
//! 0001 Exclusive OR EOR (register) on page A7-233
//! 0010 Logical Shift Left LSL (register) on page A7-283
//! 0011 Logical Shift Right LSR (register) on page A7-285
//! 0100 Arithmetic Shift Right ASR (register) on page A7-204
//! 0101 Add with Carry ADC (register) on page A7-188
//! 0110 Subtract with Carry SBC (register) on page A7-347
//! 0111 Rotate Right ROR (register) on page A7-339
//! 1000 Set flags on bitwise AND TST (register) on page A7-420
//! 1001 Reverse Subtract from 0 RSB (immediate) on page A7-341
//! 1010 Compare Registers CMP (register) on page A7-224
//! 1011 Compare Negative CMN (register) on page A7-222
//! 1100 Logical OR ORR (register) on page A7-310
//! 1101 Multiply Two Registers MUL (register) on page A7-302
//! 1110 Bit Clear BIC (register) on page A7-210
//! 1111 Bitwise NOT MVN (register) on page A7-304
//!
//! Data processing instruction encoding:
//! =================================================
//! |15 14 13 12 11 10|09 08 07 06|05 04 03 02 01 00|
//! |0 1 0 0 0 0 |opcode | |
//! =================================================
use crate::instructions::InstrThumb16;
pub struct DataProcessingInstructions {
dct: Option<Vec<InstrThumb16>>,
}
|
use super::ExtractedSyntaxGrammar;
use crate::generate::grammars::{
Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable,
};
use crate::generate::rules::{Alias, Associativity, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
struct RuleFlattener {
production: Production,
precedence_stack: Vec<Precedence>,
associativity_stack: Vec<Associativity>,
alias_stack: Vec<Alias>,
field_name_stack: Vec<String>,
}
impl RuleFlattener {
fn new() -> Self {
Self {
production: Production {
steps: Vec::new(),
dynamic_precedence: 0,
},
precedence_stack: Vec::new(),
associativity_stack: Vec::new(),
alias_stack: Vec::new(),
field_name_stack: Vec::new(),
}
}
fn flatten(mut self, rule: Rule) -> Production {
self.apply(rule, true);
self.production
}
fn apply(&mut self, rule: Rule, at_end: bool) -> bool {
match rule {
Rule::Seq(members) => {
let mut result = false;
let last_index = members.len() - 1;
for (i, member) in members.into_iter().enumerate() {
result |= self.apply(member, i == last_index && at_end);
}
result
}
Rule::Metadata { rule, params } => {
let mut has_precedence = false;
if !params.precedence.is_none() {
has_precedence = true;
self.precedence_stack.push(params.precedence);
}
let mut has_associativity = false;
if let Some(associativity) = params.associativity {
has_associativity = true;
self.associativity_stack.push(associativity);
}
let mut has_alias = false;
if let Some(alias) = params.alias {
has_alias = true;
self.alias_stack.push(alias);
}
let mut has_field_name = false;
if let Some(field_name) = params.field_name {
has_field_name = true;
self.field_name_stack.push(field_name);
}
if params.dynamic_precedence.abs() > self.production.dynamic_precedence.abs() {
self.production.dynamic_precedence = params.dynamic_precedence;
}
let did_push = self.apply(*rule, at_end);
if has_precedence {
self.precedence_stack.pop();
if did_push && !at_end {
self.production.steps.last_mut().unwrap().precedence = self
.precedence_stack
.last()
.cloned()
.unwrap_or(Precedence::None);
}
}
if has_associativity {
self.associativity_stack.pop();
if did_push && !at_end {
self.production.steps.last_mut().unwrap().associativity =
self.associativity_stack.last().cloned();
}
}
if has_alias {
self.alias_stack.pop();
}
if has_field_name {
self.field_name_stack.pop();
}
did_push
}
Rule::Symbol(symbol) => {
self.production.steps.push(ProductionStep {
symbol,
precedence: self
.precedence_stack
.last()
.cloned()
.unwrap_or(Precedence::None),
associativity: self.associativity_stack.last().cloned(),
alias: self.alias_stack.last().cloned(),
field_name: self.field_name_stack.last().cloned(),
});
true
}
_ => false,
}
}
}
fn extract_choices(rule: Rule) -> Vec<Rule> {
match rule {
Rule::Seq(elements) => {
let mut result = vec![Rule::Blank];
for element in elements {
let extraction = extract_choices(element);
let mut next_result = Vec::new();
for entry in result {
for extraction_entry in extraction.iter() {
next_result.push(Rule::Seq(vec![entry.clone(), extraction_entry.clone()]));
}
}
result = next_result;
}
result
}
Rule::Choice(elements) => {
let mut result = Vec::new();
for element in elements {
for rule in extract_choices(element) {
result.push(rule);
}
}
result
}
Rule::Metadata { rule, params } => extract_choices(*rule)
.into_iter()
.map(|rule| Rule::Metadata {
rule: Box::new(rule),
params: params.clone(),
})
.collect(),
_ => vec![rule],
}
}
fn flatten_variable(variable: Variable) -> Result<SyntaxVariable> {
let mut productions = Vec::new();
for rule in extract_choices(variable.rule) {
let production = RuleFlattener::new().flatten(rule);
if !productions.contains(&production) {
productions.push(production);
}
}
Ok(SyntaxVariable {
name: variable.name,
kind: variable.kind,
productions,
})
}
fn symbol_is_used(variables: &Vec<SyntaxVariable>, symbol: Symbol) -> bool {
for variable in variables {
for production in &variable.productions {
for step in &production.steps {
if step.symbol == symbol {
return true;
}
}
}
}
false
}
pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxGrammar> {
let mut variables = Vec::new();
for variable in grammar.variables {
variables.push(flatten_variable(variable)?);
}
for (i, variable) in variables.iter().enumerate() {
for production in &variable.productions {
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
return Err(anyhow!(
"The rule `{}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string
unless they are used only as the grammar's start rule.
",
variable.name
));
}
}
}
Ok(SyntaxGrammar {
extra_symbols: grammar.extra_symbols,
expected_conflicts: grammar.expected_conflicts,
variables_to_inline: grammar.variables_to_inline,
precedence_orderings: grammar.precedence_orderings,
external_tokens: grammar.external_tokens,
supertype_symbols: grammar.supertype_symbols,
word_token: grammar.word_token,
variables,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::generate::grammars::VariableType;
use crate::generate::rules::Symbol;
#[test]
fn test_flatten_grammar() {
let result = flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::non_terminal(1),
Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![
Rule::non_terminal(2),
Rule::choice(vec![
Rule::prec_right(
Precedence::Integer(102),
Rule::seq(vec![Rule::non_terminal(3), Rule::non_terminal(4)]),
),
Rule::non_terminal(5),
]),
Rule::non_terminal(6),
]),
),
Rule::non_terminal(7),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(3))
.with_prec(Precedence::Integer(102), Some(Associativity::Right)),
ProductionStep::new(Symbol::non_terminal(4))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
]
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(5))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
]
},
]
);
}
#[test]
fn test_flatten_grammar_with_maximum_dynamic_precedence() {
let result = flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::non_terminal(1),
Rule::prec_dynamic(
101,
Rule::seq(vec![
Rule::non_terminal(2),
Rule::choice(vec![
Rule::prec_dynamic(
102,
Rule::seq(vec![Rule::non_terminal(3), Rule::non_terminal(4)]),
),
Rule::non_terminal(5),
]),
Rule::non_terminal(6),
]),
),
Rule::non_terminal(7),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 102,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::non_terminal(3)),
ProductionStep::new(Symbol::non_terminal(4)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
],
},
Production {
dynamic_precedence: 101,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::non_terminal(5)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
],
},
]
);
}
#[test]
fn test_flatten_grammar_with_final_precedence() {
let result = flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(2)]),
),
})
.unwrap();
assert_eq!(
result.productions,
vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
]
}]
);
let result = flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![Rule::non_terminal(1)]),
),
})
.unwrap();
assert_eq!(
result.productions,
vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::non_terminal(1))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),]
}]
);
}
#[test]
fn test_flatten_grammar_with_field_names() {
let result = flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::field("first-thing".to_string(), Rule::terminal(1)),
Rule::terminal(2),
Rule::choice(vec![
Rule::Blank,
Rule::field("second-thing".to_string(), Rule::terminal(3)),
]),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(1)).with_field_name("first-thing"),
ProductionStep::new(Symbol::terminal(2))
]
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(1)).with_field_name("first-thing"),
ProductionStep::new(Symbol::terminal(2)),
ProductionStep::new(Symbol::terminal(3)).with_field_name("second-thing"),
]
},
]
);
}
}
|
//! Structs defining Amazon data sets.
use crate::arrow::*;
use arrow2_convert::ArrowSerialize;
use serde::{Deserialize, Serialize};
/// A rating as described in a source CSV file.
#[derive(Serialize, Deserialize)]
pub struct SourceRating {
pub user: String,
pub asin: String,
pub rating: f32,
pub timestamp: i64,
}
/// Structure for scanned ratings.
///
/// This data structure is serialized to `ratings.parquet` in the Amazon directories.
#[derive(ArrowField, ArrowSerialize, Serialize, Deserialize)]
pub struct RatingRow {
pub user: i32,
pub asin: String,
pub rating: f32,
pub timestamp: i64,
}
|
use crate::{
error::Error,
receiver::{
Action, AnyReceiver, AnyWrapperRef, BusPollerCallback, PermitDrop, ReceiverTrait,
SendUntypedReceiver, TypeTagAccept,
},
stats::Stats,
Bus, Event, Message, Permit, ReciveUntypedReceiver, TypeTag, TypeTagAcceptItem,
};
use core::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use dashmap::DashMap;
use futures::{pin_mut, StreamExt};
use std::sync::Arc;
use tokio::sync::{oneshot, Notify};
pub trait Relay: TypeTagAccept + SendUntypedReceiver + ReciveUntypedReceiver + 'static {}
impl<T: TypeTagAccept + SendUntypedReceiver + ReciveUntypedReceiver + 'static> Relay for T {}
struct SlabCfg;
impl sharded_slab::Config for SlabCfg {
const RESERVED_BITS: usize = 1;
}
type Slab<T> = sharded_slab::Slab<T, SlabCfg>;
pub(crate) struct RelayContext {
receivers: DashMap<TypeTag, Arc<RelayReceiverContext>>,
need_flush: AtomicBool,
ready_flag: AtomicBool,
idling_flag: AtomicBool,
init_sent: AtomicBool,
flushed: Notify,
synchronized: Notify,
closed: Notify,
ready: Notify,
idle: Notify,
}
pub struct RelayReceiverContext {
limit: u64,
processing: AtomicU64,
response: Arc<Notify>,
}
impl RelayReceiverContext {
fn new(limit: u64) -> Self {
Self {
limit,
processing: Default::default(),
response: Arc::new(Notify::new()),
}
}
}
impl PermitDrop for RelayReceiverContext {
fn permit_drop(&self) {
self.processing.fetch_sub(1, Ordering::SeqCst);
}
}
pub(crate) struct RelayWrapper<S>
where
S: 'static,
{
id: u64,
inner: S,
context: Arc<RelayContext>,
waiters: Slab<oneshot::Sender<Result<Box<dyn Message>, Error>>>,
}
impl<S> RelayWrapper<S> {
pub fn new(id: u64, inner: S) -> Self {
Self {
id,
inner,
context: Arc::new(RelayContext {
receivers: DashMap::new(),
need_flush: AtomicBool::new(false),
ready_flag: AtomicBool::new(false),
idling_flag: AtomicBool::new(true),
init_sent: AtomicBool::new(false),
flushed: Notify::new(),
synchronized: Notify::new(),
closed: Notify::new(),
ready: Notify::new(),
idle: Notify::new(),
}),
waiters: sharded_slab::Slab::new_with_config::<SlabCfg>(),
}
}
}
impl<S> TypeTagAccept for RelayWrapper<S>
where
S: Relay + Send + Sync + 'static,
{
fn iter_types(&self) -> Box<dyn Iterator<Item = TypeTagAcceptItem> + '_> {
self.inner.iter_types()
}
fn accept_msg(&self, msg: &TypeTag) -> bool {
self.inner.accept_msg(msg)
}
fn accept_req(&self, req: &TypeTag, resp: Option<&TypeTag>, err: Option<&TypeTag>) -> bool {
self.inner.accept_req(req, resp, err)
}
}
impl<S> ReceiverTrait for RelayWrapper<S>
where
S: Relay + Send + Sync + 'static,
{
fn name(&self) -> &str {
std::any::type_name::<Self>()
}
fn typed(&self) -> Option<AnyReceiver<'_>> {
None
}
fn wrapper(&self) -> Option<AnyWrapperRef<'_>> {
None
}
fn id(&self) -> u64 {
self.id
}
fn send_boxed(
&self,
mid: u64,
boxed_msg: Box<dyn Message>,
req: bool,
bus: &Bus,
) -> Result<(), Error<Box<dyn Message>>> {
self.inner.send_msg(mid, boxed_msg, req, bus)
}
fn add_response_listener(
&self,
listener: oneshot::Sender<Result<Box<dyn Message>, Error>>,
) -> Result<u64, Error> {
Ok(self
.waiters
.insert(listener)
.ok_or(Error::AddListenerError)? as _)
}
fn stats(&self) -> Stats {
unimplemented!()
}
fn send_action(&self, bus: &Bus, action: Action) -> Result<(), Error<Action>> {
SendUntypedReceiver::send(&self.inner, action, bus)
}
fn close_notify(&self) -> &Notify {
&self.context.closed
}
fn sync_notify(&self) -> &Notify {
&self.context.synchronized
}
fn flush_notify(&self) -> &Notify {
&self.context.flushed
}
fn ready_notify(&self) -> &Notify {
&self.context.ready
}
fn idle_notify(&self) -> &Notify {
&self.context.idle
}
fn is_init_sent(&self) -> bool {
self.context.init_sent.load(Ordering::SeqCst)
}
fn is_ready(&self) -> bool {
self.context.ready_flag.load(Ordering::SeqCst)
}
fn is_idling(&self) -> bool {
self.context.idling_flag.load(Ordering::SeqCst)
}
fn need_flush(&self) -> bool {
self.context.need_flush.load(Ordering::SeqCst)
}
fn set_need_flush(&self) {
self.context.need_flush.store(true, Ordering::SeqCst);
}
fn try_reserve(&self, tt: &TypeTag) -> Option<Permit> {
if !self.context.receivers.contains_key(tt) {
self.context
.receivers
.insert(tt.clone(), Arc::new(RelayReceiverContext::new(16)));
}
loop {
let context = self.context.receivers.get(tt).unwrap();
let count = context.processing.load(Ordering::Relaxed);
if count < context.limit {
let res = context.processing.compare_exchange(
count,
count + 1,
Ordering::SeqCst,
Ordering::SeqCst,
);
if res.is_ok() {
break Some(Permit {
fuse: false,
inner: context.clone(),
});
}
// continue
} else {
break None;
}
}
}
fn reserve_notify(&self, tt: &TypeTag) -> Arc<Notify> {
if !self.context.receivers.contains_key(tt) {
self.context
.receivers
.insert(tt.clone(), Arc::new(RelayReceiverContext::new(16)));
}
self.context.receivers.get(tt).unwrap().response.clone()
}
fn increment_processing(&self, tt: &TypeTag) {
self.context
.receivers
.get(tt)
.map(|r| r.processing.fetch_add(1, Ordering::SeqCst));
}
fn start_polling(self: Arc<Self>) -> BusPollerCallback {
Box::new(move |bus| {
Box::pin(async move {
let this = self.clone();
let events = this.inner.event_stream(bus);
pin_mut!(events);
loop {
let event = if let Some(event) = events.next().await {
event
} else {
self.context.closed.notify_waiters();
break;
};
match event {
Event::Error(err) => error!("Batch Error: {}", err),
Event::Pause => self.context.ready_flag.store(false, Ordering::SeqCst),
Event::Ready => {
self.context.ready.notify_waiters();
self.context.ready_flag.store(true, Ordering::SeqCst)
}
Event::InitFailed(err) => {
error!("Relay init failed: {}", err);
self.context.ready.notify_waiters();
self.context.ready_flag.store(false, Ordering::SeqCst);
}
Event::Exited => {
self.context.closed.notify_waiters();
break;
}
Event::Flushed => {
self.context.need_flush.store(false, Ordering::SeqCst);
self.context.flushed.notify_one()
}
Event::Synchronized(_res) => self.context.synchronized.notify_waiters(),
Event::Response(mid, resp) => {
let tt = if let Ok(bm) = &resp {
Some(bm.type_tag())
} else {
None
};
if let Some(chan) = self.waiters.take(mid as _) {
if let Err(err) = chan.send(resp) {
error!("Response error for mid({}): {:?}", mid, err);
}
} else {
warn!("No waiters for mid({})", mid);
};
if let Some(tt) = tt {
if let Some(ctx) = self.context.receivers.get(&tt) {
ctx.processing.fetch_sub(1, Ordering::SeqCst);
ctx.response.notify_one();
}
}
}
Event::BatchComplete(tt, n) => {
if let Some(ctx) = self.context.receivers.get(&tt) {
ctx.processing.fetch_sub(n, Ordering::SeqCst);
for _ in 0..n {
ctx.response.notify_one();
}
}
}
_ => unimplemented!(),
}
}
})
})
}
}
|
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool};
use tonic::Status;
use tracing::instrument;
use crate::api::permission_membership_request::IdPermissionOrUserid;
use crate::api::PermissionMembershipData;
use crate::schema::permission_membership;
use crate::schema::permission_membership::dsl::*;
use crate::storage::helpers::sql_err_to_grpc_error;
#[derive(Queryable, Default, Debug)]
pub struct PermissionMembership {
pub id: i32,
pub permission: i32,
pub user_id: i32,
}
#[derive(Insertable)]
#[diesel(table_name = permission_membership)]
pub struct NewPermissionMembership {
pub permission: i32,
pub user_id: i32,
}
#[derive(AsChangeset, Default)]
#[diesel(table_name = permission_membership)]
pub struct UpdatePermissionMembership {
pub permission: Option<i32>,
pub user_id: Option<i32>,
}
impl From<PermissionMembership> for PermissionMembershipData {
fn from(p: PermissionMembership) -> PermissionMembershipData {
PermissionMembershipData {
id: Some(p.id),
permission: p.permission,
user_id: p.user_id,
}
}
}
impl From<&PermissionMembership> for PermissionMembershipData {
fn from(p: &PermissionMembership) -> PermissionMembershipData {
PermissionMembershipData {
id: Some(p.id),
permission: p.permission,
user_id: p.user_id,
}
}
}
impl PermissionMembership {
#[instrument]
pub async fn all(
pool: &Pool<ConnectionManager<PgConnection>>,
) -> Result<Vec<PermissionMembershipData>, Status> {
let conn = &mut pool.get().unwrap();
match permission_membership.load::<PermissionMembership>(conn) {
Ok(results) => Ok(results.iter().map(|t| t.into()).collect()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn get(
pool: &Pool<ConnectionManager<PgConnection>>,
id_permission_or_userid: &IdPermissionOrUserid,
) -> Result<Vec<PermissionMembershipData>, Status> {
let conn = &mut pool.get().unwrap();
match id_permission_or_userid {
IdPermissionOrUserid::Id(pm_id) => match permission_membership
.find(pm_id)
.load::<PermissionMembership>(conn)
{
Ok(results) => Ok(results.iter().map(|t| t.into()).collect()),
Err(err) => Err(sql_err_to_grpc_error(err)),
},
IdPermissionOrUserid::Permission(pm_permission) => {
match permission_membership
.filter(permission.eq(pm_permission))
.load::<PermissionMembership>(conn)
{
Ok(results) => Ok(results.iter().map(|t| t.into()).collect()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
IdPermissionOrUserid::UserId(pm_userid) => {
match permission_membership
.filter(user_id.eq(pm_userid))
.load::<PermissionMembership>(conn)
{
Ok(results) => Ok(results.iter().map(|t| t.into()).collect()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
}
}
#[instrument]
pub async fn add(
pool: &Pool<ConnectionManager<PgConnection>>,
pm_data: PermissionMembershipData,
) -> Result<PermissionMembershipData, Status> {
let new_user = NewPermissionMembership {
permission: pm_data.permission,
user_id: pm_data.user_id,
};
let conn = &mut pool.get().unwrap();
match diesel::insert_into(permission_membership)
.values(&new_user)
.get_result::<PermissionMembership>(conn)
{
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn update(
pool: &Pool<ConnectionManager<PgConnection>>,
pm_data: PermissionMembershipData,
) -> Result<PermissionMembershipData, Status> {
let conn = &mut pool.get().unwrap();
let mut update = UpdatePermissionMembership::default();
if pm_data.permission != 0 {
update.permission = Some(pm_data.permission)
}
if pm_data.user_id != 0 {
update.user_id = Some(pm_data.user_id)
}
match diesel::update(permission_membership.find(pm_data.id.unwrap()))
.set(update)
.get_result::<PermissionMembership>(conn)
{
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn delete(
pool: &Pool<ConnectionManager<PgConnection>>,
id_permission_or_userid: IdPermissionOrUserid,
) -> Result<usize, Status> {
let conn = &mut pool.get().unwrap();
match id_permission_or_userid {
IdPermissionOrUserid::Id(pm_id) => {
match diesel::delete(permission_membership.find(pm_id)).execute(conn) {
Ok(results) => Ok(results),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
IdPermissionOrUserid::Permission(pm_permission) => {
match diesel::delete(permission_membership.filter(permission.eq(pm_permission)))
.execute(conn)
{
Ok(results) => Ok(results),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
IdPermissionOrUserid::UserId(pm_userid) => {
match diesel::delete(permission_membership.filter(user_id.eq(pm_userid)))
.execute(conn)
{
Ok(results) => Ok(results),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
}
}
}
|
// Copyright (c) 2016 Anatoly Ikorsky
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use crate::{
conn::pool::{Inner, Pool},
error::Error,
};
use std::sync::{atomic, Arc};
/// Future that disconnects this pool from server and resolves to `()`.
///
/// Active connections taken from this pool should be disconnected manually.
/// Also all pending and new `GetConn`'s will resolve to error.
pub struct DisconnectPool {
pool_inner: Arc<Inner>,
}
pub fn new(pool: Pool) -> DisconnectPool {
DisconnectPool {
pool_inner: pool.inner,
}
}
impl Future for DisconnectPool {
type Output = Result<(), Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.pool_inner.clone().spawn_futures_if_needed();
self.pool_inner.wake.push(cx.waker().clone());
if self.pool_inner.closed.load(atomic::Ordering::Acquire) {
Poll::Ready(Ok(()))
} else {
Poll::Pending
}
}
}
|
pub fn ensure_empty_iter<'a>(
iter: &mut impl Iterator<Item = &'a str>,
keyword: &str,
) -> Result<(), String> {
iter.next().map_or(Ok(()), |extra_str| {
Err(format!(
"extra characters in \"{}\" line: {}",
keyword, extra_str
))
})
}
pub fn ensure_empty(value: &str, keyword: &str) -> Result<(), String> {
ensure_empty_iter(&mut value.split_ascii_whitespace(), keyword)
}
pub fn trim_line_endings(line: &str) -> &str {
let line_ending_chars: &'static [_] = &['\n', '\r'];
line.trim_end_matches(line_ending_chars)
}
pub fn next_word_or_err<'a>(
iter: &mut impl Iterator<Item = &'a str>,
error_message: &str,
) -> Result<&'a str, String> {
iter.next().ok_or_else(|| error_message.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ensure_empty_iter() {
let mut iter = " abc 123 \n ".split_whitespace();
assert!(ensure_empty_iter(&mut iter, "").is_err());
assert!(ensure_empty_iter(&mut iter, "").is_err());
assert!(ensure_empty_iter(&mut iter, "").is_ok());
}
#[test]
fn test_ensure_empty() {
assert!(ensure_empty("", "").is_ok());
assert!(ensure_empty(" \t\r\n\n\r\t ", "").is_ok());
assert!(ensure_empty("abc", "").is_err());
assert!(ensure_empty("abc\n", "").is_err());
assert!(ensure_empty("abc \n", "").is_err());
}
#[test]
fn test_trim_line_endings() {
assert_eq!(trim_line_endings(""), "");
assert_eq!(trim_line_endings("\n"), "");
assert_eq!(trim_line_endings("\r"), "");
assert_eq!(trim_line_endings("\n\r"), "");
assert_eq!(trim_line_endings("\r\n"), "");
assert_eq!(trim_line_endings("\n\n\r\r"), "");
assert_eq!(trim_line_endings("\r\r\n\n"), "");
assert_eq!(trim_line_endings("\r\n\r\n"), "");
assert_eq!(trim_line_endings(" \r\n"), " ");
assert_eq!(trim_line_endings("\t\r\n"), "\t");
assert_eq!(trim_line_endings("abc \t\t \r\n"), "abc \t\t ");
assert_eq!(trim_line_endings("abc \t\t "), "abc \t\t ");
assert_eq!(trim_line_endings("abc"), "abc");
}
#[test]
fn test_next_word_or_err() {
assert!(next_word_or_err(&mut "".split_whitespace(), "").is_err());
assert_eq!(
next_word_or_err(&mut "abc".split_whitespace(), ""),
Ok("abc")
);
}
}
|
use *;
use std::ops::{Index, IndexMut};
impl<T> Index<ptr::Bool> for Reactor<T> {
type Output = fns::Bool<T>;
fn index(&self, id: ptr::Bool) -> &fns::Bool<T> {
&self.bools[usize::from(id)]
}
}
impl<T> Index<ptr::Point1<T>> for Reactor<T> {
type Output = fns::Point1<T>;
fn index(&self, id: ptr::Point1<T>) -> &fns::Point1<T> {
&self.points1[usize::from(id)]
}
}
impl<T> Index<ptr::Point2<T>> for Reactor<T> {
type Output = fns::Point2<T>;
fn index(&self, id: ptr::Point2<T>) -> &fns::Point2<T> {
&self.points2[usize::from(id)]
}
}
impl<T> Index<ptr::Point3<T>> for Reactor<T> {
type Output = fns::Point3<T>;
fn index(&self, id: ptr::Point3<T>) -> &fns::Point3<T> {
&self.points3[usize::from(id)]
}
}
impl<T> Index<ptr::Point4<T>> for Reactor<T> {
type Output = fns::Point4<T>;
fn index(&self, id: ptr::Point4<T>) -> &fns::Point4<T> {
&self.points4[usize::from(id)]
}
}
impl<T> Index<SplineRef1<T>> for Reactor<T> {
type Output = Spline1<T>;
fn index(&self, id: SplineRef1<T>) -> &Spline1<T> {
&self.splines1[usize::from(id)]
}
}
impl<T> Index<SplineRef2<T>> for Reactor<T> {
type Output = Spline2<T>;
fn index(&self, id: SplineRef2<T>) -> &Spline2<T> {
&self.splines2[usize::from(id)]
}
}
impl<T> Index<SplineRef3<T>> for Reactor<T> {
type Output = Spline3<T>;
fn index(&self, id: SplineRef3<T>) -> &Spline3<T> {
&self.splines3[usize::from(id)]
}
}
impl<T> Index<SplineRef4<T>> for Reactor<T> {
type Output = Spline4<T>;
fn index(&self, id: SplineRef4<T>) -> &Spline4<T> {
&self.splines4[usize::from(id)]
}
}
impl<T> Index<SurfaceRef1<T>> for Reactor<T> {
type Output = Surface1<T>;
fn index(&self, id: SurfaceRef1<T>) -> &Surface1<T> {
&self.surfaces1[usize::from(id)]
}
}
impl<T> Index<SurfaceRef2<T>> for Reactor<T> {
type Output = Surface2<T>;
fn index(&self, id: SurfaceRef2<T>) -> &Surface2<T> {
&self.surfaces2[usize::from(id)]
}
}
impl<T> Index<SurfaceRef3<T>> for Reactor<T> {
type Output = Surface3<T>;
fn index(&self, id: SurfaceRef3<T>) -> &Surface3<T> {
&self.surfaces3[usize::from(id)]
}
}
impl<T> Index<SurfaceRef4<T>> for Reactor<T> {
type Output = Surface4<T>;
fn index(&self, id: SurfaceRef4<T>) -> &Surface4<T> {
&self.surfaces4[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Bool> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Bool) -> &mut fns::Bool<T> {
&mut self.bools[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Point1<T>> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Point1<T>) -> &mut fns::Point1<T> {
&mut self.points1[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Point2<T>> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Point2<T>) -> &mut fns::Point2<T> {
&mut self.points2[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Point3<T>> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Point3<T>) -> &mut fns::Point3<T> {
&mut self.points3[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Point4<T>> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Point4<T>) -> &mut fns::Point4<T> {
&mut self.points4[usize::from(id)]
}
}
impl<T> IndexMut<SplineRef1<T>> for Reactor<T> {
fn index_mut(&mut self, id: SplineRef1<T>) -> &mut Spline1<T> {
&mut self.splines1[usize::from(id)]
}
}
impl<T> IndexMut<SplineRef2<T>> for Reactor<T> {
fn index_mut(&mut self, id: SplineRef2<T>) -> &mut Spline2<T> {
&mut self.splines2[usize::from(id)]
}
}
impl<T> IndexMut<SplineRef3<T>> for Reactor<T> {
fn index_mut(&mut self, id: SplineRef3<T>) -> &mut Spline3<T> {
&mut self.splines3[usize::from(id)]
}
}
impl<T> IndexMut<SplineRef4<T>> for Reactor<T> {
fn index_mut(&mut self, id: SplineRef4<T>) -> &mut Spline4<T> {
&mut self.splines4[usize::from(id)]
}
}
impl<T> IndexMut<SurfaceRef1<T>> for Reactor<T> {
fn index_mut(&mut self, id: SurfaceRef1<T>) -> &mut Surface1<T> {
&mut self.surfaces1[usize::from(id)]
}
}
impl<T> IndexMut<SurfaceRef2<T>> for Reactor<T> {
fn index_mut(&mut self, id: SurfaceRef2<T>) -> &mut Surface2<T> {
&mut self.surfaces2[usize::from(id)]
}
}
impl<T> IndexMut<SurfaceRef3<T>> for Reactor<T> {
fn index_mut(&mut self, id: SurfaceRef3<T>) -> &mut Surface3<T> {
&mut self.surfaces3[usize::from(id)]
}
}
impl<T> IndexMut<SurfaceRef4<T>> for Reactor<T> {
fn index_mut(&mut self, id: SurfaceRef4<T>) -> &mut Surface4<T> {
&mut self.surfaces4[usize::from(id)]
}
}
impl<T> Index<ptr::Color> for Reactor<T> {
type Output = fns::Color<T>;
fn index(&self, id: ptr::Color) -> &fns::Color<T> {
&self.colors[usize::from(id)]
}
}
impl<T> IndexMut<ptr::Color> for Reactor<T> {
fn index_mut(&mut self, id: ptr::Color) -> &mut fns::Color<T> {
&mut self.colors[usize::from(id)]
}
}
impl<T> Index<ptr::ColorSpline> for Reactor<T> {
type Output = fns::ColorSpline;
fn index(&self, id: ptr::ColorSpline) -> &fns::ColorSpline {
&self.color_splines[usize::from(id)]
}
}
impl<T> IndexMut<ptr::ColorSpline> for Reactor<T> {
fn index_mut(&mut self, id: ptr::ColorSpline) -> &mut fns::ColorSpline {
&mut self.color_splines[usize::from(id)]
}
}
impl<T> Index<BoneRef1<T>> for Reactor<T> {
type Output = Bone1<T>;
fn index(&self, id: BoneRef1<T>) -> &Bone1<T> {
&self.bones1[usize::from(id)]
}
}
impl<T> Index<BoneRef2<T>> for Reactor<T> {
type Output = Bone2<T>;
fn index(&self, id: BoneRef2<T>) -> &Bone2<T> {
&self.bones2[usize::from(id)]
}
}
impl<T> Index<BoneRef3<T>> for Reactor<T> {
type Output = Bone3<T>;
fn index(&self, id: BoneRef3<T>) -> &Bone3<T> {
&self.bones3[usize::from(id)]
}
}
impl<T> Index<BoneRef4<T>> for Reactor<T> {
type Output = Bone4<T>;
fn index(&self, id: BoneRef4<T>) -> &Bone4<T> {
&self.bones4[usize::from(id)]
}
}
impl<T> IndexMut<BoneRef1<T>> for Reactor<T> {
fn index_mut(&mut self, id: BoneRef1<T>) -> &mut Bone1<T> {
&mut self.bones1[usize::from(id)]
}
}
impl<T> IndexMut<BoneRef2<T>> for Reactor<T> {
fn index_mut(&mut self, id: BoneRef2<T>) -> &mut Bone2<T> {
&mut self.bones2[usize::from(id)]
}
}
impl<T> IndexMut<BoneRef3<T>> for Reactor<T> {
fn index_mut(&mut self, id: BoneRef3<T>) -> &mut Bone3<T> {
&mut self.bones3[usize::from(id)]
}
}
impl<T> IndexMut<BoneRef4<T>> for Reactor<T> {
fn index_mut(&mut self, id: BoneRef4<T>) -> &mut Bone4<T> {
&mut self.bones4[usize::from(id)]
}
}
|
extern crate serde_json;
use std::fmt;
use std::error::Error;
use self::serde_json::Map;
use self::serde_json::Value;
use super::url::ParseError;
pub use super::rustache::RustacheError;
pub use super::curl::Error as CurlError;
pub use super::serde_yaml::Error as YamlError;
pub use std::io::Error as IoError;
#[derive(Debug)]
pub enum HelmError {
Io(IoError),
FailedToCreateKubeConfig(RustacheError),
Net(CurlError),
CmdFailed(String),
UrlParse(ParseError),
Yaml(YamlError),
NoCaData,
WrongKubeApiFormat(Map<String, Value>),
}
impl fmt::Display for HelmError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&HelmError::CmdFailed(ref cmd) =>
f.write_fmt(format_args!("could not run command `{}`", cmd)),
&HelmError::WrongKubeApiFormat(ref object) =>
f.write_fmt(format_args!("could not parse api `{:?}`", object)),
_ => write!(f, "{}", self.description()),
}
}
}
impl Error for HelmError {
fn description(&self) -> &str {
match (self, self.cause()) {
(_, Some(e)) => e.description(),
(&HelmError::Io(_), None) => unreachable!(),
(&HelmError::Net(_), None) => unreachable!(),
(&HelmError::UrlParse(_), None) => unreachable!(),
(&HelmError::Yaml(_), None) => unreachable!(),
(&HelmError::FailedToCreateKubeConfig(_), _) => "rustache templating error",
(&HelmError::CmdFailed(ref cmd), _) => cmd,
(&HelmError::WrongKubeApiFormat(_), _) => "could not parse k8s api",
(&HelmError::NoCaData, _) => "No ca data given and skip_tls_verify = false",
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
HelmError::Io(ref e) => Some(e),
HelmError::Net(ref e) => Some(e),
HelmError::UrlParse(ref e) => Some(e),
HelmError::Yaml(ref e) => Some(e),
_ => None,
}
}
}
impl From<IoError> for HelmError {
fn from(e: IoError) -> Self {
HelmError::Io(e)
}
}
impl From<RustacheError> for HelmError {
fn from(e: RustacheError) -> Self {
HelmError::FailedToCreateKubeConfig(e)
}
}
impl From<CurlError> for HelmError {
fn from(e: CurlError) -> Self {
HelmError::Net(e)
}
}
impl From<ParseError> for HelmError {
fn from(e: ParseError) -> Self {
HelmError::UrlParse(e)
}
}
impl From<YamlError> for HelmError {
fn from(e: YamlError) -> Self {
HelmError::Yaml(e)
}
}
|
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_assignments)]
// constants
// ---------
const CONSTANT_1: u8 = 123; // immutable always, can be defined in outer scope
// const CONSTANT_1: u8 = 321; - ERROR: `CONSTANT_1` redefined here (no shadowing allowed)
// const mut CONSTANT_2: u8 = 123; - ERROR: cannot be mutable
// const CONSTANT_3 = 123; - ERROR: expected `:` (must have type annotations)
fn main() {
const CONSTANT_1: u8 = 111; // can be defined in function scope
// basics
// ---------
let x = 3; // immutable by default, cannot be defined in outer scope
// x = 4; - ERROR: cannot assign twice to immutable variable
let mut x = 5; // can be set to mutable with "mut"
x = 6; // you can reassign a value of the same type to a mutable variable
// x = "hi!"; - ERROR: expected integer, found reference (cannot mutate variable type)
let x = "hey!"; // with shadowing a name can be reused
// scalar types
// ------------
// btw type inference is a thing
// - integer
let int: u8 = 255; // other: i/u8/16/32/64/128 isize/usize - default: i32
// let int: u8 = 256; - ERROR: literal out of range for `u8` (overflow)
let int: usize = 12_345; // 64 bits if 64-bit arch, 32 bits if 32-bit arch
let int: i32 = -1234i32; // type can be specified in number literals (except byte literal)
// - floating-point
let float: f32 = 2.123456; // other: f64 - default: f64
// operations
let sum = 5 + 2;
// let sum = 5 + 2.1234; - ERROR: no implementation for `{integer} + {float}` (can't mix types)
let difference = 3 - 1;
let product = 2 * 34;
let quotient = 45 / 5;
let remainder = 200 % 9;
// more operators: https://doc.rust-lang.org/book/appendix-02-operators.html#operators
// - boolean
let t = true;
let f: bool = false;
// - character
let c1 = 'z';
let c2 = 'ℤ';
let cat = '😻';
let cat = "😻"; // double quotes allowed apparently
// compound types
// --------------
// - tuple
// fixed length, different types allowed
let tup: (i8, i16, u32) = (-2, 2672, 123341154);
let (x, y, z) = tup; // destructuring
let second = tup.1; // accesing by index
// - array
// fixed length, same type
let a = [1, 2, 3, 4, 5];
let a: [u8; 5] = [1, 2, 3, 4, 5]; // [type; length]
let a = [3; 5]; // [value; times] - equivalent to [3, 3, 3, 3, 3]
let third = a[2]; // accesing by index
// let sixth = a[5]; - ERROR: index out of bounds (does not compile)
let index = 5;
// let sixth = a[index]; - ERROR: index out of bounds (compiles but then panics)
}
|
//! The tokenizer module implements a nock tokenizer.
// Copyright (2017) Jeremy A. Wall.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::error;
use std::fmt;
use std::fmt::Display;
use std::convert::Into;
use std::convert::From;
use std::char;
use errors::WrappedError;
/// Token is a parsed token for a Nock Noun.
/// It includes the line and column that the token was found on.
/// All valid tokens are in the ASCII character set.
#[derive(Debug)]
pub struct Token {
pub line: usize,
pub col: usize,
pub val: String,
}
impl Token {
pub fn new(c: char, line: usize, col: usize) -> Self {
Token {
line: line,
col: col,
val: c.to_string(),
}
}
/// append_char appends a char to the value.
pub fn append_char(&mut self, c: char) {
self.val.push(c);
}
/// is_atom returns true if the token is for a valid atom.
pub fn is_atom(&self) -> bool {
self.val.len() > 0 && (self.val.as_bytes()[0] as char).is_digit(10)
}
/// is_cell_start returns true if the token is a cell start.
pub fn is_cell_start(&self) -> bool {
self.val.len() > 0 && self.val == "["
}
/// is_cell_end returns true if the token is the end of a cell.
pub fn is_cell_end(&self) -> bool {
self.val.len() > 0 && self.val == "]"
}
}
make_error!(TokenizerError, "TokenizerError: {}\n");
impl From<WrappedError> for TokenizerError {
fn from(err: WrappedError) -> Self {
Self::new_with_cause("Read Error", Box::new(err))
}
}
/// The ExpressionReader trait represents an interface that will
/// return either a Vec<String> of lines for a valid nock expression.
/// or a WrappedError.
pub trait ExpressionReader {
fn read(&mut self) -> Result<Vec<String>, WrappedError>;
// FIXME(jwall): Should this support closing?
}
/// Tokenizer reads a series of tokens from an expression reader.
pub struct Tokenizer {
curr: Option<Vec<String>>,
line: usize,
col: usize,
reader: Box<ExpressionReader>,
}
impl Tokenizer {
/// new constructs a Tokenizer from an ExpressionReader.
pub fn new(reader: Box<ExpressionReader>) -> Self {
Tokenizer {
curr: None,
line: 0,
col: 0,
reader: reader,
}
}
/// next returns the next token or a TokenizerError.
pub fn next(&mut self) -> Result<Token, TokenizerError> {
try!(self.consume_reader());
self.get_next_token()
}
fn consume_reader(&mut self) -> Result<(), TokenizerError> {
let mut consume = false;
if let Some(ref lines) = self.curr {
if self.line >= lines.len() {
consume = true;
}
} else {
consume = true;
}
if consume {
self.line = 0;
self.col = 0;
self.curr = Some(try!(self.reader.read()));
}
Ok(())
}
fn get_next_char(&mut self) -> Result<(char, usize, usize), TokenizerError> {
try!(self.consume_reader());
if let Some(ref lines) = self.curr {
// Handle our end of line.
if self.col >= lines[self.line].len() {
let (line, col) = (self.line, self.col);
self.line += 1;
self.col = 0;
// We synthesize a newline character to simplify parsing.
return Ok(('\n', line, col));
}
// TODO(jwall): Should we cache this?
let bytes = &lines[self.line].as_bytes();
// Since all nock syntax is valid ascii this is a
// safe cast to do.
let curr_col = self.col;
self.col += 1;
return Ok((bytes[curr_col] as char, self.line, curr_col));
}
return Err(TokenizerError::new("End of stream"));
}
fn pushback(&mut self, len: usize) {
// NOTE(jeremy): This is potentially unsafe but since we are in theory
// only ever pushing back something that we have already consumed in
// a single line this should be safe.
self.col -= len;
}
fn gobble_atom(&mut self, mut tok: Token) -> Result<Token, TokenizerError> {
loop {
// char loop
let (c, _, _) = match self.get_next_char() {
Ok(tpl) => tpl,
Err(_) => return Ok(tok),
};
if c.is_whitespace() {
return Ok(tok);
}
if c == '.' {
// treat . as whitespace inside of an atom.
// Currently this is pretty dumb and doesn't
// enforce the right syntax of dotting as comma.
// i.e. every 3 digits. This is deemed acceptable
// for now.
continue;
}
if !c.is_digit(10) {
// Technically this case is an error but we don't emit
// error tokens here, ever, despite what the type signature
// states.
self.pushback(1);
return Ok(tok);
}
tok.append_char(c);
}
}
fn get_next_token(&mut self) -> Result<Token, TokenizerError> {
loop {
// char loop
let (c, line, col) = try!(self.get_next_char());
match c {
// open cell
'[' => {
return Ok(Token::new(c, line, col));
}
// close cell
']' => {
return Ok(Token::new(c, line, col));
}
// Atom chars
'0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '.' => {
return self.gobble_atom(Token::new(c, line, col));
}
// Whitespace
' ' | '\t' | '\n' | '\r' => {
// We skip these.
continue;
}
_ => return Err(TokenizerError::new(format!("Invalid Character: '{}'", c))),
}
}
}
}
#[cfg(test)]
pub mod tokenizer_tests {
use tokenizer::{ExpressionReader, Tokenizer};
use errors::WrappedError;
pub struct MockReader {
expr: Vec<String>,
err: bool,
}
impl MockReader {
pub fn new(expr: Vec<String>) -> Self {
MockReader {
expr: expr,
err: false,
}
}
}
impl ExpressionReader for MockReader {
fn read(&mut self) -> Result<Vec<String>, WrappedError> {
if !self.err {
self.err = true;
return Ok(self.expr.clone());
} else {
return Err(WrappedError::new("End Of Stream"));
}
}
}
#[test]
fn mock_reader_sanity_check() {
let mut reader = MockReader::new(vec![
"[1 2 3]".to_string(),
]);
let expr = reader.read();
assert!(expr.is_ok());
let expr = reader.read();
assert!(expr.is_err());
}
fn assert_token_stream(toker: &mut Tokenizer, expect: Vec<(&str, usize, usize)>) {
for (v, l, c) in expect {
let tok = toker.next();
println!("tok: {:?}", tok);
assert!(tok.is_ok());
let tok = tok.unwrap();
assert_eq!(tok.val, *v);
assert_eq!(tok.line, l);
assert_eq!(tok.col, c);
}
assert!(toker.next().is_err());
}
#[test]
fn test_tokenizer_simple_one_liner() {
let reader = MockReader::new(vec![
"[1 2 3]".to_string(),
]);
let boxed = Box::new(reader);
let mut toker = Tokenizer::new(boxed);
let expect = vec![("[", 0, 0), ("1", 0, 1), ("2", 0, 3), ("3", 0, 5), ("]", 0, 6)];
assert_token_stream(&mut toker, expect);
}
#[test]
fn test_tokenizer_simple_multi_line() {
let reader = MockReader::new(vec![
"[1 2 3".to_string(),
"]".to_string(),
]);
let boxed = Box::new(reader);
let mut toker = Tokenizer::new(boxed);
let expect = vec![("[", 0, 0), ("1", 0, 1), ("2", 0, 3), ("3", 0, 5), ("]", 1, 0)];
assert_token_stream(&mut toker, expect);
}
#[test]
fn test_tokenizer_simple_multi_char_atoms() {
let reader = MockReader::new(vec![
"1234567890".to_string(),
"123 1".to_string(),
]);
let boxed = Box::new(reader);
let mut toker = Tokenizer::new(boxed);
let expect = vec![("1234567890", 0, 0),
("123", 1, 0),
("1", 1, 5),
];
assert_token_stream(&mut toker, expect);
}
#[test]
fn test_tokenizer_simple_multi_char_atoms_with_dot() {
let reader = MockReader::new(vec![
"123.4567.890".to_string(),
"123 1".to_string(),
]);
let boxed = Box::new(reader);
let mut toker = Tokenizer::new(boxed);
let expect = vec![("1234567890", 0, 0),
("123", 1, 0),
("1", 1, 5),
];
assert_token_stream(&mut toker, expect);
}
}
|
extern crate text_ui;
use text_ui::app::App;
use text_ui::backend::{
color::{self, Color}, Backend,
};
use text_ui::widget::{shared, DbgDump, Line, Linear, Readline, Shared, Text};
use text_ui::{Event, Input, Key};
use std::thread;
use std::time::Duration;
struct DemoApp {
log: Shared<Text>,
side: Shared<Linear>,
readline: Shared<Readline>,
vbox: Shared<Linear>,
outputs: Shared<Linear>,
show_side: bool,
}
impl DemoApp {
fn new() -> DemoApp {
let log = shared(Text::new(vec![]));
let rl = Readline::new();
let readline = shared(rl);
let mut sidebox = Linear::vbox();
let dbg = shared(DbgDump::new(&readline));
sidebox.push(&dbg);
let side = shared(sidebox);
let mut outbox = Linear::hbox();
outbox.push(&log);
outbox.push(&shared(Line::vertical()));
outbox.push(&side);
let outputs = shared(outbox);
let mut mainbox = Linear::vbox();
mainbox.push(&outputs);
mainbox.push(&shared(Line::horizontal()));
mainbox.push(&readline);
let vbox = shared(mainbox);
DemoApp {
log,
side,
readline,
vbox,
outputs,
show_side: true,
}
}
fn toggle_side(&mut self) {
let mut outputs = self.outputs.write().unwrap();
match self.show_side {
true => {
self.show_side = false;
outputs.contents.truncate(0);
outputs.push(&self.log);
}
false => {
self.show_side = true;
outputs.contents.truncate(0);
outputs.push(&self.log);
outputs.push(&self.side);
}
}
}
fn submit_input(&mut self) {
let mut rl = self.readline.write().unwrap();
let line = rl.finalize();
self.log.write().unwrap().push(line);
}
fn log_msg(&mut self, msg: &str) {
let lines: Vec<String> = msg.lines().map(|l| l.to_owned()).collect();
self.log.write().unwrap().lines.extend(lines);
}
fn input(&mut self, key: Key) {
match key {
Key::Char('\n') => self.submit_input(),
Key::Alt('\r') => self.readline.write().unwrap().process_key(Key::Char('\n')),
k => {
self.log_msg(&format!("{:?}", k));
let mut rl = self.readline.write().unwrap();
rl.process_key(k);
}
}
}
}
#[derive(Debug, PartialEq, Clone)]
enum DemoEvent {
Tick,
}
impl App for DemoApp {
type UI = Shared<Linear>;
type MyEvent = DemoEvent;
fn widget(&self) -> Self::UI {
self.vbox.clone()
}
fn handle_event(&mut self, event: Event<Self::MyEvent>) -> Result<(), Option<String>> {
match event {
Event::InputEvent(i) => match i {
Input::Key(Key::Esc) => Err(None),
Input::Key(Key::Alt('t')) => {
self.toggle_side();
Ok(())
}
Input::Key(k) => {
self.input(k);
Ok(())
}
_ => {
self.log_msg(&format!("{:?}", i));
Ok(())
}
},
Event::AppEvent(_) => Ok(()),
}
}
fn style(&self, style: &str) -> (Option<Box<Color>>, Option<Box<Color>>) {
match style {
"input" => (Some(Box::new(color::LightRed)), None),
_ => (None, None),
}
}
}
fn main() {
let be = Backend::new();
let myevents = be.sender.clone();
thread::spawn(move || loop {
myevents.send(DemoEvent::Tick).unwrap();
thread::sleep(Duration::from_millis(500));
});
let mut app = DemoApp::new();
app.log_msg("Esc to exit");
be.run_app(&mut app);
}
|
// Copyright (c) Microsoft. All rights reserved.
extern crate bytes;
extern crate edgelet_core;
#[macro_use]
extern crate failure;
extern crate hsm;
mod certificate_properties;
mod crypto;
mod error;
pub mod tpm;
pub const IOTEDGED_VALIDITY: u64 = 7_776_000; // 90 days
pub const IOTEDGED_COMMONNAME: &str = "iotedged workload ca";
pub const IOTEDGED_CA: &str = "iotedged-workload-ca";
pub use crypto::{Certificate, Crypto};
pub use error::{Error, ErrorKind};
pub use tpm::{TpmKey, TpmKeyStore};
|
#![cfg(feature = "derive")]
#[macro_use] extern crate asn1_der;
use ::asn1_der::{ IntoDerObject, FromDerObject, DerObject };
#[test]
fn test() {
// Define inner struct
#[derive(Debug, Clone, Eq, PartialEq, Asn1Der)]
struct Inner {
integer: u128,
boolean: bool,
octet_string: Vec<u8>,
utf8_string: String,
null: (),
sequence: Vec<Inner>,
der_object: DerObject
}
impl Inner {
pub fn new(num: u128) -> Self {
Inner {
integer: num, boolean: num % 2 == 0,
octet_string: b"Testolope (octet_string)".to_vec(),
utf8_string: format!("Inner {}", num),
null: (), sequence: vec![],
der_object: DerObject::from_raw(0x14, b"Testolope (der_object)".to_vec())
}
}
}
// Define outer struct
#[derive(Debug, Clone, Eq, PartialEq, Asn1Der)]
struct Outer {
utf8_string: String,
inner: Inner
}
// Create inner and outer
let inner = {
let mut inner_0 = Inner::new(0);
inner_0.sequence.push(Inner::new(1));
inner_0.sequence.push(Inner::new(2));
inner_0
};
let outer = Outer{ utf8_string: "Testolope".to_string(), inner };
// Encode and compare
let encoded = {
let mut encoded = vec![0u8; outer.serialized_len()];
outer.clone().serialize(encoded.iter_mut()).unwrap();
encoded
};
let expected = b"\x30\x81\xe2\x0c\x09\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x30\x81\xd4\x02\x01\x00\x01\x01\xff\x04\x18\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x6f\x63\x74\x65\x74\x5f\x73\x74\x72\x69\x6e\x67\x29\x0c\x07\x49\x6e\x6e\x65\x72\x20\x30\x05\x00\x30\x81\x8e\x30\x45\x02\x01\x01\x01\x01\x00\x04\x18\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x6f\x63\x74\x65\x74\x5f\x73\x74\x72\x69\x6e\x67\x29\x0c\x07\x49\x6e\x6e\x65\x72\x20\x31\x05\x00\x30\x00\x14\x16\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x64\x65\x72\x5f\x6f\x62\x6a\x65\x63\x74\x29\x30\x45\x02\x01\x02\x01\x01\xff\x04\x18\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x6f\x63\x74\x65\x74\x5f\x73\x74\x72\x69\x6e\x67\x29\x0c\x07\x49\x6e\x6e\x65\x72\x20\x32\x05\x00\x30\x00\x14\x16\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x64\x65\x72\x5f\x6f\x62\x6a\x65\x63\x74\x29\x14\x16\x54\x65\x73\x74\x6f\x6c\x6f\x70\x65\x20\x28\x64\x65\x72\x5f\x6f\x62\x6a\x65\x63\x74\x29".as_ref();
assert_eq!(encoded, expected);
// Decode
let decoded = Outer::deserialize(encoded.iter()).unwrap();
assert_eq!(decoded, outer);
} |
/// An enum to represent all characters in the PhoneticExtensionsSupplement block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum PhoneticExtensionsSupplement {
/// \u{1d80}: 'ᶀ'
LatinSmallLetterBWithPalatalHook,
/// \u{1d81}: 'ᶁ'
LatinSmallLetterDWithPalatalHook,
/// \u{1d82}: 'ᶂ'
LatinSmallLetterFWithPalatalHook,
/// \u{1d83}: 'ᶃ'
LatinSmallLetterGWithPalatalHook,
/// \u{1d84}: 'ᶄ'
LatinSmallLetterKWithPalatalHook,
/// \u{1d85}: 'ᶅ'
LatinSmallLetterLWithPalatalHook,
/// \u{1d86}: 'ᶆ'
LatinSmallLetterMWithPalatalHook,
/// \u{1d87}: 'ᶇ'
LatinSmallLetterNWithPalatalHook,
/// \u{1d88}: 'ᶈ'
LatinSmallLetterPWithPalatalHook,
/// \u{1d89}: 'ᶉ'
LatinSmallLetterRWithPalatalHook,
/// \u{1d8a}: 'ᶊ'
LatinSmallLetterSWithPalatalHook,
/// \u{1d8b}: 'ᶋ'
LatinSmallLetterEshWithPalatalHook,
/// \u{1d8c}: 'ᶌ'
LatinSmallLetterVWithPalatalHook,
/// \u{1d8d}: 'ᶍ'
LatinSmallLetterXWithPalatalHook,
/// \u{1d8e}: 'ᶎ'
LatinSmallLetterZWithPalatalHook,
/// \u{1d8f}: 'ᶏ'
LatinSmallLetterAWithRetroflexHook,
/// \u{1d90}: 'ᶐ'
LatinSmallLetterAlphaWithRetroflexHook,
/// \u{1d91}: 'ᶑ'
LatinSmallLetterDWithHookAndTail,
/// \u{1d92}: 'ᶒ'
LatinSmallLetterEWithRetroflexHook,
/// \u{1d93}: 'ᶓ'
LatinSmallLetterOpenEWithRetroflexHook,
/// \u{1d94}: 'ᶔ'
LatinSmallLetterReversedOpenEWithRetroflexHook,
/// \u{1d95}: 'ᶕ'
LatinSmallLetterSchwaWithRetroflexHook,
/// \u{1d96}: 'ᶖ'
LatinSmallLetterIWithRetroflexHook,
/// \u{1d97}: 'ᶗ'
LatinSmallLetterOpenOWithRetroflexHook,
/// \u{1d98}: 'ᶘ'
LatinSmallLetterEshWithRetroflexHook,
/// \u{1d99}: 'ᶙ'
LatinSmallLetterUWithRetroflexHook,
/// \u{1d9a}: 'ᶚ'
LatinSmallLetterEzhWithRetroflexHook,
/// \u{1d9b}: 'ᶛ'
ModifierLetterSmallTurnedAlpha,
/// \u{1d9c}: 'ᶜ'
ModifierLetterSmallC,
/// \u{1d9d}: 'ᶝ'
ModifierLetterSmallCWithCurl,
/// \u{1d9e}: 'ᶞ'
ModifierLetterSmallEth,
/// \u{1d9f}: 'ᶟ'
ModifierLetterSmallReversedOpenE,
/// \u{1da0}: 'ᶠ'
ModifierLetterSmallF,
/// \u{1da1}: 'ᶡ'
ModifierLetterSmallDotlessJWithStroke,
/// \u{1da2}: 'ᶢ'
ModifierLetterSmallScriptG,
/// \u{1da3}: 'ᶣ'
ModifierLetterSmallTurnedH,
/// \u{1da4}: 'ᶤ'
ModifierLetterSmallIWithStroke,
/// \u{1da5}: 'ᶥ'
ModifierLetterSmallIota,
/// \u{1da6}: 'ᶦ'
ModifierLetterSmallCapitalI,
/// \u{1da7}: 'ᶧ'
ModifierLetterSmallCapitalIWithStroke,
/// \u{1da8}: 'ᶨ'
ModifierLetterSmallJWithCrossedDashTail,
/// \u{1da9}: 'ᶩ'
ModifierLetterSmallLWithRetroflexHook,
/// \u{1daa}: 'ᶪ'
ModifierLetterSmallLWithPalatalHook,
/// \u{1dab}: 'ᶫ'
ModifierLetterSmallCapitalL,
/// \u{1dac}: 'ᶬ'
ModifierLetterSmallMWithHook,
/// \u{1dad}: 'ᶭ'
ModifierLetterSmallTurnedMWithLongLeg,
/// \u{1dae}: 'ᶮ'
ModifierLetterSmallNWithLeftHook,
/// \u{1daf}: 'ᶯ'
ModifierLetterSmallNWithRetroflexHook,
/// \u{1db0}: 'ᶰ'
ModifierLetterSmallCapitalN,
/// \u{1db1}: 'ᶱ'
ModifierLetterSmallBarredO,
/// \u{1db2}: 'ᶲ'
ModifierLetterSmallPhi,
/// \u{1db3}: 'ᶳ'
ModifierLetterSmallSWithHook,
/// \u{1db4}: 'ᶴ'
ModifierLetterSmallEsh,
/// \u{1db5}: 'ᶵ'
ModifierLetterSmallTWithPalatalHook,
/// \u{1db6}: 'ᶶ'
ModifierLetterSmallUBar,
/// \u{1db7}: 'ᶷ'
ModifierLetterSmallUpsilon,
/// \u{1db8}: 'ᶸ'
ModifierLetterSmallCapitalU,
/// \u{1db9}: 'ᶹ'
ModifierLetterSmallVWithHook,
/// \u{1dba}: 'ᶺ'
ModifierLetterSmallTurnedV,
/// \u{1dbb}: 'ᶻ'
ModifierLetterSmallZ,
/// \u{1dbc}: 'ᶼ'
ModifierLetterSmallZWithRetroflexHook,
/// \u{1dbd}: 'ᶽ'
ModifierLetterSmallZWithCurl,
/// \u{1dbe}: 'ᶾ'
ModifierLetterSmallEzh,
}
impl Into<char> for PhoneticExtensionsSupplement {
fn into(self) -> char {
match self {
PhoneticExtensionsSupplement::LatinSmallLetterBWithPalatalHook => 'ᶀ',
PhoneticExtensionsSupplement::LatinSmallLetterDWithPalatalHook => 'ᶁ',
PhoneticExtensionsSupplement::LatinSmallLetterFWithPalatalHook => 'ᶂ',
PhoneticExtensionsSupplement::LatinSmallLetterGWithPalatalHook => 'ᶃ',
PhoneticExtensionsSupplement::LatinSmallLetterKWithPalatalHook => 'ᶄ',
PhoneticExtensionsSupplement::LatinSmallLetterLWithPalatalHook => 'ᶅ',
PhoneticExtensionsSupplement::LatinSmallLetterMWithPalatalHook => 'ᶆ',
PhoneticExtensionsSupplement::LatinSmallLetterNWithPalatalHook => 'ᶇ',
PhoneticExtensionsSupplement::LatinSmallLetterPWithPalatalHook => 'ᶈ',
PhoneticExtensionsSupplement::LatinSmallLetterRWithPalatalHook => 'ᶉ',
PhoneticExtensionsSupplement::LatinSmallLetterSWithPalatalHook => 'ᶊ',
PhoneticExtensionsSupplement::LatinSmallLetterEshWithPalatalHook => 'ᶋ',
PhoneticExtensionsSupplement::LatinSmallLetterVWithPalatalHook => 'ᶌ',
PhoneticExtensionsSupplement::LatinSmallLetterXWithPalatalHook => 'ᶍ',
PhoneticExtensionsSupplement::LatinSmallLetterZWithPalatalHook => 'ᶎ',
PhoneticExtensionsSupplement::LatinSmallLetterAWithRetroflexHook => 'ᶏ',
PhoneticExtensionsSupplement::LatinSmallLetterAlphaWithRetroflexHook => 'ᶐ',
PhoneticExtensionsSupplement::LatinSmallLetterDWithHookAndTail => 'ᶑ',
PhoneticExtensionsSupplement::LatinSmallLetterEWithRetroflexHook => 'ᶒ',
PhoneticExtensionsSupplement::LatinSmallLetterOpenEWithRetroflexHook => 'ᶓ',
PhoneticExtensionsSupplement::LatinSmallLetterReversedOpenEWithRetroflexHook => 'ᶔ',
PhoneticExtensionsSupplement::LatinSmallLetterSchwaWithRetroflexHook => 'ᶕ',
PhoneticExtensionsSupplement::LatinSmallLetterIWithRetroflexHook => 'ᶖ',
PhoneticExtensionsSupplement::LatinSmallLetterOpenOWithRetroflexHook => 'ᶗ',
PhoneticExtensionsSupplement::LatinSmallLetterEshWithRetroflexHook => 'ᶘ',
PhoneticExtensionsSupplement::LatinSmallLetterUWithRetroflexHook => 'ᶙ',
PhoneticExtensionsSupplement::LatinSmallLetterEzhWithRetroflexHook => 'ᶚ',
PhoneticExtensionsSupplement::ModifierLetterSmallTurnedAlpha => 'ᶛ',
PhoneticExtensionsSupplement::ModifierLetterSmallC => 'ᶜ',
PhoneticExtensionsSupplement::ModifierLetterSmallCWithCurl => 'ᶝ',
PhoneticExtensionsSupplement::ModifierLetterSmallEth => 'ᶞ',
PhoneticExtensionsSupplement::ModifierLetterSmallReversedOpenE => 'ᶟ',
PhoneticExtensionsSupplement::ModifierLetterSmallF => 'ᶠ',
PhoneticExtensionsSupplement::ModifierLetterSmallDotlessJWithStroke => 'ᶡ',
PhoneticExtensionsSupplement::ModifierLetterSmallScriptG => 'ᶢ',
PhoneticExtensionsSupplement::ModifierLetterSmallTurnedH => 'ᶣ',
PhoneticExtensionsSupplement::ModifierLetterSmallIWithStroke => 'ᶤ',
PhoneticExtensionsSupplement::ModifierLetterSmallIota => 'ᶥ',
PhoneticExtensionsSupplement::ModifierLetterSmallCapitalI => 'ᶦ',
PhoneticExtensionsSupplement::ModifierLetterSmallCapitalIWithStroke => 'ᶧ',
PhoneticExtensionsSupplement::ModifierLetterSmallJWithCrossedDashTail => 'ᶨ',
PhoneticExtensionsSupplement::ModifierLetterSmallLWithRetroflexHook => 'ᶩ',
PhoneticExtensionsSupplement::ModifierLetterSmallLWithPalatalHook => 'ᶪ',
PhoneticExtensionsSupplement::ModifierLetterSmallCapitalL => 'ᶫ',
PhoneticExtensionsSupplement::ModifierLetterSmallMWithHook => 'ᶬ',
PhoneticExtensionsSupplement::ModifierLetterSmallTurnedMWithLongLeg => 'ᶭ',
PhoneticExtensionsSupplement::ModifierLetterSmallNWithLeftHook => 'ᶮ',
PhoneticExtensionsSupplement::ModifierLetterSmallNWithRetroflexHook => 'ᶯ',
PhoneticExtensionsSupplement::ModifierLetterSmallCapitalN => 'ᶰ',
PhoneticExtensionsSupplement::ModifierLetterSmallBarredO => 'ᶱ',
PhoneticExtensionsSupplement::ModifierLetterSmallPhi => 'ᶲ',
PhoneticExtensionsSupplement::ModifierLetterSmallSWithHook => 'ᶳ',
PhoneticExtensionsSupplement::ModifierLetterSmallEsh => 'ᶴ',
PhoneticExtensionsSupplement::ModifierLetterSmallTWithPalatalHook => 'ᶵ',
PhoneticExtensionsSupplement::ModifierLetterSmallUBar => 'ᶶ',
PhoneticExtensionsSupplement::ModifierLetterSmallUpsilon => 'ᶷ',
PhoneticExtensionsSupplement::ModifierLetterSmallCapitalU => 'ᶸ',
PhoneticExtensionsSupplement::ModifierLetterSmallVWithHook => 'ᶹ',
PhoneticExtensionsSupplement::ModifierLetterSmallTurnedV => 'ᶺ',
PhoneticExtensionsSupplement::ModifierLetterSmallZ => 'ᶻ',
PhoneticExtensionsSupplement::ModifierLetterSmallZWithRetroflexHook => 'ᶼ',
PhoneticExtensionsSupplement::ModifierLetterSmallZWithCurl => 'ᶽ',
PhoneticExtensionsSupplement::ModifierLetterSmallEzh => 'ᶾ',
}
}
}
impl std::convert::TryFrom<char> for PhoneticExtensionsSupplement {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ᶀ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterBWithPalatalHook),
'ᶁ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterDWithPalatalHook),
'ᶂ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterFWithPalatalHook),
'ᶃ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterGWithPalatalHook),
'ᶄ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterKWithPalatalHook),
'ᶅ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterLWithPalatalHook),
'ᶆ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterMWithPalatalHook),
'ᶇ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterNWithPalatalHook),
'ᶈ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterPWithPalatalHook),
'ᶉ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterRWithPalatalHook),
'ᶊ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterSWithPalatalHook),
'ᶋ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterEshWithPalatalHook),
'ᶌ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterVWithPalatalHook),
'ᶍ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterXWithPalatalHook),
'ᶎ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterZWithPalatalHook),
'ᶏ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterAWithRetroflexHook),
'ᶐ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterAlphaWithRetroflexHook),
'ᶑ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterDWithHookAndTail),
'ᶒ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterEWithRetroflexHook),
'ᶓ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterOpenEWithRetroflexHook),
'ᶔ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterReversedOpenEWithRetroflexHook),
'ᶕ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterSchwaWithRetroflexHook),
'ᶖ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterIWithRetroflexHook),
'ᶗ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterOpenOWithRetroflexHook),
'ᶘ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterEshWithRetroflexHook),
'ᶙ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterUWithRetroflexHook),
'ᶚ' => Ok(PhoneticExtensionsSupplement::LatinSmallLetterEzhWithRetroflexHook),
'ᶛ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallTurnedAlpha),
'ᶜ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallC),
'ᶝ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCWithCurl),
'ᶞ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallEth),
'ᶟ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallReversedOpenE),
'ᶠ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallF),
'ᶡ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallDotlessJWithStroke),
'ᶢ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallScriptG),
'ᶣ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallTurnedH),
'ᶤ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallIWithStroke),
'ᶥ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallIota),
'ᶦ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCapitalI),
'ᶧ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCapitalIWithStroke),
'ᶨ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallJWithCrossedDashTail),
'ᶩ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallLWithRetroflexHook),
'ᶪ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallLWithPalatalHook),
'ᶫ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCapitalL),
'ᶬ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallMWithHook),
'ᶭ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallTurnedMWithLongLeg),
'ᶮ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallNWithLeftHook),
'ᶯ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallNWithRetroflexHook),
'ᶰ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCapitalN),
'ᶱ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallBarredO),
'ᶲ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallPhi),
'ᶳ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallSWithHook),
'ᶴ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallEsh),
'ᶵ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallTWithPalatalHook),
'ᶶ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallUBar),
'ᶷ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallUpsilon),
'ᶸ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallCapitalU),
'ᶹ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallVWithHook),
'ᶺ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallTurnedV),
'ᶻ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallZ),
'ᶼ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallZWithRetroflexHook),
'ᶽ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallZWithCurl),
'ᶾ' => Ok(PhoneticExtensionsSupplement::ModifierLetterSmallEzh),
_ => Err(()),
}
}
}
impl Into<u32> for PhoneticExtensionsSupplement {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for PhoneticExtensionsSupplement {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for PhoneticExtensionsSupplement {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl PhoneticExtensionsSupplement {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
PhoneticExtensionsSupplement::LatinSmallLetterBWithPalatalHook
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("PhoneticExtensionsSupplement{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
#![allow(dead_code)]
mod lexer;
mod parser;
// use lexer;
// use parser;
// use std;
// use parser;
use std::collections::VecDeque;
use std::collections::HashMap;
use parser::AST;
use lexer::Token;
pub trait Graphics {
// Draws a line from p1 to p2 using window center as origin point (0, 0), and
// having the x-axis grow left->right, and y-axis down->up.
fn line(&mut self, p1: (f32, f32), p2: (f32, f32));
// Clears the screen.
fn clearscreen(&mut self);
}
struct NullGraphics {
commands: Vec<String>,
}
impl Graphics for NullGraphics {
fn line(&mut self, p1: (f32, f32), p2: (f32, f32)) {
self.commands.push(format!("line {},{} {},{}", p1.0, p1.1, p2.0, p2.1));
}
fn clearscreen(&mut self) {
self.commands.push(format!("clearscreen"));
}
}
#[derive(Default, Debug)]
pub struct Turtle {
heading: f32, // 0 .. 359 degrees
x: f32,
y: f32,
}
impl Turtle {
pub fn new() -> Turtle {
Turtle { ..Default::default() }
}
fn fd(&mut self, val: f32, graphics: &mut Graphics) {
let phi = (self.heading + 90.0) * std::f32::consts::PI / 180.0;
let new_x = self.x + val * phi.cos();
let new_y = self.y + val * phi.sin();
graphics.line((self.x, self.y), (new_x, new_y));
self.x = new_x;
self.y = new_y;
}
fn bk(&mut self, val: f32, graphics: &mut Graphics) {
self.fd(-val, graphics);
}
fn lt(&mut self, val: f32) {
// TODO: Clamp the heading perhaps to only [0, 360).
self.heading += val;
}
fn rt(&mut self, val: f32) {
self.lt(-val);
}
}
struct Evaluator {
turtle: Turtle,
vars: HashMap<String, AST>,
}
impl Evaluator {
fn new() -> Self {
Self {
turtle: Turtle::new(),
vars: HashMap::new(),
}
}
// type ExprList = Vec<AST>;
// type ExprLines = Vec<ExprList>;
// type ListType = VecDeque<AST>
// #[derive(Debug, Clone, PartialEq)]
// pub enum AST {
// Prefix(Token, ExprList), // Prefix style arithmetic operations, ie. + 3 5 = 8.
// Function(String, ExprList), // name, arguments and rest
// TODO: Eval number, or eval float/int, what should be the return type?
fn eval_number(&mut self, ast_node: &AST) -> Result<f32, String> {
let evaluated_node = self.eval(ast_node)?;
match evaluated_node {
Some(AST::Float(float)) => {
Ok(float)
},
_ => {
Err(format!("Expr doesn't evaluate to a number {:?}", ast_node))
}
}
}
fn eval(&mut self, ast_node: &AST) -> Result<Option<AST>, String> {
let mut ret = None;
match ast_node {
// TODO: ExprList(Operator/Function...) where eats all
AST::ExprLine(expr_list) | AST::ExprList(expr_list) => {
for expr in expr_list {
let result = self.eval(expr)?;
if result.is_some() {
ret = result;
}
}
},
AST::Var(var_name) => {
match self.vars.get(var_name) {
Some(ast) => {
ret = Some(ast.clone());
},
None => {
return Err(format!(":{} is not a Logo name.", var_name));
}
}
},
AST::Float(float) => {
ret = Some(AST::Float(*float));
},
AST::List(list) => {
// TODO: Try to get rid of this clone somehow.
ret = Some(AST::List(list.clone()));
},
AST::Word(string) => {
ret = Some(AST::Word(string.clone()));
},
AST::Unary(Token::Negation, box_operand) => {
let operand = self.eval_number(box_operand)?;
ret = Some(AST::Float(-operand));
},
// TODO: Need to implement all Binary operators.
AST::Binary(operator, left_box, right_box) => {
let left = self.eval_number(left_box)?;
let right = self.eval_number(right_box)?;
let result = match operator {
Token::Plus => { left + right },
Token::Minus => { left - right },
Token::Multiply => { left * right },
Token::Divide => { left / right },
_ => {
panic!("Unknown binary operator {:?}", operator);
}
};
ret = Some(AST::Float(result));
},
AST::Prefix(_operator, _expr_list) => {
println!("Unimplemented prefix operators");
},
_x => {
println!("Unimplemented eval AST {:?}", _x);
}
}
return Ok(ret);
}
fn feed(&mut self, input: &str) {
println!("{:?}", input);
let tokens;
// TODO: Don't do any parsing as long as tokens end on LineCont.
// TODO: Don't call into lexer directly, parser uses the lexer.
match lexer::process(input) {
Ok(val) => tokens = val,
Err(err) => { println!("Tokenizing error: {:?}", err); return; }
}
let mut queue: VecDeque<lexer::Token> = tokens.into_iter().collect();
println!("{:?}", queue);
let ast;
match parser::parse_line(&mut queue) {
Ok(val) => {
ast = val;
println!("{:?}", ast);
// rek_print(&val, "".to_string());
},
Err(err) => {
println!("Parsing error: {:?}", err);
return;
},
}
println!("Eval: {:?}", self.eval(&ast));
println!("Eval: {:?}", self.eval(&ast));
}
}
fn main() {
// 1 + (2 * (3 + 4 * -5) + -6 * -(-7 + -8)) * 9
let mut evaluator = Evaluator::new();
loop {
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
// pratt_parse_debug(input.trim());
evaluator.feed(&input);
}
}
// pub fn exec_command(&mut self, command: &parser::Command, graphics: &mut Graphics) {
// match *command {
// parser::Command::Fd(val) => self.fd(val, graphics),
// parser::Command::Bk(val) => self.bk(val, graphics),
// parser::Command::Lt(val) => self.lt(val),
// parser::Command::Rt(val) => self.rt(val),
// parser::Command::Cs => graphics.clearscreen(),
// parser::Command::Repeat(cnt, ref boxed_command) => {
// for _ in 0 .. cnt {
// self.exec_command(boxed_command, graphics);
// }
// },
// parser::Command::Block(ref block_commands) => {
// for command in block_commands.iter() {
// self.exec_command(command, graphics);
// }
// },
// _ => (),
// }
// }
|
use crate::{browser::Resources, html::Html, platform, render, Cmd, Sub};
use std::cell::RefCell;
use std::fmt::Debug;
use std::rc::Rc;
use wasm_bindgen::{JsCast, JsValue};
pub struct Program<Model, Msg> {
pub model: RefCell<Model>,
pub view: Box<Fn(&Model) -> Html<Msg>>,
pub update: Box<Fn(&Msg, &mut Model) -> Box<Cmd<Msg>>>,
pub last_tree: RefCell<Option<Html<Msg>>>,
pub browser: Resources,
pub root: web_sys::Node,
}
impl<Model, Msg> Program<Model, Msg>
where
Model: Debug + Clone + 'static,
Msg: PartialEq + Debug + Clone + 'static,
{
fn new<InitFn, ViewFn, UpdateFn>(init: InitFn, view: ViewFn, update: UpdateFn) -> Self
where
InitFn: FnOnce() -> (Model, Box<Cmd<Msg>>),
ViewFn: Fn(&Model) -> Html<Msg> + 'static,
UpdateFn: Fn(&Msg, &mut Model) -> Box<Cmd<Msg>> + 'static,
{
let (model, _) = init();
let browser = Resources::new().unwrap();
let root = browser.document.create_document_fragment();
Self {
model: RefCell::new(model),
view: Box::new(view),
update: Box::new(update),
last_tree: RefCell::new(None),
browser,
root: root.dyn_into().unwrap(),
}
}
pub fn dispatch(program: &Rc<Self>, message: &Msg) -> Result<(), JsValue> {
let mut model = program.model.borrow().clone();
let cmd = (program.update)(message, &mut model);
program.model.replace(model);
Program::render(program)?;
cmd.run()
}
fn render(program: &Rc<Self>) -> Result<(), JsValue> {
let tree = (program.view)(&program.model.borrow());
render::Renderer::render(&program.root, program, &tree, &program.last_tree.borrow())?;
program.last_tree.replace(Some(tree));
Ok(())
}
pub fn init(mut self, selector: &str) -> Result<(), JsValue> {
console_error_panic_hook::set_once();
wasm_logger::init(wasm_logger::Config::new(log::Level::Debug));
self.root = self
.browser
.document
.query_selector(selector)
.expect("did not find element")
.expect("did not find element")
.dyn_into()?;
let program = Rc::new(self);
Program::render(&program)
}
}
pub fn sandbox<Model, Msg, ViewFn, UpdateFn>(
init: Model,
view: ViewFn,
update: UpdateFn,
) -> Program<Model, Msg>
where
Model: Debug + Clone + 'static,
Msg: PartialEq + Debug + Clone + 'static,
ViewFn: Fn(&Model) -> Html<Msg> + 'static,
UpdateFn: Fn(&Msg, &mut Model) + 'static,
{
Program::new(
move || (init, Box::new(platform::None)),
view,
move |msg, model| {
update(msg, model);
Box::new(platform::None)
},
)
}
pub fn element<
Model,
Msg,
InitCmd,
InitFn,
ViewFn,
UpdateCmd,
UpdateFn,
SubscriptionsSub,
SubscriptionsFn,
>(
init: InitFn,
view: ViewFn,
update: UpdateFn,
subscriptions: SubscriptionsFn,
) -> Program<Model, Msg>
where
Model: Debug + Clone + 'static,
Msg: PartialEq + Debug + Clone + 'static,
InitCmd: Cmd<Msg> + 'static,
InitFn: FnOnce() -> (Model, InitCmd),
ViewFn: Fn(&Model) -> Html<Msg> + 'static,
UpdateCmd: Cmd<Msg> + 'static,
UpdateFn: Fn(&Msg, &mut Model) -> UpdateCmd + 'static,
SubscriptionsSub: Sub<Msg> + 'static,
SubscriptionsFn: Fn(&Model) -> SubscriptionsSub,
{
Program::new(
move || {
let (model, cmd) = init();
(model, Box::new(cmd))
},
view,
move |msg, model| Box::new(update(msg, model)),
)
}
|
/*
---Find the First M Multiples of N---
Implement a function, multiples(m, n), which returns an array of the first m multiples of the real
number n. Assume that m is a positive integer.
Ex.
multiples(3, 5.0)
should return
[5.0, 10.0, 15.0]
*/
fn main() {
println!("{:?}", multiples(3, 5.0));
}
fn multiples(m:i32, n:f64) -> Vec<f64> {
let mut return_vector = Vec::new();
let mut push_value = 0.0;
for _ in 0..m {
push_value += n;
return_vector.push(push_value);
}
return return_vector;
} |
fn main() {
stack_scopes();
pvar_vs_fvars();
heap_scopes();
println!("\nOWNERSHIP");
let s1 = gives_ownership();
takes_ownership(s1);
// s1 is no longer available here
let mut s2 = String::from("hi");
s2 = takes_and_gives_ownership(s2);
println!("{}", s2);
println!("\nBORROWING");
// It would be pretty annoying if we needed to return any argument
// that we wanted to continue using in the parent function. The way around this
// is to pass a reference to an object. This allows a function to refer to an object
// without taking ownership of it. This is sometimes called borrowing.
// Think of this reference just as you would think of a pointer - the syntax is the same as
// C: & to get the reference (the address), * to dereference.
let mut s2 = String::from("hi again");
let s2_len = get_len(&s2);
println!("Length of '{}' is {}", s2, s2_len);
// By default references are immutable (you can't change what is being referred to).
// But you can also create (one) mutable reference. You can't create more than one else
// weird things might happen if they concurrently try to change things! You can create as many
// immutable references as you like. But you can't have both immutable and a mutable reference.
// Whew!
cat(&mut s2, &String::from(" and again"));
println!("{}", s2);
}
fn get_len(s: &String) -> usize {
s.len()
} // s goes out of scope here, but what s points to does not.
// Borrow s1 as mutable. Append s2 to it.
fn cat(s1: &mut String, s2: &String) {
s1.push_str(s2)
}
fn gives_ownership() -> String {
String::from("hello")
}
fn takes_ownership(s1: String) {
println!("{}", s1);
} // s1 is deallocated here because it is no longer in scope!
fn takes_and_gives_ownership(s1: String) -> String {
s1
} // s1 is given back to the calling function and so is still in scope.
fn heap_scopes() {
println!("\nHEAP_SCOPES");
// String is a variable length, optionally mutable, string type.
// Because of the variable length, it must be allocated on the heap.
// This is different to string literals, which are fixed length.
let mut s = String::from("hello");
println!("{}", s);
s.push_str(", world!");
println!("{}", s);
}
// Pointer variables vs fixed variables
// I'm not entirely sure what to call this. I think it could also be,
// heap vs stack allocated variables. But basically, some variables store
// the value (fvars) and others store a pointer to a value. (Think python
// lists vs ints).
fn pvar_vs_fvars() {
println!("\nPVARS_VS_FVARS");
let mut x = 1;
let y = x;
x = x + 1;
// Numbers are allocated on the stack and so y is a copy of x
println!("x is {}, y is still {}", x, y);
let s1 = String::from("hello");
let mut s2 = s1;
// The string type is really a struct with a len, capacity and a pointer to data
// So both s1 and s2 point to the same data, *but who owns it*?
// To combat this, s1 is *invalidated*. You can't use it, can't ever read from it.
println!("s2 is {}, s1 has been invalidated", s2);
s2.push_str("!");
println!("s2 is {}, s1 has (still) been invalidated", s2);
}
// This is simple because all the variables are defined on the stack.
// They just get popped off when they are no longer in scope.
fn stack_scopes() {
println!("\nSTACK_SCOPES");
// x is not yet in scope here - it hasn't been defined!
// Let's bring x into scope
let x = 2;
println!("{}: x in scope", x);
{
println!("{}: x still in scope in a sub-block", x);
let y = 3;
println!("{}: y in scope as it is defined in a sub-block", y);
}
// Actually rust's scopes are more detailed than block scope - as
// x is not used below here, it is already out of scope! Pretty cool.
println!("But y is not in scope here! Rust is block scoped");
// This will generate a compiler error.
// println!("{}: y in scope as it is defined in a sub-block", y);
}
|
use std::fmt::Display;
use std::path::Path;
use colored::Colorize;
pub fn error(err: impl Display) {
eprintln!("{} {}", "error:".red().bold(), err);
}
pub fn success(msg: impl Display) {
println!("{} {}", "success:".green().bold(), msg);
}
pub fn info(msg: impl Display) {
println!("{} {}", "info:".blue().bold(), msg);
}
pub fn format_path(path: impl AsRef<Path>) -> String {
let path = path.as_ref();
if let Some(home_dir) = dirs::home_dir() {
let mut components = path.components();
let matches = home_dir
.components()
.zip(components.by_ref())
.all(|(a, b)| a == b);
if matches {
let path = components.as_path();
if path.file_name().is_none() {
"~".to_string()
} else {
format!("~/{}", path.display())
}
} else {
path.display().to_string()
}
} else {
path.display().to_string()
}
}
|
#![allow(non_snake_case, non_camel_case_types)]
use libc::{c_int, c_short, c_void};
use crate::connector::ssh_connector;
use crate::session::ssh_session;
use crate::socket_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ssh_event_struct {
_unused: [u8; 0],
}
pub type ssh_event = *mut ssh_event_struct;
pub type ssh_event_callback = ::std::option::Option<
unsafe extern "C" fn(fd: socket_t, revents: c_int, userdata: *mut c_void) -> c_int,
>;
extern "C" {
pub fn ssh_event_new() -> ssh_event;
pub fn ssh_event_add_fd(
event: ssh_event,
fd: socket_t,
events: c_short,
cb: ssh_event_callback,
userdata: *mut c_void,
) -> c_int;
pub fn ssh_event_add_session(event: ssh_event, session: ssh_session) -> c_int;
pub fn ssh_event_add_connector(event: ssh_event, connector: ssh_connector) -> c_int;
pub fn ssh_event_dopoll(event: ssh_event, timeout: c_int) -> c_int;
pub fn ssh_event_remove_fd(event: ssh_event, fd: socket_t) -> c_int;
pub fn ssh_event_remove_session(event: ssh_event, session: ssh_session) -> c_int;
pub fn ssh_event_remove_connector(event: ssh_event, connector: ssh_connector) -> c_int;
pub fn ssh_event_free(event: ssh_event);
}
|
use std::env;
use std::fs;
fn main() {
let args: Vec<String> = env::args().collect();
let filename = &args[1];
let contents = fs::read_to_string(filename)
.expect("Something went wrong reading the file");
let split_contents = contents.lines();
let answers: Vec<&str> = split_contents.collect();
let mut groups = vec![vec!["";0];1];
let mut num_people = vec![0;1];
for answer in answers {
if answer != "" {
let mut person: Vec<&str> = answer.split("").collect();
person.remove(0);
person.remove(person.len()-1);
let group = groups.len()-1;
groups[group].append(&mut person);
groups[group].sort();
num_people[group] += 1;
} else {
groups.push(vec!["";0]);
num_people.push(0);
}
}
let mut sum = 0;
for i in 0..groups.len() {
let group = &groups[i];
let mut all_answers = vec!["";0];
for answer in group {
if group.iter().filter(|&n| &*n == answer).count() == num_people[i] {
all_answers.push(answer);
}
}
all_answers.sort();
all_answers.dedup();
sum += all_answers.len();
}
println!("sum: {}", sum);
}
|
use super::constants::*;
// A collection of helpers to handle data structures for Cryptonid needs
// Concatenate two arrays to a unique vector
pub fn concatenate_arrays<T: Clone>(x: &[T], y: &[T]) -> Vec<T> {
let mut concat = x.to_vec();
concat.extend_from_slice(y);
concat
}
// KECCAK HELPERS
// Get Sponge from a state
pub fn get_fsc(state: [u64; STATE_SIZE]) -> [u64; FSC_SIZE] {
let mut dst: [u64; FSC_SIZE] = [0; FSC_SIZE];
dst.copy_from_slice(&state[HN_SIZE..STATE_SIZE]);
dst
}
// IMPORT HEXADECIMAL STRINGS
// Make Sponge Capacity (like FSC) in u64 from an hexadecimal string.
//
// # Examples
//
// ```
// const _ISC_HEX_: &str = "3030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030300a303030303030300000000000000000000000000000000000000000000000000000000000000000";
//
// const _ISC64: [u64; FSC_SIZE] = [
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x3030303030303030u64,
// 0x303030303030300au64,
// 0x0000000000000000u64,
// 0x0000000000000000u64,
// 0x0000000000000000u64,
// 0x0000000000000000u64
// ];
//
// let isc: [u64; FSC_SIZE] = _hexa_sponge_to_u64(_ISC_HEX_);
//
// assert_eq!(_ISC64, hexa_sponge_to_u64(_ISC_HEX_));
// ```
pub fn hexa_sponge_to_u64(sponge_hexa_string: &str) -> [u64; FSC_SIZE] {
let isc_u8 = match hex::decode(sponge_hexa_string) {
Ok(hex_string) => hex_string,
Err(_) => Vec::new(),
};
let mut sponge_u64 = [0; FSC_SIZE];
let mut vec_isc_u64: Vec<u64> = Vec::new() ;
let mut _m: u8 = 0;
let mut _n: u8 = 0;
for i64 in 0..(FSC_SIZE as u8) {
let mut u8_slice: [u8;8] = [0;8];
_m = i64 * 8;
_n = _m + 8;
u8_slice.copy_from_slice(&isc_u8[(_m as usize)..(_n as usize)]);
let value: u64 = u64::from_le_bytes(u8_slice);
vec_isc_u64.push(value);
}
sponge_u64.copy_from_slice(&vec_isc_u64);
sponge_u64
}
// EXPORT HEXADECIMAL STRINGS
// Make an hexadecimal string from a Vector 8bits
pub fn vec_to_hex_string(bytes: Vec<u8>) -> String {
let strs: Vec<String> = bytes.iter()
.map(|b| format!("{:02x}", b))
.collect();
strs.join("")
}
// Make an hexadecimal string from a Keccak state in 64bits
pub fn state_u64_to_hexa_hn_8(state: [u64; STATE_SIZE]) -> String {
let mut tmp_u64: [u64; HN_SIZE] = [0; HN_SIZE];
tmp_u64.copy_from_slice(&state[0..HN_SIZE]);
let tmp_u8: [u8; HN_SIZE * 8] = unsafe { ::core::mem::transmute(tmp_u64) };
vec_to_hex_string(tmp_u8.to_vec())
}
// Make an Sponge hexadecimal string from a state in 64bits
pub fn state_u64_to_hexa_fsc_8(state: [u64; STATE_SIZE]) -> String {
let fsc_u8: [u8; FSC_SIZE * 8] = unsafe { ::core::mem::transmute(get_fsc(state)) };
vec_to_hex_string(fsc_u8.to_vec())
}
|
use iron::{IronResult, Response, status};
use iron::headers::ContentType;
use serde::Serialize;
use serde_json;
/// Turns any serializable object into a JSON Iron response.
pub fn response<S: Serialize>(data: S) -> IronResult<Response> {
let mut response = Response::with((status::Ok, itry!(serde_json::to_string(&data))));
response.headers.set(ContentType::json());
Ok(response)
}
|
use std::iter;
use std::mem;
use crate::*;
pub fn solve_line <
LineIter: IntoIterator <Item = Cell>,
> (
line_iter: LineIter,
clues_line: & CluesLine,
) -> Option <LineSolverIter> {
LineSolver::new (
line_iter,
clues_line,
).ok ().map (LineSolver::into_iter)
}
#[ derive (Default) ]
pub struct LineSolver {
clues_line: CluesLine,
line: LineBuf,
sample_line: LineBuf,
proposed_line: LineBuf,
index: LineSize,
clues_placer: CluesPlacer <'static>,
}
impl LineSolver {
pub fn new <
LineIter: IntoIterator <Item = Cell>,
> (
line_iter: LineIter,
clues_line: & CluesLine,
) -> Result <LineSolver, LineSolver> {
let line_solver: LineSolver = Default::default ();
line_solver.into_new (line_iter, clues_line)
}
pub fn into_default (
self,
) -> LineSolver {
LineSolver {
clues_line: self.clues_line.into_default (),
line: self.line.into_default (),
sample_line: self.sample_line.into_default (),
proposed_line: self.proposed_line.into_default (),
index: 0,
clues_placer: self.clues_placer.into_default (),
}
}
pub fn into_new <
LineIter: IntoIterator <Item = Cell>,
> (
self,
line_iter: LineIter,
clues_line: & CluesLine,
) -> Result <LineSolver, LineSolver> {
let line = self.line.into_copy_of (line_iter);
let (clues_placer, sample_line) = {
let mut clues_placer = self.clues_placer.into_new (
& line,
& clues_line,
);
if ! clues_placer.advance () {
return Err (Default::default ()); // TODO
};
let sample_line = self.sample_line.into_copy_of (
render_placed_clues (
clues_line.iter ().cloned (),
clues_placer.current (),
line.len (),
),
);
(clues_placer.into_default (), sample_line)
};
let proposed_line = self.proposed_line.into_copy_of (& line);
Ok (LineSolver {
clues_line: self.clues_line.into_default ().into_extend (
clues_line.iter ().cloned (),
),
line: line,
sample_line: sample_line,
proposed_line: proposed_line,
index: 0,
clues_placer: clues_placer,
})
}
pub fn next (& mut self) -> Option <Cell> {
if self.index == self.line.len () {
return None;
}
let cell = self.solve_cell ();
self.index += 1;
Some (cell)
}
fn solve_cell (& mut self) -> Cell {
// return existing cell if known
let existing_cell = self.line [self.index];
if ! existing_cell.is_unknown () {
return existing_cell;
}
// propose inverting the sample cell
let sample_cell = self.sample_line [self.index];
self.proposed_line [self.index] = match sample_cell {
Cell::EMPTY => Cell::FILLED,
Cell::FILLED => Cell::EMPTY,
Cell::UNKNOWN => return existing_cell,
_ => panic! (),
};
// try placing clues
let mut clues_placer = Default::default ();
mem::swap (& mut clues_placer, & mut self.clues_placer);
let mut clues_placer = clues_placer.into_new (
& self.proposed_line,
& self.clues_line,
);
// if it fails the sample cell must be correct
if ! clues_placer.advance () {
self.clues_placer = clues_placer.into_default ();
self.proposed_line [self.index] = sample_cell;
self.line [self.index] = sample_cell;
return sample_cell;
}
// remove proposed cells which contradict placed clues
for (nested_index, placed_cell) in render_placed_clues (
self.clues_line.iter ().cloned (),
clues_placer.current (),
self.line.len (),
).enumerate () {
let nested_index = nested_index as LineSize;
if placed_cell != self.sample_line [nested_index] {
self.sample_line [nested_index] = Cell::UNKNOWN;
}
}
// reset the proposed cell
self.clues_placer = clues_placer.into_default ();
self.proposed_line [self.index] = Cell::UNKNOWN;
return existing_cell;
}
}
impl IntoIterator for LineSolver {
type Item = Cell;
type IntoIter = LineSolverIter;
fn into_iter (self) -> LineSolverIter {
LineSolverIter {
inner: self,
}
}
}
pub struct LineSolverIter {
inner: LineSolver,
}
impl Iterator for LineSolverIter {
type Item = Cell;
fn next (& mut self) -> Option <Cell> {
self.inner.next ()
}
}
#[ inline (always) ]
pub fn render_placed_clues <
'a,
CluesLine: IntoIterator <Item = LineSize>,
PlacedClues: IntoIterator <Item = LineSize>,
> (
clues_line: CluesLine,
placed_clues: PlacedClues,
line_size: LineSize,
) -> impl Iterator <Item = Cell> {
placed_clues.into_iter ().zip (
clues_line.into_iter (),
).chain (
iter::once ((line_size, 0)),
).scan (0, move |pos: & mut LineSize, (start, size)| {
let result = iter::empty ().chain (
iter::repeat (Cell::EMPTY).take ((start - * pos) as usize),
).chain (
iter::repeat (Cell::FILLED).take (size as usize),
);
* pos = start + size;
Some (result)
}).flatten ()
}
#[ cfg (test) ]
mod tests {
use super::*;
#[ test ]
fn test_solve_line_1 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("----------").unwrap (),
& vec! [ 3, 2, 3 ],
).map (Iterator::collect),
Some (LineBuf::from_str ("### ## ###").unwrap ()),
);
}
#[ test ]
fn test_solve_line_2 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("----------").unwrap (),
& vec! [ 3, 4 ],
).map (Iterator::collect),
Some (LineBuf::from_str ("--#---##--").unwrap ()),
);
}
#[ test ]
fn test_solve_line_3 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("----- ----").unwrap (),
& vec! [ 3, 4 ],
).map (Iterator::collect),
Some (LineBuf::from_str ("--#-- ####").unwrap ()),
);
}
#[ test ]
fn test_solve_line_4 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("----# ----").unwrap (),
& vec! [ 3, 4 ],
).map (Iterator::collect),
Some (LineBuf::from_str (" ### ####").unwrap ()),
);
}
#[ test ]
fn test_solve_line_5 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("-#---#----").unwrap (),
& vec! [ 3, 4 ],
).map (Iterator::collect),
Some (LineBuf::from_str ("-##--###- ").unwrap ()),
);
}
#[ test ]
fn test_solve_line_6 () {
assert_eq! (
solve_line (
& LineBuf::from_str ("--- #-----").unwrap (),
& vec! [ 2, 3 ],
).map (Iterator::collect),
Some (LineBuf::from_str ("--- ##----").unwrap ()),
);
}
}
|
use std::fmt::Debug;
#[derive(Debug)]
pub enum CssSelectorAtom {
Id(String),
Class(String),
Tag(String),
}
#[derive(Debug)]
pub struct CssSelectorComposite {
pub sels: Vec<CssSelectorAtom>
}
#[derive(Debug)]
pub struct CssSelectorMultiple {
pub sels: Vec<CssSelectorComposite>
}
impl std::fmt::Display for CssSelectorAtom {
fn fmt<'a>(&self, fmt: &mut std::fmt::Formatter<'a>) -> std::fmt::Result {
use CssSelectorAtom::*;
match self {
Class(s) => write!(fmt, ".{}", s),
Id(s) => write!(fmt, "#{}", s),
Tag(s) => write!(fmt, "{}", s.clone())
}
}
}
impl CssSelectorAtom {
pub fn to_string(&self) -> String {
return format!("{}", self)
}
}
#[derive(Debug)]
pub enum CssUnit {
Px
}
#[derive(Debug)]
pub enum CssRuleParam {
Color(u8, u8, u8, u8),
UnknownIdent(String),
Unit(f32, CssUnit)
}
impl CssRuleParam {
pub fn into_color(&self) -> Option<(u8, u8, u8, u8)> {
if let CssRuleParam::Color(r, g, b, a) = self {
return Some((*r, *g, *b, *a))
}
None
}
pub fn into_ident(&self) -> Option<&str> {
if let CssRuleParam::UnknownIdent(str) = self {
return Some(str.as_str())
}
None
}
pub fn into_px(&self) -> Option<f32> {
if let CssRuleParam::Unit(f, _) = self {
return Some(*f)
}
None
}
}
#[derive(Debug)]
pub struct CssRule {
pub name: String,
pub params: Vec<CssRuleParam>
}
#[derive(Debug)]
pub struct CssBlock {
pub selector: CssSelectorMultiple,
pub rules: Vec<CssRule>
}
#[derive(Debug)]
pub struct Css {
pub blocks: Vec<CssBlock>
}
pub enum CssErrorKind {
Eof,
InvalidSymbolInIdent,
ExpectedCharacter(char),
UnknownUnit(String),
UnexpectedIdent(String),
InvalidRuleParameter,
InvalidNumber,
HexColorLengthMayNotBe(usize)
}
impl Debug for CssErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use CssErrorKind::*;
write!(f, "{}", match self {
Eof => "Unexpected end of file".to_string(),
InvalidSymbolInIdent => "Invalid symbol in identifier".to_string(),
ExpectedCharacter (ch) => format!("Expected character '{}'", ch),
UnknownUnit(s) => format!("Unknown unit '{}'", s),
UnexpectedIdent(s) => format!("Unexpected identifier '{}'", s),
HexColorLengthMayNotBe(count) => format!("Hexadecimal color may not be of length {}", count),
InvalidRuleParameter => "Invalid rule parameter".to_string(),
InvalidNumber => "InvalidNumber".to_string()
})
}
}
pub struct CssError {
pub line: usize,
pub col: usize,
pub kind: CssErrorKind
}
impl Debug for CssError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "CSS error: {:?} at {}:{}", self.kind, self.line + 1, self.col + 1)
}
}
pub struct CssParser<'a> {
chars: std::str::Chars<'a>,
current: char,
line: usize,
col: usize,
eof: bool
}
impl<'a> CssParser<'a> {
fn err(&self, kind: CssErrorKind) -> CssError {
CssError { line: self.line, col: self.col, kind }
}
// Check if value is a formatting value
fn is_ignored(&self) -> bool {
let peek = self.current;
peek == ' ' || peek == '\n' || peek == '\t'
}
// This function is needed to remove unnecessary spaces, and called
// Before each parser to make sure that there's no trailing spaces
fn update_lines(&mut self) {
self.col += 1;
if self.current == '\n' {
self.line += 1;
self.col = 0;
}
}
fn no_rubbish(&mut self) {
// Remove all ignored characters
while self.is_ignored() {
self.skip();
}
if self.peek() == '/' {
self.skip();
if self.peek() == '*' {
loop {
self.skip();
if self.peek() == '*' {
self.skip();
if self.peek() == '/' {
self.skip();
self.no_rubbish();
break;
}
}
}
}
}
}
fn peek(&mut self) -> char {
return self.current
}
fn skip(&mut self) {
if self.chars.as_str() == "" {
// This character will indicate end of file since it's unused anyway
self.current = '\0';
self.eof = true;
return;
}
self.current = self.chars.next().unwrap();
self.update_lines();
}
fn next(&mut self) -> Result<char, CssError> {
let previous = self.current;
// We don't want to consume anything if there's end of file
if self.eof {
return Err(self.err(CssErrorKind::Eof));
}
self.skip();
Ok(previous)
}
fn parse_ident(&mut self) -> Result<String, CssError> {
let mut result = "".to_string();
// Miscellaneous characters
const IDENT_VALID: [char; 2] = ['_', '-'];
// Check for alphabetic characters and miscellaneous
while self.peek().is_alphabetic() || IDENT_VALID.contains(&self.peek()) {
result.push(self.next()?);
}
// If there were no alphabetic characters the identifier is invalid
if result.len() < 1 {
return Err(self.err(CssErrorKind::InvalidSymbolInIdent))
}
// We may include numbers after that
while self.peek().is_alphanumeric() || IDENT_VALID.contains(&self.peek()) {
result.push(self.next()?);
}
self.no_rubbish();
Ok(result)
}
fn parse_selector_atomic(&mut self) -> Result<CssSelectorAtom, CssError> {
// Parse different selectors, we can't backtrack so we will have to manually skip the prefix symbols
let selector = match self.peek() {
'.' => {
self.next()?;
CssSelectorAtom::Class(self.parse_ident()?)
},
'#' => {
self.next()?;
CssSelectorAtom::Id(self.parse_ident()?)
},
_ => CssSelectorAtom::Tag(self.parse_ident()?)
};
Ok(selector)
}
fn parse_composite_selector(&mut self) -> Result<CssSelectorComposite, CssError> {
let mut selectors = vec![self.parse_selector_atomic()?];
while self.peek() == '.' || self.peek() == '#' {
selectors.push(self.parse_selector_atomic()?);
}
Ok(CssSelectorComposite { sels: selectors })
}
fn parse_multiple_selector(&mut self) -> Result<CssSelectorMultiple, CssError> {
let mut selectors = vec![self.parse_composite_selector()?];
while self.peek() == ',' {
self.skip_char(',')?;
selectors.push(self.parse_composite_selector()?);
}
Ok(CssSelectorMultiple { sels: selectors })
}
pub fn parse_selector(&mut self) -> Result<CssSelectorMultiple, CssError> {
self.parse_multiple_selector()
}
fn skip_char(&mut self, ch: char) -> Result<(), CssError>{
if self.peek() == ch {
self.next()?;
self.no_rubbish();
Ok(())
}
else {
Err(self.err(CssErrorKind::ExpectedCharacter(ch)))
}
}
fn parse_number(&mut self) -> Result<f32, CssError> {
let mut res = "".to_string();
if self.peek() == '-' {
self.next()?;
res.push('-');
}
while self.peek().is_digit(10) {
res.push(self.next()?);
}
if self.peek() == '.' {
self.next()?;
res.push('.');
}
while self.peek().is_digit(10) {
res.push(self.next()?);
}
if !(res == "." || res == "") {
self.no_rubbish();
return Ok(res.parse().unwrap());
}
return Err(self.err(CssErrorKind::InvalidNumber))
}
fn parse_unit_value(&mut self) -> Result<CssRuleParam, CssError> {
let number = self.parse_number()?;
let unit = self.parse_ident()?;
if unit == "px" {
return Ok(CssRuleParam::Unit(number, CssUnit::Px))
}
Err(self.err(CssErrorKind::UnknownUnit(unit)))
}
fn parse_ident_rule(&mut self) -> Result<CssRuleParam, CssError> {
let ident = self.parse_ident()?;
// For now we will always assume it's a color
Ok(match ident.as_str() {
"red" => CssRuleParam::Color(255, 0, 0, 255),
"green" => CssRuleParam::Color(0, 255, 0, 255),
"blue" => CssRuleParam::Color(0, 0, 255, 255),
"lightblue" => CssRuleParam::Color(128, 128, 255, 255),
"white" => CssRuleParam::Color(255, 255, 255, 255),
"black" => CssRuleParam::Color(0, 0, 0, 255),
"yellow" => CssRuleParam::Color(255, 255, 0, 255),
_ => CssRuleParam::UnknownIdent(ident)
})
}
fn parse_hex_color(&mut self) -> Result<CssRuleParam, CssError> {
self.skip_char('#')?;
let mut hex = "".to_string();
while self.peek().is_digit(16) {
hex.push(self.next()?);
}
self.no_rubbish();
fn hex_to_u8(c: char) -> Option<u8>
{
Some(match c {
'0' => 0,
'1' => 1,
'2' => 2,
'3' => 3,
'4' => 4,
'5' => 5,
'6' => 6,
'7' => 7,
'8' => 8,
'9' => 9,
'a' | 'A' => 0xA,
'b' | 'B' => 0xB,
'c' | 'C' => 0xC,
'd' | 'D' => 0xD,
'e' | 'E' => 0xE,
'f' | 'F' => 0xF,
_ => return None
})
}
match hex.len() {
3 => {
let mut iter = hex.chars();
let r = hex_to_u8(iter.next().unwrap()).unwrap();
let g = hex_to_u8(iter.next().unwrap()).unwrap();
let b = hex_to_u8(iter.next().unwrap()).unwrap();
return Ok(CssRuleParam::Color(r<<4|r, g<<4|g, b<<4|b, 255))
},
4 => {
let mut iter = hex.chars();
let r = hex_to_u8(iter.next().unwrap()).unwrap();
let g = hex_to_u8(iter.next().unwrap()).unwrap();
let b = hex_to_u8(iter.next().unwrap()).unwrap();
let a = hex_to_u8(iter.next().unwrap()).unwrap();
dbg!(a<<4|a);
return Ok(CssRuleParam::Color(r<<4|r, g<<4|g, b<<4|b, a<<4|a))
}
6 => {
let mut iter = hex.chars();
let rh = hex_to_u8(iter.next().unwrap()).unwrap();
let rl = hex_to_u8(iter.next().unwrap()).unwrap();
let gh = hex_to_u8(iter.next().unwrap()).unwrap();
let gl = hex_to_u8(iter.next().unwrap()).unwrap();
let bh = hex_to_u8(iter.next().unwrap()).unwrap();
let bl = hex_to_u8(iter.next().unwrap()).unwrap();
return Ok(CssRuleParam::Color(rh<<4|rl, gh<<4|gl, bh<<4|bl, 255))
},
8 => {
let mut iter = hex.chars();
let rh = hex_to_u8(iter.next().unwrap()).unwrap();
let rl = hex_to_u8(iter.next().unwrap()).unwrap();
let gh = hex_to_u8(iter.next().unwrap()).unwrap();
let gl = hex_to_u8(iter.next().unwrap()).unwrap();
let bh = hex_to_u8(iter.next().unwrap()).unwrap();
let bl = hex_to_u8(iter.next().unwrap()).unwrap();
let ah = hex_to_u8(iter.next().unwrap()).unwrap();
let al = hex_to_u8(iter.next().unwrap()).unwrap();
return Ok(CssRuleParam::Color(rh<<4|rl, gh<<4|gl, bh<<4|bl, ah<<4|al))
}
_ => return Err(self.err(CssErrorKind::HexColorLengthMayNotBe(hex.len())))
}
}
// This function will redirect to smaller parsers
fn parse_value(&mut self) -> Result<CssRuleParam, CssError> {
if self.peek().is_digit(10) || self.peek() == '-' {
self.parse_unit_value()
}
else if self.peek().is_alphabetic() {
self.parse_ident_rule()
}
else if self.peek() == '#' {
self.parse_hex_color()
}
else {
Err(self.err(CssErrorKind::InvalidRuleParameter))
}
}
fn parse_rule(&mut self) -> Result<CssRule, CssError> {
let mut rule = CssRule { name: self.parse_ident()?, params: vec![] };
self.skip_char(':')?;
while self.peek() != ';' {
rule.params.push(self.parse_value()?);
}
self.skip_char(';')?;
Ok(rule)
}
pub fn new(source: &'a str) -> Self {
let chars = source.chars();
let mut this = Self {
current: ' ',
chars: chars,
line: 0,
col: 0,
eof: false
};
// Before that, we need to remove trailing spaces
this.no_rubbish();
this
}
pub fn parse(&mut self) -> Result<Css, CssError> {
let mut result = Css { blocks: vec![] };
while !self.eof {
let selector = self.parse_multiple_selector()?;
// This is needed as we encounter spaces and other ignored characters
self.skip_char('{')?;
let mut rules = vec![];
while self.peek() != '}' {
rules.push(self.parse_rule()?);
}
self.skip_char('}')?;
result.blocks.push(CssBlock { rules, selector });
}
Ok(result)
}
}
#[test]
fn test_integrity() {
let mut parser = CssParser::new("Div {} .test { color: red; } #ident { border: 2px; padding: 2px 3px 10px 1px; }");
parser.parse().unwrap();
}
#[test]
fn test_ignored() {
let mut parser = CssParser::new(" .test");
parser.no_rubbish();
// Because we consume next character and store it, the dot is ignored
assert_eq!(parser.chars.as_str(), "test");
}
// #[test]
fn test_rule_keyword_param() {
let mut parser = CssParser::new("rsnte");
let _rule = parser.parse_ident_rule().unwrap_err();
}
#[test]
fn test_rule_parsing() {
let mut parser = CssParser::new("background-color: red; border: 2px black;");
let rule = parser.parse_rule().unwrap();
assert_eq!(rule.name.as_str(), "background-color");
let param = &rule.params[0];
assert!(matches!(param, CssRuleParam::Color(255, 0, 0, 255)));
let rule = parser.parse_rule().unwrap();
assert_eq!(rule.name.as_str(), "border");
let param = &rule.params[0];
assert!(matches!(param, CssRuleParam::Unit(.., CssUnit::Px)));
let param = &rule.params[1];
assert!(matches!(param, CssRuleParam::Color(0, 0, 0, 255)));
}
#[test]
// Test selectors for all types of them
fn test_selector_parsing() {
let mut parser = CssParser::new(" .test");
let selector = parser.parse_selector_atomic().unwrap();
assert_eq!(selector.to_string().as_str(), ".test");
let mut parser = CssParser::new(" \n #test");
let selector = parser.parse_selector_atomic().unwrap();
assert_eq!(selector.to_string().as_str(), "#test");
let mut parser = CssParser::new("\ttest");
let selector = parser.parse_selector_atomic().unwrap();
assert_eq!(selector.to_string().as_str(), "test");
}
#[test]
// Test identifier parser
fn test_identifier_parsing() {
let mut parser = CssParser::new("valid_1dentifier--");
let ident = parser.parse_ident().unwrap();
assert_eq!(ident, "valid_1dentifier--");
// With unknown characters
let mut parser = CssParser::new("-valid_1dentifier--$$$#$532485092385");
let ident = parser.parse_ident().unwrap();
assert_eq!(ident, "-valid_1dentifier--");
// With ignored characters
let mut parser = CssParser::new("_valid_1dentifier-- this_is_something-else");
let ident = parser.parse_ident().unwrap();
assert_eq!(ident, "_valid_1dentifier--");
// Invalid
let mut parser = CssParser::new("1valid_1dentifier-- this_is_something-else");
let error = parser.parse_ident().unwrap_err();
assert!(matches!(error.kind, CssErrorKind::InvalidSymbolInIdent));
}
|
mod precise;
pub use precise::AymPrecise;
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
use std::str::FromStr;
/// Distance metric
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Metric {
/// Squared Euclidean (L2-Squared)
L2,
/// Cosine similarity
/// TODO: T should be float for Cosine distance
Cosine,
}
#[derive(thiserror::Error, Debug)]
pub enum ParseMetricError {
#[error("Invalid format for Metric: {0}")]
InvalidFormat(String),
}
impl FromStr for Metric {
type Err = ParseMetricError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"l2" => Ok(Metric::L2),
"cosine" => Ok(Metric::Cosine),
_ => Err(ParseMetricError::InvalidFormat(String::from(s))),
}
}
}
|
#[doc = "Reader of register SPINLOCK9"]
pub type R = crate::R<u32, super::SPINLOCK9>;
impl R {}
|
use crate::{messages::SBPMessage, serialize::SbpSerialize};
#[cfg(feature = "sbp_serde")]
use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "sbp_serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone)]
pub struct Unknown {
pub msg_id: u16,
pub sender_id: u16,
pub payload: Vec<u8>,
}
impl SbpSerialize for Unknown {
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
buf.extend(&self.payload);
}
fn sbp_size(&self) -> usize {
self.payload.len()
}
}
impl SBPMessage for Unknown {
fn get_message_name(&self) -> &'static str {
"UNKNOWN"
}
fn get_message_type(&self) -> u16 {
self.msg_id
}
fn get_sender_id(&self) -> Option<u16> {
Some(self.sender_id)
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = new_id;
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
}
|
use std::time::{Duration, Instant, SystemTime};
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(test)] {
#[cfg(all(feature = "diesel2", feature = "sha-1"))]
mod sleep;
#[cfg(all(feature = "diesel2", feature = "sha-1"))]
pub use self::sleep::*;
pub fn instant_now() -> Instant {
tokio::time::Instant::now().into()
}
pub fn system_time_now() -> SystemTime {
let instant = Instant::now();
let sys = SystemTime::now();
let mocked = instant_now();
if mocked > instant {
sys + (mocked - instant)
} else {
sys - (instant - mocked)
}
}
} else {
pub fn instant_now() -> Instant {
Instant::now()
}
pub fn system_time_now() -> SystemTime {
SystemTime::now()
}
}
}
/// Converts a `Duration` representing a Unix time to an `Instant`.
pub fn instant_from_unix(unix: Duration) -> Instant {
let now_i = instant_now();
let now_unix = now_unix();
// Do not add the `Duration`s directly to the `Instant` to mitigate the risk of overflowing.
if now_unix < unix {
now_i + (unix - now_unix)
} else {
now_i.checked_sub(now_unix - unix).unwrap()
}
}
/// Returns the Unix time representation of "now" as a `Duration`.
pub fn now_unix() -> Duration {
system_time_now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
}
#[cfg(test)]
mod tests {
use std::cmp::PartialOrd;
use std::ops::Sub;
use super::*;
const EPSILON: Duration = Duration::from_millis(10);
macro_rules! assert_almost_eq {
($lhs:expr, $rhs:expr) => {{
if diff_abs($lhs, $rhs) >= EPSILON {
panic!(
r#"assertion failed: `(left \approx right)`
left: `{:?}`,
right: `{:?}`,
epsilon: `{:?}`"#,
$lhs, $rhs, EPSILON
);
}
}};
}
#[tokio::test]
async fn advance() {
let delta = Duration::from_secs(42);
tokio::time::pause();
let start = system_time_now();
tokio::time::advance(delta).await;
let end = system_time_now();
assert_almost_eq!(end.duration_since(start).unwrap(), delta);
}
#[tokio::test]
async fn backward() {
let delta = Duration::from_millis(420);
tokio::time::pause();
let start = system_time_now();
std::thread::sleep(delta);
let end = system_time_now();
let d = end.duration_since(start).unwrap_or_else(|e| e.duration());
assert_almost_eq!(d, Duration::default());
}
fn diff_abs<T: PartialOrd + Sub>(x: T, y: T) -> T::Output {
if x > y {
x - y
} else {
y - x
}
}
}
|
#![cfg(not(target_arch = "wasm32"))]
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::pin::pin;
use std::rc::Rc;
use futures::{SinkExt, StreamExt};
use tokio::net::tcp::{OwnedReadHalf, OwnedWriteHalf};
use tokio::net::{TcpListener, TcpSocket, TcpStream};
use tokio::task::spawn_local;
use tokio_util::codec::{
BytesCodec, Decoder, Encoder, FramedRead, FramedWrite, LengthDelimitedCodec, LinesCodec,
};
use super::unsync::mpsc::{Receiver, Sender};
use super::unsync_channel;
/// Helper creates a TCP `Stream` and `Sink` from the given socket, using the given `Codec` to
/// handle delineation between inputs/outputs.
pub fn tcp_framed<Codec>(
stream: TcpStream,
codec: Codec,
) -> (
FramedWrite<OwnedWriteHalf, Codec>,
FramedRead<OwnedReadHalf, Codec>,
)
where
Codec: Clone + Decoder,
{
let (recv, send) = stream.into_split();
let send = FramedWrite::new(send, codec.clone());
let recv = FramedRead::new(recv, codec);
(send, recv)
}
/// Helper creates a TCP `Stream` and `Sink` for `Bytes` strings where each string is
/// length-delimited.
pub fn tcp_bytes(
stream: TcpStream,
) -> (
FramedWrite<OwnedWriteHalf, LengthDelimitedCodec>,
FramedRead<OwnedReadHalf, LengthDelimitedCodec>,
) {
tcp_framed(stream, LengthDelimitedCodec::new())
}
/// Helper creates a TCP `Stream` and `Sink` for undelimited streams of `Bytes`.
pub fn tcp_bytestream(
stream: TcpStream,
) -> (
FramedWrite<OwnedWriteHalf, BytesCodec>,
FramedRead<OwnedReadHalf, BytesCodec>,
) {
tcp_framed(stream, BytesCodec::new())
}
/// Helper creates a TCP `Stream` and `Sink` for `str`ings delimited by newlines.
pub fn tcp_lines(
stream: TcpStream,
) -> (
FramedWrite<OwnedWriteHalf, LinesCodec>,
FramedRead<OwnedReadHalf, LinesCodec>,
) {
tcp_framed(stream, LinesCodec::new())
}
/// A framed TCP `Sink` (sending).
pub type TcpFramedSink<T> = Sender<(T, SocketAddr)>;
/// A framed TCP `Stream` (receiving).
pub type TcpFramedStream<Codec: Decoder> =
Receiver<Result<(<Codec as Decoder>::Item, SocketAddr), <Codec as Decoder>::Error>>;
/// Create a listening tcp socket, and then as new connections come in, receive their data and forward it to a queue.
pub async fn bind_tcp<T: 'static, Codec: 'static + Clone + Decoder + Encoder<T>>(
endpoint: SocketAddr,
codec: Codec,
) -> Result<(TcpFramedSink<T>, TcpFramedStream<Codec>, SocketAddr), std::io::Error> {
let listener = TcpListener::bind(endpoint).await?;
let bound_endpoint = listener.local_addr()?;
let (tx_egress, mut rx_egress) = unsync_channel(None);
let (tx_ingress, rx_ingress) = unsync_channel(None);
let clients = Rc::new(RefCell::new(HashMap::new()));
spawn_local({
let clients = clients.clone();
async move {
while let Some((payload, addr)) = rx_egress.next().await {
let client = clients.borrow_mut().remove(&addr);
if let Some(mut sender) = client {
let _ = futures::SinkExt::send(&mut sender, payload).await;
clients.borrow_mut().insert(addr, sender);
}
}
}
});
spawn_local(async move {
loop {
let (stream, peer_addr) = if let Ok((stream, _)) = listener.accept().await {
if let Ok(peer_addr) = stream.peer_addr() {
(stream, peer_addr)
} else {
continue;
}
} else {
continue;
};
let mut tx_ingress = tx_ingress.clone();
let (send, recv) = tcp_framed(stream, codec.clone());
// TODO: Using peer_addr here as the key is a little bit sketchy.
// It's possible that a client could send a message, disconnect, then another client connects from the same IP address (and the same src port), and then the response could be sent to that new client.
// This can be solved by using monotonically increasing IDs for each new client, but would break the similarity with the UDP versions of this function.
clients.borrow_mut().insert(peer_addr, send);
spawn_local({
let clients = clients.clone();
async move {
let mapped = recv.map(|x| Ok(x.map(|x| (x, peer_addr))));
let _ = tx_ingress.send_all(&mut pin!(mapped)).await;
clients.borrow_mut().remove(&peer_addr);
}
});
}
});
Ok((tx_egress, rx_ingress, bound_endpoint))
}
/// This is the inverse of bind_tcp, when messages enqueued into the returned sender, tcp sockets will be created and connected as necessary to send out the requests.
/// As the responses come back, they will be forwarded to the returned receiver.
pub fn connect_tcp<T: 'static, Codec: 'static + Clone + Decoder + Encoder<T>>(
codec: Codec,
) -> (TcpFramedSink<T>, TcpFramedStream<Codec>) {
let (tx_egress, mut rx_egress) = unsync_channel(None);
let (tx_ingress, rx_ingress) = unsync_channel(None);
spawn_local(async move {
let mut streams = HashMap::new();
while let Some((payload, addr)) = rx_egress.next().await {
let stream = match streams.entry(addr) {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => {
let socket = TcpSocket::new_v4().unwrap();
let stream = socket.connect(addr).await.unwrap();
let (send, recv) = tcp_framed(stream, codec.clone());
let mut tx_ingress = tx_ingress.clone();
spawn_local(async move {
let mapped = recv.map(|x| Ok(x.map(|x| (x, addr))));
let _ = tx_ingress.send_all(&mut pin!(mapped)).await;
});
entry.insert(send)
}
};
let _ = stream.send(payload).await;
}
});
(tx_egress, rx_ingress)
}
|
use std::cmp::max;
use std::fs::File;
use std::io::prelude::*;
use std::io::{self, BufReader};
fn get_row(rows: &str) -> i32 {
assert_eq!(rows.len(), 7);
let mut lower = 0;
let mut upper = 127;
for letter in rows.chars().collect::<Vec<char>>() {
let current_mod = (upper - lower + 1) / 2;
if letter == 'F' {
upper -= current_mod;
} else if letter == 'B' {
lower += current_mod;
} else {
println!("invalid row {}", letter);
return -1;
}
}
assert_eq!(lower, upper);
lower
}
fn get_col(cols: &str) -> i32 {
assert_eq!(cols.len(), 3);
let mut lower = 0;
let mut upper = 7;
for letter in cols.chars().collect::<Vec<char>>() {
let current_mod = (upper - lower + 1) / 2;
if letter == 'R' {
lower += current_mod;
} else if letter == 'L' {
upper -= current_mod;
}
}
assert_eq!(lower, upper);
lower
}
fn get_id(rows: &str, cols: &str) -> i32 {
let row = get_row(rows);
let col = get_col(cols);
row * 8 + col
}
fn find_seat(ids: &Vec<i32>) -> i32 {
for i in 1..ids.len() {
if ids[i] - ids[i - 1] == 2 {
return (ids[i] + ids[i - 1]) / 2;
}
}
-1
}
fn main() -> io::Result<()> {
let f = File::open("input.txt")?;
let f = BufReader::new(f);
let mut vec = Vec::new();
let mut ids = Vec::new();
for line in f.lines() {
let my_string = line.unwrap();
vec.push(my_string);
}
println!("Test Case {}", get_id("FBFBBFF", "RLR"));
let mut highest_id = 0;
for line in &vec {
let rows = &line[0..7];
let columns = &line[7..];
let id = get_id(rows, columns);
highest_id = max(highest_id, id);
ids.push(id);
}
println!("Highest ID {}", highest_id);
ids.sort();
println!("Missing seat {}", find_seat(&ids));
Ok(())
}
|
/**********************************************
> File Name : AVLTree.rs
> Author : lunar
> Email : lunar_ubuntu@qq.com
> Created Time : Tue 16 Feb 2021 05:02:38 PM CST
> Location : Shanghai
> Copyright@ https://github.com/xiaoqixian
**********************************************/
/*
* Implement a average length tree in Rust.
*/
use std::cmp::Ordering;
use std::ptr::NonNull;
use std::mem::drop;
use std::fmt::Display;
/*
* Using NonNull pointer can save some space,
* cause we use None to represent null pointer
* and None takes no memory.
*/
#[allow(unused)]
struct TreeNode<T: Ord> {
val: T,
height: u32,
left: Option<NonNull<TreeNode<T>>>,
right: Option<NonNull<TreeNode<T>>>
}
impl<T> TreeNode<T> where T: Ord {
pub fn new(val: T, height: u32) -> Self {
TreeNode {
val,
height,
left: None,
right: None,
}
}
}
struct Tree<T: Ord> {
root: Option<NonNull<TreeNode<T>>>
}
impl<T> Tree<T> where T: Ord + Clone + Display {
pub fn new() -> Self {
Tree {
root: None
}
}
fn rotate_left(&self, node: Option<NonNull<TreeNode<T>>>) -> Option<NonNull<TreeNode<T>>> {
match node {
None => None,
Some(n) => {
unsafe {
let temp = (*n.as_ptr()).right.clone();
match &temp {
None => {
println!("Can't do rotate_left operation on a node without right subtree.");
assert!(false);
},
Some(rn) => {
(*n.as_ptr()).right = (*rn.as_ptr()).left.clone();
(*rn.as_ptr()).left = node.clone();
}
}
self.update_node_height(node);
self.update_node_height(temp);
temp
}
}
}
}
fn rotate_right(&self, node: Option<NonNull<TreeNode<T>>>) -> Option<NonNull<TreeNode<T>>> {
match node {
None => None,
Some(n) => {
unsafe {
let temp = (*n.as_ptr()).left.clone();
match &temp {
None => {
println!("Can't do rotate_right operation on a node without left subtree.");
assert!(false);
},
Some(ln) => {
(*n.as_ptr()).left = (*ln.as_ptr()).right.clone();
(*ln.as_ptr()).right = node.clone();
}
}
self.update_node_height(node);
self.update_node_height(temp);
temp
}
}
}
}
fn update_node_height(&self, node: Option<NonNull<TreeNode<T>>>) {
let mut max = 0;
match node {
None => {},
Some(n) => unsafe {
let left = &((*n.as_ptr()).left);
let right = &((*n.as_ptr()).right);
match left {
None => {},
Some(ln) => {
if (*ln.as_ptr()).height > max {
max = (*ln.as_ptr()).height;
}
}
}
match right {
None => {},
Some(rn) => {
if (*rn.as_ptr()).height > max {
max = (*rn.as_ptr()).height;
}
}
}
(*n.as_ptr()).height = max+1;
}
};
}
fn get_node_height(&self, node: &Option<NonNull<TreeNode<T>>>) -> u32 {
match node {
None => 0,
Some(n) => {
unsafe {
(*n.as_ptr()).height
}
}
}
}
fn insert_with_node(&self, mut node: Option<NonNull<TreeNode<T>>>, val: T) -> Option<NonNull<TreeNode<T>>> {
match &node {
None => {
let new_node = Box::new(TreeNode::new(val, 1));
Some(unsafe {NonNull::new_unchecked(Box::into_raw(new_node))})
},
Some(n) => {
let node_val: &T;
let left: &mut Option<NonNull<TreeNode<T>>>;
let right: &mut Option<NonNull<TreeNode<T>>>;
unsafe {
node_val = &(*n.as_ptr()).val;
left = &mut(*n.as_ptr()).left;
right = &mut(*n.as_ptr()).right;
}
println!("The node val = {}", node_val);
match node_val.cmp(&val) {
Ordering::Equal => {
println!("The AVLTree doesn't support duplicated node yet.");
return None;
},
Ordering::Greater => {
unsafe {
(*n.as_ptr()).left = self.insert_with_node((*n.as_ptr()).left, val.clone());
println!("insert left {}", &val);
}
if self.get_node_height(left) > self.get_node_height(right) + 1 {
println!("Need to rotate right.");
let mut left_val = val.clone();
match left {
None => {
println!("This is not possible");
assert!(false);
},
Some(ln) => {
unsafe {
left_val = (*ln.as_ptr()).val.clone();
}
}
}
/*
* if the node is inserted on the right side of the left child.
*/
if let Ordering::Greater = left_val.cmp(&val) {
println!("Need to rotate left first.");
unsafe {
(*n.as_ptr()).left = self.rotate_left(*left);
}
}
println!("Then rotate right.");
node = self.rotate_right(node);
}
},
Ordering::Less => {
unsafe {
(*n.as_ptr()).right = self.insert_with_node((*n.as_ptr()).right, val.clone());
println!("insert right {}", &val);
}
if self.get_node_height(right) > self.get_node_height(left) + 1 {
println!("Need to roate left.");
let mut right_val = val.clone();
match right {
None => {
println!("This is not possible.");
assert!(false);
},
Some(rn) => {
right_val = unsafe {(*rn.as_ptr()).val.clone()};
}
};
if let Ordering::Greater = right_val.cmp(&val) {
println!("Need to rotate right first.");
unsafe {
(*n.as_ptr()).left = self.rotate_right(*right);
}
};
println!("Then rotate left.");
node = self.rotate_left(node);
}
}
};
self.update_node_height(node);
self.print_node(&node);
node
}
}
}
fn print_node(&self, node: &Option<NonNull<TreeNode<T>>>) {
match node {
None => {
println!("None node");
},
Some(v) => unsafe {
println!("Node {}'s height = {}", (*v.as_ptr()).val, (*v.as_ptr()).height);
}
}
}
fn remove_with_node(&self, mut node: Option<NonNull<TreeNode<T>>>, val: &T) -> Option<NonNull<TreeNode<T>>> {
match &node {
None => {
return None;
},
Some(n) => {
let node_val = unsafe {
&(*n.as_ptr()).val
};
let left = unsafe {
&((*n.as_ptr()).left)
};
let right = unsafe {
&((*n.as_ptr()).right)
};
match node_val.cmp(val) {
Ordering::Greater => unsafe {
(*n.as_ptr()).left = self.remove_with_node((*n.as_ptr()).left, val);
if self.get_node_height(right) > self.get_node_height(left) + 1 {
match right {
None => {
println!("This is not possible.");
assert!(false);
},
Some(_) => {
if self.get_node_height(left) > self.get_node_height(right) + 1 {
(*n.as_ptr()).right = self.rotate_right(*right);
}
node = self.rotate_right(node);
}
}
}
},
Ordering::Less => unsafe {
(*n.as_ptr()).right = self.remove_with_node((*n.as_ptr()).right, val);
if self.get_node_height(left) > self.get_node_height(right) + 1 {
match left {
None => {
println!("The left node is None.");
assert!(false);
},
Some(_) => {
if self.get_node_height(right) - self.get_node_height(left) > 1 {
(*n.as_ptr()).left = self.rotate_left(*left);
}
node = self.rotate_right(node);
}
}
}
},
//The current node is the node to be deleted
Ordering::Equal => unsafe {
//If the current node is a leaf, just delete it.
//Leave the balance problem to father nodes.
if let None = left {
if let None = right {
drop(n);
return None;
}
}
/*
* If the current node has only right subtree,
* then its right subtree must be leaf node, otherwise
* the height difference between the right subtree and
* the left subtree will be more than 1.
* So we can just rotate left and release the left node.
* Then the right subtree will still be a leaf node.
*/
if let None = left {
let right_tree = (*n.as_ptr()).right;
drop(n);
return right_tree;
}
//So as the left subtree.
if let None = right {
let left_tree = (*n.as_ptr()).left;
drop(n);
return left_tree;
}
/*
* If both subtrees of the current node exist.
* Then copy the value from one of the child nodes,
* and delete one of the child nodes by traversely
* calling remove_with_node function.
* We delete from the higher subtree, cause the deletion
* will decrease the height by 1 as most. So we don't
* have to rebalance the tree.
* If two node are of the same height, delete from the
* right subtree by default.
*/
if self.get_node_height(right) >= self.get_node_height(left) {
match right {
None => {
println!("The right node is None.");
assert!(false);
},
Some(rn) => {
(*n.as_ptr()).val = (*rn.as_ptr()).val.clone();
(*n.as_ptr()).right = self.remove_with_node(*right, &(*n.as_ptr()).val);
}
}
} else {
match left {
None => {
println!("The left node is None.");
assert!(false);
},
Some(ln) => {
(*n.as_ptr()).val = (*ln.as_ptr()).val.clone();
(*n.as_ptr()).left = self.remove_with_node(*left, &(*n.as_ptr()).val);
}
}
}
self.update_node_height(node);
return node;
}
}
}
};
self.update_node_height(node);
node
}
pub fn insert(&mut self, val: T) {
self.root = self.insert_with_node(self.root, val).clone();
}
pub fn remove(&mut self, val: &T) {
self.root = self.remove_with_node(self.root, val).clone();
}
}
impl<T> Tree<T> where T: Display + Ord + Clone {
pub fn print_tree(&self) {
match &self.root {
None => {
println!("Empty tree");
return ;
},
Some(r) => {
let mut temp = self.root.clone();
let mut queue: Vec<Option<NonNull<TreeNode<T>>>> = Vec::new();
queue.push(temp.clone());
let mut column: u32 = 1;
let mut count: u32 = 0;
let max_depth: u32 = unsafe {
1 << ((*r.as_ptr()).height - 1)
};
while !queue.is_empty() && column <= max_depth {
temp = queue.remove(0);
count += 1;
match temp {
None => {
print!("null ");
queue.push(None);
queue.push(None);
if count == column {
count = 0;
column <<= 1;
println!("");
}
continue;
},
Some(v) => unsafe {
print!("{} ", &(*v.as_ptr()).val);
match &(*v.as_ptr()).left {
None => {
queue.push(None);
},
Some(ln) => {
queue.push(Some(*ln));
}
}
match &(*v.as_ptr()).right {
None => {
queue.push(None);
},
Some(rn) => {
queue.push(Some(*rn));
}
}
if count == column {
count = 0;
column <<= 1;
println!("");
}
}
}
}
}
}
}
}
fn main() {
let mut tree: Tree<i32> = Tree::new();
let nums = vec![4,6,8,2,1,5,7,9];
for i in nums {
println!("insert {}", &i);
tree.insert(i);
println!("");
}
tree.print_tree();
tree.remove(&4);
tree.print_tree();
}
|
#![cfg(feature = "curly")]
use dynfmt::{Format, SimpleCurlyFormat};
macro_rules! test_fmt {
($name:ident, $expected:expr, $format:expr, $($args:expr),* $(,)*) => {
#[test]
fn $name() {
assert_eq!(
$expected,
SimpleCurlyFormat
.format($format, &[$($args),*])
.expect("formatting failed")
);
}
};
}
test_fmt!(string_display, "hello, world!", "hello, {}!", "world");
test_fmt!(number_display, "hello, 42!", "hello, {}!", 42);
test_fmt!(negative_display, "hello, -42!", "hello, {}!", -42);
test_fmt!(float_display, "hello, 4.2!", "hello, {}!", 4.2);
test_fmt!(boolean_display, "hello, true!", "hello, {}!", true);
test_fmt!(
string_display_by_index,
"hello, world!",
"hello, {0}!",
"world"
);
#[test]
fn string_display_by_name() {
let mut args = std::collections::BTreeMap::new();
args.insert("name", "world");
assert_eq!(
"hello, world!",
SimpleCurlyFormat
.format("hello, {name}!", args)
.expect("formatting failed")
);
}
|
// Licensed: Apache 2.0
///
/// Wrap raw pointers
/// This is effectively syntax sugar to avoid;
/// `(*var).attr`, allowing `var.attr` on raw pointers.
///
/// Other utility functions may be exposed,
/// however the intent is to keep this a thin wrapper on raw pointers,
/// not to invent a new way of managing pointers.
///
/// Notes:
///
/// * Guaranteed to have *zero* memory and run-time overhead for release builds.
/// * Supports comparison with regular pointers,
/// convenient for comparing with `std::ptr::null()`.
/// * Supports converting `PtrMut` 'into' `PtrConst`,
/// so functions can be declared which take either.
/// * Can be used as much or as little as your like,
/// To get the value from a pointer: `plain_ptr = PtrMut(ptr)`
/// To get the original pointer: `plain_ptr.as_ptr()`
///
use std::ops::{
Deref,
DerefMut,
};
// ---------------------------------------------------------------------------
// Generics (PtrAny)
pub trait PtrAnyImpl<T> {
/// Either `*mut T` or `*const T`
type BasePtr;
fn new(ptr: Self::BasePtr) -> Self;
/// Gives a native type, from a `mut`.
/// Beware: this is a workaround for not being able to easily coerce types
/// when using `PtrAny`.
fn new_from_mut(ptr: PtrMut<T>) -> Self;
fn null() -> Self;
fn is_null(&self) -> bool;
fn as_ptr(&self) -> Self::BasePtr;
fn as_const(&self) -> PtrConst<T>;
/// Utility function to support easy null pointer assignments:
/// `if let Some(var) = func_returns_pointer() { ... }`
fn as_option(&self) -> Option<Self> where Self: Sized;
}
pub trait PtrAny<T>:
Deref<Target=T> +
Copy +
Clone +
PartialEq +
PtrAnyImpl<T> +
{}
impl<TPtr, T> PtrAny<T> for TPtr where TPtr:
Deref<Target=T> +
Copy +
Clone +
PartialEq +
PtrAnyImpl<T> +
{}
// ---------------------------------------------------------------------------
// PtrMut
#[repr(C)]
#[derive(Debug, Hash)]
pub struct PtrMut<T> {
ptr: *mut T,
}
// only for easy access
#[allow(non_snake_case)]
pub fn PtrMut<T>(ptr: *mut T) -> PtrMut<T> {
PtrMut::new(ptr)
}
pub fn null_mut<T>() -> PtrMut<T> {
PtrMut::null()
}
impl<T> PtrAnyImpl<T> for PtrMut<T> {
type BasePtr = *mut T;
// classmethods
#[inline(always)]
fn new(ptr: Self::BasePtr) -> PtrMut<T> {
PtrMut::new(ptr)
}
#[inline(always)]
fn new_from_mut(ptr: PtrMut<T>) -> PtrMut<T> {
PtrMut::new_from_mut(ptr)
}
#[inline(always)]
fn null() -> PtrMut<T> {
PtrMut { ptr: ::std::ptr::null_mut() }
}
// methods
#[inline(always)]
fn is_null(&self) -> bool {
self.is_null()
}
#[inline(always)]
fn as_ptr(&self) -> Self::BasePtr {
self.as_ptr()
}
#[inline(always)]
fn as_option(&self) -> Option<PtrMut<T>> {
self.as_option()
}
#[inline(always)]
fn as_const(&self) -> PtrConst<T> {
self.as_const()
}
}
// PtrAnyImpl
impl<T> PtrMut<T> {
// classmethods
#[inline(always)]
pub fn new(ptr: *mut T) -> PtrMut<T> {
PtrMut { ptr: ptr as *mut T }
}
#[inline(always)]
fn new_from_mut(ptr: PtrMut<T>) -> PtrMut<T> {
ptr
}
#[inline(always)]
fn null() -> PtrMut<T> {
PtrMut { ptr: ::std::ptr::null_mut() }
}
// methods
#[inline(always)]
pub fn is_null(&self) -> bool {
self.ptr == ::std::ptr::null_mut()
}
#[inline(always)]
pub fn as_ptr(&self) -> *mut T {
self.ptr
}
#[inline(always)]
pub fn as_option(&self) -> Option<PtrMut<T>> {
return if self.ptr.is_null() == false { Some(*self) } else { None };
}
}
impl<T> PtrMut<T> {
/// Utility function to support easy null pointer assignments:
/// `if let Some(var) = func_returns_pointer() { ... }`
// only for 'PtrMut'
#[inline(always)]
pub fn as_const(&self) -> PtrConst<T> {
PtrConst::new(self.ptr as *const T)
}
}
impl<T> Copy for PtrMut<T> { }
impl<T> Clone for PtrMut<T> {
#[inline(always)]
fn clone(&self) -> PtrMut<T> { *self }
}
impl<T> Deref for PtrMut<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
unsafe { &*self.ptr }
}
}
impl<T> DerefMut for PtrMut<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.ptr }
}
}
// Expose other helpers
impl<T> PartialEq for PtrMut<T> {
fn eq(&self, other: &PtrMut<T>) -> bool {
self.ptr == other.ptr
}
}
impl<T> PartialEq<PtrConst<T>> for PtrMut<T> {
fn eq(&self, other: &PtrConst<T>) -> bool {
self.ptr as *const T == other.ptr
}
}
// PtrMut == *mut
impl<T> PartialEq<*mut T> for PtrMut<T> {
fn eq(&self, other: &*mut T) -> bool {
self.ptr == *other
}
}
// PtrMut == *const
impl<T> PartialEq<*const T> for PtrMut<T> {
fn eq(&self, other: &*const T) -> bool {
self.ptr as *const T == *other
}
}
// PtrMut order
impl<T> PartialOrd<PtrMut<T>> for PtrMut<T> {
fn partial_cmp(&self, other: &PtrMut<T>) -> Option<::std::cmp::Ordering> {
(self.ptr as usize).partial_cmp(&((other.ptr) as usize))
}
}
impl<T> Ord for PtrMut<T> {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
(self.ptr as usize).cmp(&((other.ptr) as usize))
}
}
impl<T> Eq for PtrMut<T> {}
// ---------------------------------------------------------------------------
// PtrConst
#[repr(C)]
#[derive(Debug, Hash)]
pub struct PtrConst<T> {
ptr: *const T,
}
// only for easy access
#[allow(non_snake_case)]
pub fn PtrConst<T>(ptr: *const T) -> PtrConst<T> {
PtrConst::new(ptr)
}
pub fn null_const<T>() -> PtrConst<T> {
PtrConst::null()
}
impl<T> PtrAnyImpl<T> for PtrConst<T> {
type BasePtr = *const T;
#[inline(always)]
fn new(ptr: Self::BasePtr) -> PtrConst<T> {
PtrConst::new(ptr)
}
#[inline(always)]
fn new_from_mut(ptr: PtrMut<T>) -> PtrConst<T> {
PtrConst::new_from_mut(ptr)
}
#[inline(always)]
fn null() -> PtrConst<T> {
PtrConst::null()
}
#[inline(always)]
fn is_null(&self) -> bool {
self.is_null()
}
#[inline(always)]
fn as_ptr(&self) -> Self::BasePtr {
self.as_ptr()
}
#[inline(always)]
fn as_option(&self) -> Option<PtrConst<T>> {
self.as_option()
}
#[inline(always)]
fn as_const(&self) -> PtrConst<T> {
self.as_const()
}
}
// PtrAnyImpl
impl<T> PtrConst<T> {
// classmethods
#[inline(always)]
fn new(ptr: *const T) -> PtrConst<T> {
PtrConst { ptr: ptr as *const T }
}
#[inline(always)]
fn new_from_mut(ptr: PtrMut<T>) -> PtrConst<T> {
ptr.as_const()
}
#[inline(always)]
fn null() -> PtrConst<T> {
PtrConst { ptr: ::std::ptr::null() }
}
// methods
#[inline(always)]
fn is_null(&self) -> bool {
self.ptr == ::std::ptr::null()
}
#[inline(always)]
pub fn as_ptr(&self) -> *const T {
self.ptr
}
#[inline(always)]
pub fn as_option(&self) -> Option<PtrConst<T>> {
return if self.ptr.is_null() == false { Some(*self) } else { None };
}
#[inline(always)]
fn as_const(&self) -> PtrConst<T> { PtrConst::new(self.ptr) }
}
impl<T> PtrConst<T> {
/// Only for 'PtrConst'
///
/// Unlike other functions in this module that are _not_ marked unsafe,
/// this is something that should really be avoided,
/// since const-correctness should be maintained strictly
/// (that's why we have `PtrConst` and `PtrMut`).
///
/// This is needed the case when we have a function which
/// returns a values who's mutable state is based on the input.
///
/// This way we can avoid writing it twice,
/// by writing the immutable version once (which will be assured not to modify the input)
/// then write a mutable wrapper that gets the output
/// and performs the unsafe case on the output.
///
/// Later it may be worth trying to use generic functions here,
/// but for now allow unsafe casting.
///
#[inline(always)]
#[allow(dead_code)]
pub unsafe fn as_mut(&self) -> PtrMut<T> {
PtrMut::new(self.ptr as *mut T)
}
}
impl<T> Copy for PtrConst<T> { }
impl<T> Clone for PtrConst<T> {
#[inline(always)]
fn clone(&self) -> PtrConst<T> { *self }
}
impl<T> Deref for PtrConst<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
unsafe { &*self.ptr }
}
}
// no DerefMut for PtrConst, only PtrMut
/*
impl<T> DerefMut for PtrConst<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.ptr }
}
}
*/
// Expose other helpers
impl<T> PartialEq for PtrConst<T> {
fn eq(&self, other: &PtrConst<T>) -> bool {
self.ptr == other.ptr
}
}
impl<T> PartialEq<PtrMut<T>> for PtrConst<T> {
fn eq(&self, other: &PtrMut<T>) -> bool {
self.ptr == other.ptr as *const T
}
}
// PtrConst == *mut
impl<T> PartialEq<*mut T> for PtrConst<T> {
fn eq(&self, other: &*mut T) -> bool {
self.ptr == *other
}
}
// PtrConst == *const
impl<T> PartialEq<*const T> for PtrConst<T> {
fn eq(&self, other: &*const T) -> bool {
self.ptr == *other
}
}
// PtrConst order
impl<T> PartialOrd<PtrConst<T>> for PtrConst<T> {
fn partial_cmp(&self, other: &PtrConst<T>) -> Option<::std::cmp::Ordering> {
(self.ptr as usize).partial_cmp(&((other.ptr) as usize))
}
}
impl<T> Ord for PtrConst<T> {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
(self.ptr as usize).cmp(&((other.ptr) as usize))
}
}
impl<T> Eq for PtrConst<T> {}
impl<T> From<PtrMut<T>> for PtrConst<T> {
fn from(value: PtrMut<T>) -> PtrConst<T> {
PtrConst::new(value.ptr)
}
}
|
//! HyperLogLog implementation.
use std::cmp;
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::{BuildHasher, BuildHasherDefault, Hash, Hasher};
use std::marker::PhantomData;
use crate::hyperloglog_data::{
BIAS_DATA_OFFSET, BIAS_DATA_VEC, POW2MINX, RAW_ESTIMATE_DATA_OFFSET, RAW_ESTIMATE_DATA_VEC,
THRESHOLD_DATA_OFFSET, THRESHOLD_DATA_VEC,
};
/// A HyperLogLog is a data structure to count unique elements on a data stream.
///
/// # Examples
/// ```
/// use pdatastructs::hyperloglog::HyperLogLog;
///
/// // set up filter
/// let address_bits = 4; // so we store 2^4 = 16 registers in total
/// let mut hll = HyperLogLog::new(address_bits);
///
/// // add some data
/// hll.add(&"my super long string");
/// hll.add(&"another super long string");
/// hll.add(&"my super long string"); // again
///
/// // later
/// assert_eq!(hll.count(), 2);
/// ```
///
/// Note that the HyperLogLog is specific to `T`, so the following will not compile:
///
/// ```compile_fail
/// use pdatastructs::hyperloglog::HyperLogLog;
///
/// // set up filter
/// let address_bits = 4; // so we store 2^4 = 16 registers in total
/// let mut hll1 = HyperLogLog::<u8>::new(address_bits);
/// let hll2 = HyperLogLog::<i8>::new(address_bits);
///
/// hll1.merge(&hll2);
/// ```
///
/// # Applications
/// - an approximative `COUNT(DISTINCT x)` in SQL
/// - count distinct elements in a data stream
///
/// # How It Works
/// The HyperLogLog consists of `2^b` 8bit counters. Each counter is initialized to 0.
///
/// During insertion, a hash `h(x)` is calculated. The first `b` bits of the hash function are used
/// to address a register, the other bits are used to create a number `p` which essentially counts
/// the number of leading 0-bits (or in other words: the leftmost 1-bit). The addressed register is
/// then updated to the maximum of its current value and `p`.
///
/// The calculation of the count is based on `1 / Sum_0^{2^b} (2^-register_i)` with a bunch of
/// factors a corrections applied (see paper or source code).
///
/// # Implementation
/// - The registers always allocate 8 bits and are not compressed.
/// - No sparse representation is used at any point.
/// - A 64 bit hash function is used (like in HyperLogLog++ paper) instead of the 32 bit hash
/// function (like in the original HyperLogLog paper).
/// - Bias correction is applied and the data is currently just taken from the HyperLogLog++ paper
/// appendix.
///
/// # See Also
/// - `std::collections::HashSet`: can be used to get the exact count but requires you to store
/// each and every element
/// - `pdatastructs::bloomfilter::BloomFilter` and `pdatastructs::cuckoofilter::CuckooFilter`: when
/// you also want to check if a single element is in the observed set
///
/// # References
/// - ["HyperLogLog: the analysis of a near-optimal cardinality estimation algorithm", Philippe
/// Flajolet, Éric Fusy, Olivier Gandouet, Frédéric Meunier, 2007](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.142.9475)
/// - ["HyperLogLog in Practice: Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", Stefan
/// Heule, Marc Nunkesser, Alexander Hall, 2013](https://static.googleusercontent.com/media/research.google.com/en//pubs/archive/40671.pdf)
/// - ["Appendix to HyperLogLog in Practice: Algorithmic Engineering of a State of the Art
/// Cardinality Estimation Algorithm", Stefan Heule, Marc Nunkesser, Alexander Hall, 2016](https://goo.gl/iU8Ig)
/// - [Wikipedia: HyperLogLog](https://en.wikipedia.org/wiki/HyperLogLog)
#[derive(Clone)]
pub struct HyperLogLog<T, B = BuildHasherDefault<DefaultHasher>>
where
T: Hash + ?Sized,
B: BuildHasher + Clone + Eq,
{
registers: Vec<u8>,
b: usize,
buildhasher: B,
phantom: PhantomData<fn() -> T>,
}
impl<T> HyperLogLog<T>
where
T: Hash + ?Sized,
{
/// Creates a new, empty HyperLogLog.
///
/// - `b` number of bits used for register selection, number of registers within the
/// HyperLogLog will be `2^b`. `b` must be in `[4, 18]`
///
/// Panics when `b` is out of bounds.
pub fn new(b: usize) -> Self {
let bh = BuildHasherDefault::<DefaultHasher>::default();
Self::with_hash(b, bh)
}
}
impl<T, B> HyperLogLog<T, B>
where
T: Hash + ?Sized,
B: BuildHasher + Clone + Eq,
{
/// Same as `new` but with a specific `BuildHasher`.
pub fn with_hash(b: usize, buildhasher: B) -> Self {
assert!(
(4..=18).contains(&b),
"b ({}) must be larger or equal than 4 and smaller or equal than 18",
b
);
let m = 1_usize << b;
let registers = vec![0; m];
Self {
registers,
b,
buildhasher,
phantom: PhantomData,
}
}
/// Get number of bits used for register selection.
pub fn b(&self) -> usize {
self.b
}
/// Get number of registers.
pub fn m(&self) -> usize {
self.registers.len()
}
/// Get `BuildHasher`.
pub fn buildhasher(&self) -> &B {
&self.buildhasher
}
/// Get relative error for this HyperLogLog configuration.
pub fn relative_error(&self) -> f64 {
(3f64 * 2f64.ln() - 1f64).sqrt() / (self.m() as f64).sqrt()
}
/// Adds an element to the HyperLogLog.
pub fn add(&mut self, obj: &T) {
let mut hasher = self.buildhasher.build_hasher();
obj.hash(&mut hasher);
let h: u64 = hasher.finish();
// split h into:
// - w = 64 - b upper bits
// - j = b lower bits
let w = h >> self.b;
let j = h - (w << self.b); // no 1 as in the paper since register indices are 0-based
// p = leftmost bit (1-based count)
let p = w.leading_zeros() + 1 - (self.b as u32);
let m_old = self.registers[j as usize];
self.registers[j as usize] = cmp::max(m_old, p as u8);
}
fn am(&self) -> f64 {
let m = self.registers.len();
if m >= 128 {
0.7213 / (1. + 1.079 / (m as f64))
} else if m >= 64 {
0.709
} else if m >= 32 {
0.697
} else {
0.673
}
}
fn neighbor_search_startpoints(lookup_array: &[f64], e: f64) -> (Option<usize>, Option<usize>) {
// binary search first nearest neighbor
match lookup_array.binary_search_by(|v| v.partial_cmp(&e).unwrap()) {
Ok(i) => (Some(i), Some(i)),
Err(i) => {
if i == 0 {
// no left index
(None, Some(0))
} else if i == lookup_array.len() {
// no right index
(Some(i - 1), None)
} else {
(Some(i - 1), Some(i))
}
}
}
}
fn estimate_bias(&self, e: f64) -> f64 {
let lookup_array = RAW_ESTIMATE_DATA_VEC[self.b - RAW_ESTIMATE_DATA_OFFSET];
let (mut idx_left, mut idx_right) = Self::neighbor_search_startpoints(lookup_array, e);
// collect k nearest neighbors
const K: usize = 6;
assert!(lookup_array.len() >= K);
let mut neighbors = [0; K];
for neighbor in &mut neighbors {
let (right_instead_left, idx) = match (idx_left, idx_right) {
(Some(i_left), Some(i_right)) => {
// 2 candidates, find better one
let delta_left = (lookup_array[i_left] - e).abs();
let delta_right = (lookup_array[i_right] - e).abs();
if delta_right < delta_left {
(true, i_right)
} else {
(false, i_left)
}
}
(Some(i_left), None) => {
// just left one is there, use it
(false, i_left)
}
(None, Some(i_right)) => {
// just right one is there, use it
(true, i_right)
}
_ => panic!("neighborhood search failed, this is bug!"),
};
*neighbor = idx;
if right_instead_left {
idx_right = if idx < lookup_array.len() - 1 {
Some(idx + 1)
} else {
None
};
} else {
idx_left = if idx > 0 { Some(idx - 1) } else { None };
}
}
// calculate mean of neighbors
let bias_data = BIAS_DATA_VEC[self.b - BIAS_DATA_OFFSET];
neighbors.iter().map(|&i| bias_data[i]).sum::<f64>() / (K as f64)
}
fn linear_counting(&self, v: usize) -> f64 {
let m = self.registers.len() as f64;
m * (m / (v as f64)).ln()
}
fn threshold(&self) -> usize {
THRESHOLD_DATA_VEC[self.b - THRESHOLD_DATA_OFFSET]
}
/// Guess the number of unique elements seen by the HyperLogLog.
pub fn count(&self) -> usize {
let m = self.registers.len() as f64;
let z = 1f64
/ self
.registers
.iter()
.map(|&x| POW2MINX[x as usize])
.sum::<f64>();
let e = self.am() * m * m * z;
let e_star = if e <= (5. * m) {
e - self.estimate_bias(e)
} else {
e
};
let v = bytecount::count(&self.registers, 0);
let h = if v != 0 {
self.linear_counting(v)
} else {
e_star
};
if h <= (self.threshold() as f64) {
h as usize
} else {
e_star as usize
}
}
/// Merge w/ another HyperLogLog.
///
/// This HyperLogLog will then have the same state as if all elements seen by `other` where
/// directly added to `self`.
///
/// Panics when `b` or `buildhasher` parameter of `self` and `other` do not match.
pub fn merge(&mut self, other: &Self) {
assert_eq!(
self.b, other.b,
"b must be equal (left={}, right={})",
self.b, other.b
);
assert!(
self.buildhasher == other.buildhasher,
"buildhasher must be equal"
);
self.registers = self
.registers
.iter()
.zip(other.registers.iter())
.map(|x| cmp::max(x.0, x.1))
.cloned()
.collect();
}
/// Empties the HyperLogLog.
pub fn clear(&mut self) {
self.registers = vec![0; self.registers.len()];
}
/// Checks whether the HyperLogLog has never seen an element.
pub fn is_empty(&self) -> bool {
self.registers.iter().all(|&x| x == 0)
}
}
impl<T> fmt::Debug for HyperLogLog<T>
where
T: Hash + ?Sized,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "HyperLogLog {{ b: {} }}", self.b)
}
}
impl<T> Extend<T> for HyperLogLog<T>
where
T: Hash,
{
fn extend<S: IntoIterator<Item = T>>(&mut self, iter: S) {
for elem in iter {
self.add(&elem);
}
}
}
impl<'a, T> Extend<&'a T> for HyperLogLog<T>
where
T: 'a + Hash + ?Sized,
{
fn extend<S: IntoIterator<Item = &'a T>>(&mut self, iter: S) {
for elem in iter {
self.add(elem);
}
}
}
#[cfg(test)]
mod tests {
use super::HyperLogLog;
use crate::hash_utils::BuildHasherSeeded;
use crate::hyperloglog_data::{RAW_ESTIMATE_DATA_OFFSET, RAW_ESTIMATE_DATA_VEC};
use crate::test_util::{assert_send, NotSend};
#[test]
#[should_panic(expected = "b (3) must be larger or equal than 4 and smaller or equal than 18")]
fn new_panics_b3() {
HyperLogLog::<u64>::new(3);
}
#[test]
fn new_works_b4() {
HyperLogLog::<u64>::new(4);
}
#[test]
fn new_works_b18() {
HyperLogLog::<u64>::new(18);
}
#[test]
#[should_panic(expected = "b (19) must be larger or equal than 4 and smaller or equal than 18")]
fn new_panics_b19() {
HyperLogLog::<u64>::new(19);
}
#[test]
fn getter() {
let hll = HyperLogLog::<u64>::new(8);
assert_eq!(hll.b(), 8);
assert_eq!(hll.m(), 1 << 8);
hll.buildhasher();
}
#[test]
fn relative_error() {
let hll = HyperLogLog::<u64>::new(4);
assert!((hll.relative_error() - 0.2597).abs() < 0.001);
}
#[test]
fn empty() {
let hll = HyperLogLog::<u64>::new(8);
assert_eq!(hll.count(), 0);
assert!(hll.is_empty());
}
#[test]
fn add_b4_n1k() {
let mut hll = HyperLogLog::new(4);
for i in 0..1000 {
hll.add(&i);
}
assert_eq!(hll.count(), 571);
assert!(!hll.is_empty());
}
#[test]
fn add_b8_n1k() {
let mut hll = HyperLogLog::new(8);
for i in 0..1000 {
hll.add(&i);
}
assert_eq!(hll.count(), 964);
assert!(!hll.is_empty());
}
#[test]
fn add_b12_n1k() {
let mut hll = HyperLogLog::new(12);
for i in 0..1000 {
hll.add(&i);
}
assert_eq!(hll.count(), 984);
assert!(!hll.is_empty());
}
#[test]
fn add_b16_n1k() {
let mut hll = HyperLogLog::new(16);
for i in 0..1000 {
hll.add(&i);
}
assert_eq!(hll.count(), 998);
assert!(!hll.is_empty());
}
#[test]
fn add_b8_n10k() {
let mut hll = HyperLogLog::new(8);
for i in 0..10000 {
hll.add(&i);
}
assert_eq!(hll.count(), 10196);
assert!(!hll.is_empty());
}
#[test]
fn add_b12_n10k() {
let mut hll = HyperLogLog::new(12);
for i in 0..10000 {
hll.add(&i);
}
assert_eq!(hll.count(), 10050);
assert!(!hll.is_empty());
}
#[test]
fn add_b16_n10k() {
let mut hll = HyperLogLog::new(16);
for i in 0..10000 {
hll.add(&i);
}
assert_eq!(hll.count(), 10055);
assert!(!hll.is_empty());
}
#[test]
fn add_b16_n100k() {
let mut hll = HyperLogLog::new(16);
for i in 0..100000 {
hll.add(&i);
}
assert_eq!(hll.count(), 100656);
assert!(!hll.is_empty());
}
#[test]
fn add_b16_n1m() {
let mut hll = HyperLogLog::new(16);
for i in 0..1000000 {
hll.add(&i);
}
assert_eq!(hll.count(), 1000226);
assert!(!hll.is_empty());
}
#[test]
fn clear() {
let mut hll = HyperLogLog::new(8);
for i in 0..1000 {
hll.add(&i);
}
hll.clear();
assert_eq!(hll.count(), 0);
assert!(hll.is_empty());
}
#[test]
fn clone() {
let mut hll1 = HyperLogLog::new(12);
for i in 0..500 {
hll1.add(&i);
}
let c1a = hll1.count();
let hll2 = hll1.clone();
assert_eq!(hll2.count(), c1a);
for i in 501..1000 {
hll1.add(&i);
}
let c1b = hll1.count();
assert_ne!(c1b, c1a);
assert_eq!(hll2.count(), c1a);
}
#[test]
fn merge() {
let mut hll1 = HyperLogLog::new(12);
let mut hll2 = HyperLogLog::new(12);
let mut hll = HyperLogLog::new(12);
for i in 0..500 {
hll.add(&i);
hll1.add(&i);
}
for i in 501..1000 {
hll.add(&i);
hll2.add(&i);
}
assert_ne!(hll.count(), hll1.count());
assert_ne!(hll.count(), hll2.count());
hll1.merge(&hll2);
assert_eq!(hll.count(), hll1.count());
}
#[test]
#[should_panic(expected = "b must be equal (left=5, right=12)")]
fn merge_panics_p() {
let mut hll1 = HyperLogLog::<u64>::new(5);
let hll2 = HyperLogLog::<u64>::new(12);
hll1.merge(&hll2);
}
#[test]
#[should_panic(expected = "buildhasher must be equal")]
fn merge_panics_buildhasher() {
let mut hll1 =
HyperLogLog::<u64, BuildHasherSeeded>::with_hash(12, BuildHasherSeeded::new(0));
let hll2 = HyperLogLog::<u64, BuildHasherSeeded>::with_hash(12, BuildHasherSeeded::new(1));
hll1.merge(&hll2);
}
#[test]
fn debug() {
let hll = HyperLogLog::<u64>::new(12);
assert_eq!(format!("{:?}", hll), "HyperLogLog { b: 12 }");
}
#[test]
fn extend() {
let mut hll = HyperLogLog::new(4);
hll.extend(0..1000);
assert_eq!(hll.count(), 571);
assert!(!hll.is_empty());
}
#[test]
fn extend_reference() {
let mut hll: HyperLogLog<i32> = HyperLogLog::new(4);
{
let v: Vec<i32> = (0..1000).collect();
hll.extend(&v); // Can `extend` by reference.
}
// `hll` is still usable after `v` is dropped:
assert_eq!(hll.count(), 571);
assert!(!hll.is_empty());
}
#[test]
fn slice() {
let mut hll: HyperLogLog<[u8]> = HyperLogLog::new(4);
{
let v = vec![0];
hll.add(&v[..]);
}
// `hll` is still usable after `v` is dropped:
assert_eq!(hll.count(), 1);
assert!(!hll.is_empty());
}
#[test]
fn issue_74() {
let panic_data = vec![
"ofr-1-1517560282779878449",
"ofr-1-1517589543534331019",
"ofr-1-1517590532450550786",
"ofr-1-1517644560121333465",
"ofr-1-1517746611185649116",
"ofr-1-1518051376300950677",
"ofr-1-1518484387459892414",
"ofr-1-1518488008830355319",
"ofr-1-1518488407814571264",
"ofr-1-1518561818180978525",
"ofr-1-1518678274740717330",
"ofr-1-1519461045930165638",
"ofr-1-1519470647696557288",
"ofr-1-1519567114956309703",
"ofr-1-1519653616441755584",
"ofr-1-1519655049912256356",
"ofr-1-1520105514088138521",
"ofr-1-1520294225822221822",
"ofr-1-1520319017418884884",
"ofr-1-1520505982893295286",
"ofr-1-1520553027150677707",
"ofr-1-1520925550686111649",
"ofr-1-1520927095122167663",
"ofr-1-1521290010424640726",
"ofr-1-1521458659554886917",
"ofr-1-1521943577454052994",
"ofr-1-1521971260753839540",
"ofr-1-1522000670785668758",
"ofr-1-1522043914876749176",
"ofr-1-1522206531944580201",
"ofr-1-1522234960069920034",
"ofr-1-1522333169901504119",
"ofr-1-1522363887846294936",
"ofr-1-1522484446749918495",
"ofr-1-1522600458059122179",
"ofr-1-1522687450205783676",
"ofr-1-1522765602785461678",
"ofr-1-1522815395559769187",
"ofr-1-1522839112893465736",
"ofr-1-1523001178903151627",
"ofr-1-1523018056414397988",
"ofr-1-1523096555609261412",
"ofr-1-1523103371222189143",
"ofr-1-1523256333918667890",
"ofr-1-1523270427746895732",
"ofr-1-1523411745695466681",
"ofr-1-1523630566301631536",
"ofr-1-1523839014553388093",
"ofr-1-1523894230803940925",
"ofr-1-1523931915564221543",
"ofr-1-1524104734332815100",
"ofr-1-1524113364834715372",
"ofr-1-1524209603273164167",
"ofr-1-1524276802153219312",
"ofr-1-1524554894791804305",
"ofr-1-1524621894100584193",
];
let mut hll = HyperLogLog::new(4);
for entry in &panic_data {
hll.add(entry);
}
hll.count();
}
#[test]
fn neighbor_search_startpoints() {
let b = 4;
let lookup_array = RAW_ESTIMATE_DATA_VEC[b - RAW_ESTIMATE_DATA_OFFSET];
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 0.),
(None, Some(0))
);
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 100.),
(Some(78), None)
);
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 20.),
(Some(14), Some(15))
);
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 11.),
(Some(0), Some(0))
);
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 77.2394),
(Some(78), Some(78))
);
assert_eq!(
HyperLogLog::<u32>::neighbor_search_startpoints(lookup_array, 13.2882),
(Some(4), Some(4))
);
}
#[test]
fn add_unsized() {
let mut hll = HyperLogLog::new(4);
hll.add("test1");
hll.add("test2");
assert_eq!(hll.count(), 2);
}
#[test]
fn send() {
let hll: HyperLogLog<NotSend> = HyperLogLog::new(4);
assert_send(&hll);
}
}
|
#[doc = "Register `WPCCR` reader"]
pub type R = crate::R<WPCCR_SPEC>;
#[doc = "Register `WPCCR` writer"]
pub type W = crate::W<WPCCR_SPEC>;
#[doc = "Field `DCYC` reader - Number of dummy cycles"]
pub type DCYC_R = crate::FieldReader;
#[doc = "Field `DCYC` writer - Number of dummy cycles"]
pub type DCYC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `DHQC` reader - Delay hold quarter cycle"]
pub type DHQC_R = crate::BitReader;
#[doc = "Field `DHQC` writer - Delay hold quarter cycle"]
pub type DHQC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SSHIFT` reader - Sample shift"]
pub type SSHIFT_R = crate::BitReader;
#[doc = "Field `SSHIFT` writer - Sample shift"]
pub type SSHIFT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:4 - Number of dummy cycles"]
#[inline(always)]
pub fn dcyc(&self) -> DCYC_R {
DCYC_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bit 28 - Delay hold quarter cycle"]
#[inline(always)]
pub fn dhqc(&self) -> DHQC_R {
DHQC_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 30 - Sample shift"]
#[inline(always)]
pub fn sshift(&self) -> SSHIFT_R {
SSHIFT_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:4 - Number of dummy cycles"]
#[inline(always)]
#[must_use]
pub fn dcyc(&mut self) -> DCYC_W<WPCCR_SPEC, 0> {
DCYC_W::new(self)
}
#[doc = "Bit 28 - Delay hold quarter cycle"]
#[inline(always)]
#[must_use]
pub fn dhqc(&mut self) -> DHQC_W<WPCCR_SPEC, 28> {
DHQC_W::new(self)
}
#[doc = "Bit 30 - Sample shift"]
#[inline(always)]
#[must_use]
pub fn sshift(&mut self) -> SSHIFT_W<WPCCR_SPEC, 30> {
SSHIFT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "write communication configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wpccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wpccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WPCCR_SPEC;
impl crate::RegisterSpec for WPCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wpccr::R`](R) reader structure"]
impl crate::Readable for WPCCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`wpccr::W`](W) writer structure"]
impl crate::Writable for WPCCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets WPCCR to value 0"]
impl crate::Resettable for WPCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::fs::File;
use std;
|
use std::fmt;
use header::{Header, Headers};
use http::{MessageHead, ResponseHead, Body};
use status::StatusCode;
use version::HttpVersion;
/// An HTTP Response
pub struct Response<B = Body> {
version: HttpVersion,
headers: Headers,
status: StatusCode,
#[cfg(feature = "raw_status")]
raw_status: ::http::RawStatus,
body: Option<B>,
}
impl<B> Response<B> {
/// Constructs a default response
#[inline]
pub fn new() -> Response<B> {
Response::default()
}
/// Get the HTTP version of this response.
#[inline]
pub fn version(&self) -> HttpVersion { self.version }
/// Get the headers from the response.
#[inline]
pub fn headers(&self) -> &Headers { &self.headers }
/// Get a mutable reference to the headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut Headers { &mut self.headers }
/// Get the status from the server.
#[inline]
pub fn status(&self) -> StatusCode { self.status }
/// Get the raw status code and reason.
///
/// This method is only useful when inspecting the raw subject line from
/// a received response.
#[inline]
#[cfg(feature = "raw_status")]
pub fn status_raw(&self) -> &::http::RawStatus { &self.raw_status }
/// Set the `StatusCode` for this response.
#[inline]
pub fn set_status(&mut self, status: StatusCode) {
self.status = status;
}
/// Set the status and move the Response.
///
/// Useful for the "builder-style" pattern.
#[inline]
pub fn with_status(mut self, status: StatusCode) -> Self {
self.set_status(status);
self
}
/// Set a header and move the Response.
///
/// Useful for the "builder-style" pattern.
#[inline]
pub fn with_header<H: Header>(mut self, header: H) -> Self {
self.headers.set(header);
self
}
/// Set the headers and move the Response.
///
/// Useful for the "builder-style" pattern.
#[inline]
pub fn with_headers(mut self, headers: Headers) -> Self {
self.headers = headers;
self
}
/// Set the body.
#[inline]
pub fn set_body<T: Into<B>>(&mut self, body: T) {
self.body = Some(body.into());
}
/// Set the body and move the Response.
///
/// Useful for the "builder-style" pattern.
#[inline]
pub fn with_body<T: Into<B>>(mut self, body: T) -> Self {
self.set_body(body);
self
}
/// Read the body.
#[inline]
pub fn body_ref(&self) -> Option<&B> { self.body.as_ref() }
}
impl Response<Body> {
/// Take the `Body` of this response.
#[inline]
pub fn body(self) -> Body {
self.body.unwrap_or_default()
}
}
#[cfg(not(feature = "raw_status"))]
impl<B> Default for Response<B> {
fn default() -> Response<B> {
Response::<B> {
version: Default::default(),
headers: Default::default(),
status: Default::default(),
body: None,
}
}
}
#[cfg(feature = "raw_status")]
impl<B> Default for Response<B> {
fn default() -> Response<B> {
Response::<B> {
version: Default::default(),
headers: Default::default(),
status: Default::default(),
raw_status: Default::default(),
body: None,
}
}
}
impl fmt::Debug for Response {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Response")
.field("status", &self.status)
.field("version", &self.version)
.field("headers", &self.headers)
.finish()
}
}
/// Constructs a response using a received ResponseHead and optional body
#[inline]
#[cfg(not(feature = "raw_status"))]
pub fn from_wire<B>(incoming: ResponseHead, body: Option<B>) -> Response<B> {
let status = incoming.status();
Response::<B> {
status: status,
version: incoming.version,
headers: incoming.headers,
body: body,
}
}
/// Constructs a response using a received ResponseHead and optional body
#[inline]
#[cfg(feature = "raw_status")]
pub fn from_wire<B>(incoming: ResponseHead, body: Option<B>) -> Response<B> {
let status = incoming.status();
Response::<B> {
status: status,
version: incoming.version,
headers: incoming.headers,
raw_status: incoming.subject,
body: body,
}
}
/// Splits this response into a MessageHead<StatusCode> and its body
#[inline]
pub fn split<B>(res: Response<B>) -> (MessageHead<StatusCode>, Option<B>) {
let head = MessageHead::<StatusCode> {
version: res.version,
headers: res.headers,
subject: res.status
};
(head, res.body)
}
|
#[doc = "Reader of register SPINLOCK8"]
pub type R = crate::R<u32, super::SPINLOCK8>;
impl R {}
|
//! # msfs-rs
//!
//! These bindings include:
//!
//! - MSFS Gauge API
//! - SimConnect API
//!
//! ## Building
//!
//! If your MSFS SDK is not installed to `C:\MSFS SDK` you will need to set the
//! `MSFS_SDK` env variable to the correct path.
//!
//! ## Known Issues
//!
//! Until https://github.com/rust-lang/rfcs/issues/2771 is fixed, you will have to
//! run the `msfs-fix` util on your output wasm files, like so:
//! ```shell
//! $ cargo build
//! $ msfs-fix target/wasm32-wasi/release/foo.wasm > target/wasm32-wasi/release/foo.wasm
//! ```
pub mod msfs;
pub mod sim_connect;
pub mod sys;
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use fidl_mlme::BssDescription;
use Ssid;
use std::cmp::Ordering;
#[derive(Clone, Debug, PartialEq)]
pub struct BssInfo {
pub bssid: [u8; 6],
pub ssid: Ssid,
pub rx_dbm: i8,
pub channel: u8,
pub protected: bool,
pub compatible: bool,
}
#[derive(Clone, Debug, PartialEq)]
pub struct EssInfo {
pub best_bss: BssInfo,
}
pub fn convert_bss_description(bss: &BssDescription) -> BssInfo {
BssInfo {
bssid: bss.bssid.clone(),
ssid: bss.ssid.bytes().collect(),
rx_dbm: get_rx_dbm(bss),
channel: bss.chan.primary,
protected: bss.rsn.is_some(),
compatible: is_bss_compatible(bss),
}
}
pub fn compare_bss(left: &BssDescription, right: &BssDescription) -> Ordering {
is_bss_compatible(left).cmp(&is_bss_compatible(right))
.then(get_rx_dbm(left).cmp(&get_rx_dbm(right)))
}
fn get_rx_dbm(bss: &BssDescription) -> i8 {
if bss.rcpi_dbmh != 0 {
(bss.rcpi_dbmh / 2) as i8
} else if bss.rssi_dbm != 0 {
bss.rssi_dbm
} else {
::std::i8::MIN
}
}
fn is_bss_compatible(bss: &BssDescription) -> bool {
match bss.rsn.as_ref() {
None => true,
// TODO(hahnr): check if RSN is supported
Some(_rsn) => false
}
}
#[cfg(test)]
mod tests {
use super::*;
use fidl_mlme;
use std::cmp::Ordering;
#[test]
fn compare() {
// Identical BSSes should be ranked equal
assert_eq!(Ordering::Equal,
compare_bss(&bss(-10, -30, true), &bss(-10, -30, true)));
// Compatibility takes priority over everything else
assert_bss_cmp(&bss(-10, -10, false), &bss(-50, -50, true));
// RCPI takes priority over RSSI
assert_bss_cmp(&bss(-20, -30, true), &bss(-30, -20, true));
// Compare RSSI if RCPI is absent
assert_bss_cmp(&bss(-30, 0, true), &bss(-20, 0, true));
// Having an RCPI measurement is always better than not having any measurement
assert_bss_cmp(&bss(0, 0, true), &bss(0, -200, true));
// Having an RSSI measurement is always better than not having any measurement
assert_bss_cmp(&bss(0, 0, true), &bss(-100, 0, true));
}
fn assert_bss_cmp(worse: &fidl_mlme::BssDescription, better: &fidl_mlme::BssDescription) {
assert_eq!(Ordering::Less, compare_bss(worse, better));
assert_eq!(Ordering::Greater, compare_bss(better, worse));
}
fn bss(rssi_dbm: i8, rcpi_dbmh: i16, compatible: bool) -> fidl_mlme::BssDescription {
let ret = fidl_mlme::BssDescription {
bssid: [0, 0, 0, 0, 0, 0],
ssid: String::new(),
bss_type: fidl_mlme::BssTypes::Infrastructure,
beacon_period: 100,
dtim_period: 100,
timestamp: 0,
local_time: 0,
rsn: if compatible { None } else { Some(Vec::new()) },
vht_cap: None,
vht_op: None,
chan: fidl_mlme::WlanChan { primary: 1, secondary80: 0, cbw: fidl_mlme::Cbw::Cbw20 },
rssi_dbm,
rcpi_dbmh,
rsni_dbh: 0
};
assert_eq!(compatible, is_bss_compatible(&ret));
ret
}
}
|
//! A Single-Producer, Multiple-Consumer queue.
use std::sync::mpsc::{channel, Receiver, RecvError, Sender, SendError};
use std::sync::{Arc, Mutex, MutexGuard};
use std::fmt;
use std::any::Any;
use std::error::Error;
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
/// Error from broadcast module.
pub enum BroadcastError<T> {
/// Send error
SendError(T),
/// Receive error
RecvError,
}
impl<T: fmt::Display> fmt::Display for BroadcastError<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
BroadcastError::SendError(ref t) =>
write!(fmt, "could not send data on channel: {}", t),
BroadcastError::RecvError =>
write!(fmt, "could not receive data on channel"),
}
}
}
impl<T: Send + fmt::Display + fmt::Debug + Any> Error for BroadcastError<T> {
fn description(&self) -> &str {
match *self {
BroadcastError::SendError(_) => "could not send data on channel",
BroadcastError::RecvError => "could not receive data on channel",
}
}
fn cause(&self) -> Option<&Error> {
None
}
}
impl<T> From<SendError<T>> for BroadcastError<T> {
fn from(err: SendError<T>) -> BroadcastError<T> {
let SendError(data) = err;
BroadcastError::SendError(data)
}
}
impl<T> From<RecvError> for BroadcastError<T> {
fn from(_err: RecvError) -> BroadcastError<T> {
BroadcastError::RecvError
}
}
/// Struct that sends message on a broadcast pattern.
pub struct Broadcast<T> {
inner: Arc<Inner<T>>,
}
impl<T> Broadcast<T> {
/// Create a new Broadcast struct.
pub fn new() -> Broadcast<T> {
let inner = Arc::new(Inner { senders: Mutex::new(Vec::new()) });
Broadcast { inner: inner }
}
/// Create a Consumer that listens to messages from the Broadcaster.
pub fn consume(&self) -> Consumer<T> {
let (b, c) = channel();
self.inner.add_sender(b);
Consumer { inner: self.inner.clone(), receiver: c }
}
}
impl<T: Clone> Broadcast<T> {
/// Send a message on the broadcast.
pub fn send(&self, data: T) -> Result<(), BroadcastError<T>> {
let guard = self.inner.read_senders();
for s in guard.iter() {
try!(s.send(data.clone()));
}
Ok(())
}
}
struct Inner<T> {
senders: Mutex<Vec<Sender<T>>>,
}
impl<T> Inner<T> {
fn read_senders<'a>(&'a self) -> MutexGuard<'a, Vec<Sender<T>>> {
self.senders.lock().unwrap()
}
fn add_sender(&self, sender: Sender<T>) {
let mut vec = self.senders.lock().unwrap();
vec.push(sender);
}
}
/// Struct that receives messages from Broadcast.
pub struct Consumer<T> {
inner: Arc<Inner<T>>,
receiver: Receiver<T>,
}
impl<T> Consumer<T> {
/// Receive a message from the Broadcast.
///
/// This function will block.
pub fn recv(&self) -> Result<T, BroadcastError<T>> {
let data = try!(self.receiver.recv());
Ok(data)
}
}
impl<T> Clone for Consumer<T> {
fn clone(&self) -> Self {
let (s, r) = channel();
self.inner.add_sender(s);
Consumer {
inner: self.inner.clone(),
receiver: r,
}
}
}
/// Create a (Broadcast<T>, Consumer<T>) pair.
pub fn broadcast_channel<T: Clone>() -> (Broadcast<T>, Consumer<T>) {
let broadcast = Broadcast::new();
let consumer = broadcast.consume();
(broadcast, consumer)
}
#[cfg(test)]
mod test {
use broadcast::broadcast_channel;
use super::Inner;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{channel};
use std::thread::spawn;
#[test]
fn inner_iterator() {
let (s1, r1) = channel();
let (s2, r2) = channel();
let inner = Arc::new(Inner { senders: Mutex::new(vec!(s1, s2)) });
let guard = inner.read_senders();
for s in guard.iter() {
assert!(s.send(10u8).is_ok());
}
assert_eq!(r1.recv().unwrap(), 10u8);
assert_eq!(r2.recv().unwrap(), 10u8);
}
#[test]
fn sends_to_multiple_consumers() {
let (p, c) = broadcast_channel();
let c2 = c.clone();
let res = p.send(9u8);
assert!(res.is_ok());
let res = c.recv();
assert!(res.is_ok());
assert_eq!(res.unwrap(), 9u8);
let res = c2.recv();
assert!(res.is_ok());
assert_eq!(res.unwrap(), 9u8)
}
#[test]
fn test_send_threads() {
let (p, c1) = broadcast_channel();
let c2 = c1.clone();
let (s1, r1) = channel();
let (s2, r2) = channel();
let _thread = spawn(move || {
assert_eq!(c1.recv().unwrap(), 9u8);
s1.send(10u8).unwrap();
});
let _thread = spawn(move || {
assert_eq!(c2.recv().unwrap(), 9u8);
s2.send(10u8).unwrap();
});
assert!(p.send(9u8).is_ok());
assert!(r1.recv().is_ok());
assert!(r2.recv().is_ok());
}
}
|
//! Content processing for POST and PUT.
use mime::Mime;
/// Processed content from PUT and POST requests.
///
/// This will automatically store posted content in its most sensible form based on its MIME type.
pub struct Content {
mime: Mime,
data: Data,
}
/// The stored form of the data.
enum Data {
/// Encoded form data.
Form(String),
/// JSON encoded data.
Json(String),
/// XML encoded data.
Xml(String),
/// Other plaintext data.
Text(String),
/// Multipart encoded data.
Multipart(Vec<Data>),
/// Binary data stored in memory.
Blob(Vec<u8>),
}
|
pub mod ast;
pub mod expand_visitor;
pub mod expander;
pub mod lexer;
pub mod parser;
pub mod rename_idents;
pub mod replace_idents;
pub mod span;
pub mod span_visitor;
pub mod tokens;
pub mod tryfrom_visitor;
pub mod visitors;
#[cfg(test)]
mod prop;
|
use std::cmp::min;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::SyncSender;
use std::time::Duration;
use chashmap::CHashMap;
use crossbeam::queue::ArrayQueue;
use rug::Integer;
use rug::ops::Pow;
use crate::algebra;
use crate::serial_MPQS::{initialize_qs, InitResult};
use crate::tonelli_shanks::tonelli_shanks;
pub fn mpqs(n: &Integer) -> Option<Integer> {
let InitResult {
roota,
factorbase,
tsqrt,
xmax,
tlog,
thresh,
min_prime,
} = initialize_qs(n);
let smooths = ArrayQueue::new(factorbase.len() + 100);
let (sender, receiver) = std::sync::mpsc::sync_channel(num_cpus::get());
let roota = Arc::new(Mutex::new(roota));
let arc_smooths = Arc::new(smooths);
let partials = Arc::new(CHashMap::default());
for _ in 0..num_cpus::get() {
let z = n.clone();
let factorbase = factorbase.clone();
let sender = sender.clone();
let tsqrt = tsqrt.clone();
let tlog = tlog.clone();
let arc_smooths = arc_smooths.clone();
let roota = roota.clone();
let partials = partials.clone();
std::thread::spawn(move || {
thread_loop(
z,
factorbase,
arc_smooths,
sender,
roota,
tsqrt,
tlog,
xmax,
min_prime,
thresh,
partials,
)
});
}
let _ = receiver.recv();
let mut new_smooth: Vec<_> = Vec::with_capacity(arc_smooths.len());
loop {
while arc_smooths.is_empty() {
std::thread::sleep(Duration::from_millis(100));
}
while let Ok(t) = arc_smooths.pop() {
new_smooth.push(t);
}
if let Some(ris) = algebra::algebra(&factorbase, &new_smooth, n) {
return Some(ris);
}
}
}
fn thread_loop(
n: Integer,
factorbase: Vec<u64>,
smooths: Arc<ArrayQueue<(Integer, (Integer, Integer))>>,
sender: SyncSender<()>,
roota: Arc<Mutex<Integer>>,
tsqrt: Vec<Integer>,
tlog: Vec<f64>,
xmax: i64,
min_prime: u64,
thresh: f64,
partials: Arc<CHashMap<Integer, (Integer, (Integer, Integer))>>,
) {
let sievesize = 1_i64 << 15;
loop {
let my_roota: Integer = {
let mut aq_roota = roota.lock().unwrap();
aq_roota.next_prime_mut();
while n.legendre(&aq_roota) != 1 {
aq_roota.next_prime_mut();
}
aq_roota.clone()
};
let a = my_roota.clone().pow(2);
let b = tonelli_shanks(&n, &my_roota);
let int2: Integer = b.clone() * 2;
let intermediate = int2.invert(&my_roota).expect("Inverse does not exist");
let b = (-(b.clone() * &b - &n) * intermediate + &b) % &a;
let c = (b.clone() * &b - &n) / &a;
let mut s1: HashMap<u64, i64> = HashMap::new();
let mut s2: HashMap<u64, i64> = HashMap::new();
for (i, p) in factorbase.iter().enumerate() {
let ainv = a
.clone()
.pow_mod(&Integer::from(p - 2), &Integer::from(*p))
.unwrap();
let mut sol1 = (tsqrt[i].clone() - &b) * &ainv % p;
let mut sol2 = (-tsqrt[i].clone() - &b) * &ainv % p;
sol1 -= ((sol1.clone() + xmax) / p) * p;
sol2 -= ((sol2.clone() + xmax) / p) * p;
s1.insert(*p, (sol1 + xmax).to_i64().unwrap());
s2.insert(*p, (sol2 + xmax).to_i64().unwrap());
}
for low in (-xmax..xmax + 1).step_by(sievesize as usize + 1) {
let high = min(xmax, low + sievesize);
let size = high - low;
let size_plus_1 = size + 1;
let mut S = vec![0_f64; size_plus_1 as usize];
for (i, p) in factorbase.iter().enumerate() {
if *p < min_prime {
continue;
}
let mut sol1 = s1[p];
let mut sol2 = s2[p];
let logp = tlog[i];
let p_i64 = *p as i64;
while sol1 <= size || sol2 <= size {
if sol1 <= size {
S[sol1 as usize] += logp;
sol1 += p_i64;
}
if sol2 <= size {
S[sol2 as usize] += logp;
sol2 += p_i64;
}
}
s1.insert(*p, sol1 - size_plus_1);
s2.insert(*p, sol2 - size_plus_1);
}
for i in 0..size_plus_1 {
if S[i as usize] > thresh {
let x = i + low;
let tofact: Integer = a.clone() * x.pow(2) + b.clone() * x * 2 + &c;
let mut nf = tofact.clone().abs();
for p in factorbase.iter() {
while nf.clone() % p == 0 {
nf /= p;
}
}
if nf == 1 {
smooths.push((a.clone() * x + &b, (tofact, my_roota.clone())));
} else {
match partials.remove(&nf) {
Some((pairv, pairvals)) => {
smooths.push((
pairv * (a.clone() * x + &b),
(tofact * pairvals.0, pairvals.1 * &my_roota * nf),
));
}
None => {
partials
.insert(nf, (a.clone() * x + &b, (tofact, my_roota.clone())));
}
}
}
}
}
}
if smooths.len() > factorbase.len() {
sender.send(());
}
}
}
#[cfg(test)]
mod tests {
use rug::Integer;
use crate::check_is_divisor;
use super::mpqs;
#[test]
fn test_qs() {
let n = "523022617466601111760007224100074291200000001"
.parse::<Integer>()
.unwrap();
check_is_divisor(n.clone(), mpqs(&n));
}
#[test]
fn test_qs_2() {
let n = "9986801107".parse::<Integer>().unwrap();
check_is_divisor(n.clone(), mpqs(&n));
}
#[test]
#[ignore]
fn test_qs_3() {
let n = "2736300383840445596906210796102273501547527150973747"
.parse::<Integer>()
.unwrap();
check_is_divisor(n.clone(), mpqs(&n));
}
#[test]
#[ignore]
fn test_qs_4() {
let n = "676292275716558246502605230897191366469551764092181362779759"
.parse::<Integer>()
.unwrap();
check_is_divisor(n.clone(), mpqs(&n));
}
}
|
#[doc = "Register `CFGR1` reader"]
pub type R = crate::R<CFGR1_SPEC>;
#[doc = "Register `CFGR1` writer"]
pub type W = crate::W<CFGR1_SPEC>;
#[doc = "Field `MEM_MODE` reader - Memory mapping selection bits"]
pub type MEM_MODE_R = crate::FieldReader<MEM_MODE_A>;
#[doc = "Memory mapping selection bits\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum MEM_MODE_A {
#[doc = "0: Main Flash memory mapped at 0x0000_0000"]
MainFlash = 0,
#[doc = "1: System Flash memory mapped at 0x0000_0000"]
SystemFlash = 1,
#[doc = "2: Main Flash memory mapped at 0x0000_0000"]
MainFlash2 = 2,
#[doc = "3: Embedded SRAM mapped at 0x0000_0000"]
Sram = 3,
}
impl From<MEM_MODE_A> for u8 {
#[inline(always)]
fn from(variant: MEM_MODE_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for MEM_MODE_A {
type Ux = u8;
}
impl MEM_MODE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MEM_MODE_A {
match self.bits {
0 => MEM_MODE_A::MainFlash,
1 => MEM_MODE_A::SystemFlash,
2 => MEM_MODE_A::MainFlash2,
3 => MEM_MODE_A::Sram,
_ => unreachable!(),
}
}
#[doc = "Main Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn is_main_flash(&self) -> bool {
*self == MEM_MODE_A::MainFlash
}
#[doc = "System Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn is_system_flash(&self) -> bool {
*self == MEM_MODE_A::SystemFlash
}
#[doc = "Main Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn is_main_flash2(&self) -> bool {
*self == MEM_MODE_A::MainFlash2
}
#[doc = "Embedded SRAM mapped at 0x0000_0000"]
#[inline(always)]
pub fn is_sram(&self) -> bool {
*self == MEM_MODE_A::Sram
}
}
#[doc = "Field `MEM_MODE` writer - Memory mapping selection bits"]
pub type MEM_MODE_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, MEM_MODE_A>;
impl<'a, REG, const O: u8> MEM_MODE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Main Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn main_flash(self) -> &'a mut crate::W<REG> {
self.variant(MEM_MODE_A::MainFlash)
}
#[doc = "System Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn system_flash(self) -> &'a mut crate::W<REG> {
self.variant(MEM_MODE_A::SystemFlash)
}
#[doc = "Main Flash memory mapped at 0x0000_0000"]
#[inline(always)]
pub fn main_flash2(self) -> &'a mut crate::W<REG> {
self.variant(MEM_MODE_A::MainFlash2)
}
#[doc = "Embedded SRAM mapped at 0x0000_0000"]
#[inline(always)]
pub fn sram(self) -> &'a mut crate::W<REG> {
self.variant(MEM_MODE_A::Sram)
}
}
#[doc = "Field `PA11_PA12_RMP` reader - PA11 and PA12 remapping bit for small packages (28 and 20 pins)"]
pub type PA11_PA12_RMP_R = crate::BitReader<PA11_PA12_RMP_A>;
#[doc = "PA11 and PA12 remapping bit for small packages (28 and 20 pins)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PA11_PA12_RMP_A {
#[doc = "0: Pin pair PA9/PA10 mapped on the pins"]
NotRemapped = 0,
#[doc = "1: Pin pair PA11/PA12 mapped instead of PA9/PA10"]
Remapped = 1,
}
impl From<PA11_PA12_RMP_A> for bool {
#[inline(always)]
fn from(variant: PA11_PA12_RMP_A) -> Self {
variant as u8 != 0
}
}
impl PA11_PA12_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PA11_PA12_RMP_A {
match self.bits {
false => PA11_PA12_RMP_A::NotRemapped,
true => PA11_PA12_RMP_A::Remapped,
}
}
#[doc = "Pin pair PA9/PA10 mapped on the pins"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == PA11_PA12_RMP_A::NotRemapped
}
#[doc = "Pin pair PA11/PA12 mapped instead of PA9/PA10"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == PA11_PA12_RMP_A::Remapped
}
}
#[doc = "Field `PA11_PA12_RMP` writer - PA11 and PA12 remapping bit for small packages (28 and 20 pins)"]
pub type PA11_PA12_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PA11_PA12_RMP_A>;
impl<'a, REG, const O: u8> PA11_PA12_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Pin pair PA9/PA10 mapped on the pins"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(PA11_PA12_RMP_A::NotRemapped)
}
#[doc = "Pin pair PA11/PA12 mapped instead of PA9/PA10"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(PA11_PA12_RMP_A::Remapped)
}
}
#[doc = "Field `IR_MOD` reader - IR Modulation Envelope signal selection"]
pub type IR_MOD_R = crate::FieldReader<IR_MOD_A>;
#[doc = "IR Modulation Envelope signal selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum IR_MOD_A {
#[doc = "0: TIM16 selected"]
Tim16 = 0,
#[doc = "1: USART1 selected"]
Usart1 = 1,
#[doc = "2: USART4 selected"]
Usart4 = 2,
}
impl From<IR_MOD_A> for u8 {
#[inline(always)]
fn from(variant: IR_MOD_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for IR_MOD_A {
type Ux = u8;
}
impl IR_MOD_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<IR_MOD_A> {
match self.bits {
0 => Some(IR_MOD_A::Tim16),
1 => Some(IR_MOD_A::Usart1),
2 => Some(IR_MOD_A::Usart4),
_ => None,
}
}
#[doc = "TIM16 selected"]
#[inline(always)]
pub fn is_tim16(&self) -> bool {
*self == IR_MOD_A::Tim16
}
#[doc = "USART1 selected"]
#[inline(always)]
pub fn is_usart1(&self) -> bool {
*self == IR_MOD_A::Usart1
}
#[doc = "USART4 selected"]
#[inline(always)]
pub fn is_usart4(&self) -> bool {
*self == IR_MOD_A::Usart4
}
}
#[doc = "Field `IR_MOD` writer - IR Modulation Envelope signal selection"]
pub type IR_MOD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, IR_MOD_A>;
impl<'a, REG, const O: u8> IR_MOD_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "TIM16 selected"]
#[inline(always)]
pub fn tim16(self) -> &'a mut crate::W<REG> {
self.variant(IR_MOD_A::Tim16)
}
#[doc = "USART1 selected"]
#[inline(always)]
pub fn usart1(self) -> &'a mut crate::W<REG> {
self.variant(IR_MOD_A::Usart1)
}
#[doc = "USART4 selected"]
#[inline(always)]
pub fn usart4(self) -> &'a mut crate::W<REG> {
self.variant(IR_MOD_A::Usart4)
}
}
#[doc = "Field `ADC_DMA_RMP` reader - ADC DMA remapping bit"]
pub type ADC_DMA_RMP_R = crate::BitReader<ADC_DMA_RMP_A>;
#[doc = "ADC DMA remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADC_DMA_RMP_A {
#[doc = "0: ADC DMA request mapped on DMA channel 1"]
NotRemapped = 0,
#[doc = "1: ADC DMA request mapped on DMA channel 2"]
Remapped = 1,
}
impl From<ADC_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: ADC_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl ADC_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADC_DMA_RMP_A {
match self.bits {
false => ADC_DMA_RMP_A::NotRemapped,
true => ADC_DMA_RMP_A::Remapped,
}
}
#[doc = "ADC DMA request mapped on DMA channel 1"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == ADC_DMA_RMP_A::NotRemapped
}
#[doc = "ADC DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == ADC_DMA_RMP_A::Remapped
}
}
#[doc = "Field `ADC_DMA_RMP` writer - ADC DMA remapping bit"]
pub type ADC_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ADC_DMA_RMP_A>;
impl<'a, REG, const O: u8> ADC_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "ADC DMA request mapped on DMA channel 1"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(ADC_DMA_RMP_A::NotRemapped)
}
#[doc = "ADC DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(ADC_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `USART1_TX_DMA_RMP` reader - USART1_TX DMA remapping bit"]
pub type USART1_TX_DMA_RMP_R = crate::BitReader<USART1_TX_DMA_RMP_A>;
#[doc = "USART1_TX DMA remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum USART1_TX_DMA_RMP_A {
#[doc = "0: USART1_TX DMA request mapped on DMA channel 2"]
NotRemapped = 0,
#[doc = "1: USART1_TX DMA request mapped on DMA channel 4"]
Remapped = 1,
}
impl From<USART1_TX_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: USART1_TX_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl USART1_TX_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART1_TX_DMA_RMP_A {
match self.bits {
false => USART1_TX_DMA_RMP_A::NotRemapped,
true => USART1_TX_DMA_RMP_A::Remapped,
}
}
#[doc = "USART1_TX DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == USART1_TX_DMA_RMP_A::NotRemapped
}
#[doc = "USART1_TX DMA request mapped on DMA channel 4"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == USART1_TX_DMA_RMP_A::Remapped
}
}
#[doc = "Field `USART1_TX_DMA_RMP` writer - USART1_TX DMA remapping bit"]
pub type USART1_TX_DMA_RMP_W<'a, REG, const O: u8> =
crate::BitWriter<'a, REG, O, USART1_TX_DMA_RMP_A>;
impl<'a, REG, const O: u8> USART1_TX_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "USART1_TX DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART1_TX_DMA_RMP_A::NotRemapped)
}
#[doc = "USART1_TX DMA request mapped on DMA channel 4"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART1_TX_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `USART1_RX_DMA_RMP` reader - USART1_RX DMA request remapping bit"]
pub type USART1_RX_DMA_RMP_R = crate::BitReader<USART1_RX_DMA_RMP_A>;
#[doc = "USART1_RX DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum USART1_RX_DMA_RMP_A {
#[doc = "0: USART1_RX DMA request mapped on DMA channel 3"]
NotRemapped = 0,
#[doc = "1: USART1_RX DMA request mapped on DMA channel 5"]
Remapped = 1,
}
impl From<USART1_RX_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: USART1_RX_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl USART1_RX_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART1_RX_DMA_RMP_A {
match self.bits {
false => USART1_RX_DMA_RMP_A::NotRemapped,
true => USART1_RX_DMA_RMP_A::Remapped,
}
}
#[doc = "USART1_RX DMA request mapped on DMA channel 3"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == USART1_RX_DMA_RMP_A::NotRemapped
}
#[doc = "USART1_RX DMA request mapped on DMA channel 5"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == USART1_RX_DMA_RMP_A::Remapped
}
}
#[doc = "Field `USART1_RX_DMA_RMP` writer - USART1_RX DMA request remapping bit"]
pub type USART1_RX_DMA_RMP_W<'a, REG, const O: u8> =
crate::BitWriter<'a, REG, O, USART1_RX_DMA_RMP_A>;
impl<'a, REG, const O: u8> USART1_RX_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "USART1_RX DMA request mapped on DMA channel 3"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART1_RX_DMA_RMP_A::NotRemapped)
}
#[doc = "USART1_RX DMA request mapped on DMA channel 5"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART1_RX_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM16_DMA_RMP` reader - TIM16 DMA request remapping bit"]
pub type TIM16_DMA_RMP_R = crate::BitReader<TIM16_DMA_RMP_A>;
#[doc = "TIM16 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM16_DMA_RMP_A {
#[doc = "0: TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 3"]
NotRemapped = 0,
#[doc = "1: TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 4"]
Remapped = 1,
}
impl From<TIM16_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: TIM16_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl TIM16_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM16_DMA_RMP_A {
match self.bits {
false => TIM16_DMA_RMP_A::NotRemapped,
true => TIM16_DMA_RMP_A::Remapped,
}
}
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 3"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == TIM16_DMA_RMP_A::NotRemapped
}
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 4"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == TIM16_DMA_RMP_A::Remapped
}
}
#[doc = "Field `TIM16_DMA_RMP` writer - TIM16 DMA request remapping bit"]
pub type TIM16_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM16_DMA_RMP_A>;
impl<'a, REG, const O: u8> TIM16_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 3"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM16_DMA_RMP_A::NotRemapped)
}
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 4"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM16_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM17_DMA_RMP` reader - TIM17 DMA request remapping bit"]
pub type TIM17_DMA_RMP_R = crate::BitReader<TIM17_DMA_RMP_A>;
#[doc = "TIM17 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM17_DMA_RMP_A {
#[doc = "0: TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 1"]
NotRemapped = 0,
#[doc = "1: TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 2"]
Remapped = 1,
}
impl From<TIM17_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: TIM17_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl TIM17_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM17_DMA_RMP_A {
match self.bits {
false => TIM17_DMA_RMP_A::NotRemapped,
true => TIM17_DMA_RMP_A::Remapped,
}
}
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 1"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == TIM17_DMA_RMP_A::NotRemapped
}
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == TIM17_DMA_RMP_A::Remapped
}
}
#[doc = "Field `TIM17_DMA_RMP` writer - TIM17 DMA request remapping bit"]
pub type TIM17_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM17_DMA_RMP_A>;
impl<'a, REG, const O: u8> TIM17_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 1"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM17_DMA_RMP_A::NotRemapped)
}
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 2"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM17_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM16_DMA_RMP2` reader - TIM16 alternate DMA request remapping bit"]
pub type TIM16_DMA_RMP2_R = crate::BitReader<TIM16_DMA_RMP2_A>;
#[doc = "TIM16 alternate DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM16_DMA_RMP2_A {
#[doc = "0: TIM16 DMA request mapped according to TIM16_DMA_RMP bit"]
NotAlternateRemapped = 0,
#[doc = "1: TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 6"]
AlternateRemapped = 1,
}
impl From<TIM16_DMA_RMP2_A> for bool {
#[inline(always)]
fn from(variant: TIM16_DMA_RMP2_A) -> Self {
variant as u8 != 0
}
}
impl TIM16_DMA_RMP2_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM16_DMA_RMP2_A {
match self.bits {
false => TIM16_DMA_RMP2_A::NotAlternateRemapped,
true => TIM16_DMA_RMP2_A::AlternateRemapped,
}
}
#[doc = "TIM16 DMA request mapped according to TIM16_DMA_RMP bit"]
#[inline(always)]
pub fn is_not_alternate_remapped(&self) -> bool {
*self == TIM16_DMA_RMP2_A::NotAlternateRemapped
}
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 6"]
#[inline(always)]
pub fn is_alternate_remapped(&self) -> bool {
*self == TIM16_DMA_RMP2_A::AlternateRemapped
}
}
#[doc = "Field `TIM16_DMA_RMP2` writer - TIM16 alternate DMA request remapping bit"]
pub type TIM16_DMA_RMP2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM16_DMA_RMP2_A>;
impl<'a, REG, const O: u8> TIM16_DMA_RMP2_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM16 DMA request mapped according to TIM16_DMA_RMP bit"]
#[inline(always)]
pub fn not_alternate_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM16_DMA_RMP2_A::NotAlternateRemapped)
}
#[doc = "TIM16_CH1 and TIM16_UP DMA request mapped on DMA channel 6"]
#[inline(always)]
pub fn alternate_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM16_DMA_RMP2_A::AlternateRemapped)
}
}
#[doc = "Field `TIM17_DMA_RMP2` reader - TIM17 alternate DMA request remapping bit"]
pub type TIM17_DMA_RMP2_R = crate::BitReader<TIM17_DMA_RMP2_A>;
#[doc = "TIM17 alternate DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM17_DMA_RMP2_A {
#[doc = "0: TIM17 DMA request mapped according to TIM16_DMA_RMP bit"]
NotAlternateRemapped = 0,
#[doc = "1: TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 7"]
AlternateRemapped = 1,
}
impl From<TIM17_DMA_RMP2_A> for bool {
#[inline(always)]
fn from(variant: TIM17_DMA_RMP2_A) -> Self {
variant as u8 != 0
}
}
impl TIM17_DMA_RMP2_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM17_DMA_RMP2_A {
match self.bits {
false => TIM17_DMA_RMP2_A::NotAlternateRemapped,
true => TIM17_DMA_RMP2_A::AlternateRemapped,
}
}
#[doc = "TIM17 DMA request mapped according to TIM16_DMA_RMP bit"]
#[inline(always)]
pub fn is_not_alternate_remapped(&self) -> bool {
*self == TIM17_DMA_RMP2_A::NotAlternateRemapped
}
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 7"]
#[inline(always)]
pub fn is_alternate_remapped(&self) -> bool {
*self == TIM17_DMA_RMP2_A::AlternateRemapped
}
}
#[doc = "Field `TIM17_DMA_RMP2` writer - TIM17 alternate DMA request remapping bit"]
pub type TIM17_DMA_RMP2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM17_DMA_RMP2_A>;
impl<'a, REG, const O: u8> TIM17_DMA_RMP2_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM17 DMA request mapped according to TIM16_DMA_RMP bit"]
#[inline(always)]
pub fn not_alternate_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM17_DMA_RMP2_A::NotAlternateRemapped)
}
#[doc = "TIM17_CH1 and TIM17_UP DMA request mapped on DMA channel 7"]
#[inline(always)]
pub fn alternate_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM17_DMA_RMP2_A::AlternateRemapped)
}
}
#[doc = "Field `I2C_PB6_FMP` reader - Fast Mode Plus (FM plus) driving capability activation bits."]
pub type I2C_PB6_FMP_R = crate::BitReader<I2C_PB6_FMP_A>;
#[doc = "Fast Mode Plus (FM plus) driving capability activation bits.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PB6_FMP_A {
#[doc = "0: PB6 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PB6 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PB6_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PB6_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PB6_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PB6_FMP_A {
match self.bits {
false => I2C_PB6_FMP_A::Standard,
true => I2C_PB6_FMP_A::Fmp,
}
}
#[doc = "PB6 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PB6_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PB6 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PB6_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PB6_FMP` writer - Fast Mode Plus (FM plus) driving capability activation bits."]
pub type I2C_PB6_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PB6_FMP_A>;
impl<'a, REG, const O: u8> I2C_PB6_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PB6 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB6_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PB6 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB6_FMP_A::Fmp)
}
}
#[doc = "Field `I2C_PB7_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB7_FMP_R = crate::BitReader<I2C_PB7_FMP_A>;
#[doc = "Fast Mode Plus (FM+) driving capability activation bits.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PB7_FMP_A {
#[doc = "0: PB7 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PB7 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PB7_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PB7_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PB7_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PB7_FMP_A {
match self.bits {
false => I2C_PB7_FMP_A::Standard,
true => I2C_PB7_FMP_A::Fmp,
}
}
#[doc = "PB7 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PB7_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PB7 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PB7_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PB7_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB7_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PB7_FMP_A>;
impl<'a, REG, const O: u8> I2C_PB7_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PB7 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB7_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PB7 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB7_FMP_A::Fmp)
}
}
#[doc = "Field `I2C_PB8_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB8_FMP_R = crate::BitReader<I2C_PB8_FMP_A>;
#[doc = "Fast Mode Plus (FM+) driving capability activation bits.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PB8_FMP_A {
#[doc = "0: PB8 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PB8 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PB8_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PB8_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PB8_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PB8_FMP_A {
match self.bits {
false => I2C_PB8_FMP_A::Standard,
true => I2C_PB8_FMP_A::Fmp,
}
}
#[doc = "PB8 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PB8_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PB8 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PB8_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PB8_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB8_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PB8_FMP_A>;
impl<'a, REG, const O: u8> I2C_PB8_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PB8 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB8_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PB8 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB8_FMP_A::Fmp)
}
}
#[doc = "Field `I2C_PB9_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB9_FMP_R = crate::BitReader<I2C_PB9_FMP_A>;
#[doc = "Fast Mode Plus (FM+) driving capability activation bits.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PB9_FMP_A {
#[doc = "0: PB9 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PB9 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PB9_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PB9_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PB9_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PB9_FMP_A {
match self.bits {
false => I2C_PB9_FMP_A::Standard,
true => I2C_PB9_FMP_A::Fmp,
}
}
#[doc = "PB9 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PB9_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PB9 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PB9_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PB9_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits."]
pub type I2C_PB9_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PB9_FMP_A>;
impl<'a, REG, const O: u8> I2C_PB9_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PB9 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB9_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PB9 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PB9_FMP_A::Fmp)
}
}
#[doc = "Field `I2C1_FMP` reader - FM+ driving capability activation for I2C1"]
pub type I2C1_FMP_R = crate::BitReader<I2C1_FMP_A>;
#[doc = "FM+ driving capability activation for I2C1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C1_FMP_A {
#[doc = "0: FM+ mode is controlled by I2C_Pxx_FMP bits only"]
Standard = 0,
#[doc = "1: FM+ mode is enabled on all I2C1 pins selected through selection bits in GPIOx_AFR registers"]
Fmp = 1,
}
impl From<I2C1_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C1_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C1_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C1_FMP_A {
match self.bits {
false => I2C1_FMP_A::Standard,
true => I2C1_FMP_A::Fmp,
}
}
#[doc = "FM+ mode is controlled by I2C_Pxx_FMP bits only"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C1_FMP_A::Standard
}
#[doc = "FM+ mode is enabled on all I2C1 pins selected through selection bits in GPIOx_AFR registers"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C1_FMP_A::Fmp
}
}
#[doc = "Field `I2C1_FMP` writer - FM+ driving capability activation for I2C1"]
pub type I2C1_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C1_FMP_A>;
impl<'a, REG, const O: u8> I2C1_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "FM+ mode is controlled by I2C_Pxx_FMP bits only"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_FMP_A::Standard)
}
#[doc = "FM+ mode is enabled on all I2C1 pins selected through selection bits in GPIOx_AFR registers"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_FMP_A::Fmp)
}
}
#[doc = "Field `I2C2_FMP` reader - FM+ driving capability activation for I2C2"]
pub type I2C2_FMP_R = crate::BitReader<I2C2_FMP_A>;
#[doc = "FM+ driving capability activation for I2C2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C2_FMP_A {
#[doc = "0: FM+ mode is controlled by I2C_Pxx_FMP bits only"]
Standard = 0,
#[doc = "1: FM+ mode is enabled on all I2C2 pins selected through selection bits in GPIOx_AFR registers"]
Fmp = 1,
}
impl From<I2C2_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C2_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C2_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C2_FMP_A {
match self.bits {
false => I2C2_FMP_A::Standard,
true => I2C2_FMP_A::Fmp,
}
}
#[doc = "FM+ mode is controlled by I2C_Pxx_FMP bits only"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C2_FMP_A::Standard
}
#[doc = "FM+ mode is enabled on all I2C2 pins selected through selection bits in GPIOx_AFR registers"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C2_FMP_A::Fmp
}
}
#[doc = "Field `I2C2_FMP` writer - FM+ driving capability activation for I2C2"]
pub type I2C2_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C2_FMP_A>;
impl<'a, REG, const O: u8> I2C2_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "FM+ mode is controlled by I2C_Pxx_FMP bits only"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C2_FMP_A::Standard)
}
#[doc = "FM+ mode is enabled on all I2C2 pins selected through selection bits in GPIOx_AFR registers"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C2_FMP_A::Fmp)
}
}
#[doc = "Field `I2C_PA9_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits"]
pub type I2C_PA9_FMP_R = crate::BitReader<I2C_PA9_FMP_A>;
#[doc = "Fast Mode Plus (FM+) driving capability activation bits\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PA9_FMP_A {
#[doc = "0: PA9 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PA9 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PA9_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PA9_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PA9_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PA9_FMP_A {
match self.bits {
false => I2C_PA9_FMP_A::Standard,
true => I2C_PA9_FMP_A::Fmp,
}
}
#[doc = "PA9 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PA9_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PA9 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PA9_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PA9_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits"]
pub type I2C_PA9_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PA9_FMP_A>;
impl<'a, REG, const O: u8> I2C_PA9_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PA9 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PA9_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PA9 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PA9_FMP_A::Fmp)
}
}
#[doc = "Field `I2C_PA10_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits"]
pub type I2C_PA10_FMP_R = crate::BitReader<I2C_PA10_FMP_A>;
#[doc = "Fast Mode Plus (FM+) driving capability activation bits\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C_PA10_FMP_A {
#[doc = "0: PA10 pin operate in standard mode"]
Standard = 0,
#[doc = "1: I2C FM+ mode enabled on PA10 and the Speed control is bypassed"]
Fmp = 1,
}
impl From<I2C_PA10_FMP_A> for bool {
#[inline(always)]
fn from(variant: I2C_PA10_FMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C_PA10_FMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C_PA10_FMP_A {
match self.bits {
false => I2C_PA10_FMP_A::Standard,
true => I2C_PA10_FMP_A::Fmp,
}
}
#[doc = "PA10 pin operate in standard mode"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == I2C_PA10_FMP_A::Standard
}
#[doc = "I2C FM+ mode enabled on PA10 and the Speed control is bypassed"]
#[inline(always)]
pub fn is_fmp(&self) -> bool {
*self == I2C_PA10_FMP_A::Fmp
}
}
#[doc = "Field `I2C_PA10_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits"]
pub type I2C_PA10_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C_PA10_FMP_A>;
impl<'a, REG, const O: u8> I2C_PA10_FMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PA10 pin operate in standard mode"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PA10_FMP_A::Standard)
}
#[doc = "I2C FM+ mode enabled on PA10 and the Speed control is bypassed"]
#[inline(always)]
pub fn fmp(self) -> &'a mut crate::W<REG> {
self.variant(I2C_PA10_FMP_A::Fmp)
}
}
#[doc = "Field `SPI2_DMA_RMP` reader - SPI2 DMA request remapping bit"]
pub type SPI2_DMA_RMP_R = crate::BitReader<SPI2_DMA_RMP_A>;
#[doc = "SPI2 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SPI2_DMA_RMP_A {
#[doc = "0: SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 4 and 5 respectively"]
NotRemapped = 0,
#[doc = "1: SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
Remapped = 1,
}
impl From<SPI2_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: SPI2_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl SPI2_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SPI2_DMA_RMP_A {
match self.bits {
false => SPI2_DMA_RMP_A::NotRemapped,
true => SPI2_DMA_RMP_A::Remapped,
}
}
#[doc = "SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 4 and 5 respectively"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == SPI2_DMA_RMP_A::NotRemapped
}
#[doc = "SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == SPI2_DMA_RMP_A::Remapped
}
}
#[doc = "Field `SPI2_DMA_RMP` writer - SPI2 DMA request remapping bit"]
pub type SPI2_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SPI2_DMA_RMP_A>;
impl<'a, REG, const O: u8> SPI2_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 4 and 5 respectively"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(SPI2_DMA_RMP_A::NotRemapped)
}
#[doc = "SPI2_RX and SPI2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(SPI2_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `USART2_DMA_RMP` reader - USART2 DMA request remapping bit"]
pub type USART2_DMA_RMP_R = crate::BitReader<USART2_DMA_RMP_A>;
#[doc = "USART2 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum USART2_DMA_RMP_A {
#[doc = "0: USART2_RX and USART2_TX DMA requests mapped on DMA channel 5 and 4 respectively"]
NotRemapped = 0,
#[doc = "1: USART2_RX and USART2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
Remapped = 1,
}
impl From<USART2_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: USART2_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl USART2_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART2_DMA_RMP_A {
match self.bits {
false => USART2_DMA_RMP_A::NotRemapped,
true => USART2_DMA_RMP_A::Remapped,
}
}
#[doc = "USART2_RX and USART2_TX DMA requests mapped on DMA channel 5 and 4 respectively"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == USART2_DMA_RMP_A::NotRemapped
}
#[doc = "USART2_RX and USART2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == USART2_DMA_RMP_A::Remapped
}
}
#[doc = "Field `USART2_DMA_RMP` writer - USART2 DMA request remapping bit"]
pub type USART2_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, USART2_DMA_RMP_A>;
impl<'a, REG, const O: u8> USART2_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "USART2_RX and USART2_TX DMA requests mapped on DMA channel 5 and 4 respectively"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART2_DMA_RMP_A::NotRemapped)
}
#[doc = "USART2_RX and USART2_TX DMA requests mapped on DMA channel 6 and 7 respectively"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART2_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `USART3_DMA_RMP` reader - USART3 DMA request remapping bit"]
pub type USART3_DMA_RMP_R = crate::BitReader<USART3_DMA_RMP_A>;
#[doc = "USART3 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum USART3_DMA_RMP_A {
#[doc = "0: USART3_RX and USART3_TX DMA requests mapped on DMA channel 6 and 7 respectively (or simply disabled on STM32F0x0)"]
NotRemapped = 0,
#[doc = "1: USART3_RX and USART3_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
Remapped = 1,
}
impl From<USART3_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: USART3_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl USART3_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USART3_DMA_RMP_A {
match self.bits {
false => USART3_DMA_RMP_A::NotRemapped,
true => USART3_DMA_RMP_A::Remapped,
}
}
#[doc = "USART3_RX and USART3_TX DMA requests mapped on DMA channel 6 and 7 respectively (or simply disabled on STM32F0x0)"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == USART3_DMA_RMP_A::NotRemapped
}
#[doc = "USART3_RX and USART3_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == USART3_DMA_RMP_A::Remapped
}
}
#[doc = "Field `USART3_DMA_RMP` writer - USART3 DMA request remapping bit"]
pub type USART3_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, USART3_DMA_RMP_A>;
impl<'a, REG, const O: u8> USART3_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "USART3_RX and USART3_TX DMA requests mapped on DMA channel 6 and 7 respectively (or simply disabled on STM32F0x0)"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART3_DMA_RMP_A::NotRemapped)
}
#[doc = "USART3_RX and USART3_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(USART3_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `I2C1_DMA_RMP` reader - I2C1 DMA request remapping bit"]
pub type I2C1_DMA_RMP_R = crate::BitReader<I2C1_DMA_RMP_A>;
#[doc = "I2C1 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum I2C1_DMA_RMP_A {
#[doc = "0: I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
NotRemapped = 0,
#[doc = "1: I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 7 and 6 respectively"]
Remapped = 1,
}
impl From<I2C1_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: I2C1_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl I2C1_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C1_DMA_RMP_A {
match self.bits {
false => I2C1_DMA_RMP_A::NotRemapped,
true => I2C1_DMA_RMP_A::Remapped,
}
}
#[doc = "I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == I2C1_DMA_RMP_A::NotRemapped
}
#[doc = "I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 7 and 6 respectively"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == I2C1_DMA_RMP_A::Remapped
}
}
#[doc = "Field `I2C1_DMA_RMP` writer - I2C1 DMA request remapping bit"]
pub type I2C1_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, I2C1_DMA_RMP_A>;
impl<'a, REG, const O: u8> I2C1_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 3 and 2 respectively"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_DMA_RMP_A::NotRemapped)
}
#[doc = "I2C1_RX and I2C1_TX DMA requests mapped on DMA channel 7 and 6 respectively"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(I2C1_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM1_DMA_RMP` reader - TIM1 DMA request remapping bit"]
pub type TIM1_DMA_RMP_R = crate::BitReader<TIM1_DMA_RMP_A>;
#[doc = "TIM1 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM1_DMA_RMP_A {
#[doc = "0: TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 2, 3 and 4 respectively"]
NotRemapped = 0,
#[doc = "1: TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 6"]
Remapped = 1,
}
impl From<TIM1_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: TIM1_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl TIM1_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM1_DMA_RMP_A {
match self.bits {
false => TIM1_DMA_RMP_A::NotRemapped,
true => TIM1_DMA_RMP_A::Remapped,
}
}
#[doc = "TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 2, 3 and 4 respectively"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == TIM1_DMA_RMP_A::NotRemapped
}
#[doc = "TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 6"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == TIM1_DMA_RMP_A::Remapped
}
}
#[doc = "Field `TIM1_DMA_RMP` writer - TIM1 DMA request remapping bit"]
pub type TIM1_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM1_DMA_RMP_A>;
impl<'a, REG, const O: u8> TIM1_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 2, 3 and 4 respectively"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM1_DMA_RMP_A::NotRemapped)
}
#[doc = "TIM1_CH1, TIM1_CH2 and TIM1_CH3 DMA requests mapped on DMA channel 6"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM1_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM2_DMA_RMP` reader - TIM2 DMA request remapping bit"]
pub type TIM2_DMA_RMP_R = crate::BitReader<TIM2_DMA_RMP_A>;
#[doc = "TIM2 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM2_DMA_RMP_A {
#[doc = "0: TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 3 and 4 respectively"]
NotRemapped = 0,
#[doc = "1: TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 7"]
Remapped = 1,
}
impl From<TIM2_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: TIM2_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl TIM2_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM2_DMA_RMP_A {
match self.bits {
false => TIM2_DMA_RMP_A::NotRemapped,
true => TIM2_DMA_RMP_A::Remapped,
}
}
#[doc = "TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 3 and 4 respectively"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == TIM2_DMA_RMP_A::NotRemapped
}
#[doc = "TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 7"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == TIM2_DMA_RMP_A::Remapped
}
}
#[doc = "Field `TIM2_DMA_RMP` writer - TIM2 DMA request remapping bit"]
pub type TIM2_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM2_DMA_RMP_A>;
impl<'a, REG, const O: u8> TIM2_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 3 and 4 respectively"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM2_DMA_RMP_A::NotRemapped)
}
#[doc = "TIM2_CH2 and TIM2_CH4 DMA requests mapped on DMA channel 7"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM2_DMA_RMP_A::Remapped)
}
}
#[doc = "Field `TIM3_DMA_RMP` reader - TIM3 DMA request remapping bit"]
pub type TIM3_DMA_RMP_R = crate::BitReader<TIM3_DMA_RMP_A>;
#[doc = "TIM3 DMA request remapping bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM3_DMA_RMP_A {
#[doc = "0: TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 4"]
NotRemapped = 0,
#[doc = "1: TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 6"]
Remapped = 1,
}
impl From<TIM3_DMA_RMP_A> for bool {
#[inline(always)]
fn from(variant: TIM3_DMA_RMP_A) -> Self {
variant as u8 != 0
}
}
impl TIM3_DMA_RMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM3_DMA_RMP_A {
match self.bits {
false => TIM3_DMA_RMP_A::NotRemapped,
true => TIM3_DMA_RMP_A::Remapped,
}
}
#[doc = "TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 4"]
#[inline(always)]
pub fn is_not_remapped(&self) -> bool {
*self == TIM3_DMA_RMP_A::NotRemapped
}
#[doc = "TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 6"]
#[inline(always)]
pub fn is_remapped(&self) -> bool {
*self == TIM3_DMA_RMP_A::Remapped
}
}
#[doc = "Field `TIM3_DMA_RMP` writer - TIM3 DMA request remapping bit"]
pub type TIM3_DMA_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM3_DMA_RMP_A>;
impl<'a, REG, const O: u8> TIM3_DMA_RMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 4"]
#[inline(always)]
pub fn not_remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM3_DMA_RMP_A::NotRemapped)
}
#[doc = "TIM3_CH1 and TIM3_TRIG DMA requests mapped on DMA channel 6"]
#[inline(always)]
pub fn remapped(self) -> &'a mut crate::W<REG> {
self.variant(TIM3_DMA_RMP_A::Remapped)
}
}
impl R {
#[doc = "Bits 0:1 - Memory mapping selection bits"]
#[inline(always)]
pub fn mem_mode(&self) -> MEM_MODE_R {
MEM_MODE_R::new((self.bits & 3) as u8)
}
#[doc = "Bit 4 - PA11 and PA12 remapping bit for small packages (28 and 20 pins)"]
#[inline(always)]
pub fn pa11_pa12_rmp(&self) -> PA11_PA12_RMP_R {
PA11_PA12_RMP_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 6:7 - IR Modulation Envelope signal selection"]
#[inline(always)]
pub fn ir_mod(&self) -> IR_MOD_R {
IR_MOD_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bit 8 - ADC DMA remapping bit"]
#[inline(always)]
pub fn adc_dma_rmp(&self) -> ADC_DMA_RMP_R {
ADC_DMA_RMP_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - USART1_TX DMA remapping bit"]
#[inline(always)]
pub fn usart1_tx_dma_rmp(&self) -> USART1_TX_DMA_RMP_R {
USART1_TX_DMA_RMP_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - USART1_RX DMA request remapping bit"]
#[inline(always)]
pub fn usart1_rx_dma_rmp(&self) -> USART1_RX_DMA_RMP_R {
USART1_RX_DMA_RMP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - TIM16 DMA request remapping bit"]
#[inline(always)]
pub fn tim16_dma_rmp(&self) -> TIM16_DMA_RMP_R {
TIM16_DMA_RMP_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - TIM17 DMA request remapping bit"]
#[inline(always)]
pub fn tim17_dma_rmp(&self) -> TIM17_DMA_RMP_R {
TIM17_DMA_RMP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - TIM16 alternate DMA request remapping bit"]
#[inline(always)]
pub fn tim16_dma_rmp2(&self) -> TIM16_DMA_RMP2_R {
TIM16_DMA_RMP2_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - TIM17 alternate DMA request remapping bit"]
#[inline(always)]
pub fn tim17_dma_rmp2(&self) -> TIM17_DMA_RMP2_R {
TIM17_DMA_RMP2_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - Fast Mode Plus (FM plus) driving capability activation bits."]
#[inline(always)]
pub fn i2c_pb6_fmp(&self) -> I2C_PB6_FMP_R {
I2C_PB6_FMP_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
pub fn i2c_pb7_fmp(&self) -> I2C_PB7_FMP_R {
I2C_PB7_FMP_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
pub fn i2c_pb8_fmp(&self) -> I2C_PB8_FMP_R {
I2C_PB8_FMP_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
pub fn i2c_pb9_fmp(&self) -> I2C_PB9_FMP_R {
I2C_PB9_FMP_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - FM+ driving capability activation for I2C1"]
#[inline(always)]
pub fn i2c1_fmp(&self) -> I2C1_FMP_R {
I2C1_FMP_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - FM+ driving capability activation for I2C2"]
#[inline(always)]
pub fn i2c2_fmp(&self) -> I2C2_FMP_R {
I2C2_FMP_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - Fast Mode Plus (FM+) driving capability activation bits"]
#[inline(always)]
pub fn i2c_pa9_fmp(&self) -> I2C_PA9_FMP_R {
I2C_PA9_FMP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - Fast Mode Plus (FM+) driving capability activation bits"]
#[inline(always)]
pub fn i2c_pa10_fmp(&self) -> I2C_PA10_FMP_R {
I2C_PA10_FMP_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - SPI2 DMA request remapping bit"]
#[inline(always)]
pub fn spi2_dma_rmp(&self) -> SPI2_DMA_RMP_R {
SPI2_DMA_RMP_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - USART2 DMA request remapping bit"]
#[inline(always)]
pub fn usart2_dma_rmp(&self) -> USART2_DMA_RMP_R {
USART2_DMA_RMP_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - USART3 DMA request remapping bit"]
#[inline(always)]
pub fn usart3_dma_rmp(&self) -> USART3_DMA_RMP_R {
USART3_DMA_RMP_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - I2C1 DMA request remapping bit"]
#[inline(always)]
pub fn i2c1_dma_rmp(&self) -> I2C1_DMA_RMP_R {
I2C1_DMA_RMP_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - TIM1 DMA request remapping bit"]
#[inline(always)]
pub fn tim1_dma_rmp(&self) -> TIM1_DMA_RMP_R {
TIM1_DMA_RMP_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - TIM2 DMA request remapping bit"]
#[inline(always)]
pub fn tim2_dma_rmp(&self) -> TIM2_DMA_RMP_R {
TIM2_DMA_RMP_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - TIM3 DMA request remapping bit"]
#[inline(always)]
pub fn tim3_dma_rmp(&self) -> TIM3_DMA_RMP_R {
TIM3_DMA_RMP_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:1 - Memory mapping selection bits"]
#[inline(always)]
#[must_use]
pub fn mem_mode(&mut self) -> MEM_MODE_W<CFGR1_SPEC, 0> {
MEM_MODE_W::new(self)
}
#[doc = "Bit 4 - PA11 and PA12 remapping bit for small packages (28 and 20 pins)"]
#[inline(always)]
#[must_use]
pub fn pa11_pa12_rmp(&mut self) -> PA11_PA12_RMP_W<CFGR1_SPEC, 4> {
PA11_PA12_RMP_W::new(self)
}
#[doc = "Bits 6:7 - IR Modulation Envelope signal selection"]
#[inline(always)]
#[must_use]
pub fn ir_mod(&mut self) -> IR_MOD_W<CFGR1_SPEC, 6> {
IR_MOD_W::new(self)
}
#[doc = "Bit 8 - ADC DMA remapping bit"]
#[inline(always)]
#[must_use]
pub fn adc_dma_rmp(&mut self) -> ADC_DMA_RMP_W<CFGR1_SPEC, 8> {
ADC_DMA_RMP_W::new(self)
}
#[doc = "Bit 9 - USART1_TX DMA remapping bit"]
#[inline(always)]
#[must_use]
pub fn usart1_tx_dma_rmp(&mut self) -> USART1_TX_DMA_RMP_W<CFGR1_SPEC, 9> {
USART1_TX_DMA_RMP_W::new(self)
}
#[doc = "Bit 10 - USART1_RX DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn usart1_rx_dma_rmp(&mut self) -> USART1_RX_DMA_RMP_W<CFGR1_SPEC, 10> {
USART1_RX_DMA_RMP_W::new(self)
}
#[doc = "Bit 11 - TIM16 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim16_dma_rmp(&mut self) -> TIM16_DMA_RMP_W<CFGR1_SPEC, 11> {
TIM16_DMA_RMP_W::new(self)
}
#[doc = "Bit 12 - TIM17 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim17_dma_rmp(&mut self) -> TIM17_DMA_RMP_W<CFGR1_SPEC, 12> {
TIM17_DMA_RMP_W::new(self)
}
#[doc = "Bit 13 - TIM16 alternate DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim16_dma_rmp2(&mut self) -> TIM16_DMA_RMP2_W<CFGR1_SPEC, 13> {
TIM16_DMA_RMP2_W::new(self)
}
#[doc = "Bit 14 - TIM17 alternate DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim17_dma_rmp2(&mut self) -> TIM17_DMA_RMP2_W<CFGR1_SPEC, 14> {
TIM17_DMA_RMP2_W::new(self)
}
#[doc = "Bit 16 - Fast Mode Plus (FM plus) driving capability activation bits."]
#[inline(always)]
#[must_use]
pub fn i2c_pb6_fmp(&mut self) -> I2C_PB6_FMP_W<CFGR1_SPEC, 16> {
I2C_PB6_FMP_W::new(self)
}
#[doc = "Bit 17 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
#[must_use]
pub fn i2c_pb7_fmp(&mut self) -> I2C_PB7_FMP_W<CFGR1_SPEC, 17> {
I2C_PB7_FMP_W::new(self)
}
#[doc = "Bit 18 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
#[must_use]
pub fn i2c_pb8_fmp(&mut self) -> I2C_PB8_FMP_W<CFGR1_SPEC, 18> {
I2C_PB8_FMP_W::new(self)
}
#[doc = "Bit 19 - Fast Mode Plus (FM+) driving capability activation bits."]
#[inline(always)]
#[must_use]
pub fn i2c_pb9_fmp(&mut self) -> I2C_PB9_FMP_W<CFGR1_SPEC, 19> {
I2C_PB9_FMP_W::new(self)
}
#[doc = "Bit 20 - FM+ driving capability activation for I2C1"]
#[inline(always)]
#[must_use]
pub fn i2c1_fmp(&mut self) -> I2C1_FMP_W<CFGR1_SPEC, 20> {
I2C1_FMP_W::new(self)
}
#[doc = "Bit 21 - FM+ driving capability activation for I2C2"]
#[inline(always)]
#[must_use]
pub fn i2c2_fmp(&mut self) -> I2C2_FMP_W<CFGR1_SPEC, 21> {
I2C2_FMP_W::new(self)
}
#[doc = "Bit 22 - Fast Mode Plus (FM+) driving capability activation bits"]
#[inline(always)]
#[must_use]
pub fn i2c_pa9_fmp(&mut self) -> I2C_PA9_FMP_W<CFGR1_SPEC, 22> {
I2C_PA9_FMP_W::new(self)
}
#[doc = "Bit 23 - Fast Mode Plus (FM+) driving capability activation bits"]
#[inline(always)]
#[must_use]
pub fn i2c_pa10_fmp(&mut self) -> I2C_PA10_FMP_W<CFGR1_SPEC, 23> {
I2C_PA10_FMP_W::new(self)
}
#[doc = "Bit 24 - SPI2 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn spi2_dma_rmp(&mut self) -> SPI2_DMA_RMP_W<CFGR1_SPEC, 24> {
SPI2_DMA_RMP_W::new(self)
}
#[doc = "Bit 25 - USART2 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn usart2_dma_rmp(&mut self) -> USART2_DMA_RMP_W<CFGR1_SPEC, 25> {
USART2_DMA_RMP_W::new(self)
}
#[doc = "Bit 26 - USART3 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn usart3_dma_rmp(&mut self) -> USART3_DMA_RMP_W<CFGR1_SPEC, 26> {
USART3_DMA_RMP_W::new(self)
}
#[doc = "Bit 27 - I2C1 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn i2c1_dma_rmp(&mut self) -> I2C1_DMA_RMP_W<CFGR1_SPEC, 27> {
I2C1_DMA_RMP_W::new(self)
}
#[doc = "Bit 28 - TIM1 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim1_dma_rmp(&mut self) -> TIM1_DMA_RMP_W<CFGR1_SPEC, 28> {
TIM1_DMA_RMP_W::new(self)
}
#[doc = "Bit 29 - TIM2 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim2_dma_rmp(&mut self) -> TIM2_DMA_RMP_W<CFGR1_SPEC, 29> {
TIM2_DMA_RMP_W::new(self)
}
#[doc = "Bit 30 - TIM3 DMA request remapping bit"]
#[inline(always)]
#[must_use]
pub fn tim3_dma_rmp(&mut self) -> TIM3_DMA_RMP_W<CFGR1_SPEC, 30> {
TIM3_DMA_RMP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR1_SPEC;
impl crate::RegisterSpec for CFGR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr1::R`](R) reader structure"]
impl crate::Readable for CFGR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr1::W`](W) writer structure"]
impl crate::Writable for CFGR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR1 to value 0"]
impl crate::Resettable for CFGR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::ffi::CStr;
use std::fmt;
use std::marker::PhantomData;
use std::mem;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::ops::{Deref, DerefMut};
use std::os::raw::{c_char, c_void};
use std::path::Path;
use std::ptr;
use std::string;
use anyhow::Result;
use libc;
use ffi;
use errors::{AsResult, ErrorKind::CmdLineParseError};
use ether;
pub type RawTokenHeader = ffi::cmdline_token_hdr;
pub type RawTokenPtr = *const RawTokenHeader;
pub type RawStrToken = ffi::cmdline_token_string;
pub type RawNumToken = ffi::cmdline_token_num;
pub type RawIpAddrToken = ffi::cmdline_token_ipaddr;
pub type RawEtherAddrToken = ffi::cmdline_token_etheraddr;
pub type RawPortListToken = ffi::cmdline_token_portlist;
pub type RawParseTokenHeader = ffi::cmdline_parse_token_hdr_t;
pub enum Token<T> {
Raw(RawTokenPtr, PhantomData<T>),
Str(RawStrToken, PhantomData<T>),
Num(RawNumToken, PhantomData<T>),
IpAddr(RawIpAddrToken, PhantomData<T>),
EtherAddr(RawEtherAddrToken, PhantomData<T>),
PortList(RawPortListToken, PhantomData<T>),
}
impl<T> Token<T> {
pub fn as_raw(&self) -> RawTokenPtr {
match *self {
Token::Raw(hdr, _) => hdr,
Token::Str(ref token, _) => &token.hdr,
Token::Num(ref token, _) => &token.hdr,
Token::IpAddr(ref token, _) => &token.hdr,
Token::EtherAddr(ref token, _) => &token.hdr,
Token::PortList(ref token, _) => &token.hdr,
}
}
}
impl<T> Drop for Token<T> {
fn drop(&mut self) {
if let Token::Str(ref token, _) = *self {
unsafe { libc::free(token.string_data.str_ as *mut libc::c_void) }
}
}
}
pub type NumType = ffi::cmdline_numtype::Type;
pub type RawFixedStr = ffi::cmdline_fixed_string_t;
pub type RawIpNetAddr = ffi::cmdline_ipaddr_t;
pub type RawEtherAddr = ffi::rte_ether_addr;
pub type RawPortList = ffi::cmdline_portlist_t;
pub struct FixedStr(RawFixedStr);
impl Deref for FixedStr {
type Target = RawFixedStr;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for FixedStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_str())
}
}
impl FixedStr {
pub fn to_str(&self) -> &str {
unsafe { CStr::from_ptr(self.0.as_ptr()).to_str().unwrap() }
}
}
pub struct IpNetAddr(RawIpNetAddr);
impl Deref for IpNetAddr {
type Target = RawIpNetAddr;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Display for IpNetAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_ipaddr())
}
}
impl IpNetAddr {
pub fn as_ipv4(&self) -> &Ipv4Addr {
unsafe { &*(&self.0.addr as *const _ as *const std::net::Ipv4Addr) }
}
pub fn as_ipv6(&self) -> &Ipv6Addr {
unsafe { &*(&self.0.addr as *const _ as *const std::net::Ipv6Addr) }
}
pub fn to_ipaddr(&self) -> IpAddr {
if self.0.family == libc::AF_INET as u8 {
IpAddr::V4(*self.as_ipv4())
} else {
IpAddr::V6(*self.as_ipv6())
}
}
}
pub struct EtherAddr(RawEtherAddr);
impl Deref for EtherAddr {
type Target = ether::EtherAddr;
fn deref(&self) -> &Self::Target {
unsafe { &*(&self.0 as *const _ as *const ether::EtherAddr) }
}
}
impl fmt::Display for EtherAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_etheraddr())
}
}
impl EtherAddr {
pub fn to_etheraddr(&self) -> ether::EtherAddr {
ether::EtherAddr::from(self.0.addr_bytes)
}
}
pub struct PortList(RawPortList);
impl PortList {
pub fn to_portlist<'a>(&'a self) -> Box<dyn Iterator<Item = u32> + 'a> {
Box::new((0..32).filter(move |portid| ((1 << portid) as u32 & self.0.map) != 0))
}
}
impl fmt::Display for PortList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
self.to_portlist()
.map(|portid| portid.to_string())
.collect::<Vec<String>>()
.join(",")
)
}
}
pub fn is_end_of_token(c: u8) -> bool {
unsafe { ffi::cmdline_isendoftoken(c as i8) != 0 }
}
pub type RawTokenOps = ffi::cmdline_token_ops;
#[macro_export]
macro_rules! TOKEN_STRING_INITIALIZER {
($container:path, $field:ident) => {{
$crate::cmdline::Token::Str(
$crate::ffi::cmdline_token_string {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_string_ops },
offset: offset_of!($container, $field) as u32,
},
string_data: $crate::ffi::cmdline_token_string_data {
str_: ::std::ptr::null(),
},
},
::std::marker::PhantomData,
)
}};
($container:path, $field:ident, $string:expr) => {{
let p = unsafe { ::libc::calloc(1, $string.len() + 1) as *mut u8 };
unsafe {
::std::ptr::copy_nonoverlapping($string.as_ptr(), p, $string.len());
}
$crate::cmdline::Token::Str(
$crate::ffi::cmdline_token_string {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_string_ops },
offset: offset_of!($container, $field) as u32,
},
string_data: $crate::ffi::cmdline_token_string_data { str_: p as *const i8 },
},
::std::marker::PhantomData,
)
}};
}
#[macro_export]
macro_rules! TOKEN_NUM_INITIALIZER {
($container:path, $field:ident, u8) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_UINT8)
};
($container:path, $field:ident, u16) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_UINT16)
};
($container:path, $field:ident, u32) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_UINT32)
};
($container:path, $field:ident, u64) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_UINT64)
};
($container:path, $field:ident, i8) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_INT8)
};
($container:path, $field:ident, i16) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_INT16)
};
($container:path, $field:ident, i32) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_INT32)
};
($container:path, $field:ident, i64) => {
TOKEN_NUM_INITIALIZER!($container, $field, $crate::ffi::cmdline_numtype::RTE_INT64)
};
($container:path, $field:ident, $numtype:expr) => {
$crate::cmdline::Token::Num(
$crate::ffi::cmdline_token_num {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_num_ops },
offset: offset_of!($container, $field) as u32,
},
num_data: $crate::ffi::cmdline_token_num_data { type_: $numtype },
},
::std::marker::PhantomData,
)
};
}
#[macro_export]
macro_rules! TOKEN_IPADDR_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!(
$container,
$field,
$crate::ffi::CMDLINE_IPADDR_V4 | $crate::ffi::CMDLINE_IPADDR_V6
)
};
($container:path, $field:ident, $flags:expr) => {
$crate::cmdline::Token::IpAddr(
$crate::ffi::cmdline_token_ipaddr {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_ipaddr_ops },
offset: offset_of!($container, $field) as u32,
},
ipaddr_data: $crate::ffi::cmdline_token_ipaddr_data { flags: $flags as u8 },
},
::std::marker::PhantomData,
)
};
}
#[macro_export]
macro_rules! TOKEN_IPV4_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!($container, $field, $crate::ffi::CMDLINE_IPADDR_V4)
};
}
#[macro_export]
macro_rules! TOKEN_IPV6_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!($container, $field, $crate::ffi::CMDLINE_IPADDR_V6)
};
}
#[macro_export]
macro_rules! TOKEN_IPNET_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!(
$container,
$field,
$crate::ffi::CMDLINE_IPADDR_V4 | $crate::ffi::CMDLINE_IPADDR_V6 | $crate::ffi::CMDLINE_IPADDR_NETWORK
)
};
}
#[macro_export]
macro_rules! TOKEN_IPV4NET_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!(
$container,
$field,
$crate::ffi::CMDLINE_IPADDR_V4 | $crate::ffi::CMDLINE_IPADDR_NETWORK
)
};
}
#[macro_export]
macro_rules! TOKEN_IPV6NET_INITIALIZER {
($container:path, $field:ident) => {
TOKEN_IPADDR_INITIALIZER!(
$container,
$field,
$crate::ffi::CMDLINE_IPADDR_V6 | $crate::ffi::CMDLINE_IPADDR_NETWORK
)
};
}
#[macro_export]
macro_rules! TOKEN_ETHERADDR_INITIALIZER {
($container:path, $field:ident) => {
$crate::cmdline::Token::EtherAddr(
$crate::ffi::cmdline_token_etheraddr {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_etheraddr_ops },
offset: offset_of!($container, $field) as u32,
},
},
::std::marker::PhantomData,
)
};
}
#[macro_export]
macro_rules! TOKEN_PORTLIST_INITIALIZER {
($container:path, $field:ident) => {
$crate::cmdline::Token::PortList(
$crate::ffi::cmdline_token_portlist {
hdr: $crate::ffi::cmdline_token_hdr {
ops: unsafe { &mut $crate::ffi::cmdline_token_portlist_ops },
offset: offset_of!($container, $field) as u32,
},
},
::std::marker::PhantomData,
)
};
}
pub type InstHandler<T, D> = fn(inst: &mut T, cmdline: &CmdLine, data: Option<D>);
struct InstHandlerContext<T, D> {
handler: InstHandler<T, D>,
data: Option<D>,
}
unsafe extern "C" fn _inst_handler_stub<T, D>(inst: *mut c_void, cl: *mut RawCmdLine, ctxt: *mut c_void) {
let ctxt = Box::from_raw(ctxt as *mut InstHandlerContext<T, D>);
(ctxt.handler)((inst as *mut T).as_mut().unwrap(), &CmdLine::Borrowed(cl), ctxt.data);
}
pub type RawInstPtr = *mut ffi::cmdline_inst;
pub struct Inst(RawInstPtr);
impl Drop for Inst {
fn drop(&mut self) {
unsafe {
libc::free((*self.0).help_str as *mut libc::c_void);
libc::free(self.0 as *mut libc::c_void);
}
}
}
impl Inst {
pub fn as_raw(&self) -> RawInstPtr {
self.0
}
}
pub fn inst<T, D>(handler: InstHandler<T, D>, data: Option<D>, help: &'static str, tokens: &[&Token<T>]) -> Inst {
unsafe {
let help_str = libc::calloc(1, help.len() + 1) as *mut c_char;
ptr::copy_nonoverlapping(help.as_ptr(), help_str as *mut u8, help.len());
let size = mem::size_of::<ffi::cmdline_inst>() + mem::size_of::<RawTokenPtr>() * tokens.len();
let inst = libc::calloc(1, size) as *mut ffi::cmdline_inst;
*inst = ffi::cmdline_inst {
f: Some(_inst_handler_stub::<T, D>),
data: Box::into_raw(Box::new(InstHandlerContext { data, handler })) as *mut _,
help_str,
tokens: ffi::__IncompleteArrayField::new(),
};
ptr::copy_nonoverlapping(
tokens
.iter()
.map(|ref token| token.as_raw())
.collect::<Vec<RawTokenPtr>>()
.as_ptr(),
&((*inst).tokens) as *const _ as *mut *const _,
tokens.len(),
);
Inst(inst)
}
}
pub fn new(insts: &[&Inst]) -> Context {
unsafe {
let p = libc::calloc(insts.len() + 1, mem::size_of::<RawInstPtr>()) as *mut RawInstPtr;
ptr::copy_nonoverlapping(
insts
.iter()
.map(|ref inst| inst.as_raw())
.collect::<Vec<RawInstPtr>>()
.as_ptr(),
p,
insts.len(),
);
Context(p)
}
}
pub struct Context(*const RawInstPtr);
impl Drop for Context {
fn drop(&mut self) {
unsafe { libc::free(self.0 as *mut libc::c_void) }
}
}
impl Context {
pub fn open_stdin(&self, prompt: &str) -> Result<StdInCmdLine> {
let cl = unsafe { ffi::cmdline_stdin_new(self.0 as *mut *mut _, try!(to_cptr!(prompt))) };
rte_check!(cl, NonNull; ok => { StdInCmdLine(CmdLine::Owned(cl)) })
}
pub fn open_file<P: AsRef<Path>>(&self, prompt: &str, path: P) -> Result<CmdLine> {
let cl = unsafe {
ffi::cmdline_file_new(
self.0 as *mut *mut _,
try!(to_cptr!(prompt)),
path.as_ref().as_os_str().to_str().unwrap().as_ptr() as *const i8,
)
};
rte_check!(cl, NonNull; ok => { CmdLine::Owned(cl) })
}
}
pub struct StdInCmdLine(CmdLine);
impl Drop for StdInCmdLine {
fn drop(&mut self) {
unsafe { ffi::cmdline_stdin_exit(self.0.as_raw()) }
}
}
impl Deref for StdInCmdLine {
type Target = CmdLine;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for StdInCmdLine {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[repr(u32)]
#[derive(Clone, Copy, Debug, PartialEq, FromPrimitive, ToPrimitive)]
pub enum ReadlineStatus {
Init = ffi::rdline_status::RDLINE_INIT,
Running = ffi::rdline_status::RDLINE_RUNNING,
Exited = ffi::rdline_status::RDLINE_EXITED,
}
impl From<u32> for ReadlineStatus {
fn from(status: u32) -> Self {
unsafe { mem::transmute(status) }
}
}
#[repr(i32)]
#[derive(Clone, Copy, Debug, PartialEq, FromPrimitive, ToPrimitive)]
pub enum ParseStatus {
Success = ffi::CMDLINE_PARSE_SUCCESS as i32,
Ambiguous = ffi::CMDLINE_PARSE_AMBIGUOUS,
NoMatch = ffi::CMDLINE_PARSE_NOMATCH,
BadArgs = ffi::CMDLINE_PARSE_BAD_ARGS,
}
impl From<i32> for ParseStatus {
fn from(status: i32) -> Self {
unsafe { mem::transmute(status) }
}
}
#[repr(i32)]
#[derive(Clone, Copy, Debug, PartialEq, FromPrimitive, ToPrimitive)]
pub enum ParseCompleteState {
TryToComplete = 0,
DisplayChoice = -1,
}
impl From<i32> for ParseCompleteState {
fn from(status: i32) -> Self {
unsafe { mem::transmute(status) }
}
}
#[repr(u32)]
#[derive(Clone, Copy, Debug, PartialEq, FromPrimitive, ToPrimitive)]
pub enum ParseCompleteStatus {
Finished = ffi::CMDLINE_PARSE_COMPLETE_FINISHED,
Again = ffi::CMDLINE_PARSE_COMPLETE_AGAIN,
Buffer = ffi::CMDLINE_PARSE_COMPLETED_BUFFER,
}
impl From<u32> for ParseCompleteStatus {
fn from(status: u32) -> Self {
unsafe { mem::transmute(status) }
}
}
pub type RawCmdLine = ffi::cmdline;
pub type RawCmdLinePtr = *mut ffi::cmdline;
pub enum CmdLine {
Owned(RawCmdLinePtr),
Borrowed(RawCmdLinePtr),
}
impl Drop for CmdLine {
fn drop(&mut self) {
if let CmdLine::Owned(cl) = *self {
unsafe { ffi::cmdline_free(cl) }
}
}
}
impl Deref for CmdLine {
type Target = RawCmdLine;
fn deref(&self) -> &Self::Target {
unsafe { &*self.as_raw() }
}
}
impl DerefMut for CmdLine {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *self.as_raw() }
}
}
impl CmdLine {
pub fn as_raw(&self) -> RawCmdLinePtr {
match *self {
CmdLine::Owned(cl) | CmdLine::Borrowed(cl) => cl,
}
}
pub fn print<T: string::ToString>(&self, s: T) -> Result<&Self> {
unsafe {
ffi::cmdline_printf(self.as_raw(), try!(to_cptr!(s.to_string())));
}
Ok(self)
}
pub fn println<T: string::ToString>(&self, s: T) -> Result<&Self> {
unsafe {
ffi::cmdline_printf(self.as_raw(), try!(to_cptr!(format!("{}\n", s.to_string()))));
}
Ok(self)
}
pub fn set_prompt(&self, s: &str) -> &CmdLine {
unsafe {
ffi::cmdline_set_prompt(self.as_raw(), s.as_ptr() as *const i8);
}
self
}
pub fn interact(&self) -> &CmdLine {
unsafe {
ffi::cmdline_interact(self.as_raw());
}
self
}
pub fn poll(&self) -> Result<ReadlineStatus> {
let status = unsafe { ffi::cmdline_poll(self.as_raw()) };
rte_check!(status; ok => { ReadlineStatus::from(status as u32) })
}
pub fn quit(&self) {
unsafe { ffi::cmdline_quit(self.as_raw()) }
}
pub fn parse<T: string::ToString>(&self, buf: T) -> Result<&Self> {
let status = unsafe { ffi::cmdline_parse(self.as_raw(), try!(to_cptr!(buf.to_string()))) };
status.ok_or(CmdLineParseError(status)).map(|_| self)
}
pub fn complete<T: string::ToString>(
&self,
buf: T,
state: &mut ParseCompleteState,
dst: &mut [u8],
) -> Result<ParseCompleteStatus> {
let status = unsafe {
ffi::cmdline_complete(
self.as_raw(),
try!(to_cptr!(buf.to_string())),
state as *mut _ as *mut i32,
dst.as_mut_ptr() as *mut i8,
dst.len() as u32,
)
};
rte_check!(status; ok => { ParseCompleteStatus::from(status as u32) })
}
}
|
use crate::parser::Error;
use crate::schedule_at;
use crate::time_domain::RuleKind::*;
#[test]
fn basic_range() -> Result<(), Error> {
assert_eq!(
schedule_at!("Mo-Su", "2020-06-01"),
schedule! { 00,00 => Open => 24,00 }
);
assert_eq!(schedule_at!("Tu", "2020-06-01"), schedule! {});
assert_eq!(
schedule_at!("Tu", "2020-06-02"),
schedule! { 00,00 => Open => 24,00 }
);
assert_eq!(schedule_at!("We", "2020-06-02"), schedule! {});
for date in &[
"2020-06-01",
"2020-06-02",
"2020-06-04",
"2020-06-06",
"2020-06-07",
] {
assert_eq!(
schedule_at!("Mo-Tu,Th,Sa-Su 10:00-12:00", date),
schedule! { 10,00 => Open => 12,00 }
);
}
for date in &["2020-06-03", "2020-06-05"] {
assert_eq!(
schedule_at!("Mo-Tu,Th,Sa-Su 10:00-12:00", date),
schedule! {}
);
}
Ok(())
}
#[test]
fn nth() -> Result<(), Error> {
for date in &["2020-06-08", "2020-06-15", "2020-06-22"] {
assert_eq!(
schedule_at!("Mo[2-4] 10:00-12:00", date),
schedule! { 10,00 => Open => 12,00 }
);
}
for date in &["2020-06-01", "2020-06-29"] {
assert_eq!(schedule_at!("Mo[2-4] 10:00-12:00", date), schedule! {});
}
assert_eq!(
schedule_at!("Mo[1] 10:00-12:00", "2020-06-01"),
schedule! { 10,00 => Open => 12,00 }
);
assert_eq!(
schedule_at!("Mo[1] 10:00-12:00", "2020-06-08"),
schedule! {}
);
assert_eq!(
schedule_at!("Mo[1] 10:00-12:00", "2020-06-02"),
schedule! {}
);
Ok(())
}
#[test]
fn nth_with_offset() -> Result<(), Error> {
for date in &["2020-06-10", "2020-06-17", "2020-06-24"] {
assert_eq!(
schedule_at!("Mo[2-4] +2 days 10:00-12:00", date),
schedule! { 10,00 => Open => 12,00 }
);
}
for date in &["2020-06-03", "2020-07-01"] {
assert_eq!(
schedule_at!("Mo[2-4] +2 days 10:00-12:00", date),
schedule! {}
);
}
assert_eq!(
schedule_at!("Mo[1] -1 day 10:00-12:00", "2020-05-31"),
schedule! { 10,00 => Open => 12,00 }
);
assert_eq!(
schedule_at!("Mo[1] -1 day 10:00-12:00", "2020-06-01"),
schedule! {}
);
assert_eq!(
schedule_at!("Mo[1] -1 day 10:00-12:00", "2020-06-07"),
schedule! {}
);
Ok(())
}
fn _holiday() {
// TODO
}
|
use crate::utils::establish_connection;
use diesel::deserialize::QueryableByName;
use diesel::mysql::MysqlConnection;
use diesel::prelude::*;
use diesel::sql_query;
use chrono::{NaiveDateTime};
mod utils;
type DB = diesel::mysql::Mysql;
#[derive(Debug)]
pub struct Memos {
id: i32,
name: String,
comment: String,
//time: String,
//time: DateTime<Utc>,
time: NaiveDateTime,
}
impl QueryableByName<DB> for Memos {
fn build<R: diesel::row::NamedRow<diesel::mysql::Mysql>>(row: &R,) -> diesel::deserialize::Result<Self> {
Ok(Memos {
id: row.get("id")?,
name: row.get("name")?,
comment: row.get("comment")?,
time: row.get::<diesel::mysql::types::Datetime, _>("time")?,
}
)
}
}
fn simple_sql() {
let connection: MysqlConnection = establish_connection();
let memos: Vec<Memos> = sql_query("SELECT id, name, comment, time FROM memos",).load(&connection).unwrap();
for uu in memos.iter(){
println!("{}\t{}\t{}\t{}", uu.id, uu.name, uu.comment, uu.time);
//println!("{}\t{}\t{}", uu.id, uu.name, uu.comment);
}
}
fn main (){
eprintln! ("*** 開始 ***");
simple_sql();
eprintln! ("*** 終了 ***");
}
|
//! Control register
#[cfg(cortex_m)]
use core::arch::asm;
#[cfg(cortex_m)]
use core::sync::atomic::{compiler_fence, Ordering};
/// Control register
#[derive(Clone, Copy, Debug)]
pub struct Control {
bits: u32,
}
impl Control {
/// Creates a `Control` value from raw bits.
#[inline]
pub fn from_bits(bits: u32) -> Self {
Self { bits }
}
/// Returns the contents of the register as raw bits
#[inline]
pub fn bits(self) -> u32 {
self.bits
}
/// Thread mode privilege level
#[inline]
pub fn npriv(self) -> Npriv {
if self.bits & (1 << 0) == (1 << 0) {
Npriv::Unprivileged
} else {
Npriv::Privileged
}
}
/// Sets the thread mode privilege level value (nPRIV).
#[inline]
pub fn set_npriv(&mut self, npriv: Npriv) {
let mask = 1 << 0;
match npriv {
Npriv::Unprivileged => self.bits |= mask,
Npriv::Privileged => self.bits &= !mask,
}
}
/// Currently active stack pointer
#[inline]
pub fn spsel(self) -> Spsel {
if self.bits & (1 << 1) == (1 << 1) {
Spsel::Psp
} else {
Spsel::Msp
}
}
/// Sets the SPSEL value.
#[inline]
pub fn set_spsel(&mut self, spsel: Spsel) {
let mask = 1 << 1;
match spsel {
Spsel::Psp => self.bits |= mask,
Spsel::Msp => self.bits &= !mask,
}
}
/// Whether context floating-point is currently active
#[inline]
pub fn fpca(self) -> Fpca {
if self.bits & (1 << 2) == (1 << 2) {
Fpca::Active
} else {
Fpca::NotActive
}
}
/// Sets the FPCA value.
#[inline]
pub fn set_fpca(&mut self, fpca: Fpca) {
let mask = 1 << 2;
match fpca {
Fpca::Active => self.bits |= mask,
Fpca::NotActive => self.bits &= !mask,
}
}
}
/// Thread mode privilege level
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Npriv {
/// Privileged
Privileged,
/// Unprivileged
Unprivileged,
}
impl Npriv {
/// Is in privileged thread mode?
#[inline]
pub fn is_privileged(self) -> bool {
self == Npriv::Privileged
}
/// Is in unprivileged thread mode?
#[inline]
pub fn is_unprivileged(self) -> bool {
self == Npriv::Unprivileged
}
}
/// Currently active stack pointer
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Spsel {
/// MSP is the current stack pointer
Msp,
/// PSP is the current stack pointer
Psp,
}
impl Spsel {
/// Is MSP the current stack pointer?
#[inline]
pub fn is_msp(self) -> bool {
self == Spsel::Msp
}
/// Is PSP the current stack pointer?
#[inline]
pub fn is_psp(self) -> bool {
self == Spsel::Psp
}
}
/// Whether context floating-point is currently active
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Fpca {
/// Floating-point context active.
Active,
/// No floating-point context active
NotActive,
}
impl Fpca {
/// Is a floating-point context active?
#[inline]
pub fn is_active(self) -> bool {
self == Fpca::Active
}
/// Is a floating-point context not active?
#[inline]
pub fn is_not_active(self) -> bool {
self == Fpca::NotActive
}
}
/// Reads the CPU register
#[cfg(cortex_m)]
#[inline]
pub fn read() -> Control {
let bits;
unsafe { asm!("mrs {}, CONTROL", out(reg) bits, options(nomem, nostack, preserves_flags)) };
Control { bits }
}
/// Writes to the CPU register.
#[cfg(cortex_m)]
#[inline]
pub unsafe fn write(control: Control) {
let control = control.bits();
// ISB is required after writing to CONTROL,
// per ARM architectural requirements (see Application Note 321).
asm!(
"msr CONTROL, {}",
"isb",
in(reg) control,
options(nomem, nostack, preserves_flags),
);
// Ensure memory accesses are not reordered around the CONTROL update.
compiler_fence(Ordering::SeqCst);
}
|
use menu::types::PaginationContainer;
use serenity::client::Client as SerenityClient;
use serenity::http::Http;
use serenity::prelude::{GatewayIntents, RwLock, SerenityError};
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use crate::core::event_handler::Zero2EventHandler;
use crate::core::framework::Zero2Framework;
use crate::core::store::ShardManagerContainer;
pub struct Zero2Client {
client: SerenityClient,
}
impl Zero2Client {
pub async fn new() -> Zero2Client {
let token = kankyo::key("DISCORD_TOKEN").expect("Expected a token in the environment");
let http = Http::new(&token);
let app_info = match http.get_current_application_info().await {
Ok(info) => info,
Err(why) => panic!("Could not access application info: {:?}", why),
};
let mut owners = HashSet::new();
owners.insert(app_info.owner.id);
let framework = Zero2Framework::with_info(owners, None);
let intents = GatewayIntents::all();
let client = SerenityClient::builder(&token, intents)
.event_handler(Zero2EventHandler)
.framework(framework)
.application_id(*app_info.id.as_u64())
.await
.expect("Err creating client");
{
let mut data = client.data.write().await;
data.insert::<ShardManagerContainer>(Arc::clone(&client.shard_manager));
data.insert::<PaginationContainer>(Arc::new(RwLock::new(HashMap::new())));
}
let shard_manager = client.shard_manager.clone();
tokio::spawn(async move {
tokio::signal::ctrl_c()
.await
.expect("Could not register ctrl+c handler");
shard_manager.lock().await.shutdown_all().await;
});
Zero2Client { client }
}
pub async fn start(&mut self) -> Result<(), SerenityError> {
self.client.start_autosharded().await
}
}
|
use alloc::boxed::Box;
use collections::vec::Vec;
use core::intrinsics::volatile_load;
use core::mem::size_of;
//use core::slice;
use drivers::io::{Io, Mmio};
use drivers::pci::config::PciConfig;
use arch::context::context_switch;
use fs::KScheme;
use super::{Hci, Packet, Pipe, Setup};
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Qtd {
next: u32,
next_alt: u32,
token: u32,
buffers: [u32; 5],
}
#[repr(packed)]
struct QueueHead {
next: u32,
characteristics: u32,
capabilities: u32,
qtd_ptr: u32,
qtd: Qtd,
}
pub struct Ehci {
pub pci: PciConfig,
pub base: usize,
pub irq: u8,
}
impl KScheme for Ehci {
#[allow(non_snake_case)]
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
unsafe {
let cap_length = &mut *(self.base as *mut Mmio<u8>);
let op_base = self.base + cap_length.read() as usize;
let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);
usb_sts.writef(0b111111, true);
}
}
}
}
impl Ehci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {
let mut module = box Ehci {
pci: pci,
base: pci.read(0x10) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
module
}
#[allow(non_snake_case)]
pub unsafe fn init(&mut self) {
syslog_info!(" + EHCI on: {:X}, IRQ {:X}", self.base, self.irq);
/*
self.pci.flag(4, 4, true); // Bus master
let cap_length = &mut *(self.base as *mut Mmio<u8>);
let hcs_params = &mut *((self.base + 4) as *mut Mmio<u32>);
let hcc_params = &mut *((self.base + 8) as *mut Mmio<u32>);
let ports = (hcs_params.read() & 0b1111) as usize;
debug!(" PORTS ");
debug::dd(ports);
let eecp = (hcc_params.read() >> 8) as u8;
debug!(" EECP ");
debug::dh(eecp as usize);
debug::dl();
if eecp > 0 {
if self.pci.read(eecp) & (1 << 24 | 1 << 16) == 1 << 16 {
debug!("Taking Ownership");
debug!(" ");
debug::dh(self.pci.read(eecp) as usize);
self.pci.flag(eecp, 1 << 24, true);
debug!(" ");
debug::dh(self.pci.read(eecp) as usize);
debug::dl();
debug!("Waiting");
debug!(" ");
debug::dh(self.pci.read(eecp) as usize);
while self.pci.read(eecp) & (1 << 24 | 1 << 16) != 1 << 24 {}
debug!(" ");
debug::dh(self.pci.read(eecp) as usize);
debug::dl();
}
}
let op_base = self.base + cap_length.read() as usize;
let usb_cmd = &mut *(op_base as *mut Mmio<u32>);
let usb_sts = &mut *((op_base + 4) as *mut Mmio<u32>);
let usb_intr = &mut *((op_base + 8) as *mut Mmio<u32>);
let config_flag = &mut *((op_base + 0x40) as *mut Mmio<u32>);
let port_scs = &mut slice::from_raw_parts_mut((op_base + 0x44) as *mut Mmio<u32>, ports);
// let FRINDEX = (opbase + 0xC) as *mut Mmio<u32>;
// let CTRLDSSEGMENT = (opbase + 0x10) as *mut Mmio<u32>;
// let PERIODICLISTBASE = (opbase + 0x14) as *mut Mmio<u32>;
// let ASYNCLISTADDR = (opbase + 0x18) as *mut Mmio<u32>;
//Halt
if usb_sts.read() & 1 << 12 == 0 {
usb_cmd.writef(0xF, false);
while ! usb_sts.readf(1 << 12) {}
}
//Reset
usb_cmd.writef(1 << 1, true);
while usb_cmd.readf(1 << 1) {}
//Enable
usb_intr.write(0b111111);
usb_cmd.writef(1, true);
config_flag.write(1);
while usb_sts.readf(1 << 12) {}
for i in 0..port_scs.len() {
let port_sc = &mut port_scs[i];
debugln!("Port {}: {:X}", i, port_sc.read());
if port_sc.readf(1) {
debugln!("Device Found");
if port_sc.readf(1 << 1) {
debugln!("Connection Change");
port_sc.writef(1 << 1, true);
}
if ! port_sc.readf(1 << 2) {
debugln!("Reset");
while ! port_sc.readf(1 << 8) {
port_sc.writef(1 << 8, true);
}
let mut spin = 1000000000;
while spin > 0 {
spin -= 1;
}
while port_sc.readf(1 << 8) {
port_sc.writef(1 << 8, false);
}
}
debugln!("Port Enabled {:X}", port_sc.read());
self.device(i as u8 + 1);
}
}
*/
}
}
impl Hci for Ehci {
fn msg(&mut self, address: u8, endpoint: u8, _pipe: Pipe, msgs: &[Packet]) -> usize {
let mut tds = Vec::new();
for msg in msgs.iter().rev() {
let link_ptr = match tds.last() {
Some(td) => (td as *const Qtd) as u32,
None => 1
};
match *msg {
Packet::Setup(setup) => tds.push(Qtd {
next: link_ptr,
next_alt: 1,
token: (size_of::<Setup>() as u32) << 16 | 0b10 << 8 | 1 << 7,
buffers: [(setup as *const Setup) as u32, 0, 0, 0, 0]
}),
Packet::In(ref data) => tds.push(Qtd {
next: link_ptr,
next_alt: 1,
token: ((data.len() as u32) & 0x7FFF) << 16 | 0b01 << 8 | 1 << 7,
buffers: [if data.is_empty() {
0
} else {
data.as_ptr() as u32
}, 0, 0, 0, 0]
}),
Packet::Out(ref data) => tds.push(Qtd {
next: link_ptr,
next_alt: 1,
token: ((data.len() as u32) & 0x7FFF) << 16 | 0b00 << 8 | 1 << 7,
buffers: [if data.is_empty() {
0
} else {
data.as_ptr() as u32
}, 0, 0, 0, 0]
})
}
}
let mut count = 0;
if ! tds.is_empty() {
unsafe {
let cap_length = &mut *(self.base as *mut Mmio<u8>);
let op_base = self.base + cap_length.read() as usize;
let usb_cmd = &mut *(op_base as *mut Mmio<u32>);
let async_list = &mut *((op_base + 0x18) as *mut Mmio<u32>);
let queuehead = box QueueHead {
next: 1,
characteristics: 64 << 16 | 1 << 15 | 1 << 14 | 0b10 << 12 | (endpoint as u32) << 8 | address as u32,
capabilities: 0b01 << 30,
qtd_ptr: (tds.last().unwrap() as *const Qtd) as u32,
qtd: *tds.last().unwrap()
};
//TODO: Calculate actual bytes
for td in tds.iter().rev() {
count += (td.token as usize >> 16) & 0x7FFF;
}
async_list.write((&*queuehead as *const QueueHead) as u32 | 2);
usb_cmd.writef(1 << 5 | 1, true);
for td in tds.iter().rev() {
while volatile_load(td as *const Qtd).token & 1 << 7 == 1 << 7 {
context_switch();
}
}
usb_cmd.writef(1 << 5 | 1, false);
async_list.write(0);
}
}
count
}
}
|
use std::thread;
use fast_spsc_queue::create_spsc_queue;
fn main() {
let (mut producer, mut consumer) = create_spsc_queue::<String>(2);
let child = thread::spawn(move || {
while let Some(msg) = consumer.dequeue() {
println!("Child received {}", msg);
};
});
for i in 0..60 {
producer.enqueue(i.to_string());
};
producer.finish();
let _ = child.join();
}
|
//! Simple Game Protocol
use std::io;
use crate::state::*;
use crate::search::*;
use crate::consts::*;
use crate::pgn_parser::*;
use crate::hashtables::*;
pub fn user_input< 'a >( buffer: &'a mut String, stdin: &'a mut io::Stdin ) -> &'a str {
buffer.clear();
match stdin.read_line( buffer ) {
Ok( _ ) => buffer.trim(),
Err( error ) => panic!( "Error: {}", error ),
}
}
pub fn ask( buffer: &mut String, stdin: &mut io::Stdin, question: &str, options: &[ String ] ) -> String {
println!( "{}", question );
let input = user_input( buffer, stdin ).to_string();
println!( "" );
if options.contains( &input ) {
input
} else {
ask( buffer, stdin, question, options )
}
}
pub fn play() {
let stdin = &mut io::stdin();
let buffer = &mut String::new();
if ask( buffer, stdin, "Wanna play a game of Chess? (y/n)", &[ "y".to_string(), "n".to_string() ] ) == "n" { return (); }
let color_string = ask( buffer, stdin, "You wanna play White or Black? (w/b)", &[ "w".to_string(), "b".to_string() ] );
let opponent_color = if color_string == "w" { WHITE } else { BLACK };
let search_depth: usize = 4;
let mut state = State::new();
let mut tt: HashTable<Eval> = HashTable::new( 24 );
loop {
if state.to_move == opponent_color {
println!( "{}\nYour move:\n", state );
let input = user_input( buffer, stdin );
if input.contains( "exit" ) { break; }
match parse_move( input, &state ) {
Ok( mv ) => {
println!( "Parsed: {}\n\n", mv );
state.make( &mv );
},
Err( error ) => {
println!( "\nThere was an error with the input, please try again.\nDETAIL: {}", error );
continue;
},
}
} else {
let mut stats = SearchStats::new();
let pv = negamax( &mut state, search_depth, -MATE_VALUE, MATE_VALUE, &mut stats, &mut tt );
let mv = pv.move_list.get( 0 ).unwrap();
state.make( mv );
println!( "I just played: {}", mv );
println!( "My evaluation is {}, at a depth of {}.\n", pv.eval, search_depth );
}
}
}
|
use super::{ChatServer, ClientPacket};
use crate::auth::UserInfo;
use crate::chat::{InternalId, SessionState};
use crate::error::*;
use log::*;
impl ChatServer {
pub(super) fn handle_message(&mut self, user_id: InternalId, content: String) {
if self.check_ratelimit(user_id, content.clone()) {
return;
}
if self.basic_check(user_id, &content).is_some() {
let session = self
.connections
.get_mut(&user_id)
.expect("could not find connection");
let info = session.user.as_ref().unwrap();
info!("User `{}` has written `{}`.", user_id, content);
let client_packet = ClientPacket::Message {
author_info: UserInfo {
name: info.name.clone(),
uuid: info.uuid,
},
content,
};
for session in self.connections.values() {
if let Err(err) = session.addr.do_send(client_packet.clone()) {
warn!("Could not send message to client: {}", err);
}
}
}
}
pub(super) fn handle_private_message(
&mut self,
user_id: InternalId,
receiver: String,
content: String,
) {
if self.check_ratelimit(user_id, content.clone()) {
return;
}
if let Some(sender_session) = self.basic_check(user_id, &content) {
let sender_info = sender_session.user.as_ref().unwrap();
let receiver_user = match self.users.get(&receiver) {
Some(user) => user,
None => {
debug!(
"User `{}` tried to write to non-existing user `{}`.",
user_id, receiver
);
return;
}
};
for receiver_session in receiver_user
.connections
.iter()
.filter_map(|id| self.connections.get(id))
{
match &receiver_session.user {
Some(info) if info.allow_messages => {
let client_packet = ClientPacket::PrivateMessage {
author_info: UserInfo {
name: sender_info.name.clone(),
uuid: sender_info.uuid,
},
content: content.clone(),
};
info!(
"User `{}` has written to `{}` privately.",
user_id, receiver
);
if let Err(err) = receiver_session.addr.do_send(client_packet) {
warn!("Could not send private message to client: {}", err);
} else {
return;
}
}
_ => {}
}
}
}
let _ = self
.connections
.get_mut(&user_id)
.expect("could not find connection")
.addr
.do_send(ClientPacket::Error {
message: ClientError::PrivateMessageNotAccepted,
});
}
fn basic_check(&self, user_id: InternalId, content: &str) -> Option<&SessionState> {
let session = self
.connections
.get(&user_id)
.expect("could not find connection");
if let Some(info) = &session.user {
if let Err(err) = self.validator.validate(content) {
info!("User `{}` tried to send invalid message: {}", user_id, err);
if let Error::AxoChat { source } = err {
session
.addr
.do_send(ClientPacket::Error { message: source })
.ok();
}
return None;
}
if self.moderation.is_banned(&info.uuid) {
info!("User `{}` tried to send message while banned", user_id);
session
.addr
.do_send(ClientPacket::Error {
message: ClientError::Banned,
})
.ok();
return None;
}
Some(session)
} else {
info!("`{}` is not logged in.", user_id);
session
.addr
.do_send(ClientPacket::Error {
message: ClientError::NotLoggedIn,
})
.ok();
None
}
}
fn check_ratelimit(&mut self, user_id: InternalId, message: String) -> bool {
let session = self
.connections
.get(&user_id)
.expect("could not find connection");
if let Some(user) = &session.user {
let user = self.users.get_mut(&user.name).unwrap();
if user.rate_limiter.check_new_message(message) {
info!(
"User `{}` tried to send message, but was rate limited.",
user_id
);
session
.addr
.do_send(ClientPacket::Error {
message: ClientError::RateLimited,
})
.ok();
true
} else {
false
}
} else {
false
}
}
}
|
pub mod channel;
pub mod process;
pub mod start;
pub mod util;
|
fn main() {
let mut source: ::std::path::PathBuf = ::std::env::var("CARGO_MANIFEST_DIR")
.expect("No `CARGO_MANIFEST_DIR` env var")
.into();
source.push(".windows");
source.push("winmd");
let mut destination: ::std::path::PathBuf = ::std::env::var("OUT_DIR")
.expect("No `OUT_DIR` env var")
.into();
loop {
destination.pop();
destination.push("Cargo.toml");
if destination.exists() {
destination.pop();
destination.push("target");
destination.push(".windows");
destination.push("winmd");
break;
}
destination.pop();
}
if let ::std::result::Result::Ok(files) = ::std::fs::read_dir(source) {
for file in files.filter_map(|file| file.ok()) {
if let ::std::result::Result::Ok(file_type) = file.file_type() {
if file_type.is_file() {
let path = file.path();
if let ::std::option::Option::Some(filename) = path.file_name() {
let _ = std::fs::create_dir_all(&destination);
destination.push(filename);
let _ = ::std::fs::copy(path, &destination);
destination.pop();
}
}
}
}
}
}
|
use crate::point::Error;
use crate::Result;
/// The ASPRS classification table.
///
/// Classifications can be created from u8s and converted back into them:
///
/// ```
/// use las::point::Classification;
/// let classification = Classification::new(2).unwrap();
/// assert_eq!(Classification::Ground, classification);
/// assert_eq!(2, u8::from(classification));
/// ```
///
/// We make one modification to this table: we remove `OverlapPoints`, code 12. Las 1.4 added the
/// extended point formats, which include an overlap bit. The overlap bit is intended to allow a
/// point to both be an overlap point and contain some other classification.
///
/// Here's how we deal with that change:
///
/// - If the point format doesn't support the overlap bit, the classification is overwritten with
/// the code for overlap points (12). On ingest, points with an overlap classification are given
/// the `Unclassified` code and `Point::is_overlap` is set to `true`.
/// - If the point format does support the overlap bit, that is preferred.
///
/// Because of this change, trying to create a classification with code 12 is an error:
///
/// ```
/// use las::point::Classification;
/// assert!(Classification::new(12).is_err());
/// ```
#[derive(Clone, Copy, Debug, PartialEq)]
#[allow(missing_docs)]
pub enum Classification {
CreatedNeverClassified,
Unclassified,
Ground,
LowVegetation,
MediumVegetation,
HighVegetation,
Building,
LowPoint,
ModelKeyPoint,
Water,
Rail,
RoadSurface,
WireGuard,
WireConductor,
TransmissionTower,
WireStructureConnector,
BridgeDeck,
HighNoise,
Reserved(u8),
UserDefinable(u8),
}
impl Classification {
/// Creates a new classification.
///
/// Throws an error if the classification is 12 (overlap).
///
/// # Examples
///
/// ```
/// use las::point::Classification;
/// assert_eq!(Classification::Ground, Classification::new(2).unwrap());
/// assert!(Classification::new(12).is_err());
/// ```
pub fn new(n: u8) -> Result<Classification> {
Ok(match n {
0 => Classification::CreatedNeverClassified,
1 => Classification::Unclassified,
2 => Classification::Ground,
3 => Classification::LowVegetation,
4 => Classification::MediumVegetation,
5 => Classification::HighVegetation,
6 => Classification::Building,
7 => Classification::LowPoint,
8 => Classification::ModelKeyPoint,
9 => Classification::Water,
10 => Classification::Rail,
11 => Classification::RoadSurface,
12 => return Err(Error::OverlapClassification.into()),
13 => Classification::WireGuard,
14 => Classification::WireConductor,
15 => Classification::TransmissionTower,
16 => Classification::WireStructureConnector,
17 => Classification::BridgeDeck,
18 => Classification::HighNoise,
19..=63 => Classification::Reserved(n),
64..=255 => Classification::UserDefinable(n),
})
}
}
impl From<Classification> for u8 {
fn from(classification: Classification) -> u8 {
match classification {
Classification::CreatedNeverClassified => 0,
Classification::Unclassified => 1,
Classification::Ground => 2,
Classification::LowVegetation => 3,
Classification::MediumVegetation => 4,
Classification::HighVegetation => 5,
Classification::Building => 6,
Classification::LowPoint => 7,
Classification::ModelKeyPoint => 8,
Classification::Water => 9,
Classification::Rail => 10,
Classification::RoadSurface => 11,
Classification::WireGuard => 13,
Classification::WireConductor => 14,
Classification::TransmissionTower => 15,
Classification::WireStructureConnector => 16,
Classification::BridgeDeck => 17,
Classification::HighNoise => 18,
Classification::Reserved(n) | Classification::UserDefinable(n) => n,
}
}
}
impl Default for Classification {
fn default() -> Classification {
Classification::CreatedNeverClassified
}
}
|
use webrtc_sctp::association::*;
use webrtc_sctp::chunk::chunk_payload_data::PayloadProtocolIdentifier;
use webrtc_sctp::error::*;
use webrtc_sctp::stream::*;
use bytes::Bytes;
use clap::{App, AppSettings, Arg};
//use std::io::Write;
use std::sync::Arc;
use tokio::net::UdpSocket;
use tokio::signal;
use tokio::sync::mpsc;
// RUST_LOG=trace cargo run --color=always --package webrtc-sctp --example ping -- --server 0.0.0.0:5678
#[tokio::main]
async fn main() -> Result<(), Error> {
/*env_logger::Builder::new()
.format(|buf, record| {
writeln!(
buf,
"{}:{} [{}] {} - {}",
record.file().unwrap_or("unknown"),
record.line().unwrap_or(0),
record.level(),
chrono::Local::now().format("%H:%M:%S.%6f"),
record.args()
)
})
.filter(None, log::LevelFilter::Trace)
.init();*/
let mut app = App::new("SCTP Ping")
.version("0.1.0")
.author("Rain Liu <yliu@webrtc.rs>")
.about("An example of SCTP Client")
.setting(AppSettings::DeriveDisplayOrder)
.setting(AppSettings::SubcommandsNegateReqs)
.arg(
Arg::with_name("FULLHELP")
.help("Prints more detailed help information")
.long("fullhelp"),
)
.arg(
Arg::with_name("server")
.required_unless("FULLHELP")
.takes_value(true)
.long("server")
.help("SCTP Server name."),
);
let matches = app.clone().get_matches();
if matches.is_present("FULLHELP") {
app.print_long_help().unwrap();
std::process::exit(0);
}
let server = matches.value_of("server").unwrap();
let conn = Arc::new(UdpSocket::bind("0.0.0.0:0").await.unwrap());
conn.connect(server).await.unwrap();
println!("connecting {}..", server);
let config = Config {
net_conn: conn,
max_receive_buffer_size: 0,
max_message_size: 0,
name: "client".to_owned(),
};
let a = Association::client(config).await?;
println!("created a client");
let stream = a.open_stream(0, PayloadProtocolIdentifier::String).await?;
println!("opened a stream");
// set unordered = true and 10ms treshold for dropping packets
stream.set_reliability_params(true, ReliabilityType::Timed, 10);
let stream_tx = Arc::clone(&stream);
tokio::spawn(async move {
let mut ping_seq_num = 0;
while ping_seq_num < 10 {
let ping_msg = format!("ping {}", ping_seq_num);
println!("sent: {}", ping_msg);
stream_tx.write(&Bytes::from(ping_msg)).await?;
ping_seq_num += 1;
}
println!("finished send ping");
Ok::<(), Error>(())
});
let (done_tx, mut done_rx) = mpsc::channel::<()>(1);
let stream_rx = Arc::clone(&stream);
tokio::spawn(async move {
let mut buff = vec![0u8; 1024];
while let Ok(n) = stream_rx.read(&mut buff).await {
let pong_msg = String::from_utf8(buff[..n].to_vec()).unwrap();
println!("received: {}", pong_msg);
}
println!("finished recv pong");
drop(done_tx);
});
println!("Waiting for Ctrl-C...");
signal::ctrl_c().await.expect("failed to listen for event");
println!("Closing stream and association...");
stream.close().await?;
a.close().await?;
let _ = done_rx.recv().await;
Ok(())
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use core::{
convert::TryFrom,
fmt::{Debug, Display},
ops::{
Add, AddAssign, BitAnd, Div, DivAssign, Mul, MulAssign, Neg, Shl, Shr, ShrAssign, Sub,
SubAssign,
},
};
use utils::{collections::Vec, AsBytes, Deserializable, DeserializationError, Serializable};
// FIELD ELEMENT
// ================================================================================================
/// Defines an element in a finite field.
///
/// This trait defines basic arithmetic operations for elements in
/// [finite fields](https://en.wikipedia.org/wiki/Finite_field) (e.g. addition subtraction,
/// multiplication, division) as well as several convenience functions (e.g. double, square cube).
/// Moreover, it defines interfaces for serializing and deserializing field elements.
///
/// The elements could be in a prime field or an extension of a prime field. Currently, only
/// quadratic field extensions are supported.
pub trait FieldElement:
Copy
+ Clone
+ Debug
+ Display
+ Default
+ Send
+ Sync
+ Eq
+ PartialEq
+ Sized
+ Add<Self, Output = Self>
+ Sub<Self, Output = Self>
+ Mul<Self, Output = Self>
+ Div<Self, Output = Self>
+ AddAssign<Self>
+ SubAssign<Self>
+ MulAssign<Self>
+ DivAssign<Self>
+ Neg<Output = Self>
+ From<<Self as FieldElement>::BaseField>
+ From<u128>
+ From<u64>
+ From<u32>
+ From<u16>
+ From<u8>
+ for<'a> TryFrom<&'a [u8]>
+ AsBytes
+ Serializable
+ Deserializable
{
/// A type defining positive integers big enough to describe a field modulus for
/// `Self::BaseField` with no loss of precision.
type PositiveInteger: Debug
+ Copy
+ PartialEq
+ PartialOrd
+ ShrAssign
+ Shl<u32, Output = Self::PositiveInteger>
+ Shr<u32, Output = Self::PositiveInteger>
+ BitAnd<Output = Self::PositiveInteger>
+ From<u32>
+ From<u64>;
/// Base field type for this finite field. For prime fields, `BaseField` should be set
/// to `Self`.
type BaseField: StarkField;
/// Number of bytes needed to encode an element
const ELEMENT_BYTES: usize;
/// True if internal representation of an element can be redundant - i.e., multiple
/// internal representations map to the same canonical representation.
const IS_MALLEABLE: bool;
/// The additive identity.
const ZERO: Self;
/// The multiplicative identity.
const ONE: Self;
// ALGEBRA
// --------------------------------------------------------------------------------------------
/// Returns this field element added to itself.
fn double(self) -> Self {
self + self
}
/// Returns this field element raised to power 2.
fn square(self) -> Self {
self * self
}
/// Returns this field element raised to power 3.
fn cube(self) -> Self {
self * self * self
}
/// Exponentiates this field element by `power` parameter.
fn exp(self, power: Self::PositiveInteger) -> Self {
let mut r = Self::ONE;
let mut b = self;
let mut p = power;
let int_zero = Self::PositiveInteger::from(0u32);
let int_one = Self::PositiveInteger::from(1u32);
if p == int_zero {
return Self::ONE;
} else if b == Self::ZERO {
return Self::ZERO;
}
while p > int_zero {
if p & int_one == int_one {
r *= b;
}
p >>= int_one;
b = b.square();
}
r
}
/// Returns a multiplicative inverse of this field element. If this element is ZERO, ZERO is
/// returned.
fn inv(self) -> Self;
/// Returns a conjugate of this field element.
fn conjugate(&self) -> Self;
// RANDOMNESS
// --------------------------------------------------------------------------------------------
/// Returns a cryptographically-secure random element drawn uniformly from the entire field.
#[cfg(feature = "std")]
fn rand() -> Self;
/// Returns a field element if the set of bytes forms a valid field element, otherwise returns
/// None. The element is expected to be in canonical representation. This function is primarily
/// intended for sampling random field elements from a hash function output.
fn from_random_bytes(bytes: &[u8]) -> Option<Self>;
// SERIALIZATION / DESERIALIZATION
// --------------------------------------------------------------------------------------------
/// Converts a vector of field elements into a vector of bytes. The elements may be in the
/// internal representation rather than in the canonical representation. This conversion is
/// intended to be zero-copy (i.e. by re-interpreting the underlying memory).
fn elements_into_bytes(elements: Vec<Self>) -> Vec<u8>;
/// Converts a list of elements into a list of bytes. The elements may be in the internal
/// representation rather than in the canonical representation. This conversion is intended
/// to be zero-copy (i.e. by re-interpreting the underlying memory).
fn elements_as_bytes(elements: &[Self]) -> &[u8];
/// Converts a list of bytes into a list of field elements. The elements are assumed to
/// encoded in the internal representation rather than in the canonical representation. The
/// conversion is intended to be zero-copy (i.e. by re-interpreting the underlying memory).
///
/// # Errors
/// An error is returned if:
/// * Memory alignment of `bytes` does not match memory alignment of field element data.
/// * Length of `bytes` does not divide into whole number of elements.
///
/// # Safety
/// This function is unsafe because it does not check whether underlying bytes represent valid
/// field elements according to their internal representation.
unsafe fn bytes_as_elements(bytes: &[u8]) -> Result<&[Self], DeserializationError>;
// INITIALIZATION
// --------------------------------------------------------------------------------------------
/// Returns a vector of length `n` initialized with all ZERO elements.
///
/// Specialized implementations of this function may be faster than the generic implementation.
fn zeroed_vector(n: usize) -> Vec<Self> {
vec![Self::ZERO; n]
}
/// Returns a vector of `n` pseudo-random elements drawn uniformly from the entire
/// field based on the provided `seed`.
#[cfg(feature = "std")]
fn prng_vector(seed: [u8; 32], n: usize) -> Vec<Self>;
// UTILITIES
// --------------------------------------------------------------------------------------------
/// Normalizes internal representation of this element.
///
/// Normalization is applicable only to malleable field elements; for non-malleable elements
/// this is a no-op.
fn normalize(&mut self);
}
// STARK FIELD
// ================================================================================================
/// Defines an element in a STARK-friendly finite field.
///
/// A STARK-friendly field is defined as a prime field with high two-addicity. That is, the
/// the modulus of the field should be a prime number of the form `k` * 2^`n` + 1 (a Proth prime),
/// where `n` is relatively larger (e.g., greater than 32).
pub trait StarkField: FieldElement<BaseField = Self> {
/// Type describing quadratic extension of this StarkField.
type QuadExtension: FieldElement<BaseField = Self>;
/// Prime modulus of the field. Must be of the form `k` * 2^`n` + 1 (a Proth prime).
/// This ensures that the field has high 2-adicity.
const MODULUS: Self::PositiveInteger;
/// The number of bits needed to represents `Self::MODULUS`.
const MODULUS_BITS: u32;
/// A multiplicative generator of the field.
const GENERATOR: Self;
/// Let Self::MODULUS = `k` * 2^`n` + 1; then, TWO_ADICITY is `n`.
const TWO_ADICITY: u32;
/// Let Self::MODULUS = `k` * 2^`n` + 1; then, TWO_ADIC_ROOT_OF_UNITY is 2^`n` root of unity
/// computed as Self::GENERATOR^`k`.
const TWO_ADIC_ROOT_OF_UNITY: Self;
/// Returns the root of unity of order 2^`n`.
///
/// # Panics
/// Panics if the root of unity for the specified order does not exist in this field.
fn get_root_of_unity(n: u32) -> Self {
assert!(n != 0, "cannot get root of unity for n = 0");
assert!(
n <= Self::TWO_ADICITY,
"order cannot exceed 2^{}",
Self::TWO_ADICITY
);
let power = Self::PositiveInteger::from(1u32) << (Self::TWO_ADICITY - n);
Self::TWO_ADIC_ROOT_OF_UNITY.exp(power)
}
/// Returns byte representation of the field modulus in little-endian byte order.
fn get_modulus_le_bytes() -> Vec<u8>;
/// Returns a canonical integer representation of the field element.
fn as_int(&self) -> Self::PositiveInteger;
}
|
use crate::{
bet_database::{BetId, BetState},
bitcoin::{Amount, Script},
change::{BinScript, Change},
party::Party,
ValueChoice,
};
use anyhow::{anyhow, Context};
use bdk::{bitcoin, bitcoin::Denomination, database::BatchDatabase, FeeRate};
use core::str::FromStr;
use olivia_core::{EventId, OracleEvent, OracleId};
use olivia_secp256k1::{
schnorr_fun::fun::{marker::*, Point},
Secp256k1,
};
use super::BetArgs;
#[derive(Clone, Debug)]
pub enum VersionedProposal {
One(Proposal),
}
impl VersionedProposal {
fn version_marker(&self) -> String {
match self {
VersionedProposal::One(..) => "📣",
}
.into()
}
}
impl From<VersionedProposal> for Proposal {
fn from(vp: VersionedProposal) -> Self {
match vp {
VersionedProposal::One(proposal) => proposal,
}
}
}
#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Proposal {
pub oracle: String,
pub event_id: EventId,
#[serde(with = "bitcoin::util::amount::serde::as_sat")]
pub value: Amount,
pub inputs: Vec<bdk::bitcoin::OutPoint>,
pub public_key: Point<EvenY>,
pub change_script: Option<BinScript>,
}
impl Proposal {
pub fn into_versioned(self) -> VersionedProposal {
VersionedProposal::One(self)
}
}
#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct LocalProposal {
pub proposal: Proposal,
pub oracle_event: OracleEvent<Secp256k1>,
pub change: Option<Change>,
}
#[derive(Debug, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Payload {
pub inputs: Vec<bdk::bitcoin::OutPoint>,
pub public_key: Point<EvenY>,
pub change_script: Option<BinScript>,
}
impl Proposal {
pub fn to_sentence(&self) -> String {
format!(
"Wants to bet {} on {} relying on {} as the oracle",
self.value, self.event_id, self.oracle
)
}
}
impl core::fmt::Display for VersionedProposal {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VersionedProposal::One(proposal) => {
let payload = Payload {
inputs: proposal.inputs.clone(),
public_key: proposal.public_key,
change_script: proposal.change_script.clone(),
};
write!(
f,
"{}{}#{}#{}#{}",
self.version_marker(),
proposal
.value
.to_string_in(Denomination::Bitcoin)
// FIXME: This looks dangerous?
.trim_end_matches('0'),
proposal.oracle,
proposal.event_id,
crate::encode::serialize_base2048(&payload)
)
}
}
}
}
impl FromStr for VersionedProposal {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
let string = string.trim_start_matches("📣");
let mut segments = string.split("#");
let value = Amount::from_str_in(
segments.next().ok_or(anyhow!("missing amount"))?,
Denomination::Bitcoin,
)?;
let oracle = segments
.next()
.ok_or(anyhow!("missing oralce"))?
.to_string();
let event_id = EventId::from_str(segments.next().ok_or(anyhow!("missing event id"))?)?;
let base2048_encoded_payload = segments
.next()
.ok_or(anyhow!("missing base2048 encoded data"))?;
let payload: Payload = crate::encode::deserialize_base2048(base2048_encoded_payload)?;
Ok(VersionedProposal::One(Proposal {
oracle,
value,
event_id,
inputs: payload.inputs,
public_key: payload.public_key,
change_script: payload.change_script,
}))
}
}
impl<D: BatchDatabase> Party<bdk::blockchain::EsploraBlockchain, D> {
pub fn make_proposal(
&self,
oracle_id: OracleId,
oracle_event: OracleEvent<Secp256k1>,
args: BetArgs,
) -> anyhow::Result<(BetId, Proposal)> {
let event_id = &oracle_event.event.id;
if !event_id.n_outcomes() == 2 {
return Err(anyhow!(
"Cannot make a bet on {} since it isn't binary",
event_id
));
}
let mut builder = self.wallet.build_tx();
// we use a 0 feerate because the offerer will pay the fee
builder.fee_rate(FeeRate::from_sat_per_vb(0.0));
match args.value {
ValueChoice::All => {
builder.drain_wallet().drain_to(Script::default());
}
ValueChoice::Amount(amount) => {
builder.add_recipient(Script::default(), amount.as_sat());
}
}
args.apply_args(self.bet_db(), &mut builder)?;
let (psbt, txdetails) = builder
.finish()
.context("Failed to gather proposal outputs")?;
assert_eq!(txdetails.fee, Some(0));
let outputs = &psbt.global.unsigned_tx.output;
let tx_inputs = psbt
.global
.unsigned_tx
.input
.iter()
.map(|txin| txin.previous_output.clone())
.collect();
let value = Amount::from_sat(
outputs
.iter()
.find(|o| o.script_pubkey == Script::default())
.unwrap()
.value,
);
let change = if outputs.len() > 1 {
if outputs.len() != 2 {
return Err(anyhow!(
"wallet produced psbt with too many outputs: {:?}",
psbt
));
}
Some(
outputs
.iter()
.find(|output| output.script_pubkey != Script::default())
.map(|output| Change::new(output.value, output.script_pubkey.clone()))
.expect("bdk change script_pubkey will not be empty"),
)
} else {
None
};
let mut proposal = Proposal {
oracle: oracle_id.clone(),
event_id: event_id.clone(),
value,
inputs: tx_inputs,
public_key: crate::placeholder_point(),
change_script: change.as_ref().map(|x| x.binscript().clone()),
};
let keypair = self.keychain.get_key_for_proposal(&proposal);
proposal.public_key = keypair.public_key;
let local_proposal = LocalProposal {
proposal: proposal.clone(),
oracle_event,
change,
};
let new_bet = BetState::Proposed { local_proposal };
let bet_id = self.bet_db.insert_bet(new_bet)?;
Ok((bet_id, proposal))
}
}
#[cfg(test)]
mod test {
use super::*;
use bdk::bitcoin::{hashes::Hash, Address, OutPoint, Txid};
use olivia_secp256k1::schnorr_fun::fun::{s, G};
#[test]
fn to_and_from_str() {
use std::string::ToString;
let forty_two = Point::<EvenY>::from_scalar_mul(G, &mut s!(42));
let change_address =
Address::from_str("bc1qwqdg6squsna38e46795at95yu9atm8azzmyvckulcc7kytlcckxswvvzej")
.unwrap();
let mut proposal = Proposal {
oracle: "h00.ooo".into(),
value: Amount::from_str("0.1 BTC").unwrap(),
event_id: EventId::from_str("/random/2020-09-25T08:00:00/heads_tails.winner").unwrap(),
inputs: vec![
OutPoint::new(Txid::from_slice(&[1u8; 32]).unwrap(), 0),
OutPoint::new(Txid::from_slice(&[2u8; 32]).unwrap(), 1),
],
public_key: forty_two,
change_script: None,
};
let encoded = proposal.clone().into_versioned().to_string();
let decoded = VersionedProposal::from_str(&encoded).unwrap();
assert_eq!(proposal, decoded.into());
proposal.change_script = Some(change_address.script_pubkey().into());
let encoded = proposal.clone().into_versioned().to_string();
let decoded = VersionedProposal::from_str(&encoded).unwrap();
assert_eq!(proposal, decoded.into());
}
}
|
use serenity::framework::standard::{macros::command, Args, CommandResult};
use serenity::model::prelude::{Message, MessageId};
use serenity::prelude::Context;
use crate::core::checks::ADMIN_CHECK;
#[command]
#[checks(Admin)]
async fn cleanup(context: &Context, message: &Message, args: Args) -> CommandResult {
// TODO handle numbers over 100
// - add a way to filter users
let count = args.parse::<u64>().unwrap_or(10);
let msgs_ids: Vec<MessageId> = message
.channel_id
.messages(&context.http, |retriever| {
retriever.before(message.id).limit(count)
})
.await?
.into_iter()
.map(|msg| msg.id)
.collect();
message
.channel_id
.delete_messages(&context, msgs_ids)
.await?;
message.delete(&context).await?;
Ok(())
}
|
#[cfg(any(feature = "backend_egl", feature = "renderer_gl"))]
extern crate gl_generator;
#[cfg(any(feature = "backend_egl", feature = "renderer_gl"))]
use gl_generator::{Api, Fallbacks, Profile, Registry};
use std::{env, fs::File, path::PathBuf};
#[cfg(any(feature = "backend_egl", feature = "renderer_gl"))]
fn main() {
let dest = PathBuf::from(&env::var("OUT_DIR").unwrap());
println!("cargo:rerun-if-changed=build.rs");
if env::var_os("CARGO_FEATURE_BACKEND_EGL").is_some() {
let mut file = File::create(&dest.join("egl_bindings.rs")).unwrap();
Registry::new(
Api::Egl,
(1, 5),
Profile::Core,
Fallbacks::All,
[
"EGL_KHR_create_context",
"EGL_EXT_create_context_robustness",
"EGL_KHR_create_context_no_error",
"EGL_KHR_platform_x11",
"EGL_KHR_platform_android",
"EGL_KHR_platform_wayland",
"EGL_KHR_platform_gbm",
"EGL_EXT_platform_base",
"EGL_EXT_platform_x11",
"EGL_MESA_platform_gbm",
"EGL_EXT_platform_wayland",
"EGL_EXT_platform_device",
"EGL_KHR_image_base",
],
)
.write_bindings(gl_generator::GlobalGenerator, &mut file)
.unwrap();
}
if env::var_os("CARGO_FEATURE_RENDERER_GL").is_some() {
let mut file = File::create(&dest.join("gl_bindings.rs")).unwrap();
Registry::new(
Api::Gles2,
(3, 2),
Profile::Compatibility,
Fallbacks::None,
["GL_OES_EGL_image"],
)
.write_bindings(gl_generator::StructGenerator, &mut file)
.unwrap();
}
}
#[cfg(not(any(feature = "backend_egl", feature = "renderer_gl")))]
fn main() {}
|
// inside lib.rs, only the following line should be in here
pub mod error;
pub mod instruction;
pub mod processor;
pub mod state;
#[cfg(not(feature = "no-entrypoint"))]
pub mod entrypoint; |
#[cfg(test)]
mod tests {
extern crate pqcrypto_classicmceliece;
use self::pqcrypto_classicmceliece::mceliece8192128::*;
#[test]
fn basic_classicmceliece_test() {
let (pk, sk) = keypair();
let (ss1, ct) = encapsulate(&pk);
let ss2 = decapsulate(&ct, &sk);
assert!(ss1 == ss2);
}
}
|
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(dead_code)]
include!(concat!(env!("OUT_DIR"), "/nginx.rs"));
#[no_mangle]
pub unsafe extern "C" fn ngx_http_calculator_handler(
r: *mut ngx_http_request_t,
) -> ngx_int_t {
let rc = ngx_http_read_client_request_body(r, Some(read_body_handler));
if rc != 0 {
return rc;
}
0
}
unsafe extern "C" fn read_body_handler(r: *mut ngx_http_request_t) {
if r.is_null() {
eprintln!("got null request in body handler");
return;
}
let request = &mut *r;
let body = match request_body_as_str(request) {
Ok(body) => body,
Err(e) => {
eprintln!("failed to parse body: {}", e);
return;
}
};
match calculate::evaluate(body) {
Ok(result) => {
let response_body = format!("{}", result);
match write_response(request, &response_body, 200) {
Ok(()) => {}
Err(e) => {
eprintln!("failed to write HTTP response: {}", e);
}
}
}
Err(e) => eprintln!("{} => error: {}", body, e),
}
}
unsafe fn request_body_as_str<'a>(
request: &'a ngx_http_request_t,
) -> Result<&'a str, &'static str> {
if request.request_body.is_null()
|| (*request.request_body).bufs.is_null()
|| (*(*request.request_body).bufs).buf.is_null()
{
return Err("Request body buffers were not initialized as expected");
}
let buf = (*(*request.request_body).bufs).buf;
let start = (*buf).pos;
let len = (*buf).last.offset_from(start) as usize;
let body_bytes = std::slice::from_raw_parts(start, len);
let body_str = std::str::from_utf8(body_bytes)
.map_err(|_| "Body contains invalid UTF-8")?;
Ok(body_str)
}
unsafe fn write_response(
request: &mut ngx_http_request_t,
response_body: &str,
status_code: ngx_uint_t,
) -> Result<(), &'static str> {
let headers = &mut request.headers_out;
headers.status = status_code;
let response_bytes = response_body.as_bytes();
headers.content_length_n = response_bytes.len() as off_t;
let rc = ngx_http_send_header(request);
if rc != 0 {
return Err("failed to send headers");
}
let buf_p =
ngx_pcalloc(request.pool, std::mem::size_of::<ngx_buf_t>() as size_t)
as *mut ngx_buf_t;
if buf_p.is_null() {
return Err("Failed to allocate buffer");
}
let buf = &mut (*buf_p);
buf.set_last_buf(1);
buf.set_last_in_chain(1);
buf.set_memory(1);
let response_buffer =
ngx_pcalloc(request.pool, response_bytes.len() as size_t);
if response_buffer.is_null() {
return Err("Failed to allocate response buffer");
}
std::ptr::copy_nonoverlapping(
response_bytes.as_ptr(),
response_buffer as *mut u8,
response_bytes.len(),
);
buf.pos = response_buffer as *mut u8;
buf.last = response_buffer.offset(response_bytes.len() as isize) as *mut u8;
let mut out_chain = ngx_chain_t {
buf,
next: std::ptr::null_mut(),
};
if ngx_http_output_filter(request, &mut out_chain) != 0 {
return Err("Failed to perform http output filter chain");
}
Ok(())
}
|
use crate::api::*;
use crate::data::{get_book, get_order};
fn composer<T1: 'static, T2: 'static, E: 'static>(
input: Result<T1, E>,
transform: &'static impl Fn(T1) -> Result<T2, E>,
) -> Result<T2, E> {
input.and_then(transform)
}
fn compose_two<T1: 'static, T2: 'static, T3: 'static, E: 'static>(
transform1: &'static impl Fn(T1) -> Result<T2, E>,
transform2: &'static impl Fn(T2) -> Result<T3, E>,
) -> impl Fn(Result<T1, E>) -> Result<T3, E> {
move |x| composer(composer(x, transform1), transform2)
}
macro_rules! compose {
( $last:expr ) => { $last };
( $head:expr, $($tail:expr), +) => {
compose_two($head, compose!($($tail),+))
};
}
fn book_service(id: &String) -> Result<&'static Book, OrderNotValid> {
match get_book(id) {
Some(b) => Ok(b),
None => Err(OrderNotValid::BookNotExists),
}
}
fn order_service(id: &String) -> Result<&'static Order, OrderNotValid> {
match get_order(id) {
Some(o) => Ok(o),
None => Err(OrderNotValid::BookNotExists),
}
}
fn validation_service(order: &Order) -> ValidationResult {
validate_order(order)
}
fn calculate_amount_service(order: &Order) -> Result<f64, OrderNotValid> {
order
.items
.iter()
.map(|line| (book_service(&line.book_id), line.quantity))
.fold(Ok(0.0), |current, (order, quantity)| match current {
Ok(amount) => match order {
Ok(book) => Ok(amount + quantity as f64 * book.price),
_ => panic!("book error"),
},
_ => panic!("order error"),
})
}
fn place_order_service(order: &'static Order) -> Result<f64, OrderNotValid> {
calculate_amount_service(order)
}
pub struct SyncFpProcessor {}
impl SyncProcessor for SyncFpProcessor {
fn process(&self, order_id: &String) -> Result<f64, ()> {
compose!(&validation_service, &place_order_service)(order_service(order_id)).map_err(|_| ())
}
}
impl SyncFpProcessor {
pub fn processor() -> &'static dyn SyncProcessor {
&(SyncFpProcessor {}) as &dyn SyncProcessor
}
}
pub fn process_syncfp_direct(order_id: &String) -> Result<f64, ()> {
compose!(&validation_service, &place_order_service)(order_service(order_id)).map_err(|_| ())
}
|
//! Rust parser for the racr format.
//!
//! # Examples
//! ## Parse Access
//! ```
//! assert_eq!(
//! racr::Access::ReadOnly,
//! racr_parser::AccessParser::new().parse("ro").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Access::WriteOnly,
//! racr_parser::AccessParser::new().parse("wo").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Access::ReadWrite,
//! racr_parser::AccessParser::new().parse("rw").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Access::ReadAsWrite,
//! racr_parser::AccessParser::new().parse("raw").unwrap()
//! );
//! ```
//!
//! ## Parse paths
//! ```
//! assert_eq!(
//! racr::Path{segments: vec!["foo".into(), "bar".into(), "baz".into()]},
//! racr_parser::PathParser::new().parse("foo::bar::baz").unwrap()
//! );
//! ```
//!
//! ## Parse modules
//! ```
//! assert_eq!(
//! racr::Module{ident: "foo".into(), content: None},
//! racr_parser::ModuleParser::new().parse("mod foo;").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Module{ident: "foo".into(), content: Some(vec![
//! racr::Module{ident: "bar".into(), content: Some(vec![
//! racr::Module{ident: "baz".into(), content: None}.into()
//! ])}.into()
//! ])},
//! racr_parser::ModuleParser::new().parse("mod foo {mod bar {mod baz;}}").unwrap()
//! );
//! ```
//! ## Parse use
//! ```
//! assert_eq!(
//! racr::Use{tree: racr::UseTree::Ident("Foo".into())},
//! racr_parser::UseParser::new().parse("use Foo;").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Use{tree: racr::UseTree::Path{path_segment: "foo".into(), sub_tree:
//! Box::new( racr::UseTree::Path{path_segment: "bar".into(), sub_tree:
//! Box::new( racr::UseTree::Ident("Baz".into()) )
//! } )
//! }},
//! racr_parser::UseParser::new().parse("use foo::bar::Baz;").unwrap()
//! );
//! ```
//! ```
//! assert_eq!(
//! racr::Use{tree: racr::UseTree::Path{path_segment: "foo".into(), sub_tree:
//! Box::new( racr::UseTree::Rename{ident: "Bar".into(), rename: "Baz".into()} )
//! }},
//! racr_parser::UseParser::new().parse("use foo::Bar as Baz;").unwrap()
//! );
//! ```
//! ## Parse register definitions
//! ```
//! assert_eq!(
//! racr::RegisterDefinition{
//! access: racr::Access::WriteOnly,
//! ident: "Foo".into(),
//! documentation: Some(String::from("Some documentation")),
//! size: 32,
//! reset_value: Some(0x00),
//! fields: vec![
//! racr::FieldInstance{ty: racr::FieldType::Field{ident: "bar".into()}, documentation: None, bit_range: 0..4, access: None},
//! racr::FieldInstance{documentation: None, bit_range: 4..8, access: None, ty: racr::FieldType::Enum{ident: "barX".into(), variants: vec![
//! racr::FieldVariant{ident: "BarA".into(), value: 0, documentation: None},
//! racr::FieldVariant{ident: "BarB".into(), value: 2, documentation: Some(String::from("some documentation"))},
//! racr::FieldVariant{ident: "BarC".into(), value: 4, documentation: None},
//! ]}},
//! racr::FieldInstance{ty: racr::FieldType::Field{ident: "baz".into()}, documentation: None, bit_range: 8..9, access: Some(racr::Access::ReadOnly)},
//! racr::FieldInstance{ty: racr::FieldType::Reserved{value: 0}, documentation: Some(String::from("Some documentation")), bit_range: 9..10, access: None},
//! racr::FieldInstance{ty: racr::FieldType::Reserved{value: 2}, documentation: None, bit_range: 10..12, access: None},
//! racr::FieldInstance{ty: racr::FieldType::Field{ident: "bax".into()}, documentation: Some(String::from("Some documentation")), bit_range: 12..32, access: None},
//! ],
//! },
//! racr_parser::RegisterDefinitionParser::new().parse("
//! #[doc = \"Some documentation\"]
//! wo register[32] Foo = 0 {
//! field[0..4] bar,
//! enum[4..8] barX {
//! BarA = 0,
//! #[doc = \"some documentation\"]
//! BarB = 0b10,
//! BarC = 0x4,
//! },
//! ro field[8] baz,
//! #[doc = \"Some documentation\"]
//! reserved[9] = 0,
//! reserved[10..12] = 2,
//! #[doc = \"Some documentation\"]
//! field[12..32] bax,
//! }"
//! ).unwrap()
//! );
//! ```
//! ## Parse peripheral definitions
//! ```
//! assert_eq!(
//! racr::PeripheralDefinition{
//! ident: "Foo".into(),
//! documentation: Some(String::from("Some documentation")),
//! registers: vec![
//! racr::RegisterSlot::Single{instance: racr::RegisterInstance{ident: "bar".into(), ty: racr::RegisterType::Single{path: racr_parser::PathParser::new().parse("bar::Bar").unwrap()}}, offset: 0x0},
//! racr::RegisterSlot::Single{instance: racr::RegisterInstance{ident: "bax".into(), ty: racr::RegisterType::Array{path: racr_parser::PathParser::new().parse("bax::Bax").unwrap(), size: 2}}, offset: 0x4},
//! racr::RegisterSlot::Union{
//! alternatives: vec![
//! racr::RegisterInstance{ident: "baz1".into(), ty: racr::RegisterType::Single{path: racr_parser::PathParser::new().parse("baz::Baz1").unwrap()}},
//! racr::RegisterInstance{ident: "baz2".into(), ty: racr::RegisterType::Single{path: racr_parser::PathParser::new().parse("baz::Baz2").unwrap()}},
//! racr::RegisterInstance{ident: "baz3".into(), ty: racr::RegisterType::Single{path: racr_parser::PathParser::new().parse("baz::Baz3").unwrap()}},
//! ],
//! offset: 0x10,
//! },
//! ],
//! },
//! racr_parser::PeripheralDefinitionParser::new().parse("
//! #[doc = \"Some documentation\"]
//! peripheral Foo {
//! bar: bar::Bar @ 0x00,
//! bax: [bax::Bax; 2] @ 0x04,
//! (baz1: baz::Baz1 | baz2: baz::Baz2 | baz3: baz::Baz3) @ 0x10,
//! }"
//! ).unwrap()
//! );
//! ```
//! ## Parse device definitions
//! ```
//! assert_eq!(
//! racr::DeviceDefinition{
//! ident: "Foo".into(),
//! documentation: Some(String::from("Some documentation")),
//! peripherals: vec![
//! racr::PeripheralInstance{ident: "bar".into(), path: racr_parser::PathParser::new().parse("bar::Bar").unwrap(), address: 0x0},
//! racr::PeripheralInstance{ident: "baz".into(), path: racr_parser::PathParser::new().parse("baz::Baz").unwrap(), address: 0x4},
//! racr::PeripheralInstance{ident: "bax".into(), path: racr_parser::PathParser::new().parse("bax::Bax").unwrap(), address: 0xc},
//! ],
//! },
//! racr_parser::DeviceDefinitionParser::new().parse("
//! #[doc = \"Some documentation\"]
//! device Foo {
//! bar: bar::Bar @ 0x00,
//! baz: baz::Baz @ 0x04,
//! bax: bax::Bax @ 0x0c,
//! }"
//! ).unwrap()
//! );
//! ```
//! ## Parse content
//! ```
//! racr_parser::ContentParser::new().parse("
//! use Foo;
//! use crate::bar::Baz;
//!
//! mod module {
//! peripheral Peripheral {
//! foo: Foo @ 0x00,
//! nar: Baz @ 0x10,
//! }
//! }
//! ").unwrap();
//! ```
use lalrpop_util;
use lalrpop_util::lalrpop_mod;
lalrpop_mod!(parser);
pub use crate::parser::ModuleParser;
pub use crate::parser::PathParser;
pub use crate::parser::AccessParser;
pub use crate::parser::ItemParser;
pub use crate::parser::UseParser;
pub use crate::parser::RegisterDefinitionParser;
pub use crate::parser::PeripheralDefinitionParser;
pub use crate::parser::DeviceDefinitionParser;
pub use crate::parser::ContentParser;
|
use std::fs::{self, DirEntry};
use std::{io, path::Path};
pub struct VisitDir {
root: Box<dyn Iterator<Item = io::Result<DirEntry>>>,
children: Box<dyn Iterator<Item = VisitDir>>,
}
impl VisitDir {
pub fn new<P: AsRef<Path>>(path: P) -> io::Result<Self> {
let root = Box::new(fs::read_dir(&path)?);
let children = Box::new(fs::read_dir(&path)?.filter_map(|e| {
let e = e.ok()?;
if e.file_type().ok()?.is_dir() {
return Some(VisitDir::new(e.path()).ok()?);
}
None
}));
Ok(VisitDir { root, children })
}
pub fn entries(self) -> Box<dyn Iterator<Item = io::Result<DirEntry>>> {
Box::new(
self.root
.chain(self.children.map(|s| s.entries()).flatten()),
)
}
}
impl Iterator for VisitDir {
type Item = io::Result<DirEntry>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.root.next() {
return Some(item);
}
if let Some(child) = self.children.next() {
self.root = child.entries();
return self.next();
}
None
}
}
|
// TODO: Needs improvement
pub fn ladder_length(begin_word: String, end_word: String, word_list: Vec<String>) -> i32 {
use std::collections::HashSet;
let begin_word: Vec<char> = begin_word.chars().collect();
let end_word: Vec<char> = end_word.chars().collect();
let word_list: Vec<Vec<char>> = word_list.into_iter().map(|s| s.chars().collect::<Vec<char>>()).collect();
let n = word_list.len();
let mut neighbours = vec![HashSet::new(); n];
fn diff_once(a: &[char], b: &[char]) -> bool {
let mut mismatch = 0;
for (&aa, &bb) in a.iter().zip(b.iter()) {
if aa != bb {
mismatch += 1;
}
if mismatch >= 2 {
return false
}
}
true
}
fn same(a: &[char], b: &[char]) -> bool {
for (&aa, &bb) in a.iter().zip(b.iter()) {
if aa != bb {
return false
}
}
true
}
for i in 0..n {
for j in 0..i {
if diff_once(word_list[i].as_slice(), word_list[j].as_slice()) {
neighbours[i].insert(j);
neighbours[j].insert(i);
}
}
}
let mut start_set = HashSet::new();
for i in 0..n {
if diff_once(word_list[i].as_slice(), begin_word.as_slice()) {
start_set.insert(i);
}
}
let mut active_set = HashSet::new();
let mut visited_set = HashSet::new();
for i in 0..n {
if same(word_list[i].as_slice(), end_word.as_slice()) {
active_set.insert(i);
}
}
let mut steps = 2;
while !active_set.is_empty() {
for &j in active_set.iter() {
if start_set.contains(&j) {
return steps
}
}
let mut newly_added = HashSet::new();
for &j in active_set.iter() {
for &k in neighbours[j].iter() {
if !visited_set.contains(&k) && !active_set.contains(&k) {
newly_added.insert(k);
}
}
}
for &j in active_set.iter() {
visited_set.insert(j);
}
active_set = newly_added;
steps += 1;
}
0
} |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
//! Provides a more rustic interface to a minimal set of `perf` functionality.
//!
//! Explicitly missing (because they are unnecessary) perf features include:
//! * Grouping
//! * Sample type flags
//! * Reading any kind of sample events
//! * BPF
//! * Hardware breakpoints
//!
//! The arguments and behaviors in this module generally correspond exactly to
//! those of `perf_event_open(2)`. No attempts are made to paper over the
//! non-determinism/weirndess of `perf`. For example, counter increments are
//! dropped whenever an event fires on a running thread.
//! [`PerfCounter::DISABLE_SAMPLE_PERIOD`] can be used to avoid this for sampling.
//! events.
use core::ptr::NonNull;
#[allow(unused_imports)] // only used if we have an error
use std::compile_error;
use lazy_static::lazy_static;
use nix::sys::signal::Signal;
use nix::unistd::sysconf;
use nix::unistd::SysconfVar;
pub use perf::perf_event_header;
use perf_event_open_sys::bindings as perf;
use perf_event_open_sys::ioctls;
use reverie::Errno;
use reverie::Tid;
use tracing::info;
use tracing::warn;
use crate::validation::check_for_pmu_bugs;
use crate::validation::PmuValidationError;
lazy_static! {
static ref PMU_BUG: Result<(), PmuValidationError> = check_for_pmu_bugs();
}
// Not available in the libc crate
const F_SETOWN_EX: libc::c_int = 15;
const F_SETSIG: libc::c_int = 10;
const F_OWNER_TID: libc::c_int = 0;
#[repr(C)]
struct f_owner_ex {
pub type_: libc::c_int,
pub pid: libc::pid_t,
}
/// An incomplete enumeration of events perf can monitor
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Event {
#[allow(dead_code)] // used in tests
/// A perf-supported hardware event.
Hardware(HardwareEvent),
/// A perf-supported software event.
Software(SoftwareEvent),
/// A raw CPU event. The inner value will have a CPU-specific meaning.
Raw(u64),
}
/// An incomplete enumeration of hardware events perf can monitor.
#[allow(dead_code)] // used in tests
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum HardwareEvent {
/// Count retired instructions. Can be affected by hardware interrupt counts.
Instructions,
/// Count retired branch instructions.
BranchInstructions,
}
/// An incomplete enumeration of software events perf can monitor.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum SoftwareEvent {
/// A placeholder event that counts nothing.
Dummy,
}
/// A perf counter with a very limited range of configurability.
/// Construct via [`Builder`].
#[derive(Debug)]
pub struct PerfCounter {
fd: libc::c_int,
mmap: Option<NonNull<perf::perf_event_mmap_page>>,
}
impl Event {
fn attr_type(self) -> u32 {
match self {
Event::Hardware(_) => perf::perf_type_id_PERF_TYPE_HARDWARE,
Event::Software(_) => perf::perf_type_id_PERF_TYPE_SOFTWARE,
Event::Raw(_) => perf::perf_type_id_PERF_TYPE_RAW,
}
}
fn attr_config(self) -> u64 {
match self {
Event::Raw(x) => x,
Event::Hardware(HardwareEvent::Instructions) => {
perf::perf_hw_id_PERF_COUNT_HW_INSTRUCTIONS.into()
}
Event::Hardware(HardwareEvent::BranchInstructions) => {
perf::perf_hw_id_PERF_COUNT_HW_BRANCH_INSTRUCTIONS.into()
}
Event::Software(SoftwareEvent::Dummy) => perf::perf_sw_ids_PERF_COUNT_SW_DUMMY.into(),
}
}
}
/// Builder for a PerfCounter. Contains only the subset of the attributes that
/// this API allows manipulating set to non-defaults.
#[derive(Debug, Clone)]
pub struct Builder {
pid: libc::pid_t,
cpu: libc::c_int,
evt: Event,
sample_period: u64,
precise_ip: u32,
fast_reads: bool,
}
impl Builder {
/// Initialize the builder. The initial configuration is for a software
/// counting event that never increments.
///
/// `pid` accepts a *TID* from `gettid(2)`. Passing `getpid(2)` will
/// monitor the main thread of the calling thread group. Passing `0`
/// monitors the calling thread. Passing `-1` monitors all threads on
/// the specified CPU.
///
/// `cpu` should almost always be `-1`, which tracks the specified `pid`
/// across all CPUs. Non-negative integers track only the specified `pid`
/// on that CPU.
///
/// Passing `-1` for both `pid` and `cpu` will result in an error.
pub fn new(pid: libc::pid_t, cpu: libc::c_int) -> Self {
Self {
pid,
cpu,
evt: Event::Software(SoftwareEvent::Dummy),
sample_period: 0,
precise_ip: 0,
fast_reads: false,
}
}
/// Select the event to monitor.
pub fn event(&mut self, evt: Event) -> &mut Self {
self.evt = evt;
self
}
/// Set the period for sample collection. Default is 0, which creates a
/// counting event.
///
/// Because this module always sets `wakeup_events` to 1, this also
/// specifies after how many events an overflow notification should be
/// raised. If a signal has been setup with
/// `PerfCounter::set_signal_delivery`], this corresponds to one sent
/// signal. Overflow notifications are sent whenever the counter reaches a
/// multiple of `sample_period`.
///
/// If you only want accurate counts, pass
/// `DISABLE_SAMPLE_PERIOD`. Passing `0` will also work, but will create a
/// _counting_ event that cannot become a _sampling event_ via the
/// `PERF_EVENT_IOC_PERIOD` ioctl.
pub fn sample_period(&mut self, period: u64) -> &mut Self {
self.sample_period = period;
self
}
/// Set `precise_ip` on the underlying perf attribute structure. Valid
/// values are 0-3; the underlying field is 2 bits.
///
/// Non-zero values will cause perf to attempt to lower the skid of *samples*
/// (but not necessarily notifications), usually via hardware features like
/// Intel PEBS.
///
/// Use with caution: experiments have shown that counters with non-zero
/// `precise_ip` can drop events under certain circumstances. See
/// `experiments/test_consistency.c` for more information.
pub fn precise_ip(&mut self, precise_ip: u32) -> &mut Self {
self.precise_ip = precise_ip;
self
}
/// Enable fast reads via shared memory with the kernel for the latest
/// counter value.
pub fn fast_reads(&mut self, enable: bool) -> &mut Self {
self.fast_reads = enable;
self
}
/// Render the builder into a `PerfCounter`. Created counters begin in a
/// disabled state. Additional initialization steps should be performed,
/// followed by a call to [`PerfCounter::enable`].
pub fn create(&self) -> Result<PerfCounter, Errno> {
let mut attr = perf::perf_event_attr::default();
attr.size = core::mem::size_of_val(&attr) as u32;
attr.type_ = self.evt.attr_type();
attr.config = self.evt.attr_config();
attr.__bindgen_anon_1.sample_period = self.sample_period;
attr.set_disabled(1); // user must enable later
attr.set_exclude_kernel(1); // we only care about user code
attr.set_exclude_guest(1);
attr.set_exclude_hv(1); // unlikely this is supported, but it doesn't hurt
attr.set_pinned(1); // error state if we are descheduled from the PMU
attr.set_precise_ip(self.precise_ip.into());
attr.__bindgen_anon_2.wakeup_events = 1; // generate a wakeup (overflow) after one sample event
let pid = self.pid;
let cpu = self.cpu;
let group_fd: libc::c_int = -1; // always create a new group
let flags = perf::PERF_FLAG_FD_CLOEXEC; // marginally more safe if we fork+exec
let fd = Errno::result(unsafe {
libc::syscall(libc::SYS_perf_event_open, &attr, pid, cpu, group_fd, flags)
})?;
let fd = fd as libc::c_int;
let mmap = if self.fast_reads {
let res = Errno::result(unsafe {
libc::mmap(
core::ptr::null_mut(),
get_mmap_size(),
libc::PROT_READ, // leaving PROT_WRITE unset lets us passively read
libc::MAP_SHARED,
fd,
0,
)
});
match res {
Ok(ptr) => Some(NonNull::new(ptr as *mut _).unwrap()),
Err(e) => {
close_perf_fd(fd);
return Err(e);
}
}
} else {
None
};
Ok(PerfCounter { fd, mmap })
}
pub(crate) fn check_for_pmu_bugs(&mut self) -> &mut Self {
if let Err(pmu_error) = &*PMU_BUG {
warn!("Pmu bugs detected: {:?}", pmu_error);
}
self
}
}
impl PerfCounter {
/// Perf counters cannot be switched from sampling to non-sampling, so
/// setting their period to this large value effectively disables overflows
/// and sampling.
pub const DISABLE_SAMPLE_PERIOD: u64 = 1 << 60;
/// Call the `PERF_EVENT_IOC_ENABLE` ioctl. Enables increments of the
/// counter and event generation.
pub fn enable(&self) -> Result<(), Errno> {
Errno::result(unsafe { ioctls::ENABLE(self.fd, 0) }).and(Ok(()))
}
/// Call the `PERF_EVENT_IOC_ENABLE` ioctl. Disables increments of the
/// counter and event generation.
pub fn disable(&self) -> Result<(), Errno> {
Errno::result(unsafe { ioctls::DISABLE(self.fd, 0) }).and(Ok(()))
}
/// Corresponds exactly to the `PERF_EVENT_IOC_REFRESH` ioctl.
#[allow(dead_code)]
pub fn refresh(&self, count: libc::c_int) -> Result<(), Errno> {
assert!(count != 0); // 0 is undefined behavior
Errno::result(unsafe { ioctls::REFRESH(self.fd, 0) }).and(Ok(()))
}
/// Call the `PERF_EVENT_IOC_RESET` ioctl. Resets the counter value to 0,
/// which results in delayed overflow events.
pub fn reset(&self) -> Result<(), Errno> {
Errno::result(unsafe { ioctls::RESET(self.fd, 0) }).and(Ok(()))
}
/// Call the `PERF_EVENT_IOC_PERIOD` ioctl. This causes the counter to
/// behave as if `ticks` was the original argument to `sample_period` in
/// the builder.
pub fn set_period(&self, ticks: u64) -> Result<(), Errno> {
// The bindings are wrong for this ioctl. The method signature takes a
// u64, but the actual ioctl expects a pointer to a u64. Thus, we use
// the constant manually.
// This ioctl shouldn't mutate it's argument per its API. But in case it
// does, create a mutable copy to avoid Rust UB.
let mut ticks = ticks;
Errno::result(unsafe {
libc::ioctl(
self.fd,
perf::perf_event_ioctls_PERIOD as _,
&mut ticks as *mut u64,
)
})
.and(Ok(()))
}
/// Call the `PERF_EVENT_IOC_ID` ioctl. Returns a unique identifier for this
/// perf counter.
#[allow(dead_code)]
pub fn id(&self) -> Result<u64, Errno> {
let mut res = 0u64;
Errno::result(unsafe { ioctls::ID(self.fd, &mut res as *mut u64) })?;
Ok(res)
}
/// Sets up overflow events to deliver a `SIGPOLL`-style signal, with the
/// signal number specified in `signal`, to the specified `thread`.
///
/// There is no reason this couldn't be called at any point, but typial use
/// cases will set up signal delivery once or not at all.
pub fn set_signal_delivery(&self, thread: Tid, signal: Signal) -> Result<(), Errno> {
let owner = f_owner_ex {
type_: F_OWNER_TID,
pid: thread.as_raw(),
};
Errno::result(unsafe { libc::fcntl(self.fd, F_SETOWN_EX, &owner as *const _) })?;
Errno::result(unsafe { libc::fcntl(self.fd, libc::F_SETFL, libc::O_ASYNC) })?;
Errno::result(unsafe { libc::fcntl(self.fd, F_SETSIG, signal as i32) })?;
Ok(())
}
/// Read the current value of the counter.
pub fn ctr_value(&self) -> Result<u64, Errno> {
let mut value = 0u64;
let expected_bytes = std::mem::size_of_val(&value);
loop {
let res =
unsafe { libc::read(self.fd, &mut value as *mut u64 as *mut _, expected_bytes) };
if res == -1 {
let errno = Errno::last();
if errno != Errno::EINTR {
return Err(errno);
}
}
if res == 0 {
// EOF: this only occurs when attr.pinned = 1 and our event was descheduled.
// This unrecoverably gives us innacurate counts.
panic!("pinned perf event descheduled!")
}
if res == expected_bytes as isize {
break;
}
}
Ok(value)
}
/// Perform a fast read, which doesn't involve a syscall in the fast path.
/// This falls back to a slow syscall read where necessary, including if
/// fast reads weren't enabled in the `Builder`.
pub fn ctr_value_fast(&self) -> Result<u64, Errno> {
match self.mmap {
Some(ptr) => {
// SAFETY: self.mmap is constructed as the correct page or not at all
let res = unsafe { self.ctr_value_fast_loop(ptr) };
// TODO: remove this assertion after we're confident in correctness
debug_assert_eq!(res, self.ctr_value_fallback());
res
}
None => self.ctr_value_fallback(),
}
}
#[cold]
fn ctr_value_fallback(&self) -> Result<u64, Errno> {
self.ctr_value()
}
/// Safety: `ptr` must refer to the metadata page corresponding to self.fd.
#[deny(unsafe_op_in_unsafe_fn)]
#[inline(always)]
unsafe fn ctr_value_fast_loop(
&self,
ptr: NonNull<perf::perf_event_mmap_page>,
) -> Result<u64, Errno> {
// This implements synchronization with the kernel via a seqlock,
// see https://www.kernel.org/doc/html/latest/locking/seqlock.html.
// Also see experiments/perf_fast_reads.c for more details on fast reads.
use std::ptr::addr_of_mut;
let ptr = ptr.as_ptr();
let mut seq;
let mut running;
let mut enabled;
let mut count;
loop {
// Acquire a lease on the seqlock -- even values are outside of
// writers' critical sections.
loop {
// SAFETY: ptr->lock is valid and aligned
seq = unsafe { read_once(addr_of_mut!((*ptr).lock)) };
if seq & 1 == 0 {
break;
}
}
smp_rmb(); // force re-reads of other data
let index;
// SAFETY: these reads are synchronized by the correct reads of the
// seqlock. We don't do anything with them until after the outer
// loop finishing has guaranteed our read was serialized.
unsafe {
running = (*ptr).time_running;
enabled = (*ptr).time_enabled;
count = (*ptr).offset;
index = (*ptr).index;
}
if index != 0 {
// `index` being non-zero indicates we need to read from the
// hardware counter and add it to our count. Instead, we
// fallback to the slow path for a few reasons:
// 1. This only works if we're on the same core, which is basically
// never true for our usecase.
// 2. Reads of an active PMU are racy.
// 3. The PMU should almost never be active, because we should
// generally only read from stopped processes.
return self.ctr_value_fallback();
}
smp_rmb();
// SAFETY: ptr->lock is valid and aligned
if seq == unsafe { read_once(addr_of_mut!((*ptr).lock)) } {
// if seq is unchanged, we didn't race with writer
break;
}
}
// This check must be outside the loop to ensure our reads were actually
// serialized with any writes.
if running != enabled {
// Non-equal running/enabled time indicates the event was
// descheduled at some point, meaning our counts are inaccurate.
// This is not recoverable. The slow-read equivalent is getting EOF
// when attr.pinned = 1.
panic!("fast-read perf event was probably descheduled!")
}
Ok(count as u64)
}
/// Return the underlying perf fd.
pub fn raw_fd(&self) -> libc::c_int {
self.fd
}
}
fn close_perf_fd(fd: libc::c_int) {
Errno::result(unsafe { libc::close(fd) }).expect("Could not close perf fd");
}
fn close_mmap(ptr: *mut perf::perf_event_mmap_page) {
Errno::result(unsafe { libc::munmap(ptr as *mut _, get_mmap_size()) })
.expect("Could not munmap ring buffer");
}
impl Drop for PerfCounter {
fn drop(&mut self) {
if let Some(ptr) = self.mmap {
close_mmap(ptr.as_ptr());
}
close_perf_fd(self.fd);
}
}
// Safety:
// The mmap region is never written to. Multiple readers then race with the
// kernel as any single thread would. Though the reads are racy, that is the
// intended behavior of the perf api.
unsafe impl std::marker::Send for PerfCounter {}
unsafe impl std::marker::Sync for PerfCounter {}
fn get_mmap_size() -> usize {
// Use a single page; we only want the perf metadata
sysconf(SysconfVar::PAGE_SIZE)
.unwrap()
.unwrap()
.try_into()
.unwrap()
}
/// Force a relaxed atomic load. Like Linux's READ_ONCE.
/// SAFETY: caller must ensure v points to valid data and is aligned
#[inline(always)]
#[deny(unsafe_op_in_unsafe_fn)]
unsafe fn read_once(v: *mut u32) -> u32 {
use std::sync::atomic::AtomicU32;
use std::sync::atomic::Ordering::Relaxed;
// SAFETY: AtomicU32 is guaranteed to have the same in-memory representation
// SAFETY: The UnsafeCell inside AtomicU32 allows aliasing with *mut
// SAFETY: The reference doesn't escape this function, so any lifetime is ok
let av: &AtomicU32 = unsafe { &*(v as *const AtomicU32) };
av.load(Relaxed)
}
#[inline(always)]
fn smp_rmb() {
use core::sync::atomic::compiler_fence;
use core::sync::atomic::Ordering::SeqCst;
compiler_fence(SeqCst);
}
// Test if we have PMU access by doing a check for a basic hardware event.
fn test_perf_pmu_support() -> bool {
// Do a raw perf_event_open because our default configuration has flags that
// might be the actual cause of the error, which we want to catch separately.
let evt = Event::Hardware(HardwareEvent::Instructions);
let mut attr = perf::perf_event_attr::default();
attr.size = core::mem::size_of_val(&attr) as u32;
attr.type_ = evt.attr_type();
attr.config = evt.attr_config();
attr.__bindgen_anon_1.sample_period = PerfCounter::DISABLE_SAMPLE_PERIOD;
attr.set_exclude_kernel(1); // lowers permission requirements
let pid: libc::pid_t = 0; // track this thread
let cpu: libc::c_int = -1; // across any CPU
let group_fd: libc::c_int = -1;
let flags = perf::PERF_FLAG_FD_CLOEXEC;
let res = Errno::result(unsafe {
libc::syscall(libc::SYS_perf_event_open, &attr, pid, cpu, group_fd, flags)
});
match res {
Ok(fd) => {
Errno::result(unsafe { libc::close(fd as libc::c_int) })
.expect("perf feature check: close(fd) failed");
return true;
}
Err(Errno::ENOENT) => info!("Perf feature check failed due to ENOENT"),
Err(Errno::EPERM) => info!("Perf feature check failed due to EPERM"),
Err(Errno::EACCES) => info!("Perf feature check failed due to EACCES"),
Err(e) => panic!("Unexpected error during perf feature check: {}", e),
}
false
}
lazy_static! {
static ref IS_PERF_SUPPORTED: bool = test_perf_pmu_support();
}
/// Returns true if the current system configuration supports use of perf for
/// hardware events.
pub fn is_perf_supported() -> bool {
*IS_PERF_SUPPORTED
}
/// Concisely return if `is_perf_supported` is `false`. Useful for guarding
/// tests.
#[macro_export]
macro_rules! ret_without_perf {
() => {
if !$crate::is_perf_supported() {
return;
}
};
(expr:expr) => {
if !$crate::is_perf_supported() {
return ($expr);
}
};
}
/// Perform exactly `count+1` conditional branch instructions. Useful for
/// testing timer-related code.
#[cfg(target_arch = "x86_64")]
#[inline(never)]
pub fn do_branches(mut count: u64) {
// Anything but assembly is unreliable between debug and release
unsafe {
// Loop until carry flag is set, indicating underflow
core::arch::asm!(
"2:",
"sub {0}, 1",
"jnz 2b",
inout(reg) count,
)
}
assert_eq!(count, 0);
}
/// Perform exactly `count+1` conditional branch instructions. Useful for
/// testing timer-related code.
#[cfg(target_arch = "aarch64")]
#[inline(never)]
pub fn do_branches(mut count: u64) {
unsafe {
core::arch::asm!(
"2:",
"subs {0}, {0}, #0x1",
"b.ne 2b",
inout(reg) count,
)
}
assert_eq!(count, 0);
}
// NOTE: aarch64 doesn't work with
// `Event::Hardware(HardwareEvent::BranchInstructions)`, so these tests are
// disabled for that architecture. Most likely, we need to use `Event::Raw`
// instead to enable these tests.
#[cfg(all(test, target_arch = "x86_64"))]
mod test {
use nix::unistd::gettid;
use super::*;
#[test]
fn test_do_branches() {
do_branches(1000);
}
#[test]
fn trace_self() {
ret_without_perf!();
let pc = Builder::new(gettid().as_raw(), -1)
.sample_period(PerfCounter::DISABLE_SAMPLE_PERIOD)
.event(Event::Hardware(HardwareEvent::BranchInstructions))
.create()
.unwrap();
pc.reset().unwrap();
pc.enable().unwrap();
const ITERS: u64 = 10000;
do_branches(ITERS);
pc.disable().unwrap();
let ctr = pc.ctr_value().unwrap();
assert!(ctr >= ITERS);
assert!(ctr <= ITERS + 100); // `.disable()` overhead
}
#[test]
fn trace_other_thread() {
ret_without_perf!();
use std::sync::mpsc::sync_channel;
let (tx1, rx1) = sync_channel(0); // send TID
let (tx2, rx2) = sync_channel(0); // start guest spinn
const ITERS: u64 = 100000;
let handle = std::thread::spawn(move || {
tx1.send(gettid()).unwrap();
rx2.recv().unwrap();
do_branches(ITERS);
});
let pc = Builder::new(rx1.recv().unwrap().as_raw(), -1)
.sample_period(PerfCounter::DISABLE_SAMPLE_PERIOD)
.event(Event::Hardware(HardwareEvent::BranchInstructions))
.create()
.unwrap();
pc.enable().unwrap();
tx2.send(()).unwrap(); // tell thread to start
handle.join().unwrap();
let ctr = pc.ctr_value().unwrap();
assert!(ctr >= ITERS);
assert!(ctr <= ITERS + 6000, "{}", ctr); // overhead from channel operations
}
#[test]
fn deliver_signal() {
ret_without_perf!();
use std::mem::MaybeUninit;
use std::sync::mpsc::sync_channel;
let (tx1, rx1) = sync_channel(0); // send TID
let (tx2, rx2) = sync_channel(0); // start guest spinn
// SIGSTKFLT defaults to TERM, so if any thread but the traced one
// receives the signal, the test will fail due to process exit.
const MARKER_SIGNAL: Signal = Signal::SIGSTKFLT;
const SPIN_BRANCHES: u64 = 50000; // big enough to "absorb" noise from debug/release
const SPINS_PER_EVENT: u64 = 10;
const SAMPLE_PERIOD: u64 = SPINS_PER_EVENT * SPIN_BRANCHES + (SPINS_PER_EVENT / 4);
fn signal_is_pending() -> bool {
unsafe {
let mut mask = MaybeUninit::<libc::sigset_t>::zeroed();
libc::sigemptyset(mask.as_mut_ptr());
libc::sigpending(mask.as_mut_ptr());
libc::sigismember(mask.as_ptr(), MARKER_SIGNAL as _) == 1
}
}
let handle = std::thread::spawn(move || {
unsafe {
let mut mask = MaybeUninit::<libc::sigset_t>::zeroed();
libc::sigemptyset(mask.as_mut_ptr());
libc::sigaddset(mask.as_mut_ptr(), MARKER_SIGNAL as _);
libc::sigprocmask(libc::SIG_BLOCK, mask.as_ptr(), std::ptr::null_mut());
}
tx1.send(gettid()).unwrap();
rx2.recv().unwrap();
let mut count = 0;
loop {
count += 1;
do_branches(SPIN_BRANCHES);
if signal_is_pending() {
break;
}
}
assert_eq!(count, SPINS_PER_EVENT);
});
let tid = rx1.recv().unwrap();
let pc = Builder::new(tid.as_raw(), -1)
.sample_period(SAMPLE_PERIOD)
.event(Event::Hardware(HardwareEvent::BranchInstructions))
.create()
.unwrap();
pc.set_signal_delivery(tid.into(), MARKER_SIGNAL).unwrap();
pc.enable().unwrap();
tx2.send(()).unwrap(); // tell thread to start
handle.join().unwrap(); // propagate panics
}
}
|
fn read_line() -> String {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
}
fn main() {
let stdin = read_line();
let mut iter = stdin.split_whitespace();
let a: isize = iter.next().unwrap().parse().unwrap();
let b: isize = iter.next().unwrap().parse().unwrap();
let c: isize = iter.next().unwrap().parse().unwrap();
let winnter = if a == b && c == 1 {
"Takahashi"
} else if a == b && c == 0 {
"Aoki"
} else if a > b {
"Takahashi"
} else {
"Aoki"
};
println!("{}", winnter)
}
|
extern crate futures;
extern crate tokio_core;
use futures::{Future, Stream};
use tokio_core::io::{copy, Io};
use tokio_core::net::TcpListener;
use tokio_core::reactor::Core;
fn main() {
// Create the event loop that will drive this server
let mut core = Core::new().unwrap();
let handle = core.handle();
// Bind the server's socket
let addr = "127.0.0.1:12345".parse().unwrap();
let sock = TcpListener::bind(&addr, &handle).unwrap();
// Pull out a stream of sockets for incoming connections
let server = sock.incoming().for_each(|(sock, _)| {
// Split up the reading and writing parts of the
// socket
let (reader, writer) = sock.split();
// A future that echos the data and returns how
// many bytes were copied...
let bytes_copied = copy(reader, writer);
// ... after which we'll print what happened
let handle_conn = bytes_copied.map(|amt| {
println!("wrote {} bytes", amt)
}).map_err(|err| {
println!("IO error {:?}", err)
});
// Spawn the future as a concurrent task
handle.spawn(handle_conn);
Ok(())
});
// Spin up the server on the event loop
core.run(server).unwrap();
} |
pub mod tui;
mod config;
mod source;
mod scraper;
use async_trait::async_trait;
use crate::track::Track;
use super::{metasource, websearch};
pub use config::Config;
pub use source::{Params as SourceParams, Status, ItemStatus};
#[derive(Debug, Clone)]
pub struct Module<WS: websearch::Module> {
config: Config<WS::SearchConfig>,
}
impl<WS: websearch::Module> super::Module for Module<WS> {
type Config = Config<WS::SearchConfig>;
fn new(config: Self::Config) -> Self {
Module { config }
}
}
#[async_trait(?Send)]
impl<WS> metasource::Module for Module<WS>
where
WS: websearch::Module,
{
type Params = SourceParams<WS>;
type Error = WS::Error;
async fn fill_metadata(
&self,
track: &mut Track,
params: Self::Params
) -> Result<bool, Self::Error> {
source
::fill_metadata(self, track, params)
.await
}
}
|
extern crate proc_macro;
use std::io::{Error as IOError};
use byteorder::{ByteOrder, WriteBytesExt, BE, LE};
use failure::Fail;
use quote::{ToTokens, quote};
use proc_macro::TokenStream;
use proc_macro_hack::proc_macro_hack;
use syn::{parse_macro_input, Error as SynError, Expr, IntSuffix, FloatSuffix, Lit, LitInt, LitFloat, Token, UnOp};
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Endianness {
LE,
BE,
}
#[cfg(not(feature = "default-big-endian"))]
const DEFAULT_ENDIANNESS: Endianness = Endianness::LE;
#[cfg(feature = "default-big-endian")]
const DEFAULT_ENDIANNESS: Endianness = Endianness::BE;
#[derive(Debug, Fail)]
enum Error {
#[fail(display = "Unsupported prefixed expression in the macro: {} [+] {}", _0, _1)]
UnsupportedPrefixedExpression(String, String),
#[fail(display = "Unsupported expression in the macro: {}", _0)]
UnsupportedExpression(String),
#[fail(display = "Unsupported literal in the macro: {}", _0)]
UnsupportedLit(String),
#[fail(display = "Unsupported numeric suffix in the macro: {}", _0)]
UnsupportedNumberSuffix(String),
#[fail(display = "Failed to parse the input as a comma-separated list: {}", _0)]
InvalidInput(#[cause] SynError),
#[fail(display = "Failed to parse endianness: {}", _0)]
InvalidEndianness(String),
#[fail(display = "Failed to write a suffixed value: {}, negative: {}, given suffix: {}, requested suffix: {}", _0, _1, _2, _3)]
IncompatibleNumberSuffix(String, bool, String, String),
#[fail(display = "Failed to write a value: {}", _0)]
IO(#[cause] IOError),
}
impl From<SynError> for Error {
fn from(from: SynError) -> Self {
Error::InvalidInput(from)
}
}
impl From<IOError> for Error {
fn from(from: IOError) -> Self {
Error::IO(from)
}
}
impl Error {
pub fn unsupported_expression(expr: Expr) -> Self {
Error::UnsupportedExpression(expr.into_token_stream().to_string())
}
pub fn unsupported_lit(lit: Lit) -> Self {
Error::UnsupportedLit(lit.into_token_stream().to_string())
}
pub fn unsupported_prefixed_expression(op: UnOp, expr: Expr) -> Self {
Error::UnsupportedPrefixedExpression(op.into_token_stream().to_string(), expr.into_token_stream().to_string())
}
}
fn int_to_suffix(negative: bool, int: &LitInt) -> Result<IntSuffix, Error> {
let num_bits = int.value();
let s = if negative {
match () {
() if num_bits > 0x80000000 => IntSuffix::I64,
() if num_bits > 0x8000 => IntSuffix::I32,
() if num_bits > 0x80 => IntSuffix::I16,
() => IntSuffix::I8,
}
} else {
match () {
() if num_bits > 0xFFFFFFFF => IntSuffix::U64,
() if num_bits > 0xFFFF => IntSuffix::U32,
() if num_bits > 0xFF => IntSuffix::U16,
() => IntSuffix::U8,
}
};
let s = match (s, int.suffix()) {
// If none is specified use the least size suffix possible.
(s, IntSuffix::None) => s,
// Allowed casts Uint -> Uint.
(IntSuffix::U8 , IntSuffix::U8 ) => IntSuffix::U8 ,
(IntSuffix::U8 , IntSuffix::U16) => IntSuffix::U16,
(IntSuffix::U8 , IntSuffix::U32) => IntSuffix::U32,
(IntSuffix::U8 , IntSuffix::U64) => IntSuffix::U64,
(IntSuffix::U16, IntSuffix::U16) => IntSuffix::U16,
(IntSuffix::U16, IntSuffix::U32) => IntSuffix::U32,
(IntSuffix::U16, IntSuffix::U64) => IntSuffix::U64,
(IntSuffix::U32, IntSuffix::U32) => IntSuffix::U32,
(IntSuffix::U32, IntSuffix::U64) => IntSuffix::U64,
(IntSuffix::U64, IntSuffix::U64) => IntSuffix::U64,
// Allowed casts Sint -> Sint.
(IntSuffix::I8 , IntSuffix::I8 ) => IntSuffix::I8 ,
(IntSuffix::I8 , IntSuffix::I16) => IntSuffix::I16,
(IntSuffix::I8 , IntSuffix::I32) => IntSuffix::I32,
(IntSuffix::I8 , IntSuffix::I64) => IntSuffix::I64,
(IntSuffix::I16, IntSuffix::I16) => IntSuffix::I16,
(IntSuffix::I16, IntSuffix::I32) => IntSuffix::I32,
(IntSuffix::I16, IntSuffix::I64) => IntSuffix::I64,
(IntSuffix::I32, IntSuffix::I32) => IntSuffix::I32,
(IntSuffix::I32, IntSuffix::I64) => IntSuffix::I64,
(IntSuffix::I64, IntSuffix::I64) => IntSuffix::I64,
// Allowed casts Uint -> Sint.
(IntSuffix::U8 , IntSuffix::I8 ) if num_bits < 0x80 => IntSuffix::I8 ,
(IntSuffix::U16, IntSuffix::I16) if num_bits < 0x8000 => IntSuffix::I16,
(IntSuffix::U32, IntSuffix::I32) if num_bits < 0x80000000 => IntSuffix::I32,
(IntSuffix::U64, IntSuffix::I64) if num_bits < 0x8000000000000000 => IntSuffix::I64,
(IntSuffix::U8 , IntSuffix::I16) => IntSuffix::I16,
(IntSuffix::U8 , IntSuffix::I32) => IntSuffix::I32,
(IntSuffix::U8 , IntSuffix::I64) => IntSuffix::I64,
(IntSuffix::U16, IntSuffix::I32) => IntSuffix::I32,
(IntSuffix::U16, IntSuffix::I64) => IntSuffix::I64,
(IntSuffix::U32, IntSuffix::I64) => IntSuffix::I64,
// Everything else is either invalid or ambiguous.
(given, requested) => {
return Err(Error::IncompatibleNumberSuffix(
int.into_token_stream().to_string(),
negative,
format!("{:?}", given),
format!("{:?}", requested),
));
},
};
Ok(s)
}
fn bytify_implementation_int<O: ByteOrder>(negative: bool, int: LitInt, output: &mut Vec<u8>) -> Result<(), Error> {
let num_bits = int.value();
let num_bits_suffix = int_to_suffix(negative, &int)?;
match num_bits_suffix {
IntSuffix::U8 => {
output.write_u8(num_bits as u8)?;
},
IntSuffix::I8 => {
if negative {
output.write_u8((!(num_bits as u8)).wrapping_add(1))?;
} else {
output.write_u8( num_bits as u8)?;
}
},
IntSuffix::U16 => {
output.write_u16::<O>(num_bits as u16)?;
},
IntSuffix::I16 => {
if negative {
output.write_u16::<O>((!(num_bits as u16)).wrapping_add(1))?;
} else {
output.write_u16::<O>( num_bits as u16)?;
}
},
IntSuffix::U32 => {
output.write_u32::<O>(num_bits as u32)?;
},
IntSuffix::I32 => {
if negative {
output.write_u32::<O>((!(num_bits as u32)).wrapping_add(1))?;
} else {
output.write_u32::<O>( num_bits as u32)?;
}
},
IntSuffix::U64 => {
output.write_u64::<O>(num_bits as u64)?;
},
IntSuffix::I64 => {
if negative {
output.write_u64::<O>((!(num_bits as u64)).wrapping_add(1))?;
} else {
output.write_u64::<O>( num_bits as u64)?;
}
},
// Everything else is either invalid or ambiguous.
s => {
return Err(Error::UnsupportedNumberSuffix(format!("{:?}", s)));
},
}
Ok(())
}
fn float_to_suffix(negative: bool, float: &LitFloat) -> Result<FloatSuffix, Error> {
let num_bits = float.value();
let s = if num_bits > 3.40282347e+38 {
FloatSuffix::F64
} else {
FloatSuffix::F32
};
let s = match (s, float.suffix()) {
// If none is specified use the least size suffix possible.
(s, FloatSuffix::None) => s,
// The only possible float cast.
(FloatSuffix::F32, FloatSuffix::F64) => FloatSuffix::F64,
// Everything else is either invalid or ambiguous.
(given, requested) => {
return Err(Error::IncompatibleNumberSuffix(
float.into_token_stream().to_string(),
negative,
format!("{:?}", given),
format!("{:?}", requested),
));
},
};
Ok(s)
}
fn bytify_implementation_float<O: ByteOrder>(negative: bool, float: LitFloat, output: &mut Vec<u8>) -> Result<(), Error> {
let num_bits = float.value();
let num_bits_suffix = float_to_suffix(negative, &float)?;
match num_bits_suffix {
FloatSuffix::F32 => {
if negative {
output.write_f32::<O>(-(num_bits as f32))?;
} else {
output.write_f32::<O>( num_bits as f32 )?;
}
},
FloatSuffix::F64 => {
if negative {
output.write_f64::<O>(-num_bits)?;
} else {
output.write_f64::<O>( num_bits)?;
}
},
// Everything else is either invalid or ambiguous.
s => {
return Err(Error::UnsupportedNumberSuffix(format!("{:?}", s)));
},
}
Ok(())
}
fn bytify_implementation_element<O: ByteOrder>(lit: Lit, output: &mut Vec<u8>) -> Result<(), Error> {
match lit {
Lit::Char(c) => {
let offset = output.len();
output.resize(c.value().len_utf8() + offset, 0u8);
c.value().encode_utf8(&mut output[offset ..]);
},
Lit::Str(string) => {
output.extend_from_slice(string.value().as_bytes());
},
Lit::Int(int) => {
bytify_implementation_int::<O>(false, int, output)?;
},
Lit::Float(float) => {
bytify_implementation_float::<O>(false, float, output)?;
},
lit => {
return Err(Error::unsupported_lit(lit));
},
}
Ok(())
}
#[derive(Debug)]
struct MyMacroInput {
list: Punctuated<Expr, Token![,]>,
}
impl Parse for MyMacroInput {
fn parse(input: ParseStream) -> Result<Self, SynError> {
Ok(MyMacroInput {
list: input.parse_terminated(Expr::parse)?,
})
}
}
fn bytify_implementation(input: MyMacroInput) -> Result<TokenStream, Error> {
let mut output: Vec<u8> = Vec::new();
for expr in input.list {
let (
endianness,
expr,
) = match expr {
/* it is not, actually! */ Expr::Type(tpe_expr) => {
let expr = *tpe_expr.expr;
let endianness = match tpe_expr.ty.into_token_stream().to_string().as_str() {
"BE" | "be" => Endianness::BE,
"LE" | "le" => Endianness::LE,
invalid => {
return Err(Error::InvalidEndianness(invalid.to_string()));
},
};
(endianness, expr)
},
expr => {
(DEFAULT_ENDIANNESS, expr)
},
};
match expr {
Expr::Lit(lit_expr) => {
if endianness == Endianness::BE {
bytify_implementation_element::<BE>(lit_expr.lit, &mut output)?;
} else {
bytify_implementation_element::<LE>(lit_expr.lit, &mut output)?;
}
},
Expr::Unary(unary_expr) => {
match unary_expr.op {
UnOp::Neg(op) => {
match *unary_expr.expr {
Expr::Lit(lit_expr) => {
match lit_expr.lit {
Lit::Int(int) => {
if endianness == Endianness::BE {
bytify_implementation_int::<BE>(true, int, &mut output)?;
} else {
bytify_implementation_int::<LE>(true, int, &mut output)?;
}
},
Lit::Float(float) => {
if endianness == Endianness::BE {
bytify_implementation_float::<BE>(true, float, &mut output)?;
} else {
bytify_implementation_float::<LE>(true, float, &mut output)?;
}
},
lit => {
return Err(Error::unsupported_lit(lit));
},
}
},
expr => {
return Err(Error::unsupported_prefixed_expression(UnOp::Neg(op), expr));
},
}
},
op => {
return Err(Error::unsupported_prefixed_expression(op, *unary_expr.expr));
},
}
},
expr => {
return Err(Error::unsupported_expression(expr));
},
}
}
Ok(quote! {
[
#(#output),*
]
}.into())
}
#[proc_macro_hack]
pub fn bytify(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as MyMacroInput);
bytify_implementation(input).unwrap_or_else(|err| panic!("{}", err))
}
|
//! Checks that the declared unsafety is respected by the attributes
#![deny(warnings)]
#![no_main]
#![no_std]
extern crate cortex_m_rt;
extern crate panic_halt;
use cortex_m_rt::{entry, exception, ExceptionFrame};
#[entry]
unsafe fn main() -> ! {
foo();
loop {}
}
#[exception]
unsafe fn DefaultHandler(_irqn: i16) {
foo();
}
#[exception]
unsafe fn HardFault(_ef: &ExceptionFrame) -> ! {
foo();
loop {}
}
#[exception]
unsafe fn SysTick() {
foo();
}
unsafe fn foo() {}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Skus_List(#[from] skus::list::Error),
#[error(transparent)]
TestBaseAccounts_ListBySubscription(#[from] test_base_accounts::list_by_subscription::Error),
#[error(transparent)]
TestBaseAccounts_ListByResourceGroup(#[from] test_base_accounts::list_by_resource_group::Error),
#[error(transparent)]
TestBaseAccounts_Get(#[from] test_base_accounts::get::Error),
#[error(transparent)]
TestBaseAccounts_Create(#[from] test_base_accounts::create::Error),
#[error(transparent)]
TestBaseAccounts_Update(#[from] test_base_accounts::update::Error),
#[error(transparent)]
TestBaseAccounts_Delete(#[from] test_base_accounts::delete::Error),
#[error(transparent)]
TestBaseAccounts_Offboard(#[from] test_base_accounts::offboard::Error),
#[error(transparent)]
Usage_List(#[from] usage::list::Error),
#[error(transparent)]
TestBaseAccounts_GetFileUploadUrl(#[from] test_base_accounts::get_file_upload_url::Error),
#[error(transparent)]
AvailableOs_List(#[from] available_os::list::Error),
#[error(transparent)]
AvailableOs_Get(#[from] available_os::get::Error),
#[error(transparent)]
FlightingRings_List(#[from] flighting_rings::list::Error),
#[error(transparent)]
FlightingRings_Get(#[from] flighting_rings::get::Error),
#[error(transparent)]
TestTypes_List(#[from] test_types::list::Error),
#[error(transparent)]
TestTypes_Get(#[from] test_types::get::Error),
#[error(transparent)]
TestBaseAccounts_CheckPackageNameAvailability(#[from] test_base_accounts::check_package_name_availability::Error),
#[error(transparent)]
Packages_ListByTestBaseAccount(#[from] packages::list_by_test_base_account::Error),
#[error(transparent)]
Packages_Get(#[from] packages::get::Error),
#[error(transparent)]
Packages_Create(#[from] packages::create::Error),
#[error(transparent)]
Packages_Update(#[from] packages::update::Error),
#[error(transparent)]
Packages_Delete(#[from] packages::delete::Error),
#[error(transparent)]
Packages_HardDelete(#[from] packages::hard_delete::Error),
#[error(transparent)]
Packages_GetDownloadUrl(#[from] packages::get_download_url::Error),
#[error(transparent)]
TestSummaries_List(#[from] test_summaries::list::Error),
#[error(transparent)]
TestSummaries_Get(#[from] test_summaries::get::Error),
#[error(transparent)]
TestResults_List(#[from] test_results::list::Error),
#[error(transparent)]
TestResults_Get(#[from] test_results::get::Error),
#[error(transparent)]
OsUpdates_List(#[from] os_updates::list::Error),
#[error(transparent)]
OsUpdates_Get(#[from] os_updates::get::Error),
#[error(transparent)]
FavoriteProcesses_List(#[from] favorite_processes::list::Error),
#[error(transparent)]
FavoriteProcesses_Get(#[from] favorite_processes::get::Error),
#[error(transparent)]
FavoriteProcesses_Create(#[from] favorite_processes::create::Error),
#[error(transparent)]
FavoriteProcesses_Delete(#[from] favorite_processes::delete::Error),
#[error(transparent)]
AnalysisResults_List(#[from] analysis_results::list::Error),
#[error(transparent)]
AnalysisResults_Get(#[from] analysis_results::get::Error),
#[error(transparent)]
TestResults_GetDownloadUrl(#[from] test_results::get_download_url::Error),
#[error(transparent)]
TestResults_GetVideoDownloadUrl(#[from] test_results::get_video_download_url::Error),
#[error(transparent)]
EmailEvents_List(#[from] email_events::list::Error),
#[error(transparent)]
EmailEvents_Get(#[from] email_events::get::Error),
#[error(transparent)]
CustomerEvents_ListByTestBaseAccount(#[from] customer_events::list_by_test_base_account::Error),
#[error(transparent)]
CustomerEvents_Get(#[from] customer_events::get::Error),
#[error(transparent)]
CustomerEvents_Create(#[from] customer_events::create::Error),
#[error(transparent)]
CustomerEvents_Delete(#[from] customer_events::delete::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
}
pub mod skus {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::TestBaseAccountSkuListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.TestBase/skus",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountSkuListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod test_base_accounts {
use super::{models, API_VERSION};
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
get_deleted: Option<bool>,
) -> std::result::Result<models::TestBaseAccountListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.TestBase/testBaseAccounts",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(get_deleted) = get_deleted {
url.query_pairs_mut().append_pair("getDeleted", get_deleted.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
get_deleted: Option<bool>,
) -> std::result::Result<models::TestBaseAccountListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(get_deleted) = get_deleted {
url.query_pairs_mut().append_pair("getDeleted", get_deleted.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::TestBaseAccountResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
parameters: &models::TestBaseAccountResource,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
restore: Option<bool>,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
if let Some(restore) = restore {
url.query_pairs_mut().append_pair("restore", restore.to_string().as_str());
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::TestBaseAccountResource),
Created201(models::TestBaseAccountResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
parameters: &models::TestBaseAccountUpdateParameters,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => Ok(update::Response::Created201),
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::TestBaseAccountResource),
Created201,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn offboard(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<offboard::Response, offboard::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/offboard",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(offboard::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(offboard::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(offboard::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(offboard::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(offboard::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(offboard::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(offboard::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| offboard::Error::DeserializeError(source, rsp_body.clone()))?;
Err(offboard::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod offboard {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_file_upload_url(
operation_config: &crate::OperationConfig,
parameters: Option<&models::GetFileUploadUrlParameters>,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::FileUploadUrlResponse, get_file_upload_url::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/getFileUploadUrl",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(get_file_upload_url::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_file_upload_url::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(get_file_upload_url::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_file_upload_url::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_file_upload_url::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileUploadUrlResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_file_upload_url::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_file_upload_url::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_file_upload_url::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_file_upload_url {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_package_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
parameters: &models::PackageCheckNameAvailabilityParameters,
) -> std::result::Result<models::CheckNameAvailabilityResult, check_package_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/checkPackageNameAvailability",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(check_package_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_package_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(check_package_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_package_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_package_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(rsp_body)
.map_err(|source| check_package_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| check_package_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_package_name_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_package_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod usage {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::TestBaseAccountUsageDataList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/usages",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestBaseAccountUsageDataList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod available_os {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
os_update_type: &str,
) -> std::result::Result<models::AvailableOsListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/availableOSs",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("osUpdateType", os_update_type);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailableOsListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
available_os_resource_name: &str,
) -> std::result::Result<models::AvailableOsResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/availableOSs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
available_os_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailableOsResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod flighting_rings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::FlightingRingListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/flightingRings",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FlightingRingListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
flighting_ring_resource_name: &str,
) -> std::result::Result<models::FlightingRingResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/flightingRings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
flighting_ring_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FlightingRingResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod test_types {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::TestTypeListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/testTypes",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestTypeListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
test_type_resource_name: &str,
) -> std::result::Result<models::TestTypeResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/testTypes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
test_type_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestTypeResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod packages {
use super::{models, API_VERSION};
pub async fn list_by_test_base_account(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::PackageListResult, list_by_test_base_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_test_base_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_test_base_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_test_base_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_test_base_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PackageListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_test_base_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_test_base_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_test_base_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_test_base_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<models::PackageResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PackageResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
parameters: &models::PackageResource,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PackageResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PackageResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PackageResource),
Created201(models::PackageResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
parameters: &models::PackageUpdateParameters,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PackageResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => Ok(update::Response::Created201),
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PackageResource),
Created201,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn hard_delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<hard_delete::Response, hard_delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/hardDelete",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(hard_delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(hard_delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(hard_delete::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(hard_delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(hard_delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(hard_delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(hard_delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| hard_delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(hard_delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod hard_delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_download_url(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<models::DownloadUrlResponse, get_download_url::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/getDownloadUrl",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(get_download_url::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_download_url::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_download_url::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_download_url::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DownloadUrlResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_download_url::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_download_url {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod test_summaries {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::TestSummaryListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/testSummaries",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestSummaryListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
test_summary_name: &str,
) -> std::result::Result<models::TestSummaryResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/testSummaries/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
test_summary_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestSummaryResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod test_results {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
os_update_type: &str,
filter: Option<&str>,
) -> std::result::Result<models::TestResultListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("osUpdateType", os_update_type);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestResultListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
test_result_name: &str,
) -> std::result::Result<models::TestResultResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name,
test_result_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::TestResultResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_download_url(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
test_result_name: &str,
) -> std::result::Result<models::DownloadUrlResponse, get_download_url::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults/{}/getDownloadUrl" , operation_config . base_path () , subscription_id , resource_group_name , test_base_account_name , package_name , test_result_name) ;
let mut url = url::Url::parse(url_str).map_err(get_download_url::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_download_url::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_download_url::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_download_url::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DownloadUrlResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_download_url::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_download_url {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_video_download_url(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
test_result_name: &str,
) -> std::result::Result<models::DownloadUrlResponse, get_video_download_url::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults/{}/getVideoDownloadUrl" , operation_config . base_path () , subscription_id , resource_group_name , test_base_account_name , package_name , test_result_name) ;
let mut url = url::Url::parse(url_str).map_err(get_video_download_url::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_video_download_url::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_video_download_url::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_video_download_url::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DownloadUrlResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_video_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_video_download_url::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_video_download_url::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_video_download_url {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod os_updates {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
os_update_type: &str,
) -> std::result::Result<models::OsUpdateListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/osUpdates",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("osUpdateType", os_update_type);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OsUpdateListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
os_update_resource_name: &str,
) -> std::result::Result<models::OsUpdateResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/osUpdates/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name,
os_update_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OsUpdateResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod favorite_processes {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
) -> std::result::Result<models::FavoriteProcessListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/favoriteProcesses",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FavoriteProcessListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
favorite_process_resource_name: &str,
) -> std::result::Result<models::FavoriteProcessResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/favoriteProcesses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name,
favorite_process_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FavoriteProcessResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
parameters: &models::FavoriteProcessResource,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
favorite_process_resource_name: &str,
) -> std::result::Result<models::FavoriteProcessResource, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/favoriteProcesses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name,
favorite_process_resource_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FavoriteProcessResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
favorite_process_resource_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/favoriteProcesses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
package_name,
favorite_process_resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod analysis_results {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
test_result_name: &str,
analysis_result_type: &str,
) -> std::result::Result<models::AnalysisResultListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults/{}/analysisResults" , operation_config . base_path () , subscription_id , resource_group_name , test_base_account_name , package_name , test_result_name) ;
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("analysisResultType", analysis_result_type);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AnalysisResultListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
package_name: &str,
test_result_name: &str,
analysis_result_name: &str,
) -> std::result::Result<models::AnalysisResultSingletonResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/packages/{}/testResults/{}/analysisResults/{}" , operation_config . base_path () , subscription_id , resource_group_name , test_base_account_name , package_name , test_result_name , analysis_result_name) ;
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AnalysisResultSingletonResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod email_events {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::EmailEventListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/emailEvents",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::EmailEventListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
email_event_resource_name: &str,
) -> std::result::Result<models::EmailEventResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/emailEvents/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
email_event_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::EmailEventResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod customer_events {
use super::{models, API_VERSION};
pub async fn list_by_test_base_account(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
) -> std::result::Result<models::CustomerEventListResult, list_by_test_base_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/customerEvents",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_test_base_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_test_base_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_test_base_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_test_base_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CustomerEventListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_test_base_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_test_base_account::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_test_base_account::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_test_base_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
customer_event_name: &str,
) -> std::result::Result<models::CustomerEventResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/customerEvents/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
customer_event_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CustomerEventResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
parameters: &models::CustomerEventResource,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
customer_event_name: &str,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/customerEvents/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
customer_event_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CustomerEventResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::CustomerEventResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::CustomerEventResource),
Created201(models::CustomerEventResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
test_base_account_name: &str,
customer_event_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TestBase/testBaseAccounts/{}/customerEvents/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
test_base_account_name,
customer_event_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.TestBase/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
#[doc = "Reader of register LPTIM_SIDR"]
pub type R = crate::R<u32, super::LPTIM_SIDR>;
#[doc = "Reader of field `S_ID`"]
pub type S_ID_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:31 - S_ID"]
#[inline(always)]
pub fn s_id(&self) -> S_ID_R {
S_ID_R::new((self.bits & 0xffff_ffff) as u32)
}
}
|
extern crate libc;
extern crate regex;
#[macro_use]
extern crate lazy_static;
use std::env;
use std::fs::File;
mod keyboard;
mod virtual_keyboard;
mod key_converter;
use keyboard::Keyboard;
use virtual_keyboard::*;
use key_converter::KeyConverter;
fn loop_keymap(kbd: Keyboard,
mut vkbd: VirtualKeyboard,
mut kc: KeyConverter,
show_state: bool)
{
let mut last_push = None;
loop {
let (_, read_code, state) = kbd.read_key();
// 結果をoptionで受け取る
let (push, leave) = match state {
// push
1 => {
let (push, leave) = kc.push(read_code);
if let Some(p) = push.last() {
last_push = Some(*p);
} else {
last_push = None;
}
for l in &leave {
vkbd.leave(*l);
}
for p in &push {
vkbd.push(*p);
}
(Some(push), Some(leave))
},
// leave
0 => {
let leave = kc.leave(read_code);
for l in &leave {
vkbd.leave(*l);
}
(None, Some(leave))
},
// repeat
2 => {
// 最後にvkbdにpushされたキーコードをrepeatする
if let Some(p) = last_push {
vkbd.repeat(p);
}
(None, None)
},
_ => panic!()
};
// キーの状態を表示する
if show_state {
// 現実世界のキーボードで入力された値を表示
print!("\t{:>15} | ", "kbd");
match state {
0 => print!("leave "),
1 => print!("push "),
2 => print!("repeat "),
_ => ()
}
println!("{}", read_code);
// 仮想的なキーボードで入力された値を表示
print!("\t{:>15} | ", "vkbd");
for p in push.unwrap_or(Vec::new()) {
print!("push {} ", p);
}
for l in leave.unwrap_or(Vec::new()) {
print!("leave {} ", l);
}
if state == 2 {
if let Some(p) = last_push {
print!("repeat {} ", p);
}
}
println!();
// 押されているキーをルールに適用した結果を表示
let name = format!("@{}", kc.get_rules_name());
println!("\t{:>15} | {}", name, kc.filter_to_string());
println!()
}
}
}
// 実際にvkbdでは入力しない
fn loop_keymap_without_vkbd(kbd: Keyboard, mut kc: KeyConverter) {
loop {
let (_, read_code, state) = kbd.read_key();
// 結果をoptionで受け取る
match state {
// push
1 => {
kc.push(read_code);
},
// leave
0 => {
kc.leave(read_code);
},
// repeat
2 => (),
_ => panic!()
}
// キーの状態を表示する
// 現実世界のキーボードで入力された値を表示
print!("\t{:>15} | ", "kbd");
match state {
0 => print!("leave "),
1 => print!("push "),
2 => print!("repeat "),
_ => ()
}
println!("{}", read_code);
// 仮想的なキーボードで入力された値を表示しない
println!("\t{:>15} | ", "vkbd");
// 押されているキーをルールに適用した結果を表示
let name = format!("@{}", kc.get_rules_name());
println!("\t{:>15} | {}", name, kc.filter_to_string());
println!()
}
}
fn print_help() {
println!("usage:");
println!(" keymap [options...] <rule>");
println!();
println!("arguments:");
println!(" <rule> ルールを記述したファイルを指定します");
println!();
println!("options:");
println!(" -s, --show-state 実行中にキーの状態を出力します");
println!(" -r, --rule ルールを適用しますが、実際に変換後のキーが入力されることはありません");
}
fn print_error<T: std::fmt::Display>(t: T) {
println!("Error: {}", t);
}
fn main() {
let wait_time = std::time::Duration::from_millis(250);
let mut show_state = false;
let mut only_rule = false;
let mut filename = None;
// 引数をパースする
for arg in env::args().skip(1) {
if arg.len() == 0 {
continue
}
if arg.get(..2) == Some("--") {
match arg.get(2..) {
Some("show-state") => show_state = true,
Some("rule") => only_rule = true,
_ => {
print_error(format!("'{}'は無効なオプションです", arg));
print_help();
return
}
}
continue
}
if arg.chars().next() == Some('-') {
for c in arg.chars().skip(1) {
match c {
's' => show_state = true,
'r' => only_rule = true,
_ => {
print_error(format!("'{}'は無効なオプションです", arg));
print_help();
return
}
}
}
continue
}
filename = Some(arg);
}
let kc = match filename {
Some(f) => match File::open(f) {
Ok(f) => match KeyConverter::new(f) {
Ok(kc) => kc,
Err(e) => return print_error(e)
},
Err(_) => return print_error("ファイルが開けません")
},
None => {
print_error("ファイル名がありません");
print_help();
return
}
};
std::thread::sleep(wait_time);
let kbd;
if only_rule {
// grabしない
kbd = match Keyboard::open() {
Ok(kbd) => kbd,
Err(e) => { print_error(format!("{:?}", e.kind())); return; }
};
} else {
kbd = match Keyboard::open_and_grab() {
Ok(kbd) => kbd,
Err(e) => { print_error(format!("{:?}", e.kind())); return; }
};
}
let vkbd = match VirtualKeyboard::new() {
Some(vkbd) => vkbd,
None => {
print_error("Can't create virtual_keyboard.");
return;
}
};
if only_rule {
loop_keymap_without_vkbd(kbd, kc);
} else {
loop_keymap(kbd, vkbd, kc, show_state);
}
} |
mod boid;
use ggez::{GameResult, event, Context, graphics};
use ggez::graphics::Color;
use ggez::mint::Vector2;
use glam::*;
use legion::{World, Schedule, Resources, Read, IntoQuery, Entity};
use crate::boid::{update_positions_system, update_velocities_system, Position, Boid, Velocity, update_velocities, Acceleration};
use std::time::{Instant, Duration};
use rand::rngs::ThreadRng;
use rand::Rng;
pub const WINDOW_WIDTH: usize = 1000;
pub const WINDOW_HEIGHT: usize = 1000;
pub const BOIDS: usize = 400;
#[derive(Copy, Clone)]
pub struct Time {
start: Instant,
}
impl Time {
pub fn elapsed_seconds(&self) -> f32 {
self.start.elapsed().as_secs() as f32 / 60.0
}
pub fn reset(&mut self) {
self.start = Instant::now()
}
}
impl Default for Time {
fn default() -> Self {
Time { start: Instant::now() }
}
}
struct StartState {
world: World,
resources: Resources,
update_schedule: Schedule,
}
impl StartState {
fn new(ctx: &mut Context) -> GameResult<StartState> {
let mut world = World::default();
let mut resources = Resources::default();
resources.insert(Time::default());
let mut update_schedule = Schedule::builder()
.add_system(update_velocities_system())
.add_system(update_positions_system())
.build();
let mut rand = ThreadRng::default();
for i in 0..BOIDS {
let pos = Position { x: rand.gen_range(0.0..400.0), y: rand.gen_range(0.0..400.0) };
let vel = Velocity { dx: 0.0, dy: 0.0 };
let acc = Acceleration { dx: 1.0, dy: 1.0 };
world.push((pos, vel, acc, Boid::new(ctx) ));
}
Ok( StartState { world, resources, update_schedule } )
}
}
impl event::EventHandler for StartState {
fn update(&mut self, _ctx: &mut Context) -> GameResult {
self.update_schedule.execute(&mut self.world, &mut self.resources);
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into());
let mut draw_boid = |boid: &Boid, position: &Position| {
graphics::draw(ctx, &boid.mesh, (Vec2::new(position.x, position.y),)).unwrap();
};
let mut query = <(&Position, &Boid)>::query();
for (pos, boid) in query.iter(&self.world) {
draw_boid(boid, pos);
}
graphics::present(ctx)?;
Ok(())
}
}
fn main() {
let cb = ggez::ContextBuilder::new("boids", "luca");
let (mut ctx, event_loop) = cb.build().unwrap();
let state = StartState::new(&mut ctx).unwrap();
event::run(ctx, event_loop, state)
}
|
use super::hpu::*;
use std::boxed::Box;
use std::fs::File;
use super::error::*;
use std::io::{BufRead, BufReader, BufWriter, Write};
use crate::{hack_report_less};
pub fn create_assembler(path: &std::path::PathBuf) -> Assembler {
Assembler {
path: path.clone(),
hpu: HPU::new(path),
}
}
pub struct Assembler {
path: std::path::PathBuf,
hpu: HPU,
}
impl Assembler {
pub fn run(&mut self) -> Result<(), Box<HackError>> {
self.first_pass()?;
self.second_pass()?;
Ok(())
}
fn polish(s: &str) -> String {
s.trim().to_owned()
}
fn first_pass(&mut self) -> Result<(), Box<HackError>> {
println!("================= First Pass Begins =================");
let f = File::open(&self.path).expect("Could not read file");
let reader = BufReader::new(f);
for (num, line) in reader.lines().enumerate() {
self.hpu
.first_pass(&(num, Assembler::polish(&line.unwrap())))?;
}
println!("================= First Pass Ends =================");
Ok(())
}
fn second_pass(&mut self) -> Result<(), Box<HackError>> {
println!("================= Second Pass Begins =================");
let f = File::open(&self.path).expect("Could not read file");
&self.path.set_extension("hack");
let w = File::create(&self.path).expect("Could not read file");
let mut writer = BufWriter::new(w);
let reader = BufReader::new(f);
for (num, line) in reader.lines().enumerate() {
let out = self
.hpu
.second_pass(num, &Assembler::polish(&line.unwrap()))?;
println!("[out]: {}", out);
if out.len() > 0 {
if let Err(_) = writeln!(&mut writer, "{}", out) {
hack_report_less!("Error occured in writeln!")
}
}
}
println!("================= Second Pass Ends =================");
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
}
|
use crate::parser::nom_input;
use crate::parser::parse::token_tree::TokenNode;
use crate::parser::parse::tokens::RawToken;
use crate::parser::{Pipeline, PipelineElement};
use crate::shell::shell_manager::ShellManager;
use crate::Tagged;
use ansi_term::Color;
use rustyline::completion::Completer;
use rustyline::error::ReadlineError;
use rustyline::highlight::Highlighter;
use rustyline::hint::Hinter;
use std::borrow::Cow::{self, Owned};
pub(crate) struct Helper {
helper: ShellManager,
}
impl Helper {
pub(crate) fn new(helper: ShellManager) -> Helper {
Helper { helper }
}
}
impl Completer for Helper {
type Candidate = rustyline::completion::Pair;
fn complete(
&self,
line: &str,
pos: usize,
ctx: &rustyline::Context<'_>,
) -> Result<(usize, Vec<rustyline::completion::Pair>), ReadlineError> {
self.helper.complete(line, pos, ctx)
}
}
/*
impl Completer for Helper {
type Candidate = rustyline::completion::Pair;
fn complete(
&self,
line: &str,
pos: usize,
ctx: &rustyline::Context<'_>,
) -> Result<(usize, Vec<rustyline::completion::Pair>), ReadlineError> {
let result = self.helper.complete(line, pos, ctx);
result.map(|(x, y)| (x, y.iter().map(|z| z.into()).collect()))
}
}
*/
impl Hinter for Helper {
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
self.helper.hint(line, pos, ctx)
}
}
impl Highlighter for Helper {
fn highlight_prompt<'b, 's: 'b, 'p: 'b>(&'s self, prompt: &'p str, _: bool) -> Cow<'b, str> {
Owned("\x1b[32m".to_owned() + &prompt[0..prompt.len() - 2] + "\x1b[m> ")
}
fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
Owned("\x1b[1m".to_owned() + hint + "\x1b[m")
}
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
let tokens = crate::parser::pipeline(nom_input(line, uuid::Uuid::nil()));
match tokens {
Err(_) => Cow::Borrowed(line),
Ok((_rest, v)) => {
let mut out = String::new();
let pipeline = match v.as_pipeline() {
Err(_) => return Cow::Borrowed(line),
Ok(v) => v,
};
let Pipeline { parts, post_ws } = pipeline;
let mut iter = parts.into_iter();
loop {
match iter.next() {
None => {
if let Some(ws) = post_ws {
out.push_str(ws.slice(line));
}
return Cow::Owned(out);
}
Some(token) => {
let styled = paint_pipeline_element(&token, line);
out.push_str(&styled.to_string());
}
}
}
}
}
}
fn highlight_char(&self, _line: &str, _pos: usize) -> bool {
true
}
}
fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
let styled = match token_node {
TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)),
TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)),
TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)),
TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)),
TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)),
TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)),
TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::Number(..),
..
}) => Color::Purple.bold().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::Size(..),
..
}) => Color::Purple.bold().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::GlobPattern,
..
}) => Color::Cyan.normal().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::String(..),
..
}) => Color::Green.normal().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::Variable(..),
..
}) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::Bare,
..
}) => Color::Green.normal().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::ExternalCommand(..),
..
}) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
TokenNode::Token(Tagged {
item: RawToken::ExternalWord,
..
}) => Color::Black.bold().paint(token_node.tag().slice(line)),
};
styled.to_string()
}
fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String {
let mut styled = String::new();
if let Some(_) = pipeline_element.pipe {
styled.push_str(&Color::Purple.paint("|"));
}
if let Some(ws) = pipeline_element.pre_ws {
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
}
styled.push_str(
&Color::Cyan
.bold()
.paint(pipeline_element.call().head().tag().slice(line))
.to_string(),
);
if let Some(children) = pipeline_element.call().children() {
for child in children {
styled.push_str(&paint_token_node(child, line));
}
}
if let Some(ws) = pipeline_element.post_ws {
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
}
styled.to_string()
}
impl rustyline::Helper for Helper {}
|
use crate::distribution::{Discrete, DiscreteCDF};
use crate::function::factorial;
use crate::statistics::*;
use crate::{Result, StatsError};
use rand::Rng;
use std::cmp;
use std::f64;
/// Implements the
/// [Hypergeometric](http://en.wikipedia.org/wiki/Hypergeometric_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// ```
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Hypergeometric {
population: u64,
successes: u64,
draws: u64,
}
impl Hypergeometric {
/// Constructs a new hypergeometric distribution
/// with a population (N) of `population`, number
/// of successes (K) of `successes`, and number of draws
/// (n) of `draws`
///
/// # Errors
///
/// If `successes > population` or `draws > population`
///
/// # Examples
///
/// ```
/// use statrs::distribution::Hypergeometric;
///
/// let mut result = Hypergeometric::new(2, 2, 2);
/// assert!(result.is_ok());
///
/// result = Hypergeometric::new(2, 3, 2);
/// assert!(result.is_err());
/// ```
pub fn new(population: u64, successes: u64, draws: u64) -> Result<Hypergeometric> {
if successes > population || draws > population {
Err(StatsError::BadParams)
} else {
Ok(Hypergeometric {
population,
successes,
draws,
})
}
}
/// Returns the population size of the hypergeometric
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Hypergeometric;
///
/// let n = Hypergeometric::new(10, 5, 3).unwrap();
/// assert_eq!(n.population(), 10);
/// ```
pub fn population(&self) -> u64 {
self.population
}
/// Returns the number of observed successes of the hypergeometric
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Hypergeometric;
///
/// let n = Hypergeometric::new(10, 5, 3).unwrap();
/// assert_eq!(n.successes(), 5);
/// ```
pub fn successes(&self) -> u64 {
self.successes
}
/// Returns the number of draws of the hypergeometric
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::Hypergeometric;
///
/// let n = Hypergeometric::new(10, 5, 3).unwrap();
/// assert_eq!(n.draws(), 3);
/// ```
pub fn draws(&self) -> u64 {
self.draws
}
/// Returns population, successes, and draws in that order
/// as a tuple of doubles
fn values_f64(&self) -> (f64, f64, f64) {
(
self.population as f64,
self.successes as f64,
self.draws as f64,
)
}
}
impl ::rand::distributions::Distribution<f64> for Hypergeometric {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> f64 {
let mut population = self.population as f64;
let mut successes = self.successes as f64;
let mut draws = self.draws;
let mut x = 0.0;
loop {
let p = successes / population;
let next: f64 = rng.gen();
if next < p {
x += 1.0;
successes -= 1.0;
}
population -= 1.0;
draws -= 1;
if draws == 0 {
break;
}
}
x
}
}
impl DiscreteCDF<u64, f64> for Hypergeometric {
/// Calculates the cumulative distribution function for the hypergeometric
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// 1 - ((n choose k+1) * (N-n choose K-k-1)) / (N choose K) * 3_F_2(1,
/// k+1-K, k+1-n; k+2, N+k+2-K-n; 1)
/// ```
///
/// where `N` is population, `K` is successes, `n` is draws,
/// and `p_F_q` is the [generalized hypergeometric
/// function](https://en.wikipedia.
/// org/wiki/Generalized_hypergeometric_function)
fn cdf(&self, x: u64) -> f64 {
if x < self.min() {
0.0
} else if x >= self.max() {
1.0
} else {
let k = x;
let ln_denom = factorial::ln_binomial(self.population, self.draws);
(0..k + 1).fold(0.0, |acc, i| {
acc + (factorial::ln_binomial(self.successes, i)
+ factorial::ln_binomial(self.population - self.successes, self.draws - i)
- ln_denom)
.exp()
})
}
}
}
impl Min<u64> for Hypergeometric {
/// Returns the minimum value in the domain of the
/// hypergeometric distribution representable by a 64-bit
/// integer
///
/// # Formula
///
/// ```ignore
/// max(0, n + K - N)
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn min(&self) -> u64 {
(self.draws + self.successes).saturating_sub(self.population)
}
}
impl Max<u64> for Hypergeometric {
/// Returns the maximum value in the domain of the
/// hypergeometric distribution representable by a 64-bit
/// integer
///
/// # Formula
///
/// ```ignore
/// min(K, n)
/// ```
///
/// where `K` is successes and `n` is draws
fn max(&self) -> u64 {
cmp::min(self.successes, self.draws)
}
}
impl Distribution<f64> for Hypergeometric {
/// Returns the mean of the hypergeometric distribution
///
/// # None
///
/// If `N` is `0`
///
/// # Formula
///
/// ```ignore
/// K * n / N
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn mean(&self) -> Option<f64> {
if self.population == 0 {
None
} else {
Some(self.successes as f64 * self.draws as f64 / self.population as f64)
}
}
/// Returns the variance of the hypergeometric distribution
///
/// # None
///
/// If `N <= 1`
///
/// # Formula
///
/// ```ignore
/// n * (K / N) * ((N - K) / N) * ((N - n) / (N - 1))
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn variance(&self) -> Option<f64> {
if self.population <= 1 {
None
} else {
let (population, successes, draws) = self.values_f64();
let val = draws * successes * (population - draws) * (population - successes)
/ (population * population * (population - 1.0));
Some(val)
}
}
/// Returns the skewness of the hypergeometric distribution
///
/// # None
///
/// If `N <= 2`
///
/// # Formula
///
/// ```ignore
/// ((N - 2K) * (N - 1)^(1 / 2) * (N - 2n)) / ([n * K * (N - K) * (N -
/// n)]^(1 / 2) * (N - 2))
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn skewness(&self) -> Option<f64> {
if self.population <= 2 {
None
} else {
let (population, successes, draws) = self.values_f64();
let val = (population - 1.0).sqrt()
* (population - 2.0 * draws)
* (population - 2.0 * successes)
/ ((draws * successes * (population - successes) * (population - draws)).sqrt()
* (population - 2.0));
Some(val)
}
}
}
impl Mode<Option<u64>> for Hypergeometric {
/// Returns the mode of the hypergeometric distribution
///
/// # Formula
///
/// ```ignore
/// floor((n + 1) * (k + 1) / (N + 2))
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn mode(&self) -> Option<u64> {
Some(((self.draws + 1) * (self.successes + 1)) / (self.population + 2))
}
}
impl Discrete<u64, f64> for Hypergeometric {
/// Calculates the probability mass function for the hypergeometric
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// (K choose x) * (N-K choose n-x) / (N choose n)
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn pmf(&self, x: u64) -> f64 {
if x > self.draws {
0.0
} else {
factorial::binomial(self.successes, x)
* factorial::binomial(self.population - self.successes, self.draws - x)
/ factorial::binomial(self.population, self.draws)
}
}
/// Calculates the log probability mass function for the hypergeometric
/// distribution at `x`
///
/// # Formula
///
/// ```ignore
/// ln((K choose x) * (N-K choose n-x) / (N choose n))
/// ```
///
/// where `N` is population, `K` is successes, and `n` is draws
fn ln_pmf(&self, x: u64) -> f64 {
factorial::ln_binomial(self.successes, x)
+ factorial::ln_binomial(self.population - self.successes, self.draws - x)
- factorial::ln_binomial(self.population, self.draws)
}
}
#[rustfmt::skip]
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use crate::statistics::*;
use crate::distribution::{DiscreteCDF, Discrete, Hypergeometric};
use crate::distribution::internal::*;
use crate::consts::ACC;
fn try_create(population: u64, successes: u64, draws: u64) -> Hypergeometric {
let n = Hypergeometric::new(population, successes, draws);
assert!(n.is_ok());
n.unwrap()
}
fn create_case(population: u64, successes: u64, draws: u64) {
let n = try_create(population, successes, draws);
assert_eq!(population, n.population());
assert_eq!(successes, n.successes());
assert_eq!(draws, n.draws());
}
fn bad_create_case(population: u64, successes: u64, draws: u64) {
let n = Hypergeometric::new(population, successes, draws);
assert!(n.is_err());
}
fn get_value<T, F>(population: u64, successes: u64, draws: u64, eval: F) -> T
where T: PartialEq + Debug,
F: Fn(Hypergeometric) -> T
{
let n = try_create(population, successes, draws);
eval(n)
}
fn test_case<T, F>(population: u64, successes: u64, draws: u64, expected: T, eval: F)
where T: PartialEq + Debug,
F: Fn(Hypergeometric) -> T
{
let x = get_value(population, successes, draws, eval);
assert_eq!(expected, x);
}
fn test_almost<F>(population: u64, successes: u64, draws: u64, expected: f64, acc: f64, eval: F)
where F: Fn(Hypergeometric) -> f64
{
let x = get_value(population, successes, draws, eval);
assert_almost_eq!(expected, x, acc);
}
#[test]
fn test_create() {
create_case(0, 0, 0);
create_case(1, 1, 1,);
create_case(2, 1, 1);
create_case(2, 2, 2);
create_case(10, 1, 1);
create_case(10, 5, 3);
}
#[test]
fn test_bad_create() {
bad_create_case(2, 3, 2);
bad_create_case(10, 5, 20);
bad_create_case(0, 1, 1);
}
#[test]
fn test_mean() {
let mean = |x: Hypergeometric| x.mean().unwrap();
test_case(1, 1, 1, 1.0, mean);
test_case(2, 1, 1, 0.5, mean);
test_case(2, 2, 2, 2.0, mean);
test_case(10, 1, 1, 0.1, mean);
test_case(10, 5, 3, 15.0 / 10.0, mean);
}
#[test]
#[should_panic]
fn test_mean_with_population_0() {
let mean = |x: Hypergeometric| x.mean().unwrap();
get_value(0, 0, 0, mean);
}
#[test]
fn test_variance() {
let variance = |x: Hypergeometric| x.variance().unwrap();
test_case(2, 1, 1, 0.25, variance);
test_case(2, 2, 2, 0.0, variance);
test_case(10, 1, 1, 81.0 / 900.0, variance);
test_case(10, 5, 3, 525.0 / 900.0, variance);
}
#[test]
#[should_panic]
fn test_variance_with_pop_lte_1() {
let variance = |x: Hypergeometric| x.variance().unwrap();
get_value(1, 1, 1, variance);
}
#[test]
fn test_skewness() {
let skewness = |x: Hypergeometric| x.skewness().unwrap();
test_case(10, 1, 1, 8.0 / 3.0, skewness);
test_case(10, 5, 3, 0.0, skewness);
}
#[test]
#[should_panic]
fn test_skewness_with_pop_lte_2() {
let skewness = |x: Hypergeometric| x.skewness().unwrap();
get_value(2, 2, 2, skewness);
}
#[test]
fn test_mode() {
let mode = |x: Hypergeometric| x.mode().unwrap();
test_case(0, 0, 0, 0, mode);
test_case(1, 1, 1, 1, mode);
test_case(2, 1, 1, 1, mode);
test_case(2, 2, 2, 2, mode);
test_case(10, 1, 1, 0, mode);
test_case(10, 5, 3, 2, mode);
}
#[test]
fn test_min() {
let min = |x: Hypergeometric| x.min();
test_case(0, 0, 0, 0, min);
test_case(1, 1, 1, 1, min);
test_case(2, 1, 1, 0, min);
test_case(2, 2, 2, 2, min);
test_case(10, 1, 1, 0, min);
test_case(10, 5, 3, 0, min);
}
#[test]
fn test_max() {
let max = |x: Hypergeometric| x.max();
test_case(0, 0, 0, 0, max);
test_case(1, 1, 1, 1, max);
test_case(2, 1, 1, 1, max);
test_case(2, 2, 2, 2, max);
test_case(10, 1, 1, 1, max);
test_case(10, 5, 3, 3, max);
}
#[test]
fn test_pmf() {
let pmf = |arg: u64| move |x: Hypergeometric| x.pmf(arg);
test_case(0, 0, 0, 1.0, pmf(0));
test_case(1, 1, 1, 1.0, pmf(1));
test_case(2, 1, 1, 0.5, pmf(0));
test_case(2, 1, 1, 0.5, pmf(1));
test_case(2, 2, 2, 1.0, pmf(2));
test_case(10, 1, 1, 0.9, pmf(0));
test_case(10, 1, 1, 0.1, pmf(1));
test_case(10, 5, 3, 0.41666666666666666667, pmf(1));
test_case(10, 5, 3, 0.083333333333333333333, pmf(3));
}
#[test]
fn test_ln_pmf() {
let ln_pmf = |arg: u64| move |x: Hypergeometric| x.ln_pmf(arg);
test_case(0, 0, 0, 0.0, ln_pmf(0));
test_case(1, 1, 1, 0.0, ln_pmf(1));
test_case(2, 1, 1, -0.6931471805599453094172, ln_pmf(0));
test_case(2, 1, 1, -0.6931471805599453094172, ln_pmf(1));
test_case(2, 2, 2, 0.0, ln_pmf(2));
test_almost(10, 1, 1, -0.1053605156578263012275, 1e-14, ln_pmf(0));
test_almost(10, 1, 1, -2.302585092994045684018, 1e-14, ln_pmf(1));
test_almost(10, 5, 3, -0.875468737353899935621, 1e-14, ln_pmf(1));
test_almost(10, 5, 3, -2.484906649788000310234, 1e-14, ln_pmf(3));
}
#[test]
fn test_cdf() {
let cdf = |arg: u64| move |x: Hypergeometric| x.cdf(arg);
test_case(2, 1, 1, 0.5, cdf(0));
test_almost(10, 1, 1, 0.9, 1e-14, cdf(0));
test_almost(10, 5, 3, 0.5, 1e-15, cdf(1));
test_almost(10, 5, 3, 11.0 / 12.0, 1e-14, cdf(2));
test_almost(10000, 2, 9800, 199.0 / 499950.0, 1e-14, cdf(0));
test_almost(10000, 2, 9800, 199.0 / 499950.0, 1e-14, cdf(0));
test_almost(10000, 2, 9800, 19799.0 / 499950.0, 1e-12, cdf(1));
}
#[test]
fn test_cdf_arg_too_big() {
let cdf = |arg: u64| move |x: Hypergeometric| x.cdf(arg);
test_case(0, 0, 0, 1.0, cdf(0));
}
#[test]
fn test_cdf_arg_too_small() {
let cdf = |arg: u64| move |x: Hypergeometric| x.cdf(arg);
test_case(2, 2, 2, 0.0, cdf(0));
}
#[test]
fn test_discrete() {
test::check_discrete_distribution(&try_create(5, 4, 3), 4);
test::check_discrete_distribution(&try_create(3, 2, 1), 2);
}
}
|
fn main() {
let f = print_number;
f();
}
fn print_number() {
println!(666);
}
|
use std::collections::BTreeSet;
// TODO: evaluate performance of using btreemap's instead of sets (it's nice to have the sortedness, but performance?)
// insertion should (almost always) be a greater value?
#[derive(Clone)]
pub enum Selection {
AlwaysOne(usize),
MaybeOne(Option<usize>),
Multiple(BTreeSet<usize>),
}
impl Selection {
/// Create a new `Selection::AlwaysOne` with the given selection
pub fn one(index: usize) -> Self {
Selection::AlwaysOne(index)
}
/// Create a new `Selection::MaybeOne` with the given selection
pub fn some(index: usize) -> Self {
Selection::MaybeOne(Some(index))
}
/// Create a new `Selection::MaybeOne` with no selection
pub fn none() -> Self {
Selection::MaybeOne(None)
}
/// Create a new `Selection::Multiple` with no selection
pub fn empty() -> Self {
Selection::Multiple(BTreeSet::new())
}
/// Create a new `Selection::Multiple` with some indices selected
pub fn multiple<T>(indices: T) -> Self
where
T: IntoIterator<Item = usize>,
{
Selection::Multiple(indices.into_iter().collect::<BTreeSet<usize>>())
}
pub fn len(&self) -> usize {
match self {
Selection::MaybeOne(None) => 0,
Selection::AlwaysOne(_) | Selection::MaybeOne(Some(_)) => 1,
Selection::Multiple(ref set) => set.len(),
}
}
pub fn is_empty(&self) -> bool {
match self {
Selection::MaybeOne(None) => true,
Selection::AlwaysOne(_) | Selection::MaybeOne(Some(_)) => false,
Selection::Multiple(ref set) => set.is_empty(),
}
}
pub fn as_set(&self) -> BTreeSet<usize> {
match self {
Selection::MaybeOne(None) => BTreeSet::new(),
Selection::AlwaysOne(index) | Selection::MaybeOne(Some(index)) => {
let mut set = BTreeSet::new();
set.insert(*index);
set
}
Selection::Multiple(ref set) => set.clone(),
}
}
pub(crate) fn includes(&self, i: &usize) -> bool {
match self {
Selection::MaybeOne(None) => false,
Selection::AlwaysOne(index) | Selection::MaybeOne(Some(index)) => *index == *i,
Selection::Multiple(ref set) => set.contains(i),
}
}
pub fn is_multiple(&self) -> bool {
match self {
Selection::AlwaysOne(_) => false,
Selection::MaybeOne(_) => false,
Selection::Multiple(_) => true,
}
}
pub fn is_nullable(&self) -> bool {
match self {
Selection::AlwaysOne(_) => false,
Selection::MaybeOne(_) => true,
Selection::Multiple(_) => true,
}
}
/// Select an index from the options.
/// Returns true if the selection has changed.
pub(crate) fn select(&mut self, index: usize) -> bool {
match self {
Selection::AlwaysOne(ref mut idx) => {
if index != *idx {
*idx = index;
true
} else {
false
}
}
Selection::MaybeOne(ref mut maybe_idx) => {
if let Some(old_index) = maybe_idx.replace(index) {
old_index != index
} else {
// Was `None`, now `Some(_)`
true
}
}
Selection::Multiple(ref mut set) => set.insert(index),
}
}
/// Deselect an index from the options.
/// Returns true if the selection has changed.
pub(crate) fn deselect(&mut self, index: usize) -> bool {
match self {
Selection::AlwaysOne(_) => false, // Cannot deselect AlwaysOne
Selection::MaybeOne(None) => false,
Selection::MaybeOne(Some(i)) => {
if index == *i {
*self = Selection::MaybeOne(None);
true
} else {
// If mismatched index, do nothing
false
}
}
Selection::Multiple(ref mut set) => set.remove(&index),
}
}
/// Clear the selected items.
/// Returns true if the selection has changed.
pub(crate) fn clear(&mut self) -> bool {
match self {
Selection::AlwaysOne(_) => false, // Cannot deselect AlwaysOne
Selection::MaybeOne(None) => false,
Selection::MaybeOne(Some(_)) => {
*self = Selection::MaybeOne(None);
true
}
Selection::Multiple(ref mut set) => {
if set.is_empty() {
false
} else {
set.clear();
true
}
}
}
}
}
|
pub mod mutable_ref;
pub mod reference;
pub fn references() {
mutable_ref::mut_ref();
reference::reference();
}
|
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::quote;
use syn::{parse::ParseBuffer, Error, Fields, Ident, ItemEnum, Type};
pub struct MoebiusInstruction {
ast: ItemEnum,
}
impl MoebiusInstruction {
pub fn expand(&self) -> TokenStream2 {
let mut initialize_fields = vec![quote! { moebius_program_id }];
let mut initialize_fields_ty = vec![quote! { Pubkey }];
let mut initialize_pack_instructions =
vec![quote! { buf.extend_from_slice(moebius_program_id.as_ref()) }];
let mut initialize_unpack_instructions =
vec![quote! { let (moebius_program_id, rest) = Self::unpack_pubkey(rest)? }];
let mut update_state_fields = vec![];
let mut update_state_fields_ty = vec![];
let mut update_state_pack_instructions = vec![];
let mut update_state_unpack_instructions = vec![];
for variant in self.ast.variants.iter() {
match &variant.fields {
Fields::Named(fields_named) => {
for field in fields_named.named.iter() {
let field_ident = field.ident.clone().unwrap();
let field_slice = format!("{}_slice", field_ident.to_string());
let field_ident_slice = Ident::new(&field_slice, Span::call_site());
if let Type::Path(ref p) = field.ty {
let input_ty = p.path.segments[0].ident.to_string();
let field_ty = match input_ty.as_ref() {
"address" => quote! { [u8; 20] },
"bytes32" | "uint256" => quote! { [u8; 32] },
"bool" | "uint8" => quote! { u8 },
_ => panic!("unexpected type"),
};
match variant.ident.to_string().as_ref() {
"Initialize" => {
initialize_fields.push(quote! { #field_ident });
initialize_fields_ty.push(field_ty);
let (pack_inst, unpack_inst) = match input_ty.as_ref() {
"address" => (
quote! { buf.extend_from_slice(&#field_ident[..]) },
quote! {
let (#field_ident_slice, rest) = rest.split_at(20);
let mut #field_ident = [0u8; 20];
#field_ident.copy_from_slice(&#field_ident_slice[..]);
},
),
"bytes32" | "uint256" => (
quote! { buf.extend_from_slice(&#field_ident[..]) },
quote! {
let (#field_ident_slice, rest) = rest.split_at(32);
let mut #field_ident = [0u8; 32];
#field_ident.copy_from_slice(&#field_ident_slice[..]);
},
),
"bool" | "uint8" => (
quote! { buf.push(*#field_ident) },
quote! {
let (&#field_ident, rest) = rest.split_first().ok_or(InvalidInstruction)?;
},
),
_ => panic!("unexpected type"),
};
initialize_pack_instructions.push(pack_inst);
initialize_unpack_instructions.push(unpack_inst);
}
"UpdateState" => {
update_state_fields.push(quote! { #field_ident });
update_state_fields_ty.push(field_ty);
let (pack_inst, unpack_inst) = match input_ty.as_ref() {
"address" => (
quote! {
buf.extend_from_slice(&[0u8; 12]);
buf.extend_from_slice(&#field_ident[..]);
},
quote! {
let (#field_ident_slice, rest) = rest.split_at(32);
let mut #field_ident = [0u8; 20];
#field_ident.copy_from_slice(&#field_ident_slice[12..]);
},
),
"bytes32" | "uint256" => (
quote! { buf.extend_from_slice(&#field_ident[..]) },
quote! {
let (#field_ident_slice, rest) = rest.split_at(32);
let mut #field_ident = [0u8; 32];
#field_ident.copy_from_slice(&#field_ident_slice[..]);
},
),
"bool" | "uint8" => (
quote! { buf.push(*#field_ident) },
quote! {
let (&#field_ident, rest) = rest.split_first().ok_or(InvalidInstruction)?;
},
),
_ => panic!("unexpected type"),
};
update_state_pack_instructions.push(pack_inst);
update_state_unpack_instructions.push(unpack_inst);
}
_ => panic!("unexpected variant"),
}
}
}
}
_ => {}
}
}
let vis = &self.ast.vis;
let ident = &self.ast.ident;
quote! {
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
#vis enum #ident {
Initialize {
#(
#initialize_fields: #initialize_fields_ty
),*,
},
UpdateState {
#(
#update_state_fields: #update_state_fields_ty
),*,
},
}
impl #ident {
pub fn pack(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(size_of::<Self>());
match self {
Self::Initialize {
#(
#initialize_fields
),*
} => {
buf.push(0);
#(
#initialize_pack_instructions
);*;
}
Self::UpdateState {
#(
#update_state_fields
),*
} => {
buf.push(1);
#(
#update_state_pack_instructions
);*;
}
}
buf
}
pub fn unpack(input: &[u8]) -> Result<Self, ProgramError> {
let (&tag, rest) = input.split_first().ok_or(InvalidInstruction)?;
Ok(match tag {
0 => {
#(
#initialize_unpack_instructions
);*;
Self::Initialize {
#(
#initialize_fields
),*
}
}
1 => {
#(
#update_state_unpack_instructions
);*;
Self::UpdateState {
#(
#update_state_fields
),*
}
}
_ => return Err(InvalidInstruction.into()),
})
}
fn unpack_pubkey(input: &[u8]) -> Result<(Pubkey, &[u8]), ProgramError> {
if input.len() >= 32 {
let (key, rest) = input.split_at(32);
let pk = Pubkey::new(key);
Ok((pk, rest))
} else {
Err(InvalidInstruction.into())
}
}
}
}
}
}
impl syn::parse::Parse for MoebiusInstruction {
fn parse(input: &ParseBuffer) -> Result<Self, Error> {
Ok(Self {
ast: input.parse()?,
})
}
}
|
use futures::future::{Future, FutureResult};
use negotiationresponse::NegotiationResponse;
use connection::Connection;
use serde_json::{Map, Value};
use std::sync::mpsc::Sender;
pub trait ClientTransport {
fn name(&self) -> &str;
fn negotiate(
&mut self,
url: &str,
connection_data: &str,
protocol: &str,
) -> Box<Future<Item = NegotiationResponse, Error = ()>>;
fn start(
&mut self,
url: &str,
connection_data: &str,
connection_token: &str,
protocol: &str,
sender: Option<Sender<Vec<u8>>>,
) -> Box<Future<Item = (), Error = ()>>;
fn send(
&mut self,
url: &str,
connection_data: &str,
connection_token: &str,
protocol: &str,
data: String,
) -> Box<Future<Item = (), Error = ()>>;
fn abort(&self) -> Box<Future<Item = (), Error = ()>>;
}
|
#[macro_use]
extern crate lazy_static;
extern crate wars_8_api;
use wars_8_api::*;
use std::sync::Mutex;
const MSG: &str = "UwU";
lazy_static! {
static ref COORDS: Mutex<((i32, i32), (i32, i32), bool)> = Mutex::new(((0, 6), (1, 1), false));
}
#[no_mangle]
pub fn _init() {
}
#[no_mangle]
pub fn _update() {
let mut coords = COORDS.lock().unwrap();
let ((mut x, mut y),(mut dx, mut dy), mut status) = *coords;
for _ in 0..4 {
x += dx;
y += dy;
if x < 1 || x >= 127 - (MSG.len() as i32 * 4) {
dx *= -1
}
if y < 6 || y >= 127 {
dy *= -1
}
}
status = !status;
*coords = ((x, y), (dx, dy), status);
}
#[no_mangle]
pub fn _draw() {
let ((x, y),(_, _), _) = *COORDS.lock().unwrap();
if input::btn(input::Button::O, input::Player::One) {
gfx::cls(gfx::ColorPallete::Pink);
gfx::print(MSG.to_string(), x, y, gfx::ColorPallete::White)
} else {
gfx::cls(gfx::ColorPallete::White);
gfx::print(MSG.to_string(), x, y, gfx::ColorPallete::Pink)
}
}
|
#![deny(missing_docs, unsafe_code)]
use super::{BitMode, DeviceType, FtStatus, FtdiCommon, TimeoutError};
use std::convert::From;
use std::time::Duration;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
#[repr(u8)]
enum MpsseCmd {
SetDataBitsLowbyte = 0x80,
GetDataBitsLowbyte = 0x81,
SetDataBitsHighbyte = 0x82,
GetDataBitsHighbyte = 0x83,
EnableLoopback = 0x84,
DisableLoopback = 0x85,
SetClockFrequency = 0x86,
SendImmediate = 0x87,
WaitOnIOHigh = 0x88,
WaitOnIOLow = 0x89,
DisableClockDivide = 0x8A,
EnableClockDivide = 0x8B,
Enable3PhaseClocking = 0x8C,
Disable3PhaseClocking = 0x8D,
// EnableDriveOnlyZero = 0x9E,
}
/// Modes for clocking data out of the FTDI device.
///
/// This is an argument to the [`clock_data_out`] method.
///
/// [`clock_data_out`]: MpsseCmdBuilder::clock_data_out
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockDataOut {
/// Positive clock edge MSB first.
///
/// The data is sent MSB first.
///
/// The data will change to the next bit on the rising edge of the CLK pin.
MsbPos = 0x10,
/// Negative clock edge MSB first.
///
/// The data is sent MSB first.
///
/// The data will change to the next bit on the falling edge of the CLK pin.
MsbNeg = 0x11,
/// Positive clock edge LSB first.
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will change to the next bit on the rising edge of the CLK pin.
LsbPos = 0x18,
/// Negative clock edge LSB first.
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will change to the next bit on the falling edge of the CLK pin.
LsbNeg = 0x19,
}
impl From<ClockDataOut> for u8 {
fn from(value: ClockDataOut) -> u8 {
value as u8
}
}
/// Modes for clocking bits out of the FTDI device.
///
/// This is an argument to the [`clock_bits_out`] method.
///
/// [`clock_bits_out`]: MpsseCmdBuilder::clock_bits_out
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockBitsOut {
/// Positive clock edge MSB first.
///
/// The data is sent MSB first (bit 7 first).
///
/// The data will change to the next bit on the rising edge of the CLK pin.
MsbPos = 0x12,
/// Negative clock edge MSB first.
///
/// The data is sent MSB first (bit 7 first).
///
/// The data will change to the next bit on the falling edge of the CLK pin.
MsbNeg = 0x13,
/// Positive clock edge LSB first (bit 0 first).
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will change to the next bit on the rising edge of the CLK pin.
LsbPos = 0x1A,
/// Negative clock edge LSB first (bit 0 first).
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will change to the next bit on the falling edge of the CLK pin.
LsbNeg = 0x1B,
}
impl From<ClockBitsOut> for u8 {
fn from(value: ClockBitsOut) -> u8 {
value as u8
}
}
/// Modes for clocking data into the FTDI device.
///
/// This is an argument to the [`clock_data_in`] method.
///
/// [`clock_data_in`]: MpsseCmdBuilder::clock_data_in
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockDataIn {
/// Positive clock edge MSB first.
///
/// The first bit in will be the MSB of the first byte and so on.
///
/// The data will be sampled on the rising edge of the CLK pin.
MsbPos = 0x20,
/// Negative clock edge MSB first.
///
/// The first bit in will be the MSB of the first byte and so on.
///
/// The data will be sampled on the falling edge of the CLK pin.
MsbNeg = 0x24,
/// Positive clock edge LSB first.
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will be sampled on the rising edge of the CLK pin.
LsbPos = 0x28,
/// Negative clock edge LSB first.
///
/// The first bit in will be the LSB of the first byte and so on.
///
/// The data will be sampled on the falling edge of the CLK pin.
LsbNeg = 0x2C,
}
impl From<ClockDataIn> for u8 {
fn from(value: ClockDataIn) -> u8 {
value as u8
}
}
/// Modes for clocking data bits into the FTDI device.
///
/// This is an argument to the [`clock_bits_in`] method.
///
/// [`clock_bits_in`]: MpsseCmdBuilder::clock_bits_in
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockBitsIn {
/// Positive clock edge MSB first.
///
/// The data will be shifted up so that the first bit in may not be in bit 7
/// but from 6 downwards depending on the number of bits to shift
/// (i.e. a length of 1 bit will have the data bit sampled in bit 0 of the
/// byte sent back to the PC).
///
/// The data will be sampled on the rising edge of the CLK pin.
MsbPos = 0x22,
/// Negative clock edge MSB first.
///
/// The data will be shifted up so that the first bit in may not be in bit 7
/// but from 6 downwards depending on the number of bits to shift
/// (i.e. a length of 1 bit will have the data bit sampled in bit 0 of the
/// byte sent back to the PC).
///
/// The data will be sampled on the falling edge of the CLK pin.
MsbNeg = 0x26,
/// Positive clock edge LSB first.
///
/// The data will be shifted down so that the first bit in may not be in bit
/// 0 but from 1 upwards depending on the number of bits to shift
/// (i.e. a length of 1 bit will have the data bit sampled in bit 7 of the
/// byte sent back to the PC).
///
/// The data will be sampled on the rising edge of the CLK pin.
LsbPos = 0x2A,
/// Negative clock edge LSB first.
///
/// The data will be shifted down so that thefirst bit in may not be in bit
/// 0 but from 1 upwards depending on the number of bits to shift
/// (i.e. a length of 1 bit will have the data bit sampled in bit 7 of the
/// byte sent back to the PC).
///
/// The data will be sampled on the falling edge of the CLK pin.
LsbNeg = 0x2E,
}
impl From<ClockBitsIn> for u8 {
fn from(value: ClockBitsIn) -> u8 {
value as u8
}
}
/// Modes for clocking data in and out of the FTDI device.
///
/// This is an argument to the [`clock_data`] method.
///
/// [`clock_data`]: MpsseCmdBuilder::clock_data
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockData {
/// MSB first, data in on positive edge, data out on negative edge.
MsbPosIn = 0x31,
/// MSB first, data in on negative edge, data out on positive edge.
MsbNegIn = 0x34,
/// LSB first, data in on positive edge, data out on negative edge.
LsbPosIn = 0x39,
/// LSB first, data in on negative edge, data out on positive edge.
LsbNegIn = 0x3C,
}
impl From<ClockData> for u8 {
fn from(value: ClockData) -> u8 {
value as u8
}
}
/// Modes for clocking data bits in and out of the FTDI device.
///
/// This is an argument to the [`clock_bits`] method.
///
/// [`clock_bits`]: MpsseCmdBuilder::clock_bits
#[repr(u8)]
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ClockBits {
/// MSB first, data in on positive edge, data out on negative edge.
MsbPosIn = 0x33,
/// MSB first, data in on negative edge, data out on positive edge.
MsbNegIn = 0x36,
/// LSB first, data in on positive edge, data out on negative edge.
LsbPosIn = 0x3B,
/// LSB first, data in on negative edge, data out on positive edge.
LsbNegIn = 0x3E,
}
impl From<ClockBits> for u8 {
fn from(value: ClockBits) -> u8 {
value as u8
}
}
// seemingly arbitrary values from libmpsse
// const ECHO_CMD_1: u8 = 0xAA;
const ECHO_CMD_2: u8 = 0xAB;
impl From<MpsseCmd> for u8 {
fn from(value: MpsseCmd) -> Self {
value as u8
}
}
fn check_limits(device: DeviceType, frequency: u32, max: u32) {
const MIN: u32 = 92;
assert!(
frequency >= MIN,
"frequency of {} exceeds minimum of {} for {:?}",
frequency,
MIN,
device
);
assert!(
frequency <= max,
"frequency of {} exceeds maximum of {} for {:?}",
frequency,
max,
device
);
}
// calculate the clock divisor from a frequency
fn clock_divisor(device: DeviceType, frequency: u32) -> (u32, Option<MpsseCmd>) {
match device {
// FT2232D appears as FT2232C in FTD2XX
DeviceType::FT2232C => {
check_limits(device, frequency, 6_000_000);
(6_000_000 / frequency - 1, None)
}
DeviceType::FT2232H | DeviceType::FT4232H | DeviceType::FT232H => {
check_limits(device, frequency, 30_000_000);
if frequency <= 6_000_000 {
(6_000_000 / frequency - 1, Some(MpsseCmd::EnableClockDivide))
} else {
(
30_000_000 / frequency - 1,
Some(MpsseCmd::DisableClockDivide),
)
}
}
_ => panic!("Unknown device type: {:?}", device),
}
}
#[cfg(test)]
mod clock_divisor {
use super::*;
macro_rules! pos {
($NAME:ident, $DEVICE:expr, $FREQ:expr, $OUT:expr) => {
#[test]
fn $NAME() {
assert_eq!(clock_divisor($DEVICE, $FREQ), $OUT);
}
};
}
macro_rules! neg {
($NAME:ident, $DEVICE:expr, $FREQ:expr) => {
#[test]
#[should_panic]
fn $NAME() {
clock_divisor($DEVICE, $FREQ);
}
};
}
pos!(ft232c_min, DeviceType::FT2232C, 92, (65216, None));
pos!(ft232c_max, DeviceType::FT2232C, 6_000_000, (0, None));
pos!(
min,
DeviceType::FT2232H,
92,
(65216, Some(MpsseCmd::EnableClockDivide))
);
pos!(
max_with_div,
DeviceType::FT2232H,
6_000_000,
(0, Some(MpsseCmd::EnableClockDivide))
);
pos!(
min_without_div,
DeviceType::FT2232H,
6_000_001,
(3, Some(MpsseCmd::DisableClockDivide))
);
pos!(
max,
DeviceType::FT4232H,
30_000_000,
(0, Some(MpsseCmd::DisableClockDivide))
);
neg!(panic_unknown, DeviceType::Unknown, 1_000);
neg!(panic_ft232c_min, DeviceType::FT2232C, 91);
neg!(panic_ft232c_max, DeviceType::FT2232C, 6_000_001);
neg!(panic_min, DeviceType::FT232H, 91);
neg!(panic_max, DeviceType::FT232H, 30_000_001);
}
/// Initialization settings for the MPSSE.
///
/// Used by [`initialize_mpsse`].
///
/// [`initialize_mpsse`]: FtdiMpsse::initialize_mpsse
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct MpsseSettings {
/// Reset the MPSSE on initialization.
///
/// This calls [`reset`] if `true`.
///
/// [`reset`]: FtdiCommon::reset
pub reset: bool,
/// USB in transfer size in bytes.
///
/// This gets passed to [`set_usb_parameters`].
///
/// [`set_usb_parameters`]: FtdiCommon::set_usb_parameters
pub in_transfer_size: u32,
/// Read timeout.
///
/// This gets passed along with [`write_timeout`] to [`set_timeouts`].
///
/// [`set_timeouts`]: FtdiCommon::set_timeouts
/// [`write_timeout`]: MpsseSettings::write_timeout
pub read_timeout: Duration,
/// Write timeout.
///
/// This gets passed along with [`read_timeout`] to [`set_timeouts`].
///
/// [`set_timeouts`]: FtdiCommon::set_timeouts
/// [`read_timeout`]: MpsseSettings::read_timeout
pub write_timeout: Duration,
/// Latency timer.
///
/// This gets passed to [`set_latency_timer`].
///
/// [`set_latency_timer`]: FtdiCommon::set_latency_timer
pub latency_timer: Duration,
/// Bitmode mask.
///
/// * A bit value of `0` sets the corresponding pin to an input.
/// * A bit value of `1` sets the corresponding pin to an output.
///
/// This gets passed to [`set_bit_mode`].
///
/// [`set_bit_mode`]: FtdiCommon::set_bit_mode
pub mask: u8,
/// Clock frequency.
///
/// If not `None` this will call [`set_clock`] to set the clock frequency.
///
/// [`set_clock`]: crate::FtdiMpsse::set_clock
pub clock_frequency: Option<u32>,
}
impl std::default::Default for MpsseSettings {
fn default() -> Self {
MpsseSettings {
reset: true,
in_transfer_size: 4096,
read_timeout: Duration::from_secs(1),
write_timeout: Duration::from_secs(1),
latency_timer: Duration::from_millis(16),
mask: 0x00,
clock_frequency: None,
}
}
}
/// FTDI Multi-Protocol Synchronous Serial Engine (MPSSE).
///
/// For details about the MPSSE read the [FTDI MPSSE Basics].
///
/// [FTDI MPSSE Basics]: https://www.ftdichip.com/Support/Documents/AppNotes/AN_135_MPSSE_Basics.pdf
pub trait FtdiMpsse: FtdiCommon {
/// Set the clock frequency.
///
/// # Frequency Limits
///
/// | Device Type | Minimum | Maximum |
/// |--------------------------|---------|---------|
/// | FT2232D | 92 Hz | 6 MHz |
/// | FT4232H, FT2232H, FT232H | 92 Hz | 30 MHz |
///
/// Values outside of these limits will result in panic.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft4232h, FtdiMpsse};
///
/// let mut ft = Ft4232h::with_serial_number("FT4PWSEOA")?;
/// ft.initialize_mpsse_default()?;
/// ft.set_clock(100_000)?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn set_clock(&mut self, frequency: u32) -> Result<(), TimeoutError> {
let (value, divisor) = clock_divisor(Self::DEVICE_TYPE, frequency);
debug_assert!(value <= 0xFFFF);
let mut buf: Vec<u8> = Vec::new();
if let Some(div) = divisor {
buf.push(div.into());
};
buf.push(MpsseCmd::SetClockFrequency.into());
buf.push((value & 0xFF) as u8);
buf.push(((value >> 8) & 0xFF) as u8);
self.write_all(&buf.as_slice())
}
/// Initialize the MPSSE.
///
/// This method does the following:
///
/// 1. Optionally [`reset`]s the device.
/// 2. Sets USB transfer sizes using values provided.
/// 3. Disables special characters.
/// 4. Sets the transfer timeouts using values provided.
/// 5. Sets latency timers using values provided.
/// 6. Sets the flow control to RTS CTS.
/// 7. Resets the bitmode, then sets it to MPSSE.
/// 8. Enables loopback.
/// 9. Synchronizes the MPSSE.
/// 10. Disables loopback.
/// 11. Optionally sets the clock frequency.
///
/// Upon failure cleanup is not guaranteed.
///
/// # Example
///
/// Initialize the MPSSE with a 5 second read timeout.
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse, MpsseSettings};
/// use std::time::Duration;
///
/// let mut settings = MpsseSettings::default();
/// settings.read_timeout = Duration::from_secs(5);
/// let mut ft = Ft232h::with_serial_number("FT59UO4C")?;
/// ft.initialize_mpsse(&settings)?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
///
/// [`reset`]: FtdiCommon::reset
fn initialize_mpsse(&mut self, settings: &MpsseSettings) -> Result<(), TimeoutError> {
if settings.reset {
self.reset()?;
}
self.purge_rx()?;
debug_assert_eq!(self.queue_status()?, 0);
self.set_usb_parameters(settings.in_transfer_size)?;
self.set_chars(0, false, 0, false)?;
self.set_timeouts(settings.read_timeout, settings.write_timeout)?;
self.set_latency_timer(settings.latency_timer)?;
self.set_flow_control_rts_cts()?;
self.set_bit_mode(0x0, BitMode::Reset)?;
self.set_bit_mode(settings.mask, BitMode::Mpsse)?;
self.enable_loopback()?;
self.synchronize_mpsse()?;
// bundle the disable loopback and clock divisor writes together
// to save some time
let mut mpsse_cmd = MpsseCmdBuilder::new().disable_loopback();
if let Some(frequency) = settings.clock_frequency {
mpsse_cmd = mpsse_cmd.set_clock(frequency, Self::DEVICE_TYPE);
}
self.write_all(mpsse_cmd.as_slice())?;
Ok(())
}
/// Initializes the MPSSE to default settings.
///
/// This simply calles [`initialize_mpsse`] with the default
/// [`MpsseSettings`].
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT59UO4C")?;
/// ft.initialize_mpsse_default()?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
///
/// [`initialize_mpsse`]: FtdiMpsse::initialize_mpsse
fn initialize_mpsse_default(&mut self) -> Result<(), TimeoutError> {
self.initialize_mpsse(&MpsseSettings::default())
}
/// Synchronize the MPSSE port with the application.
///
/// There are various implementations of the synchronization flow, this
/// uses the flow from [FTDI MPSSE Basics].
///
/// [FTDI MPSSE Basics]: https://www.ftdichip.com/Support/Documents/AppNotes/AN_135_MPSSE_Basics.pdf
fn synchronize_mpsse(&mut self) -> Result<(), TimeoutError> {
self.purge_rx()?;
debug_assert_eq!(self.queue_status()?, 0);
self.write_all(&[ECHO_CMD_2])?;
// the FTDI MPSSE basics polls the queue status here
// we purged the RX buffer so the response should always be 2 bytes
// this allows us to leverage the timeout built into read
let mut buf: [u8; 2] = [0; 2];
self.read_all(&mut buf)?;
if buf[0] == 0xFA && buf[1] == ECHO_CMD_2 {
Ok(())
} else {
Err(TimeoutError::from(FtStatus::OTHER_ERROR))
}
}
/// Enable the MPSSE loopback state.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft4232h, FtdiMpsse};
///
/// let mut ft = Ft4232h::with_serial_number("FT4PWSEOA")?;
/// ft.initialize_mpsse_default()?;
/// ft.enable_loopback()?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn enable_loopback(&mut self) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::EnableLoopback.into()])
}
/// Disable the MPSSE loopback state.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft4232h, FtdiMpsse};
///
/// let mut ft = Ft4232h::with_serial_number("FT4PWSEOA")?;
/// ft.initialize_mpsse_default()?;
/// ft.disable_loopback()?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn disable_loopback(&mut self) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::DisableLoopback.into()])
}
/// Set the pin direction and state of the lower byte (0-7) GPIO pins on the
/// MPSSE interface.
///
/// The pins that this controls depends on the device.
///
/// * On the FT232H this will control the AD0-AD7 pins.
///
/// # Arguments
///
/// * `state` - GPIO state mask, `0` is low (or input pin), `1` is high.
/// * `direction` - GPIO direction mask, `0` is input, `1` is output.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.set_gpio_lower(0xFF, 0xFF)?;
/// ft.set_gpio_lower(0x00, 0xFF)?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn set_gpio_lower(&mut self, state: u8, direction: u8) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::SetDataBitsLowbyte.into(), state, direction])
}
/// Get the pin state state of the lower byte (0-7) GPIO pins on the MPSSE
/// interface.
///
/// # Example
///
/// Set the first GPIO, without modify the state of the other GPIOs.
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT59UO4C")?;
/// ft.initialize_mpsse_default()?;
/// let mut gpio_state: u8 = ft.gpio_lower()?;
/// gpio_state |= 0x01;
/// ft.set_gpio_lower(gpio_state, 0xFF)?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn gpio_lower(&mut self) -> Result<u8, TimeoutError> {
self.write_all(&[
MpsseCmd::GetDataBitsLowbyte.into(),
MpsseCmd::SendImmediate.into(),
])?;
let mut buf: [u8; 1] = [0];
self.read_all(&mut buf)?;
Ok(buf[0])
}
/// Set the pin direction and state of the upper byte (8-15) GPIO pins on
/// the MPSSE interface.
///
/// The pins that this controls depends on the device.
/// This method may do nothing for some devices, such as the FT4232H that
/// only have 8 pins per port.
///
/// See [`set_gpio_lower`] for an example.
///
/// # Arguments
///
/// * `state` - GPIO state mask, `0` is low (or input pin), `1` is high.
/// * `direction` - GPIO direction mask, `0` is input, `1` is output.
///
/// # FT232H Corner Case
///
/// On the FT232H only CBUS5, CBUS6, CBUS8, and CBUS9 can be controlled.
/// These pins confusingly map to the first four bits in the direction and
/// state masks.
///
/// [`set_gpio_lower`]: FtdiMpsse::set_gpio_lower
fn set_gpio_upper(&mut self, state: u8, direction: u8) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::SetDataBitsHighbyte.into(), state, direction])
}
/// Get the pin state state of the upper byte (8-15) GPIO pins on the MPSSE
/// interface.
///
/// See [`gpio_lower`] for an example.
///
/// See [`set_gpio_upper`] for additional information about physical pin
/// mappings.
///
/// [`gpio_lower`]: FtdiMpsse::gpio_lower
/// [`set_gpio_upper`]: FtdiMpsse::set_gpio_upper
fn gpio_upper(&mut self) -> Result<u8, TimeoutError> {
self.write_all(&[
MpsseCmd::GetDataBitsHighbyte.into(),
MpsseCmd::SendImmediate.into(),
])?;
let mut buf: [u8; 1] = [0];
self.read_all(&mut buf)?;
Ok(buf[0])
}
/// Clock data out.
///
/// This will clock out bytes on TDI/DO.
/// No data is clocked into the device on TDO/DI.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{ClockDataOut, Ft232h, FtdiMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.set_clock(100_000)?;
/// ft.set_gpio_lower(0xFA, 0xFB)?;
/// ft.set_gpio_lower(0xF2, 0xFB)?;
/// ft.clock_data_out(ClockDataOut::MsbNeg, &[0x12, 0x34, 0x56])?;
/// ft.set_gpio_lower(0xFA, 0xFB)?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn clock_data_out(&mut self, mode: ClockDataOut, data: &[u8]) -> Result<(), TimeoutError> {
let mut len = data.len();
if len == 0 {
return Ok(());
}
len -= 1;
assert!(len <= 65536);
let mut payload = vec![mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8];
payload.extend_from_slice(&data);
self.write_all(&payload.as_slice())
}
/// Clock data in.
///
/// This will clock in bytes on TDO/DI.
/// No data is clocked out of the device on TDI/DO.
fn clock_data_in(&mut self, mode: ClockDataIn, data: &mut [u8]) -> Result<(), TimeoutError> {
let mut len = data.len();
if len == 0 {
return Ok(());
}
len -= 1;
assert!(len <= 65536);
self.write_all(&[mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8])?;
self.read_all(data)
}
/// Clock data in and out at the same time.
fn clock_data(&mut self, mode: ClockData, data: &mut [u8]) -> Result<(), TimeoutError> {
let mut len = data.len();
if len == 0 {
return Ok(());
}
len -= 1;
assert!(len <= 65536);
let mut payload = vec![mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8];
payload.extend_from_slice(&data);
self.write_all(&payload.as_slice())?;
self.read_all(data)
}
}
/// This contains MPSSE commands that are only available on the the FT232H,
/// FT2232H, and FT4232H devices.
///
/// For details about the MPSSE read the [FTDI MPSSE Basics].
///
/// [FTDI MPSSE Basics]: https://www.ftdichip.com/Support/Documents/AppNotes/AN_135_MPSSE_Basics.pdf
pub trait Ftx232hMpsse: FtdiMpsse {
/// Enable 3 phase data clocking.
///
/// This will give a 3 stage data shift for the purposes of supporting
/// interfaces such as I2C which need the data to be valid on both edges of
/// the clock.
///
/// It will appears as:
///
/// 1. Data setup for 1/2 clock period
/// 2. Pulse clock for 1/2 clock period
/// 3. Data hold for 1/2 clock period
///
/// # Example
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse, Ftx232hMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.enable_3phase_data_clocking()?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn enable_3phase_data_clocking(&mut self) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::Enable3PhaseClocking.into()])
}
/// Disable 3 phase data clocking.
///
/// This will give a 2 stage data shift which is the default state.
///
/// It will appears as:
///
/// 1. Data setup for 1/2 clock period
/// 2. Pulse clock for 1/2 clock period
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiMpsse, Ftx232hMpsse};
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.disable_3phase_data_clocking()?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
fn disable_3phase_data_clocking(&mut self) -> Result<(), TimeoutError> {
self.write_all(&[MpsseCmd::Disable3PhaseClocking.into()])
}
}
/// FTDI Multi-Protocol Synchronous Serial Engine (MPSSE) command builder.
///
/// For details about the MPSSE read the [FTDI MPSSE Basics].
///
/// This structure is a `Vec<u8>` that the methods push bytewise commands onto.
/// These commands can then be written to the device with the [`write_all`]
/// method.
///
/// This is useful for creating commands that need to do multiple operations
/// quickly, since individual [`write_all`] calls can be expensive.
/// For example, this can be used to set a GPIO low and clock data out for
/// SPI operations.
///
/// [FTDI MPSSE Basics]: https://www.ftdichip.com/Support/Documents/AppNotes/AN_135_MPSSE_Basics.pdf
/// [`write_all`]: FtdiCommon::write_all
pub struct MpsseCmdBuilder(pub Vec<u8>);
impl MpsseCmdBuilder {
/// Create a new command builder.
///
/// # Example
///
/// ```
/// use libftd2xx::MpsseCmdBuilder;
///
/// MpsseCmdBuilder::new();
/// ```
pub const fn new() -> MpsseCmdBuilder {
MpsseCmdBuilder(Vec::new())
}
/// Create a new command builder from a vector.
///
/// # Example
///
/// ```
/// use libftd2xx::MpsseCmdBuilder;
///
/// MpsseCmdBuilder::with_vec(Vec::new());
/// ```
pub const fn with_vec(vec: Vec<u8>) -> MpsseCmdBuilder {
MpsseCmdBuilder(vec)
}
/// Get the MPSSE command as a slice.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{DeviceType, Ft232h, FtdiCommon, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().set_clock(100_000, DeviceType::FT232H);
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn as_slice(&self) -> &[u8] {
self.0.as_slice()
}
/// Set the clock frequency.
///
/// # Frequency Limits
///
/// | Device Type | Minimum | Maximum |
/// |--------------------------|---------|---------|
/// | FT2232D | 92 Hz | 6 MHz |
/// | FT4232H, FT2232H, FT232H | 92 Hz | 30 MHz |
///
/// Values outside of these limits will result in panic.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{DeviceType, Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new()
/// .set_clock(100_000, DeviceType::FT232H)
/// .set_gpio_lower(0xFF, 0xFF);
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn set_clock(mut self, frequency: u32, device_type: DeviceType) -> Self {
let (value, divisor) = clock_divisor(device_type, frequency);
debug_assert!(value <= 0xFFFF);
if let Some(div) = divisor {
self.0.push(div.into());
};
self.0.push(MpsseCmd::SetClockFrequency.into());
self.0.push((value & 0xFF) as u8);
self.0.push(((value >> 8) & 0xFF) as u8);
self
}
/// Enable the MPSSE loopback state.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().enable_loopback();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn enable_loopback(mut self) -> Self {
self.0.push(MpsseCmd::EnableLoopback.into());
self
}
/// Disable the MPSSE loopback state.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().disable_loopback();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn disable_loopback(mut self) -> Self {
self.0.push(MpsseCmd::DisableLoopback.into());
self
}
/// Disable 3 phase data clocking.
///
/// This is only avaliable on FTx232H devices.
///
/// This will give a 2 stage data shift which is the default state.
///
/// It will appears as:
///
/// 1. Data setup for 1/2 clock period
/// 2. Pulse clock for 1/2 clock period
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().disable_3phase_data_clocking();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn disable_3phase_data_clocking(mut self) -> Self {
self.0.push(MpsseCmd::Disable3PhaseClocking.into());
self
}
/// Enable 3 phase data clocking.
///
/// This is only avaliable on FTx232H devices.
///
/// This will give a 3 stage data shift for the purposes of supporting
/// interfaces such as I2C which need the data to be valid on both edges of
/// the clock.
///
/// It will appears as:
///
/// 1. Data setup for 1/2 clock period
/// 2. Pulse clock for 1/2 clock period
/// 3. Data hold for 1/2 clock period
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().enable_3phase_data_clocking();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn enable_3phase_data_clocking(mut self) -> Self {
self.0.push(MpsseCmd::Enable3PhaseClocking.into());
self
}
/// Set the pin direction and state of the lower byte (0-7) GPIO pins on the
/// MPSSE interface.
///
/// The pins that this controls depends on the device.
///
/// * On the FT232H this will control the AD0-AD7 pins.
///
/// # Arguments
///
/// * `state` - GPIO state mask, `0` is low (or input pin), `1` is high.
/// * `direction` - GPIO direction mask, `0` is input, `1` is output.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new()
/// .set_gpio_lower(0xFF, 0xFF)
/// .set_gpio_lower(0x00, 0xFF);
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn set_gpio_lower(mut self, state: u8, direction: u8) -> Self {
self.0
.extend_from_slice(&[MpsseCmd::SetDataBitsLowbyte.into(), state, direction]);
self
}
/// Set the pin direction and state of the upper byte (8-15) GPIO pins on
/// the MPSSE interface.
///
/// The pins that this controls depends on the device.
/// This method may do nothing for some devices, such as the FT4232H that
/// only have 8 pins per port.
///
/// # Arguments
///
/// * `state` - GPIO state mask, `0` is low (or input pin), `1` is high.
/// * `direction` - GPIO direction mask, `0` is input, `1` is output.
///
/// # FT232H Corner Case
///
/// On the FT232H only CBUS5, CBUS6, CBUS8, and CBUS9 can be controlled.
/// These pins confusingly map to the first four bits in the direction and
/// state masks.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new()
/// .set_gpio_upper(0xFF, 0xFF)
/// .set_gpio_upper(0x00, 0xFF);
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn set_gpio_upper(mut self, state: u8, direction: u8) -> Self {
self.0
.extend_from_slice(&[MpsseCmd::SetDataBitsHighbyte.into(), state, direction]);
self
}
/// Get the pin state state of the lower byte (0-7) GPIO pins on the MPSSE
/// interface.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().gpio_lower().send_immediate();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// let mut buf: [u8; 1] = [0; 1];
/// ft.read_all(&mut buf)?;
/// println!("GPIO lower state: 0x{:02X}", buf[0]);
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
pub fn gpio_lower(mut self) -> Self {
self.0.push(MpsseCmd::GetDataBitsLowbyte.into());
self
}
/// Get the pin state state of the upper byte (8-15) GPIO pins on the MPSSE
/// interface.
///
/// See [`set_gpio_upper`] for additional information about physical pin
/// mappings.
///
/// # Example
///
/// ```no_run
/// use libftd2xx::{Ft232h, FtdiCommon, FtdiMpsse, MpsseCmdBuilder};
///
/// let cmd = MpsseCmdBuilder::new().gpio_upper().send_immediate();
///
/// let mut ft = Ft232h::with_serial_number("FT5AVX6B")?;
/// ft.initialize_mpsse_default()?;
/// ft.write_all(cmd.as_slice())?;
/// let mut buf: [u8; 1] = [0; 1];
/// ft.read_all(&mut buf)?;
/// println!("GPIO upper state: 0x{:02X}", buf[0]);
/// # Ok::<(), std::boxed::Box<dyn std::error::Error>>(())
/// ```
///
/// [`set_gpio_upper`]: FtdiMpsse::set_gpio_upper
pub fn gpio_upper(mut self) -> Self {
self.0.push(MpsseCmd::GetDataBitsHighbyte.into());
self
}
/// Send the preceding commands immediately.
///
/// # Example
///
/// ```
/// use libftd2xx::MpsseCmdBuilder;
///
/// let cmd = MpsseCmdBuilder::new()
/// .set_gpio_upper(0xFF, 0xFF)
/// .set_gpio_upper(0x00, 0xFF)
/// .send_immediate();
/// ```
pub fn send_immediate(mut self) -> Self {
self.0.push(MpsseCmd::SendImmediate.into());
self
}
/// Make controller wait until GPIOL1 or I/O1 is high before running further commands.
///
/// # Example
///
/// ```
/// use libftd2xx::{ClockData, MpsseCmdBuilder};
///
/// // Assume a "chip ready" signal is connected to GPIOL1. This signal is pulled high
/// // shortly after AD3 (chip select) is pulled low. Data will not be clocked out until
/// // the chip is ready.
/// let cmd = MpsseCmdBuilder::new()
/// .set_gpio_lower(0x0, 0xb)
/// .wait_on_io_high()
/// .clock_data(ClockData::MsbPosIn, &[0x12, 0x34, 0x56])
/// .set_gpio_lower(0x8, 0xb)
/// .send_immediate();
/// ```
pub fn wait_on_io_high(mut self) -> Self {
self.0.push(MpsseCmd::WaitOnIOHigh.into());
self
}
/// Make controller wait until GPIOL1 or I/O1 is low before running further commands.
///
/// # Example
///
/// ```
/// use libftd2xx::{ClockData, MpsseCmdBuilder};
///
/// // Assume a "chip ready" signal is connected to GPIOL1. This signal is pulled low
/// // shortly after AD3 (chip select) is pulled low. Data will not be clocked out until
/// // the chip is ready.
/// let cmd = MpsseCmdBuilder::new()
/// .set_gpio_lower(0x0, 0xb)
/// .wait_on_io_low()
/// .clock_data(ClockData::MsbPosIn, &[0x12, 0x34, 0x56])
/// .set_gpio_lower(0x8, 0xb)
/// .send_immediate();
/// ```
pub fn wait_on_io_low(mut self) -> Self {
self.0.push(MpsseCmd::WaitOnIOLow.into());
self
}
/// Clock data out.
///
/// This will clock out bytes on TDI/DO.
/// No data is clocked into the device on TDO/DI.
///
/// This will panic for data lengths greater than `u16::MAX + 1`.
pub fn clock_data_out(mut self, mode: ClockDataOut, data: &[u8]) -> Self {
let mut len = data.len();
assert!(len <= 65536, "data length cannot exceed u16::MAX + 1");
if len == 0 {
return self;
}
len -= 1;
self.0
.extend_from_slice(&[mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8]);
self.0.extend_from_slice(&data);
self
}
/// Clock data in.
///
/// This will clock in bytes on TDO/DI.
/// No data is clocked out of the device on TDI/DO.
///
/// # Arguments
///
/// * `mode` - Data clocking mode.
/// * `len` - Number of bytes to clock in.
/// This will panic for values greater than `u16::MAX + 1`.
pub fn clock_data_in(mut self, mode: ClockDataIn, mut len: usize) -> Self {
assert!(len <= 65536, "data length cannot exceed u16::MAX + 1");
if len == 0 {
return self;
}
len -= 1;
self.0
.extend_from_slice(&[mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8]);
self
}
/// Clock data in and out simultaneously.
///
/// This will panic for data lengths greater than `u16::MAX + 1`.
pub fn clock_data(mut self, mode: ClockData, data: &[u8]) -> Self {
let mut len = data.len();
assert!(len <= 65536, "data length cannot exceed u16::MAX + 1");
if len == 0 {
return self;
}
len -= 1;
self.0
.extend_from_slice(&[mode.into(), (len & 0xFF) as u8, ((len >> 8) & 0xFF) as u8]);
self.0.extend_from_slice(&data);
self
}
/// Clock data bits out.
///
/// # Arguments
///
/// * `mode` - Bit clocking mode.
/// * `data` - Data bits.
/// * `len` - Number of bits to clock out.
/// This will panic for values greater than 8.
pub fn clock_bits_out(mut self, mode: ClockBitsOut, data: u8, mut len: u8) -> Self {
assert!(len <= 8, "data length cannot exceed 8");
if len == 0 {
return self;
}
len -= 1;
self.0.extend_from_slice(&[mode.into(), len, data]);
self
}
/// Clock data bits in.
///
/// # Arguments
///
/// * `mode` - Bit clocking mode.
/// * `len` - Number of bits to clock in.
/// This will panic for values greater than 8.
pub fn clock_bits_in(mut self, mode: ClockBitsIn, mut len: u8) -> Self {
assert!(len <= 8, "data length cannot exceed 8");
if len == 0 {
return self;
}
len -= 1;
self.0.extend_from_slice(&[mode.into(), len]);
self
}
/// Clock data bits in and out simultaneously.
///
/// # Arguments
///
/// * `mode` - Bit clocking mode.
/// * `len` - Number of bits to clock in.
/// This will panic for values greater than 8.
pub fn clock_bits(mut self, mode: ClockBits, data: u8, mut len: u8) -> Self {
assert!(len <= 8, "data length cannot exceed 8");
if len == 0 {
return self;
}
len -= 1;
self.0.extend_from_slice(&[mode.into(), len, data]);
self
}
}
|
pub mod networks;
pub mod transaction_types;
pub use self::networks::Network;
pub use self::transaction_types::TransactionType;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.