text stringlengths 8 4.13M |
|---|
// Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate colored;
use crate::grin_util as util;
use crate::grin_util::secp::key::{PublicKey, SecretKey};
use crate::grin_util::secp::pedersen::Commitment;
use crate::grin_util::secp::{pedersen, ContextFlag, Secp256k1, Signature};
use crate::proof::crypto::Hex;
use super::crypto;
use super::message::EncryptedMessage;
use super::proofaddress::ProvableAddress;
use crate::error::{Error, ErrorKind};
use crate::signature::Signature as otherSignature;
use crate::slate_versions::VersionedSlate;
use crate::Slate;
use ed25519_dalek::Verifier;
use std::collections::HashMap;
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use std::{fs, path};
use util::Mutex;
use crate::grin_core::core::amount_to_hr_string;
use crate::grin_core::core::Committed;
use crate::grin_core::global;
use colored::*;
use std::collections::HashSet;
/// Dir name with proof files
pub const TX_PROOF_SAVE_DIR: &'static str = "saved_proofs";
lazy_static! {
/// Global proof in memory storage.
static ref SLATE_PROOFS: Mutex< HashMap<uuid::Uuid, TxProof> > = Mutex::new(HashMap::new());
}
/// Add a txProof into the mem storage
pub fn push_proof_for_slate(uuid: &uuid::Uuid, proof: TxProof) {
SLATE_PROOFS.lock().insert(uuid.clone(), proof);
}
/// Get txProof form the mem storage. At step we suppose to Finalize
pub fn pop_proof_for_slate(uuid: &uuid::Uuid) -> Option<TxProof> {
SLATE_PROOFS.lock().remove(uuid)
}
/// Tx Proof - the mwc713 based proof that can be made for any address that is a public key.
/// we would like to generalize mwc713 proof implementation to be used in mwc-wallet proof framework with changing
/// of the message to generate signature in receiver wallet.
/// in mwc713 proof signature is generated using json string of slate; and after upgrade
/// it is generated using three factors: amount,sender address and commitment sum.
#[derive(Debug, Serialize, Deserialize)]
pub struct TxProof {
/// From address.
pub address: ProvableAddress,
/// Message that contain slate data
pub message: String,
/// Challenge
pub challenge: String,
/// Message & Challenge signature
pub signature: Option<Signature>,
/// Private key to decrypt the message
pub key: [u8; 32],
/// Placeholder
pub amount: u64,
/// Placeholder
pub fee: u64,
/// Placeholder
pub inputs: Vec<Commitment>,
/// Placeholder
pub outputs: Vec<Commitment>,
/// added to support the new proof implementation but be backward compatible
pub version: Option<String>,
/// this is the encrypted slate message
pub slate_message: Option<String>,
/// Tor (Dalek ed25519) signature
pub tor_proof_signature: Option<String>,
/// Tor Sender address
pub tor_sender_address: Option<String>,
}
impl TxProof {
/// Verify this Proof
pub fn verify_extract(
&self,
expected_destination: Option<&ProvableAddress>,
) -> Result<(ProvableAddress, Slate), ErrorKind> {
let mut challenge = String::new();
challenge.push_str(self.message.as_str());
challenge.push_str(self.challenge.as_str());
let mut tor_proof = false;
if let Some(version) = &self.version {
if version.eq("tor") {
tor_proof = true;
}
}
if tor_proof {
if let Some(signature) = &self.tor_proof_signature {
let dalek_sig_vec = util::from_hex(&signature).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to deserialize tor payment proof signature, {}",
e
))
})?;
let dalek_sig = ed25519_dalek::Signature::from_bytes(dalek_sig_vec.as_ref())
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to deserialize tor payment proof receiver signature, {}",
e
))
})?;
let receiver_dalek_pub_key = self.address.tor_public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to deserialize tor payment proof receiver address, {}",
e
))
})?;
if let Err(e) = receiver_dalek_pub_key.verify(&challenge.as_bytes(), &dalek_sig) {
return Err(ErrorKind::PaymentProof(format!(
"Invalid proof signature, {}",
e
)))?;
};
}
} else {
let public_key = self.address.public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key from address {}, {}",
self.address, e
))
})?;
if let Some(signature) = &self.signature {
crypto::verify_signature(&challenge, &signature, &public_key)
.map_err(|e| ErrorKind::TxProofVerifySignature(format!("{}", e)))?;
} else {
return Err(ErrorKind::TxProofVerifySignature(format!(
"empty proof signature!"
)));
}
}
let encrypted_message: EncryptedMessage;
if let Some(_version) = &self.version {
//this is the newer version tx_proof
encrypted_message = serde_json::from_str(&self.slate_message.clone().unwrap())
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Fail to convert Json to EncryptedMessage {}, {}",
self.message, e
))
})?;
} else {
encrypted_message = serde_json::from_str(&self.message.clone()).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Fail to convert proof message Json to EncryptedMessage {}, {}",
self.message, e
))
})?;
}
// TODO: at some point, make this check required
let destination = &encrypted_message.destination; //sender address
if expected_destination.is_some()
&& destination.public_key != expected_destination.clone().unwrap().public_key
{
return Err(ErrorKind::TxProofVerifyDestination(
expected_destination.unwrap().public_key.clone(),
destination.public_key.clone(),
));
}
let mut decrypted_message = encrypted_message.decrypt_with_key(&self.key).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to decrypt message, {}", e))
})?;
//the decrypted_message cloud have been appended with the _<torkey>tor
let mut tor_key = "tor".to_string();
if decrypted_message.ends_with("tor") {
let leng = decrypted_message.len();
if leng <= 59 {
return Err(ErrorKind::TxProofGenericError(format!(
"Unable to build Slate form proof message"
)));
}
tor_key = decrypted_message.clone()[leng - 59..].to_string();
tor_key.truncate(56);
decrypted_message.truncate(leng - 59); //remove the "tor" and tor_key from the elnd
}
let slate = Slate::deserialize_upgrade_plain(&decrypted_message).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build Slate form proof message, {}",
e
))
})?;
//for mwc713 display purpose. the destination needs to be onion address
if let Some(onion_addr) = self.tor_sender_address.clone() {
if tor_key != "tor" && tor_key != onion_addr {
return Err(ErrorKind::TxProofVerifySender(
tor_key.to_string(),
onion_addr,
));
}
let tor_sender = ProvableAddress::from_str(&onion_addr).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to create sender onion address, {}",
e
))
})?;
Ok((tor_sender, slate))
} else {
Ok((destination.clone(), slate))
}
}
/// Build proof data. message suppose to be slate.
pub fn from_response(
from: &ProvableAddress,
message: String,
challenge: String,
signature: String,
secret_key: &SecretKey,
expected_destination: &ProvableAddress,
) -> Result<(Slate, TxProof), ErrorKind> {
let address = from;
let signature = util::from_hex(&signature).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build signature from HEX {}, {}",
signature, e
))
})?;
let signature = Signature::from_der(&signature).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to build signature, {}", e))
})?;
let public_key = address.public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?;
let encrypted_message: EncryptedMessage = serde_json::from_str(&message).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build message fom HEX {}, {}",
message, e
))
})?;
let key = encrypted_message
.key(&public_key, secret_key)
.map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to build a signature, {}", e))
})?;
let proof = TxProof {
address: address.clone(),
message,
challenge,
signature: Some(signature),
key,
amount: 0,
fee: 0,
inputs: vec![],
outputs: vec![],
version: None,
slate_message: None,
tor_proof_signature: None,
tor_sender_address: None,
};
let (_, slate) = proof.verify_extract(Some(expected_destination))?;
Ok((slate, proof))
}
/// Build proof data from slate
pub fn from_slate(
message: String,
slate: &Slate,
secret_key: &SecretKey,
expected_destination: &ProvableAddress, //sender address
tor_destination: Option<String>, //tor onion address
) -> Result<TxProof, ErrorKind> {
if let Some(p) = slate.payment_proof.clone() {
if let Some(signature) = p.receiver_signature {
//build the signature from signature string:
if p.receiver_address.public_key.len() == 56 {
let address = p.receiver_address;
let _public_key = address.tor_public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build dalek public key for address {}, {}",
address, e
))
})?;
//build the encrypted message from the slate
//and generate the key.
let version = slate.lowest_version();
let slate = VersionedSlate::into_version_plain(slate.clone(), version)
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Slate serialization error, {}",
e
))
})?;
let mut slate_json_with_tor = serde_json::to_string(&slate).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?;
if let Some(tor_des) = tor_destination.clone() {
slate_json_with_tor = slate_json_with_tor + &tor_des + "tor";
}
let encrypted_message = EncryptedMessage::new(
slate_json_with_tor,
expected_destination, //this is the sender address
&expected_destination.public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?,
&secret_key,
)
.map_err(|e| ErrorKind::GenericError(format!("Unable encrypt slate, {}", e)))?;
let message_ser = &serde_json::to_string(&encrypted_message).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?;
let key = encrypted_message
.key(&expected_destination.public_key().unwrap(), secret_key)
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build a signature, {}",
e
))
})?;
//create the tor address for the sender wallet.
let proof = TxProof {
address: address.clone(),
message,
challenge: "".to_string(),
signature: None,
key,
amount: 0,
fee: 0,
inputs: vec![],
outputs: vec![],
version: Some("tor".to_string()),
slate_message: Some(message_ser.to_string()),
tor_proof_signature: Some(signature),
tor_sender_address: tor_destination,
};
proof.verify_extract(Some(expected_destination))?;
Ok(proof)
} else {
let address = p.receiver_address;
let signature = util::from_hex(&signature).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build signature from HEX {}, {}",
signature, e
))
})?;
let signature = Signature::from_der(&signature).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to build signature, {}", e))
})?;
let _public_key = address.public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?;
//build the encrypted message from the slate
//and generate the key.
let version = slate.lowest_version();
let slate = VersionedSlate::into_version_plain(slate.clone(), version)
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Slate serialization error, {}",
e
))
})?;
let encrypted_message = EncryptedMessage::new(
serde_json::to_string(&slate).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?,
expected_destination, //this is the sender address when receiver wallet sends the slate back
&expected_destination.public_key().map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?,
&secret_key,
)
.map_err(|e| ErrorKind::GenericError(format!("Unable encrypt slate, {}", e)))?;
let message_ser = &serde_json::to_string(&encrypted_message).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build public key for address {}, {}",
address, e
))
})?;
let key = encrypted_message
.key(&expected_destination.public_key().unwrap(), secret_key)
.map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build a signature, {}",
e
))
})?;
let proof = TxProof {
address: address.clone(),
message,
challenge: "".to_string(),
signature: Some(signature),
key,
amount: 0,
fee: 0,
inputs: vec![],
outputs: vec![],
version: Some("version2".to_string()),
slate_message: Some(message_ser.to_string()),
tor_proof_signature: None,
tor_sender_address: None,
};
proof.verify_extract(Some(expected_destination))?;
Ok(proof)
}
} else {
return Err(ErrorKind::TxProofGenericError(
"No receiver signature in payment proof in slate".to_string(),
));
}
} else {
return Err(ErrorKind::TxProofGenericError(
"No pyament proof in slate".to_string(),
));
}
}
/// Init proff files storage
pub fn init_proof_backend(data_file_dir: &str) -> Result<(), Error> {
let stored_tx_proof_path = path::Path::new(data_file_dir).join(TX_PROOF_SAVE_DIR);
fs::create_dir_all(&stored_tx_proof_path)
.expect("Couldn't create wallet backend tx proof storage directory!");
Ok(())
}
/// Check if Proofs are here
pub fn has_stored_tx_proof(data_file_dir: &str, uuid: &str) -> Result<bool, Error> {
let filename = format!("{}.proof", uuid);
let path = path::Path::new(data_file_dir)
.join(TX_PROOF_SAVE_DIR)
.join(filename);
let tx_proof_file = Path::new(&path).to_path_buf();
Ok(tx_proof_file.exists())
}
/// Read stored proof file. data_file_dir
pub fn get_stored_tx_proof(data_file_dir: &str, uuid: &str) -> Result<TxProof, Error> {
let filename = format!("{}.proof", uuid);
let path = path::Path::new(data_file_dir)
.join(TX_PROOF_SAVE_DIR)
.join(filename);
let tx_proof_file = Path::new(&path).to_path_buf();
if !tx_proof_file.exists() {
return Err(ErrorKind::TransactionHasNoProof(
tx_proof_file.to_str().unwrap_or(&"UNKNOWN").to_string(),
)
.into());
}
let mut tx_proof_f = File::open(tx_proof_file)?;
let mut content = String::new();
tx_proof_f.read_to_string(&mut content)?;
Ok(serde_json::from_str(&content).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to Build TxProof from Json, {}", e))
})?)
}
/// Store tx proof at the file.
pub fn store_tx_proof(&self, data_file_dir: &str, uuid: &str) -> Result<(), Error> {
let filename = format!("{}.proof", uuid);
let path = path::Path::new(data_file_dir)
.join(TX_PROOF_SAVE_DIR)
.join(filename);
let path_buf = Path::new(&path).to_path_buf();
let mut stored_tx = File::create(path_buf)?;
let proof_ser = serde_json::to_string(self).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to conver TxProof to Json, {}", e))
})?;
stored_tx.write_all(&proof_ser.as_bytes())?;
stored_tx.sync_all()?;
Ok(())
}
}
///support mwc713 payment proof message
pub fn proof_ok(
sender: Option<String>,
receiver: String,
amount: u64,
outputs: Vec<String>,
kernel: String,
) {
let sender_message = sender
.as_ref()
.map(|s| format!(" from [{}]", s.bright_green()))
.unwrap_or(String::new());
let tor_sender_message = sender
.as_ref()
.map(|s| {
format!(
" from [{}{}{}]",
"http://".bright_green(),
s.bright_green(),
".onion".bright_green()
)
})
.unwrap_or(String::new());
if receiver.len() == 56 {
println!(
"this file proves that [{}] MWCs was sent to [{}]{}",
amount_to_hr_string(amount, false).bright_green(),
format!(
"{}{}{}",
"http://".bright_green(),
receiver.bright_green(),
".onion".bright_green()
),
tor_sender_message
);
} else {
println!(
"this file proves that [{}] MWCs was sent to [{}]{}",
amount_to_hr_string(amount, false).bright_green(),
receiver.bright_green(),
sender_message
);
}
if sender.is_none() {
println!(
"{}: this proof does not prove which address sent the funds, only which received it",
"WARNING".bright_yellow()
);
}
println!("\noutputs:");
if global::is_mainnet() {
for output in outputs {
println!(
" {}: https://explorer.mwc.mw/#o{}",
output.bright_magenta(),
output
);
}
println!("kernel:");
println!(
" {}: https://explorer.mwc.mw/#k{}",
kernel.bright_magenta(),
kernel
);
} else {
for output in outputs {
println!(
" {}: https://explorer.floonet.mwc.mw/#o{}",
output.bright_magenta(),
output
);
}
println!("kernel:");
println!(
" {}: https://explorer.floonet.mwc.mw/#k{}",
kernel.bright_magenta(),
kernel
);
}
println!("\n{}: this proof should only be considered valid if the kernel is actually on-chain with sufficient confirmations", "WARNING".bright_yellow());
println!("please use a mwc block explorer to verify this is the case.");
}
//to support mwc713 payment proof verification
fn verify_tx_proof(
tx_proof: &TxProof,
) -> Result<
(
Option<ProvableAddress>,
ProvableAddress,
u64,
Vec<pedersen::Commitment>,
pedersen::Commitment,
),
Error,
> {
let secp = &Secp256k1::with_caps(ContextFlag::Commit);
let (destination, slate) = tx_proof.verify_extract(None).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to extract destination and slate, {}", e))
})?;
let inputs_ex = tx_proof.inputs.iter().collect::<HashSet<_>>();
let mut inputs: Vec<pedersen::Commitment> = slate
.tx
.inputs_committed()
.iter()
.filter(|c| !inputs_ex.contains(c))
.map(|c| c.clone())
.collect();
let outputs_ex = tx_proof.outputs.iter().collect::<HashSet<_>>();
let outputs: Vec<pedersen::Commitment> = slate
.tx
.outputs()
.iter()
.map(|o| o.commitment())
.filter(|c| !outputs_ex.contains(c))
.collect();
let excess_parts: Vec<&PublicKey> = slate
.participant_data
.iter()
.map(|p| &p.public_blind_excess)
.collect();
let excess_sum = PublicKey::from_combination(excess_parts).map_err(|e| {
ErrorKind::TxProofGenericError(format!("Unable to combine public keys, {}", e))
})?;
let commit_amount = secp.commit_value(tx_proof.amount)?;
inputs.push(commit_amount);
let mut input_com: Vec<pedersen::Commitment> = slate.tx.inputs_committed();
let mut output_com: Vec<pedersen::Commitment> = slate.tx.outputs_committed();
input_com.push(secp.commit(0, slate.tx.offset.secret_key()?)?);
output_com.push(secp.commit_value(slate.fee)?);
let excess_sum_com = Secp256k1::commit_sum(output_com, input_com)?;
if excess_sum_com.to_pubkey()? != excess_sum {
return Err(ErrorKind::TxProofGenericError("Excess sum mismatch".to_string()).into());
}
return Ok((
Some(destination),
tx_proof.address.clone(),
tx_proof.amount,
outputs,
excess_sum_com,
));
}
///to support mwc713 payment proof verification
pub fn verify_tx_proof_wrapper(
tx_proof: &TxProof,
) -> Result<(Option<String>, String, u64, Vec<String>, String), Error> {
let (sender, receiver, amount, outputs, excess_sum) = verify_tx_proof(tx_proof)?;
let outputs = outputs
.iter()
.map(|o| crate::grin_util::to_hex(&o.0))
.collect();
Ok((
sender.map(|a| a.public_key.clone()),
receiver.public_key.clone(),
amount,
outputs,
excess_sum.to_hex(),
))
}
|
use std::io::{Write, Result as IOResult};
use ansi_term::ANSIStrings;
use fs::File;
use output::file_name::{FileName, FileStyle};
use style::Colours;
/// The lines view literally just displays each file, line-by-line.
pub struct Render<'a> {
pub files: Vec<File<'a>>,
pub colours: &'a Colours,
pub style: &'a FileStyle,
}
impl<'a> Render<'a> {
pub fn render<W: Write>(&self, w: &mut W) -> IOResult<()> {
for file in &self.files {
let name_cell = self.render_file(file).paint();
writeln!(w, "{}", ANSIStrings(&name_cell))?;
}
Ok(())
}
fn render_file<'f>(&self, file: &'f File<'a>) -> FileName<'f, 'a, Colours> {
self.style.for_file(file, self.colours).with_link_paths()
}
}
|
#![feature(asm)]
#![feature(thread_local)]
use std::thread;
#[thread_local]
static mut TLS_DATA: usize = 1;
#[thread_local]
static mut TLS_BSS: usize = 0;
fn main() {
unsafe { asm!("xchg bx, bx" : : : "memory" : "intel", "volatile") };
unsafe {
TLS_DATA += 1;
TLS_BSS += 1;
println!("PARENT: DATA {}==2 BSS {}==1", TLS_DATA, TLS_BSS);
}
thread::spawn(|| {
unsafe {
TLS_DATA += 1;
TLS_BSS += 1;
println!("CHILD: DATA {}==2 BSS {}==1", TLS_DATA, TLS_BSS);
}
}).join().unwrap();
}
|
mod fixes;
mod problem;
mod inspection_impl;
struct InspectionRegistrar {
}
trait Inspection {
fn register(holder: InspectionRegistrar);
}
struct A {
}
impl A {
fn foo(&mut self) -> i32 { 32 }
}
fn main() {
let a = A {};
let c = A::foo;
} |
use std::panic;
use rocket::{self, http::{ContentType, Header, Status, StatusClass}, local::Client};
use diesel::connection::SimpleConnection;
use horus_server::{self, routes::dist::*};
use test::{run_test, sql::*};
#[test]
fn legacy_redirect()
{
run(|| {
let client = get_client();
let req = client.get("/dist/version");
let response = req.dispatch();
assert_eq!(response.status().class(), StatusClass::Redirection);
});
}
#[test]
fn version()
{
run(|| {
let client = get_client();
let req = client.get("/dist/version/win64");
let response = req.dispatch();
assert_eq!(response.status(), Status::Ok);
let req = client.get("/dist/version/linux");
let response = req.dispatch();
assert_eq!(response.status(), Status::Ok);
});
}
#[test]
fn version_can_fail()
{
run(|| {
let client = get_client();
let req = client.get("/dist/version/notaversion");
let mut response = req.dispatch();
assert_eq!(response.status(), Status::BadRequest);
assert!(
response
.body_string()
.unwrap()
.contains("platform is correct?")
);
});
}
#[test]
fn get_latest()
{
run(|| {
let client = get_client();
let req = client.get("/dist/latest/linux").header(auth_header());
let response = req.dispatch();
assert_eq!(response.status().class(), StatusClass::Redirection);
});
}
#[test]
fn get_latest_can_fail()
{
run(|| {
let client = get_client();
let req = client.get("/dist/latest/failure").header(auth_header());
let response = req.dispatch();
assert_eq!(response.status(), Status::NotFound);
});
}
#[test]
fn deploy()
{
run(|| {
let client = get_client();
let body = "test_body";
let req = client
.post("/dist/deploy/new/linux/99.99.99")
.header(Header::new("content-type", "application/octet-stream"))
.header(api_key_header())
.header(depkey_header())
.body(body);
let mut response = req.dispatch();
assert_eq!(response.status(), Status::Accepted);
assert!(
response
.body_string()
.unwrap()
.contains("queued for processing.")
);
});
}
fn run<T>(test: T) -> ()
where
T: FnOnce() -> () + panic::UnwindSafe,
{
run_test(test, setup_db, unsetup_db);
}
fn setup_db()
{
let conn = horus_server::dbtools::get_db_conn_requestless().unwrap();
let mut setup_sql = String::new();
setup_sql.push_str(sql_insert_user().as_str());
setup_sql.push_str(sql_insert_license().as_str());
setup_sql.push_str(sql_insert_depkey().as_str());
conn.batch_execute(&setup_sql).unwrap();
}
fn unsetup_db()
{
let conn = horus_server::dbtools::get_db_conn_requestless().unwrap();
let mut unsetup_sql = format!("DELETE FROM horus_jobs WHERE owner = {};", USER_ID);
unsetup_sql.push_str(sql_delete_user().as_str());
conn.batch_execute(&unsetup_sql).unwrap();
}
fn get_client() -> Client
{
use rocket_contrib::Template;
let rocket = rocket::ignite()
.attach(Template::fairing())
.mount(
"/dist",
routes![
version_legacy,
get_version,
get_latest,
enable_deployment,
deploy
],
)
.manage(horus_server::dbtools::init_pool());
Client::new(rocket).expect("valid rocket instance")
}
|
extern crate time;
extern crate serde;
extern crate serde_json;
extern crate sha2;
use sha2::{Sha256, Digest};
use std::fmt::Write;
#[debug(Serialize, Clone, Debug)]
struct Transaction {
sender: String,
reciever: String,
amount: f32,
}
#[debug(Serialize, Debug)]
pub struct Blockheader {
timestamp: i64,
nonce: u32,
pre_hash: String,
merkle: String,
difficulty: u32,
}
pub struct Block {
header: Blockheader,
count: u32,
transactions: Vec<Transaction>,
}
pub struct Chain {
chain: Vec<Block>,
current_transaction: Vec<Transaction>,
miner_address: String,
reward: f32,
difficulty: u32,
}
impl Chain {
pub fn new(miner_address: String, difficulty: u32) -> Chain {
let mut chain = Chain {
chain : Vec::new(),
current_transaction: Vec::new(),
difficulty,
miner_address,
reward: 100.0,
};
//generation of new block
chain.generate_block();
//return the newly generated chain
chain
}
pub fn new_transaction(&mut self, sender: String, receiver: String, amount: f32) -> bool { //as we want to change the instance
self.current_transaction.push(Transaction{ //prevents the original instance from being used when function is called
sender,
receiver,
amount,
});
//returns true if a new transaction has occured
true
}
pub fn last_hash(&self) -> String {
let block = self.chain.last() {
Some(block) => block,
None => return String::from_utf8(vec![48; 64]).unwrap();
};
Chain::hash(&block.header)
}
pub fb update_difficulty(&mut self, difficulty: u32) -> bool {
self.difficulty = difficulty;
true
}
pub fb update_reward(&mut self, reward: u32) -> bool {
self.reward = reward;
true
}
pub fn generate_block(&mut self) -> bool {
let header = Blockheader{
timestamp: time::now().to_timespec().sec,//converting in secconds
nonce: 0,
pre_hash: self.last_hash(),
merkle: String::new();
difficulty: self.difficulty,
}
let reward_transaction = Transaction {
sender: String::from("Root"),
reciever: self.miner_address.clone();
amount: self.reward;
}
let mut block = Block {
header,//created above
count:0 ,// as no current transactions are occuring
transactions: vec![]//empty transcation
}
block.transactions.push(reward_transaction);
block.transactions.append(current_transaction);
block.count = block.transactions.len() as u32;
block.header.merkle = Chain::get_merkle(block.transactions.clone());
Chain::proof_of_work(&mut block.header);
println!("{:?}", &block);
self.chain.push(block);
true
}
fn get_merkle(current_transaction: Vec<Transaction>) -> String {
let mut merkle = Vec::new();
for i in ¤t_transaction {
let hash = Chain::hash(i);
merkle.push(hash);
}
//odd merkle length
if merkle.len() % 2 == 1 {
let last_one = merkle.last().cloned().unwrap();//cloning the last hash
}
if merkle.len() > 1{
//removing the last 2 hashes
let mut hl = merkle.remove(0);
let mut hll = merkle.remove(0);
hl.push_str(&mut h2);//pushing 2nd hash to first
let new_hash = Chain::hash(&hl);
merkle.push(new_hash);
}
merkle.pop().unwrap();
}
pub fn proof_of_work(header: &mut Blockheader) {
loop {
let hash = Chain::hash(header);
let slice = &hash[..header.difficulty as usize];
match slice.parse::<u32>() {
Ok(val) => {
if val = 0 {
println!("hash {}", hash);
break;
} else {
hash.nonce += 1;
}
},
Err(_) => {
header.nonce += 1;
continue;
}
}
};
}
pub fn hash<T: serde::Serialize>(item: &T) -> String {
let input_hash = serde_json::to_string(&item).unwrap();//json to string
let mut hasher = Sha256::default();
hasher.input(input.as_bytes());//converting to bytes
let res = hasher.result();
let res_vector = result.to_vec();//returns as hex
Chain::hex_to_string(vec_res.as_slice());
}
pub fn hex_to_string(vec_res: &[u8]) -> String { //bytes to string
let mut str = String::new();
for i in res_vec {
write!(&mut str, "{:x}", i).expect("unable to write");
}
str
}
}
|
pub mod vector{
use std::ops::{Add, Sub, Mul, BitXor};
use std::f64::EPSILON as F_EPSILON;
// Clone is so we have the clone() method available for out struct
// Copy is so everytime we attempt to give away ownership we clone() instead
#[derive(Debug, Clone, Copy)]
pub struct Vector{
pub x: f64,
pub y: f64,
pub z: f64
}
impl Vector {
pub fn new(x: f64, y: f64, z: f64) -> Self {
Self{
x: x,
y: y,
z: z
}
}
pub fn is_zero(&self)-> bool {
(self.x == 0_f64) && (self.y == 0_f64) && (self.z == 0_f64)
}
pub fn norm(&self)-> f64 {
((self.x*self.x) +(self.y*self.y) +(self.z*self.z)).sqrt()
}
}
impl PartialEq for Vector {
fn eq(&self, rhs: &Self) -> bool {
((self.x - rhs.x).abs() +
(self.y - rhs.y).abs() +
(self.z - rhs.z).abs())< (3_f64 * F_EPSILON)
}
}
impl Add for Vector {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z
}
}
}
impl Sub for Vector {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z
}
}
}
impl Mul<Vector> for f64{
type Output = Vector;
fn mul(self, rhs: Vector) -> Vector {
Vector{
x: self * rhs.x,
y: self * rhs.y,
z: self * rhs.z
}
}
}
impl Mul<Vector> for Vector{
type Output = f64;
fn mul(self, rhs: Self) -> f64 {
(self.x*rhs.x) + (self.y*rhs.y) + (self.z*rhs.z)
}
}
impl BitXor for Vector {
type Output = Self;
fn bitxor(self, rhs: Self) -> Self{
Self {
x: ((self.y * rhs.z) - (self.z*rhs.y)),
y: ((self.z*rhs.x) - (self.x*rhs.z)),
z: ((self.x*rhs.y) - (self.y*rhs.x))
}
}
}
} |
#![feature(option_filter)]
use std::io::{self, Read};
mod lex;
mod parse;
use lex::Lexer;
use lex::Token;
use parse::parse_object;
fn main() {
let mut buffer = String::new();
if io::stdin().read_to_string(&mut buffer).is_ok() {
let mut lexer = Lexer {
chars: buffer.chars().peekable(),
};
match lexer.lex() {
Ok(mut tokens) => {
tokens.retain(|token| token != &Token::Whitespace);
println!("{:?}", parse_object(&mut tokens.iter().peekable()));
}
Err(err) => println!("{:?}", err),
}
} else {
panic!("Error reading input.");
}
}
|
use itertools::Itertools;
trait Distance<T> {
fn manhattan_distance(self, t: Self) -> T;
}
type Coordinate = (i32, i32);
impl Distance<i32> for Coordinate {
fn manhattan_distance(self, to: Coordinate) -> i32 {
(self.0 - to.0).abs() + (self.1 - to.1).abs()
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
struct Slope {
dx: i32,
dy: i32,
}
fn parse_input(input: &str) -> Vec<Coordinate> {
input
.lines()
.enumerate()
.flat_map(|(row, line)| {
line
.trim()
.chars()
.enumerate()
.filter_map(|(col, c)| {
if c == '#' {
Some((col as i32, row as i32))
} else {
None
}
})
.collect_vec()
})
.collect_vec()
}
fn gcd(mut m: i32, mut n: i32) -> i32 {
while m != 0 {
let old_m = m;
m = n % m;
n = old_m;
}
n.abs()
}
impl Slope {
fn from(from: Coordinate, to: Coordinate) -> Slope {
let dx = to.0 - from.0;
let dy = to.1 - from.1;
if dx == 0 && dy == 0 {
return Slope { dx, dy };
}
let div = gcd(dx, dy);
Slope {
dx: dx / div,
dy: dy / div,
}
}
fn angle(self) -> f32 {
use std::f32;
if self.dx == 0 && self.dy == 0 {
return f32::INFINITY;
}
let dx = self.dx as f32;
let dy = self.dy as f32;
let ang = dy.atan2(dx).to_degrees();
if ang < -90. {
360. + ang
} else {
ang
}
}
}
fn solve_01(asteroids: &[Coordinate]) -> (usize, Coordinate) {
asteroids
.iter()
.map(|from| {
(
asteroids
.iter()
.map(|to| Slope::from(*from, *to))
.unique()
.count()
- 1,
*from,
)
})
.max()
.unwrap()
}
fn solve_02(coord: Coordinate, asteroids: &[Coordinate]) -> Vec<(f32, Coordinate)> {
let mut map = asteroids
.iter()
.map(|ast| {
let sl = Slope::from(coord, *ast);
(sl, *ast)
})
.into_group_map();
let mut res = map
.iter_mut()
.flat_map(|(s, points)| {
let ang = s.angle();
points.sort_by_key(|k| k.manhattan_distance(coord));
points
.iter()
.enumerate()
.map(|(i, k)| (ang + (i as f32) * 360., *k))
.collect_vec()
})
.collect_vec();
res.sort_by(|a, b| a.partial_cmp(b).unwrap());
res
}
pub fn solve(input: &str) {
let asteroids = parse_input(input);
let (part_01, location) = solve_01(&asteroids);
dbg!(part_01);
let res = solve_02(location, &asteroids);
let (_, part_02) = res[199];
dbg!(part_02);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn small() {
let asteroids = parse_input(
".#..#
.....
#####
....#
...##",
);
let (count, coord) = solve_01(&asteroids);
assert_eq!(count, 8);
assert_eq!(coord, (3, 4));
}
#[test]
fn medium() {
let asteroids = parse_input(
"......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####",
);
let (count, coord) = solve_01(&asteroids);
assert_eq!(count, 33);
assert_eq!(coord, (5, 8));
}
#[test]
fn large() {
let asteroids = parse_input(
".#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##",
);
let (count, coord) = solve_01(&asteroids);
assert_eq!(count, 210);
assert_eq!(coord, (11, 13));
let res = solve_02(coord, &asteroids);
res
.iter()
.enumerate()
.for_each(|(i, c)| println!("{}, {:?}", i, c));
assert_eq!(res[199].1, (8, 2));
}
}
|
use crate::algorithms::human::Human;
use crate::algorithms::monte_carlo::MonteCarlo;
use crate::algorithms::random::Random;
use crate::game::Game;
use crate::utils::plural;
use itertools::Itertools;
mod action;
mod algorithms;
mod card;
mod game;
mod player;
mod power;
mod resources;
mod table;
mod utils;
mod wonder;
fn main() {
let mut game = Game::new(vec![Box::new(Human {}), Box::new(MonteCarlo {}), Box::new(Random {})]);
let scores = game.play();
let sorted_scores: Vec<(usize, i32)> = scores
.into_iter()
.enumerate()
.sorted_by_key(|(_, score)| *score)
.rev()
.collect();
// TODO: deal with draws
println!("Player {} wins!", sorted_scores[0].0 + 1);
println!();
for (i, score) in sorted_scores {
println!("Player {}: {}", i + 1, plural(score, "point"));
}
}
|
#![deny(clippy::all)]
//! Riddle crate containing common utilities and types needed by platform implementations
//! and which other crates can use to interact with a platform service without needing to
//! know or genericize for the platform system type being used.
//!
//! Most types in here are either consumed or reexported through the concrete platform
//! crate (`riddle_platform_winit`). There should be very few cases where this crate will
//! need to be depended on directly.
mod dimensions;
mod event;
mod mouse;
mod scancode;
mod virtualkey;
mod window_id;
#[doc(hidden)]
pub mod doctest;
pub mod traits;
pub use dimensions::*;
pub use event::*;
pub use mouse::*;
pub use scancode::*;
pub use virtualkey::*;
pub use window_id::*;
|
pub use self::arch::*;
#[cfg(target_arch = "x86")]
#[path="x86/tss.rs"]
mod arch;
#[cfg(target_arch = "x86_64")]
#[path="x86_64/tss.rs"]
mod arch;
|
mod ast;
mod builtin;
mod common;
mod error;
mod parse;
mod pattern;
use common::*;
use std::io::Read;
fn repl(verbose: bool) {
let mut i = 0;
let mut buf = String::new();
let mut env = builtin::totals();
// I copied this from clap: https://kbknapp.github.io/clap-rs/src/clap/macros.rs.html#632-640
macro_rules! crate_version {
() => {
format!(
"{}.{}.{}{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")
)
};
};
println!("Fractal {}", crate_version!());
let config = rustyline::Config::builder()
.auto_add_history(true)
.tab_stop(2)
.build();
let mut rl = rustyline::Editor::<()>::with_config(config);
let app_info = app_dirs2::AppInfo {
name: "fractal",
author: "Lorxu",
};
// Use the current directory if we can't get a app-specific one for some reason
let mut history_file = app_dirs2::app_root(app_dirs2::AppDataType::UserData, &app_info)
.unwrap_or_else(|_| {
let p = std::path::PathBuf::from("./.fractal");
if !p.exists() {
std::fs::create_dir_all(&p).unwrap_or_else(|_| {
println!("Failed to create history directory in the current directory")
});
}
p
});
history_file.push("repl_history");
if history_file.exists() {
rl.load_history(&history_file).unwrap();
}
loop {
let s = if buf.is_empty() {
rl.readline(">> ")
} else {
rl.readline(">| ")
};
if s.as_ref().map(|x| x.trim() == "exit").unwrap_or(true) {
rl.save_history(&history_file)
.unwrap_or_else(|e| println!("Failed to write history file, error {:?}", e));
println!("Goodbye");
break;
}
let s = s.unwrap();
// If we can parse this line, do that;
// If not, assume it's multiple lines and stop when they give us a blank line
let result = if buf.is_empty() {
match parse::parse_str(format!("<interactive:{}>", i), &s) {
r @ Ok(_) => r,
Err(_) => {
buf.push('\n');
buf.push_str(&s);
continue;
}
}
} else if s.trim().is_empty() {
parse::parse_str(format!("<interactive:{}>", i), buf)
} else {
buf.push('\n');
buf.push_str(&s);
continue;
};
buf = String::new();
i += 1;
match result {
Ok(result) => {
for i in result {
if verbose {
println!("{}", i);
}
use pattern::HasTotal;
match i.total(&mut env, false) {
Ok(mut x) => {
x.simplify_mut(&mut env);
println!("=> {}", x);
}
Err(e) => {
println!("Match error {:?}", e);
for i in e.error() {
i.write().unwrap();
}
}
}
}
}
Err(e) => e.write().unwrap(),
}
}
}
fn main() {
let args: Vec<String> = std::env::args().collect();
let in_file = args.iter().find(|x| x.rfind(".fl").is_some());
let verbose = args
.iter()
.any(|x| x.trim() == "-v" || x.trim() == "--verbose");
let mut env = builtin::totals();
if let Some(f) = in_file {
let mut s = String::new();
std::fs::File::open(f)
.expect("Couldn't open file")
.read_to_string(&mut s)
.unwrap();
match parse::parse_str(f, s) {
Ok(result) => {
let b = Node::new_raw(ast::Term::Block(result));
if verbose {
println!("{}", b);
}
use pattern::HasTotal;
match b.total(&mut env, false) {
Ok(mut x) => {
x.simplify_mut(&mut env);
// We only print the result of a script with -v
if verbose {
println!("=> {}", x);
}
}
Err(e) => {
println!("Match error {:?}", e);
for i in e.error() {
i.write().unwrap();
}
}
}
}
Err(e) => e.write().unwrap(),
}
} else {
repl(verbose)
}
}
|
use std::collections::HashSet;
use std::io::{self, BufRead};
fn main() {
let numbers = io::stdin()
.lock()
.lines()
.map(|x| x.unwrap().parse().unwrap())
.collect::<Vec<_>>();
let mut seen = HashSet::new();
let mut sum = 0;
for num in numbers.iter().cycle() {
if !seen.insert(sum) {
break;
}
sum += num;
}
println!("{}", sum);
}
|
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::net::{SocketAddr, IpAddr, Ipv4Addr};
use futures::executor::block_on;
use tokio::io::{AsyncRead, AsyncWrite};
use tokio::net::{TcpStream, TcpListener};
use async_trait::async_trait;
use super::{AsyncConnect, AsyncAccept, IOStream};
use crate::dns;
use crate::utils::{self, CommonAddr};
#[cfg(unix)]
use std::os::unix::io::{AsRawFd, RawFd};
#[cfg(unix)]
use tokio::net::{UnixStream, UnixListener};
#[allow(clippy::upper_case_acronyms)]
pub enum PlainStream {
TCP(TcpStream),
#[cfg(unix)]
UDS(UnixStream),
}
pub struct ReadHalf<'a>(&'a PlainStream);
pub struct WriteHalf<'a>(&'a PlainStream);
impl IOStream for PlainStream {}
#[cfg(unix)]
impl AsRawFd for PlainStream {
fn as_raw_fd(&self) -> RawFd {
match self {
Self::TCP(x) => x.as_raw_fd(),
#[cfg(unix)]
Self::UDS(x) => x.as_raw_fd(),
}
}
}
impl AsRef<PlainStream> for ReadHalf<'_> {
fn as_ref(&self) -> &PlainStream { self.0 }
}
impl AsRef<PlainStream> for WriteHalf<'_> {
fn as_ref(&self) -> &PlainStream { self.0 }
}
impl PlainStream {
pub fn set_no_delay(&self, nodelay: bool) -> io::Result<()> {
match self {
Self::TCP(x) => x.set_nodelay(nodelay),
#[cfg(unix)]
_ => Ok(()),
}
}
pub fn split(&mut self) -> (ReadHalf<'_>, WriteHalf<'_>) {
(ReadHalf(&*self), WriteHalf(&*self))
}
}
impl AsyncRead for PlainStream {
#[inline]
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
match &mut self.get_mut() {
Self::TCP(x) => Pin::new(x).poll_read(cx, buf),
#[cfg(unix)]
Self::UDS(x) => Pin::new(x).poll_read(cx, buf),
}
}
}
impl AsyncRead for ReadHalf<'_> {
#[inline]
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
Pin::new(unsafe { utils::const_cast(self.get_mut().0) })
.poll_read(cx, buf)
}
}
impl AsyncWrite for PlainStream {
#[inline]
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
match &mut self.get_mut() {
Self::TCP(x) => Pin::new(x).poll_write(cx, buf),
#[cfg(unix)]
Self::UDS(x) => Pin::new(x).poll_write(cx, buf),
}
}
#[inline]
fn poll_flush(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), io::Error>> {
match &mut self.get_mut() {
Self::TCP(x) => Pin::new(x).poll_flush(cx),
#[cfg(unix)]
Self::UDS(x) => Pin::new(x).poll_flush(cx),
}
}
#[inline]
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), io::Error>> {
match &mut self.get_mut() {
Self::TCP(x) => Pin::new(x).poll_shutdown(cx),
#[cfg(unix)]
Self::UDS(x) => Pin::new(x).poll_shutdown(cx),
}
}
}
impl AsyncWrite for WriteHalf<'_> {
#[inline]
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Pin::new(unsafe { utils::const_cast(self.get_mut().0) })
.poll_write(cx, buf)
}
#[inline]
fn poll_flush(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), io::Error>> {
Pin::new(unsafe { utils::const_cast(self.get_mut().0) }).poll_flush(cx)
}
#[inline]
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), io::Error>> {
Pin::new(unsafe { utils::const_cast(self.get_mut().0) })
.poll_shutdown(cx)
}
}
// Plain Connector
#[derive(Clone)]
pub struct Connector {
addr: CommonAddr,
}
impl Connector {
pub fn new(addr: CommonAddr) -> Self { Connector { addr } }
}
#[async_trait]
impl AsyncConnect for Connector {
type IO = PlainStream;
fn addr(&self) -> &CommonAddr { &self.addr }
async fn connect(&self) -> io::Result<Self::IO> {
let stream = match &self.addr {
CommonAddr::DomainName(addr, port) => {
let ip = dns::resolve_async(addr).await?;
let sockaddr = SocketAddr::new(ip, *port);
PlainStream::TCP(TcpStream::connect(sockaddr).await?)
}
CommonAddr::SocketAddr(sockaddr) => {
PlainStream::TCP(TcpStream::connect(sockaddr).await?)
}
#[cfg(unix)]
CommonAddr::UnixSocketPath(path) => {
PlainStream::UDS(UnixStream::connect(path).await?)
}
};
stream.set_no_delay(true)?;
Ok(stream)
}
}
// Plain Acceptor
#[allow(clippy::upper_case_acronyms)]
pub enum PlainListener {
TCP(TcpListener),
#[cfg(unix)]
UDS(UnixListener),
}
impl PlainListener {
pub fn bind(addr: &CommonAddr) -> io::Result<PlainListener> {
Ok(match addr {
CommonAddr::SocketAddr(sockaddr) => {
PlainListener::TCP(block_on(TcpListener::bind(sockaddr))?)
}
#[cfg(unix)]
CommonAddr::UnixSocketPath(path) => {
PlainListener::UDS(UnixListener::bind(path)?)
}
_ => unreachable!(),
})
}
pub async fn accept_plain(&self) -> io::Result<(PlainStream, SocketAddr)> {
Ok(match self {
PlainListener::TCP(x) => {
let (stream, sockaddr) = x.accept().await?;
stream.set_nodelay(true)?;
(PlainStream::TCP(stream), sockaddr)
}
#[cfg(unix)]
PlainListener::UDS(x) => {
let (stream, _) = x.accept().await?;
let sockaddr =
SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0);
(PlainStream::UDS(stream), sockaddr)
}
})
}
}
#[derive(Clone)]
pub struct Acceptor {
addr: CommonAddr,
}
impl Acceptor {
pub fn new(addr: CommonAddr) -> Self { Acceptor { addr } }
}
#[async_trait]
impl AsyncAccept for Acceptor {
type IO = PlainStream;
fn addr(&self) -> &CommonAddr { &self.addr }
async fn accept(
&self,
res: (PlainStream, SocketAddr),
) -> io::Result<(Self::IO, SocketAddr)> {
// fake accept
Ok(res)
}
}
|
use crate::*;
pub fn init_struct(globals: &mut Globals) -> Value {
let id = globals.get_ident_id("Struct");
let class = ClassRef::from(id, globals.builtins.object);
let class = Value::class(globals, class);
globals.add_builtin_class_method(class, "new", struct_new);
class
}
fn struct_new(vm: &mut VM, self_val: Value, args: &Args) -> VMResult {
vm.check_args_min(args.len(), 1)?;
let mut i = 0;
let name = match args[0].as_string() {
None => None,
Some(s) => {
match s.chars().nth(0) {
Some(c) if c.is_ascii_uppercase() => {}
_ => return Err(vm.error_name(format!("Identifier `{}` needs to be constant.", s))),
};
i = 1;
let s = vm.globals.get_ident_id(format!("Struct:{}", s));
Some(s)
}
};
let mut val = Value::class_from(&mut vm.globals, name, self_val);
let class = val.as_class();
vm.globals
.add_builtin_instance_method(class, "initialize", initialize);
vm.globals
.add_builtin_instance_method(class, "inspect", inspect);
vm.globals
.add_builtin_class_method(val, "[]", builtin::class::new);
vm.globals
.add_builtin_class_method(val, "new", builtin::class::new);
let mut attr_args = Args::new(args.len() - i);
let mut vec = vec![];
for index in i..args.len() {
let v = args[index];
if v.as_symbol().is_none() {
let n = vm.val_inspect(v);
return Err(vm.error_type(format!("{} is not a symbol.", n)));
};
vec.push(v);
attr_args[index - i] = v;
}
val.set_var(
vm.globals.get_ident_id("_members"),
Value::array_from(&vm.globals, vec),
);
builtin::module::attr_accessor(vm, val, &attr_args)?;
match args.block {
Some(method) => {
vm.class_push(val);
let arg = Args::new1(val);
vm.eval_method(method, val, Some(vm.context()), &arg)?;
vm.class_pop();
}
None => {}
};
Ok(val)
}
fn initialize(vm: &mut VM, mut self_val: Value, args: &Args) -> VMResult {
let class = self_val.get_class_object(&vm.globals);
let members = class
.get_var(vm.globals.get_ident_id("_members"))
.unwrap()
.as_array()
.unwrap();
if members.elements.len() < args.len() {
return Err(vm.error_argument("Struct size differs."));
};
for (i, arg) in args.iter().enumerate() {
let id = members.elements[i].as_symbol().unwrap();
let var = format!("@{}", vm.globals.get_ident_name(id));
self_val.set_var(vm.globals.get_ident_id(var), *arg);
}
Ok(Value::nil())
}
fn inspect(vm: &mut VM, self_val: Value, _args: &Args) -> VMResult {
let members = match self_val
.get_class_object(&vm.globals)
.get_var(vm.globals.get_ident_id("_members"))
{
Some(v) => match v.as_array() {
Some(aref) => aref,
None => return Err(vm.error_internal("Illegal _members value.")),
},
None => return Err(vm.error_internal("No _members.")),
};
let attrs: Vec<IdentId> = members
.elements
.iter()
.map(|x| {
let id = x.as_symbol().unwrap();
let name = format!("@{}", vm.globals.get_ident_name(id));
vm.globals.get_ident_id(name)
})
.collect();
let mut attr_str = String::new();
for id in attrs {
let val = match self_val.get_var(id) {
Some(v) => vm.val_inspect(v),
None => "<>".to_string(),
};
let name = vm.globals.get_ident_name(id);
attr_str = format!("{} {}={}", attr_str, name, val);
}
let class_name = match self_val.get_class_object(&vm.globals).as_class().name {
Some(id) => vm.globals.get_ident_name(id),
None => "",
};
let inspect = format!("#<struct: {}{}>", class_name, attr_str);
Ok(Value::string(&vm.globals, inspect))
}
#[cfg(test)]
mod tests {
use crate::test::*;
#[test]
fn struct_test() {
let program = r#"
Customer = Struct.new(:name, :address) do
def greeting
"Hello #{name}!"
end
end
assert "Hello Dave!", Customer.new("Dave", "123 Main").greeting
assert "Hello Gave!", Customer["Gave", "456 Sub"].greeting
"#;
assert_script(program);
}
}
|
#[cfg(test)]
mod tests {
use compiled_uuid::uuid;
use uuid::Uuid;
// const _: Uuid = uuid!("F9168C5E-CEB2-4FAA-B6BF-329BF39FA1G4");
const _: Uuid = uuid!("F9168C5E-CEB2-4FAA-B6BF-329BF39FA1E4");
const _: Uuid = uuid!("F9168C5ECEB24FAAB6BF329BF39FA1E4");
const _: Uuid = uuid!("550e8400-e29b-41d4-a716-446655440000");
}
|
//! Additional, experimental, strategies.
//!
//! Some more strategies are available here, if the `experimental-strategies` feature flag is
//! enabled.
//!
//! Note that these strategies **are not part of the API stability guarantees** and may be changed,
//! renamed or removed at any point in time. They are also not necessarily as mature. They are here
//! to allow experimentation.
//!
//! If they are deemed good enough, they might mature and be moved from here to some better place.
//! You're welcome to try them out, see if they fit better and send feedback about them. It is also
//! possible to add some more strategies in here. But might not be as good idea to use in
//! production just yet.
use crate::gen_lock::PrivateUnsharded;
pub use crate::gen_lock::Shard;
use crate::strategy::gen_lock::GenLockStrategy;
/// A generation lock.
///
/// The generation lock is the fallback strategy of the default hybrid one. This variant has single
/// shard inside each separate [`ArcSwap`], therefore they don't contend each other. As it doesn't
/// support the borrowing mode and creates a fully-featured [`Arc`] behind the scenes, it is
/// potentially slower than the default.
///
/// However, it is simpler and can turn out to be faster in certain corner case situations. It is
/// suitable when:
///
/// * A lot of guards would be held or loading the full [`Arc`]s is desirable most of the times,
/// therefore the advantage of the default strategy is not in play. In that case skipping the
/// fast path that is nevertheless not used is beneficial.
/// * The accesses are mostly across different [`ArcSwap`] instances.
///
/// Note that each instance has its own generation lock. This means that they are fully independent
/// of each other, but is larger (each [`ArcSwap`] then is 4 words large instead of 1).
///
/// Readers are [wait-free], writers are not even [lock-free].
///
/// [`ArcSwap`]: crate::ArcSwap
/// [`Arc`]: std::sync::Arc
/// [lock-free]: https://en.wikipedia.org/wiki/Non-blocking_algorithm#Lock-freedom
/// [wait-free]: https://en.wikipedia.org/wiki/Non-blocking_algorithm#Wait-freedom
pub type SimpleGenLock = GenLockStrategy<PrivateUnsharded>;
|
fn main() {
println!("Hello, Rust world! - 日本語はどうだ!");
}
|
use std::collections::HashMap;
use crate::util::time;
pub fn day21() {
println!("== Day 21 ==");
let input = "src/day21/input.txt";
time(part_a, input, "A");
time(part_b, input, "B");
}
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
enum Operation {
Add,
Sub,
Div,
Mul,
Eql,
}
impl From<&str> for Operation {
fn from(str: &str) -> Self {
match str {
"+" => Operation::Add,
"-" => Operation::Sub,
"/" => Operation::Div,
"*" => Operation::Mul,
"=" => Operation::Eql,
&_ => panic!("Can't parse {}", str)
}
}
}
impl Operation {
fn solve(&self, lhs: isize, rhs: isize) -> isize {
match self {
Operation::Add => lhs + rhs,
Operation::Sub => lhs - rhs,
Operation::Div => lhs / rhs,
Operation::Mul => lhs * rhs,
Operation::Eql => (lhs == rhs) as isize,
}
}
}
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
enum MonkeyValue {
Value(isize),
Equation(MonkeyId, Operation, MonkeyId),
}
impl MonkeyValue {
#[allow(dead_code)]
fn is_equation(&self) -> bool {
match self {
MonkeyValue::Value(_) => false,
MonkeyValue::Equation(_, _, _) => true,
}
}
fn is_value(&self) -> bool {
match self {
MonkeyValue::Value(_) => true,
MonkeyValue::Equation(_, _, _) => false,
}
}
fn lhs(&self) -> MonkeyId {
match self {
MonkeyValue::Value(_) => panic!("Can't get id of value"),
MonkeyValue::Equation(lhs, _, _) => *lhs,
}
}
fn rhs(&self) -> MonkeyId {
match self {
MonkeyValue::Value(_) => panic!("Can't get id of value"),
MonkeyValue::Equation(_, _, rhs) => *rhs,
}
}
#[allow(dead_code)]
fn contains(&self, monkey_id: &MonkeyId) -> bool {
match self {
MonkeyValue::Value(_) => false,
MonkeyValue::Equation(lhs, _, rhs) => lhs == monkey_id || rhs == monkey_id
}
}
fn equation(&self) -> Option<(MonkeyId, Operation, MonkeyId)> {
match self {
MonkeyValue::Value(_) => None,
MonkeyValue::Equation(lhs, op, rhs) => Some((*lhs, *op, *rhs))
}
}
fn value(&self) -> Option<isize> {
match self {
MonkeyValue::Value(i) => Some(*i),
MonkeyValue::Equation(_, _, _) => None,
}
}
}
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
struct Monkey {
id: MonkeyId,
value: MonkeyValue,
}
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
struct MonkeyId {
id: (char, char, char, char), // Because Rust and Strings a effing annoying.
}
impl From<&str> for MonkeyId {
fn from(str: &str) -> Self {
let chars = str.chars().collect::<Vec<char>>();
Self {
id: (chars[0], chars[1], chars[2], chars[3])
}
}
}
struct MonkeyMath {
monkeys: HashMap<MonkeyId, Monkey>,
}
impl MonkeyMath {
fn parse(input: &str) -> Self {
let mut monkeys = HashMap::new();
for line in input.lines() {
let split = line.split(": ").collect::<Vec<&str>>();
let id = MonkeyId::from(split[0]);
let result = split[1].parse::<isize>();
let value = match result {
Ok(int) => { MonkeyValue::Value(int) }
Err(_) => {
let spl = split[1].split(" ").collect::<Vec<&str>>();
MonkeyValue::Equation(MonkeyId::from(spl[0]), Operation::from(spl[1]), MonkeyId::from(spl[2]))
}
};
let monkey = Monkey { id, value };
monkeys.insert(id, monkey);
}
Self { monkeys }
}
fn solve(&self, start: &MonkeyId) -> isize {
let monkeh = self.monkeys.get(start).unwrap();
match monkeh.value {
MonkeyValue::Value(v) => { v }
MonkeyValue::Equation(lhs, op, rhs) => {
op.solve(self.solve(&lhs), self.solve(&rhs))
}
}
}
#[allow(dead_code)]
fn find_path_for(&self, path_for: &MonkeyId, top: &MonkeyId) -> Vec<MonkeyId> {
let mut path: Vec<MonkeyId> = Vec::new();
path.push(*path_for);
let parents = self.monkeys.iter()
.filter(|(_id, monkey)| monkey.value.is_equation())
.filter(|(_id, monkey)| monkey.value.contains(&path_for))
.map(|(id, _)| *id)
.collect::<Vec<MonkeyId>>();
if parents.len() > 1 {
panic!("Need to handle more parents! {}", parents.len());
}
let parent = parents.first().unwrap();
if parent == top {
path.push(*parent);
} else {
let mut vec = self.find_path_for(parent, top);
path.append(vec.as_mut());
}
path
}
fn solve_static(monkeys: &HashMap<MonkeyId, Monkey>, start: &MonkeyId) -> Option<isize> {
let monkeh = monkeys.get(start);
match monkeh {
None => None,
Some(val) => {
match val.value {
MonkeyValue::Value(v) => { Some(v) }
MonkeyValue::Equation(lhs, op, rhs) => {
let lhs_o = MonkeyMath::solve_static(monkeys, &lhs);
let rhs_o = MonkeyMath::solve_static(monkeys, &rhs);
if lhs_o.is_some() && rhs_o.is_some() {
return Some(op.solve(lhs_o.unwrap(), rhs_o.unwrap()));
}
None
}
}
}
}
}
fn reduce(monkeys: &HashMap<MonkeyId, Monkey>) -> HashMap<MonkeyId, Monkey> {
let mut map = monkeys.clone();
for (id, _m) in monkeys.iter() {
let option = MonkeyMath::solve_static(&map, id);
if option.is_some() {
map.insert(*id, Monkey { id: *id, value: MonkeyValue::Value(option.unwrap()) });
}
}
map
}
fn find_value_for(&self, should_equal: &MonkeyId, change: &MonkeyId) -> isize {
let mut cmc = self.monkeys.clone();
// Remove "humn" and change "root" to be equals
cmc.remove(change);
if let Some(m) = cmc.get_mut(should_equal) {
m.value = MonkeyValue::Equation(m.value.lhs(), Operation::Eql, m.value.rhs());
}
// Solve as much as possible so we can get the actual numbers
let reduced = MonkeyMath::reduce(&cmc);
let numbers: HashMap<MonkeyId, MonkeyValue> = reduced.values()
.filter(|v| v.value.is_value())
.map(|v| (v.id, v.value))
.collect();
// Massive block of code, what does it do?
// We start a loop where we want to find "change" (humn)
// This value has been removed from the map so the equations that need that value
// are unresolved. However, we have resolved "should_equal" (root). We know that
// we need to find what ever value the other leg for "should_equal" has. This means
// we have a target value. All our unresolved equations must result in that value.
// We start backwards then. "find X for blabla". If the equation says we should do
// addition, we do subtraction. If we should do subtraction of A-B, we do B-A.
// If multiplication we divide.
// If division is A / B we do B/A.
// At the end we have the result that "change" must be for the unresolved equations to
// produce the correct result.
let mut search = *should_equal;
let mut result = 0;
while search != *change {
let x = reduced.get(&search).unwrap();
let (lhs, op, rhs) = x.value.equation().unwrap();
(search, result) = match (op, numbers.get(&lhs), numbers.get(&rhs)) {
(Operation::Eql, None, Some(m)) => (lhs, m.value().unwrap()),
(Operation::Eql, Some(m), None) => (rhs, m.value().unwrap()),
(Operation::Add, None, Some(m)) => (lhs, result - m.value().unwrap()),
(Operation::Add, Some(m), None) => (rhs, result - m.value().unwrap()),
(Operation::Sub, None, Some(m)) => (lhs, result + m.value().unwrap()),
(Operation::Sub, Some(m), None) => (rhs, m.value().unwrap() - result),
(Operation::Mul, None, Some(m)) => (lhs, result / m.value().unwrap()),
(Operation::Mul, Some(m), None) => (rhs, result / m.value().unwrap()),
(Operation::Div, None, Some(m)) => (lhs, result * m.value().unwrap()),
(Operation::Div, Some(m), None) => (rhs, m.value().unwrap() / result),
_ => panic!()
}
}
result
}
}
fn part_a(input: &str) -> isize {
let open = std::fs::read_to_string(input.to_string()).expect("Could not read file");
let math = MonkeyMath::parse(open.as_str());
math.solve(&MonkeyId::from("root"))
}
fn part_b(input: &str) -> isize {
let open = std::fs::read_to_string(input.to_string()).expect("Could not read file");
let math = MonkeyMath::parse(open.as_str());
math.find_value_for(&MonkeyId::from("root"), &MonkeyId::from("humn"))
}
#[cfg(test)]
mod tests {
use super::*;
#[ignore]
#[test]
fn runday() {
day21();
}
#[ignore]
#[test]
fn real_a() {
let input = "src/day21/input.txt";
assert_eq!(379578518396784, part_a(input));
}
#[ignore]
#[test]
fn real_b() {
let input = "src/day21/input.txt";
assert_eq!(3353687996514, part_b(input));
}
#[test]
fn part_a_test_input() {
let input = "src/day21/test-input.txt";
let result = part_a(input);
assert_eq!(152, result);
}
#[test]
fn part_b_test_input() {
let input = "src/day21/test-input.txt";
let result = part_b(input);
assert_eq!(301, result);
}
} |
extern crate marafet_parser as parser;
extern crate marafet_util as util;
use std::io::{Write, Result};
use std::collections::HashMap;
use parser::{Ast, Block};
use parser::css::{Rule, Selector};
use util::join;
pub struct Settings<'a> {
pub block_name: &'a String,
pub vars: &'a HashMap<String, String>,
}
struct Generator<'a, W: 'a> {
block_name: &'a String,
vars: &'a HashMap<&'a String, &'a String>,
buf: &'a mut W,
}
impl<'a, W:Write+'a> Generator<'a, W> {
fn selector_to_string(&self, sel: &Selector) -> String {
let mut buf = Vec::new();
if let Some(ref element) = sel.element {
write!(&mut buf, "{}", element).unwrap();
}
write!(&mut buf, ".{}", self.block_name).unwrap();
for cls in sel.classes.iter() {
write!(&mut buf, ".{}", cls).unwrap();
}
if let Some(ref state) = sel.state {
write!(&mut buf, ":{}", state).unwrap();
}
return String::from_utf8(buf).unwrap();
}
fn output_rule(&mut self, rule: &Rule) -> Result<()>
{
let selectors = join(rule.selectors.iter()
.map(|x| self.selector_to_string(x)), ", ");
try!(write!(self.buf, "{} {{\n", selectors));
for &(ref k, ref v) in rule.properties.iter() {
try!(write!(self.buf, " {}: {};\n", k, v));
}
try!(write!(self.buf, "}}\n\n"));
Ok(())
}
}
pub fn generate<W>(buf: &mut W, ast: &Ast, settings: &Settings) -> Result<()>
where W: Write
{
for block in ast.blocks.iter() {
if let &Block::Css(ref params, ref rules) = block {
let mut vars = HashMap::new();
for param in params.iter() {
if let Some(ref val) = param.default_value {
vars.insert(¶m.name, val);
}
}
for (key, val) in settings.vars.iter() {
vars.insert(key, val);
}
for rule in rules.iter() {
let mut gen = Generator {
block_name: settings.block_name,
vars: &vars,
buf: buf,
};
try!(gen.output_rule(rule));
}
}
}
Ok(())
}
|
#![deny(missing_docs)]
use std::{ops, slice, u32};
use std::default::Default;
use std::time::Duration;
/// The number of buckets in a latency histogram.
pub const NUM_BUCKETS: usize = 26;
/// The maximum value (inclusive) for each latency bucket in
/// tenths of a millisecond.
pub const BUCKET_BOUNDS: [Latency; NUM_BUCKETS] = [
// The controller telemetry server creates 5 sets of 5 linear buckets
// each:
// TODO: it would be nice if we didn't have to hard-code each
// individual bucket and could use Rust ranges or something.
// However, because we're using a raw fixed size array rather
// than a vector (as we don't ever expect to grow this array
// and thus don't _need_ a vector) we can't concatenate it
// from smaller arrays, making it difficult to construct
// programmatically...
// in the controller:
// prometheus.LinearBuckets(1, 1, 5),
Latency(10),
Latency(20),
Latency(30),
Latency(40),
Latency(50),
// prometheus.LinearBuckets(10, 10, 5),
Latency(100),
Latency(200),
Latency(300),
Latency(400),
Latency(500),
// prometheus.LinearBuckets(100, 100, 5),
Latency(1_000),
Latency(2_000),
Latency(3_000),
Latency(4_000),
Latency(5_000),
// prometheus.LinearBuckets(1000, 1000, 5),
Latency(10_000),
Latency(20_000),
Latency(30_000),
Latency(40_000),
Latency(50_000),
// prometheus.LinearBuckets(10000, 10000, 5),
Latency(100_000),
Latency(200_000),
Latency(300_000),
Latency(400_000),
Latency(500_000),
// Prometheus implicitly creates a max bucket for everything that
// falls outside of the highest-valued bucket, but we need to
// create it explicitly.
Latency(u32::MAX),
];
/// A series of latency values and counts.
#[derive(Debug)]
pub struct Histogram([u32; NUM_BUCKETS]);
/// A latency in tenths of a millisecond.
#[derive(Debug, Default, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash)]
pub struct Latency(u32);
// ===== impl Histogram =====
impl Histogram {
/// Observe a measurement
pub fn observe<I>(&mut self, measurement: I)
where
I: Into<Latency>,
{
let measurement = measurement.into();
let i = BUCKET_BOUNDS.iter()
.position(|max| &measurement <= max)
.expect("latency value greater than u32::MAX; this shouldn't be \
possible.");
self.0[i] += 1;
}
/// Construct a new, empty `Histogram`.
pub fn new() -> Self {
Histogram([0; NUM_BUCKETS])
}
}
impl<I> ops::AddAssign<I> for Histogram
where
I: Into<Latency>
{
#[inline]
fn add_assign(&mut self, measurement: I) {
self.observe(measurement)
}
}
impl<'a> IntoIterator for &'a Histogram {
type Item = &'a u32;
type IntoIter = slice::Iter<'a, u32>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
impl Default for Histogram {
#[inline]
fn default() -> Self {
Self::new()
}
}
// ===== impl Latency =====
const SEC_TO_MS: u32 = 1_000;
const SEC_TO_TENTHS_OF_A_MS: u32 = SEC_TO_MS * 10;
const TENTHS_OF_MS_TO_NS: u32 = MS_TO_NS / 10;
/// Conversion ratio from milliseconds to nanoseconds.
pub const MS_TO_NS: u32 = 1_000_000;
impl From<Duration> for Latency {
fn from(dur: Duration) -> Self {
let secs = dur.as_secs();
// checked conversion from u64 -> u32.
let secs =
if secs >= u64::from(u32::MAX) {
None
} else {
Some(secs as u32)
};
// represent the duration as tenths of a ms.
let tenths_of_ms = {
let t = secs.and_then(|as_secs|
// convert the number of seconds to tenths of a ms, or
// None on overflow.
as_secs.checked_mul(SEC_TO_TENTHS_OF_A_MS)
);
let t = t.and_then(|as_tenths_ms| {
// convert the subsecond part of the duration (in ns) to
// tenths of a millisecond.
let subsec_tenths_ms = dur.subsec_nanos() / TENTHS_OF_MS_TO_NS;
as_tenths_ms.checked_add(subsec_tenths_ms)
});
t.unwrap_or_else(|| {
debug!(
"{:?} too large to represent as tenths of a \
millisecond!",
dur
);
u32::MAX
})
};
Latency(tenths_of_ms)
}
}
impl From<u32> for Latency {
#[inline]
fn from(value: u32) -> Self {
Latency(value)
}
}
impl Into<u32> for Latency {
fn into(self) -> u32 {
self.0
}
} |
use blake2::{Blake2b, Digest};
use std::error::Error;
use std::fs;
use std::io;
use std::path::PathBuf;
pub(crate) trait PathUtilities {
/// Returns a file's Blake2 hash.
fn blake2(&self) -> io::Result<String>;
/// Returns all files within a directory, optionally of certain `sizes`.
fn files_within(&self, sizes: Option<&[u64]>) -> io::Result<Vec<PathBuf>>;
}
impl PathUtilities for PathBuf {
fn blake2(&self) -> io::Result<String> {
let bytes = fs::read(self.as_path())?;
let mut hasher = Blake2b::new();
hasher.input(&bytes);
Ok(format!("{:x}", hasher.result()))
}
fn files_within(&self, sizes: Option<&[u64]>) -> io::Result<Vec<PathBuf>> {
let read_dir = try_with_path!(self.read_dir(), self);
let mut files = Vec::new();
let sizes_vec = match sizes {
Some(sizes_slice) => Vec::from(sizes_slice),
None => Vec::new(),
};
for entry in read_dir {
let entry = try_with_path!(entry, self);
let entry_path = entry.path();
if entry_path.is_file() {
let metadata = try_with_path!(entry_path.metadata(), entry_path);
let size = metadata.len();
if sizes.is_none() || sizes_vec.contains(&size) {
files.push(entry_path);
}
} else if entry_path.is_dir() {
let mut subdir_files = entry_path.files_within(sizes)?;
files.append(&mut subdir_files);
}
}
Ok(files)
}
}
|
pub const ENDPOINT_IN: u8 = 0x80;
pub const ENDPOINT_OUT: u8 = 0x00;
pub const STLINK_RX_EP: u8 = 1 | ENDPOINT_IN;
pub const STLINK_TX_EP: u8 = 2 | ENDPOINT_OUT;
pub const STLINK_TRACE_EP: u8 = 3 | ENDPOINT_IN;
pub const STLINK_CMD_SIZE_V2: usize = 16;
pub const STLINK_GET_VERSION: u8 = 0xF1;
pub const STLINK_DEBUG_COMMAND: u8 = 0xF2;
pub const STLINK_GET_CURRENT_MODE: u8 = 0xF5;
pub const STLINK_GET_TARGET_VOLTAGE: u8 = 0xF7;
pub const STLINK_DEBUG_APIV2_RESETSYS: u8 = 0x32;
pub const STLINK_VID: u16 = 0x0483;
pub const STLINK_V1_PID: u16 = 0x3744;
pub const STLINK_V2_PID: u16 = 0x3748;
pub const STLINK_V2_1_PID: u16 = 0x374B;
pub const STLINK_V2_1_NO_MSD_PID: u16 = 0x3752;
pub const STLINK_V3_USBLOADER_PID: u16 = 0x374D;
pub const STLINK_V3E_PID: u16 = 0x374E;
pub const STLINK_V3S_PID: u16 = 0x374F;
pub const STLINK_V3_2VCP_PID: u16 = 0x3753;
|
//! Pieces pertaining to the HTTP message protocol.
use std::borrow::Cow;
use std::fmt;
use bytes::BytesMut;
use header::{Connection, ConnectionOption, Expect};
use header::Headers;
use method::Method;
use status::StatusCode;
use uri::Uri;
use version::HttpVersion;
use version::HttpVersion::{Http10, Http11};
pub use self::conn::{Conn, KeepAlive, KA};
pub use self::body::{Body, TokioBody};
pub use self::chunk::Chunk;
mod body;
mod chunk;
mod conn;
mod io;
mod h1;
//mod h2;
pub mod request;
pub mod response;
/// An Incoming Message head. Includes request/status line, and headers.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct MessageHead<S> {
/// HTTP version of the message.
pub version: HttpVersion,
/// Subject (request line or status line) of Incoming message.
pub subject: S,
/// Headers of the Incoming message.
pub headers: Headers
}
/// An incoming request message.
pub type RequestHead = MessageHead<RequestLine>;
#[derive(Debug, Default, PartialEq)]
pub struct RequestLine(pub Method, pub Uri);
impl fmt::Display for RequestLine {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.0, self.1)
}
}
/// An incoming response message.
pub type ResponseHead = MessageHead<RawStatus>;
impl<S> MessageHead<S> {
pub fn should_keep_alive(&self) -> bool {
should_keep_alive(self.version, &self.headers)
}
pub fn expecting_continue(&self) -> bool {
expecting_continue(self.version, &self.headers)
}
}
impl ResponseHead {
/// Converts this head's RawStatus into a StatusCode.
#[inline]
pub fn status(&self) -> StatusCode {
self.subject.status()
}
}
/// The raw status code and reason-phrase.
#[derive(Clone, PartialEq, Debug)]
pub struct RawStatus(pub u16, pub Cow<'static, str>);
impl RawStatus {
/// Converts this into a StatusCode.
#[inline]
pub fn status(&self) -> StatusCode {
StatusCode::try_from(self.0).unwrap()
}
}
impl fmt::Display for RawStatus {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.0, self.1)
}
}
impl From<StatusCode> for RawStatus {
fn from(status: StatusCode) -> RawStatus {
RawStatus(status.into(), Cow::Borrowed(status.canonical_reason().unwrap_or("")))
}
}
impl Default for RawStatus {
fn default() -> RawStatus {
RawStatus(200, Cow::Borrowed("OK"))
}
}
impl From<MessageHead<::StatusCode>> for MessageHead<RawStatus> {
fn from(head: MessageHead<::StatusCode>) -> MessageHead<RawStatus> {
MessageHead {
subject: head.subject.into(),
version: head.version,
headers: head.headers,
}
}
}
/// Checks if a connection should be kept alive.
#[inline]
pub fn should_keep_alive(version: HttpVersion, headers: &Headers) -> bool {
let ret = match (version, headers.get::<Connection>()) {
(Http10, None) => false,
(Http10, Some(conn)) if !conn.contains(&ConnectionOption::KeepAlive) => false,
(Http11, Some(conn)) if conn.contains(&ConnectionOption::Close) => false,
_ => true
};
trace!("should_keep_alive(version={:?}, header={:?}) = {:?}", version, headers.get::<Connection>(), ret);
ret
}
/// Checks if a connection is expecting a `100 Continue` before sending its body.
#[inline]
pub fn expecting_continue(version: HttpVersion, headers: &Headers) -> bool {
let ret = match (version, headers.get::<Expect>()) {
(Http11, Some(&Expect::Continue)) => true,
_ => false
};
trace!("expecting_continue(version={:?}, header={:?}) = {:?}", version, headers.get::<Expect>(), ret);
ret
}
#[derive(Debug)]
pub enum ServerTransaction {}
#[derive(Debug)]
pub enum ClientTransaction {}
pub trait Http1Transaction {
type Incoming;
type Outgoing: Default;
fn parse(bytes: &mut BytesMut) -> ParseResult<Self::Incoming>;
fn decoder(head: &MessageHead<Self::Incoming>, method: &mut Option<::Method>) -> ::Result<h1::Decoder>;
fn encode(head: MessageHead<Self::Outgoing>, has_body: bool, method: &mut Option<Method>, dst: &mut Vec<u8>) -> h1::Encoder;
}
pub type ParseResult<T> = ::Result<Option<(MessageHead<T>, usize)>>;
#[test]
fn test_should_keep_alive() {
let mut headers = Headers::new();
assert!(!should_keep_alive(Http10, &headers));
assert!(should_keep_alive(Http11, &headers));
headers.set(Connection::close());
assert!(!should_keep_alive(Http10, &headers));
assert!(!should_keep_alive(Http11, &headers));
headers.set(Connection::keep_alive());
assert!(should_keep_alive(Http10, &headers));
assert!(should_keep_alive(Http11, &headers));
}
#[test]
fn test_expecting_continue() {
let mut headers = Headers::new();
assert!(!expecting_continue(Http10, &headers));
assert!(!expecting_continue(Http11, &headers));
headers.set(Expect::Continue);
assert!(!expecting_continue(Http10, &headers));
assert!(expecting_continue(Http11, &headers));
}
|
use std::error::Error as StdError;
use std::fmt;
use std::marker::PhantomData;
pub use err::Error;
pub use hyper::Error as HttpError;
pub use hyper::error::Result as HttpResult;
use hyper::status::StatusCode;
#[derive(Debug)]
pub enum ErrorTiming {
AtNetwork,
AtRequest,
AtResponse,
}
#[derive(Debug)]
pub struct ApiError {
pub error: Box<Error + Send>,
pub timing: ErrorTiming,
}
impl ApiError {
pub fn new<E: Error>(e: E, timing: ErrorTiming) -> ApiError {
ApiError {
error: Box::new(e),
timing: timing,
}
}
}
impl fmt::Display for ApiError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(&*self.error, f)
}
}
impl StdError for ApiError {
fn description(&self) -> &str {
self.error.description()
}
fn cause(&self) -> Option<&StdError> {
self.error.cause()
}
}
#[derive(Debug)]
pub enum ResponseError {
UnacceptableStatusCode(StatusCode),
}
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ResponseError::UnacceptableStatusCode(ref code) => {
write!(f, "Unacceptable Status Code: {}", code)
}
}
}
}
impl StdError for ResponseError {
fn description(&self) -> &str {
match *self {
ResponseError::UnacceptableStatusCode(code) => "Unacceptable Status code",
}
}
fn cause(&self) -> Option<&StdError> {
return None;
}
}
|
use crate::completions::{Completer, CompletionOptions};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
};
use reedline::Suggestion;
use std::sync::Arc;
#[derive(Clone)]
pub struct VariableCompletion {
engine_state: Arc<EngineState>,
}
impl VariableCompletion {
pub fn new(engine_state: Arc<EngineState>) -> Self {
Self { engine_state }
}
}
impl Completer for VariableCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
prefix: Vec<u8>,
span: Span,
offset: usize,
_: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
let mut output = vec![];
let builtins = ["$nu", "$in", "$config", "$env", "$nothing"];
for builtin in builtins {
if builtin.as_bytes().starts_with(&prefix) {
output.push(Suggestion {
value: builtin.to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
}
}
for scope in &working_set.delta.scope {
for v in &scope.vars {
if v.0.starts_with(&prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
}
}
}
for scope in &self.engine_state.scope {
for v in &scope.vars {
if v.0.starts_with(&prefix) {
output.push(Suggestion {
value: String::from_utf8_lossy(v.0).to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
}
}
}
output.dedup();
(output, CompletionOptions::default())
}
}
|
const MINIMUM_VALUE: u32 = 206_938;
const MAXIMUM_VALUE: u32 = 679_128;
// I'm not _unhappy_ with this implementation, but it's simplistic.
// You really want to look at AxlLind's:
// https://github.com/AxlLind/AdventOfCode2019/blob/master/src/bin/04.rs
// It's a thing of beauty, and runs in a fraction of the time of this code.
// I'm not going to work on this further as I'd just be copying his.
fn main() {
let start_time = std::time::Instant::now();
let (part_1_valid, part_2_valid) = (MINIMUM_VALUE..=MAXIMUM_VALUE).map(evaluate_password).unzip::<bool, bool, Vec<bool>, Vec<bool>>();
let part_1_count = part_1_valid.iter().filter(|item| **item).count();
let part_2_count = part_2_valid.iter().filter(|item| **item).count();
println!(
"Part 1: {}\nPart 2: {}\nTime: {}ms",
part_1_count,
part_2_count,
start_time.elapsed().as_millis()
);
}
// Returns two bools - the first is whether the password is valid by part 1 rules,
// and the second by part 2 rules.
fn evaluate_password(password: u32) -> (bool, bool) {
let pass_str = password.to_string();
let mut chars = pass_str.chars();
let mut previous_digit = chars.next().unwrap();
let mut at_least_double_digit = false;
let mut double_digit = false;
let mut digit_repetition = 1; // How many times have we seen the current digit consecutively?
for digit in chars {
if digit < previous_digit {
return (false, false); // Digits may never decrease
}
if digit == previous_digit {
digit_repetition += 1;
} else {
if digit_repetition >= 2 {
at_least_double_digit = true;
}
if digit_repetition == 2 {
double_digit = true;
}
digit_repetition = 1;
}
previous_digit = digit;
}
if digit_repetition >= 2 {
at_least_double_digit = true;
}
if digit_repetition == 2 {
double_digit = true;
}
(at_least_double_digit, double_digit)
}
|
#[doc = "Register `MID` reader"]
pub type R = crate::R<MID_SPEC>;
#[doc = "Field `MID` reader - Magic ID"]
pub type MID_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - Magic ID"]
#[inline(always)]
pub fn mid(&self) -> MID_R {
MID_R::new(self.bits)
}
}
#[doc = "magic ID\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mid::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MID_SPEC;
impl crate::RegisterSpec for MID_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mid::R`](R) reader structure"]
impl crate::Readable for MID_SPEC {}
#[doc = "`reset()` method sets MID to value 0xa3c5_dd01"]
impl crate::Resettable for MID_SPEC {
const RESET_VALUE: Self::Ux = 0xa3c5_dd01;
}
|
fn is_prime(curr_n: u32, primes: &Vec<u32>) -> bool {
let sqr = (curr_n as f64).sqrt().ceil() as u32;
let mut prime_range = primes.iter().take_while(|x| **x <= sqr);
prime_range.all(|p| curr_n % p != 0)
}
pub fn nth(n: u32) -> Option<u32> {
if n == 0 {
return None;
}
let (mut last_prime, mut curr_n) = (2, 3);
let mut primes: Vec<u32> = vec![last_prime];
while (primes.len() as u32) < n {
if is_prime(curr_n, &primes) {
last_prime = curr_n;
primes.push(last_prime);
}
curr_n += 2;
}
Some(last_prime)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_nth() {
assert_eq!(Some(2), nth(1));
assert_eq!(Some(3), nth(2));
assert_eq!(Some(5), nth(3));
assert_eq!(Some(7), nth(4));
assert_eq!(Some(11), nth(5));
}
#[test]
fn test_big() {
assert_eq!(Some(104743), nth(10001));
}
}
|
mod day1;
mod day2;
mod day3;
fn main() {
if false {
day1::main();
day2::main();
}
day3::main();
}
|
pub use crate::common::{Const, Id, Op2};
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum Expr<Ty> {
Var(Id),
Const(Const),
Op2(Op2, Box<Expr<Ty>>, Box<Expr<Ty>>),
Fun(Id, Ty, Box<Expr<Ty>>),
App(Box<Expr<Ty>>, Box<Expr<Ty>>),
If(Box<Expr<Ty>>, Box<Expr<Ty>>, Box<Expr<Ty>>),
Let(Id, Box<Expr<Ty>>, Box<Expr<Ty>>),
Fix(Id, Ty, Box<Expr<Ty>>),
MkArray(Box<Expr<Ty>>, Box<Expr<Ty>>),
GetArray(Box<Expr<Ty>>, Box<Expr<Ty>>),
SetArray(Box<Expr<Ty>>, Box<Expr<Ty>>, Box<Expr<Ty>>),
V,
Star,
}
|
use std::collections::HashSet;
use color_eyre::eyre::{eyre, WrapErr};
use color_eyre::Result;
use cursive::{CbSink, Cursive, CursiveExt};
use cursive::direction::Orientation::Vertical;
use cursive::traits::{Nameable, Resizable, Scrollable};
use cursive::views::{Dialog, EditView, LinearLayout, ListView, TextView};
use tokio::sync::{mpsc, oneshot};
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::task::JoinHandle;
use crate::messages::{Command, CommandResult};
use cursive::view::ScrollStrategy;
use cursive::event::EventResult;
pub struct TerminalUI {
pub command_source: Receiver<Command>,
pub result_sink: Sender<CommandResult>,
pub join_handle: JoinHandle<Result<()>>,
}
pub fn create_session() -> TerminalUI {
let _quit_commands = create_quit_commands();
let (reader_tx, reader_rx) = mpsc::channel(32);
let (writer_tx, writer_rx) = mpsc::channel(32);
let (cb_sink_tx, cb_sink_rx) = oneshot::channel();
let reader: std::thread::JoinHandle<Result<()>> = std::thread::spawn(move || {
let mut siv = Cursive::new();
siv.add_global_callback('q', |s| s.quit());
let submit_reader_tx = reader_tx.clone();
let results_list = ListView::new().with_name("results-list")
.scrollable()
.scroll_strategy(ScrollStrategy::StickToBottom)
.on_scroll_inner(|scroll_view, _rect| {
if scroll_view.is_at_bottom() {
scroll_view.set_scroll_strategy(ScrollStrategy::StickToBottom);
EventResult::Consumed(None)
} else {
EventResult::Ignored
}
});
// Create a dialog with an edit text and a button.
// The user can either hit the <Ok> button,
// or press Enter on the edit text.
siv.add_layer(
LinearLayout::new(Vertical)
.child(
Dialog::new()
.title("Results")
.padding_lrtb(1, 1, 1, 0)
.content(results_list)
.full_height(),
)
.child(
Dialog::new()
.title("Enter URLs")
.padding_lrtb(1, 1, 1, 1)
.content(EditView::new().on_submit(move |cursive, line| {
send_command(cursive, submit_reader_tx.clone(), line)
})),
)
.full_screen()
);
cb_sink_tx
.send(siv.cb_sink().clone())
.map_err(|e| eyre!("Error sending cb_sink on initialization: {:?}", e))?;
siv.run();
Ok(())
});
let writer = tokio::spawn(display_results(cb_sink_rx, writer_rx));
let join_handle = tokio::spawn(async {
// I think it's okay if the join() blocks this task since it should
// always be the first thing that shuts down in the application. In theory we shouldn't
// block though since it might permanently block a single-threaded executor. Maybe one
// of the threads in our executor is permanently blocked because of this? Probably good to
// avoid this kind of infinite blocking in a more serious application.
match reader.join() {
Err(e) => return Err(eyre!("Shutdown of reader thread failed: {:?}", e)),
Ok(r) => r?,
};
writer.await??;
Ok(())
});
TerminalUI {
command_source: reader_rx,
result_sink: writer_tx,
join_handle,
}
}
async fn display_results(
cb_sink_rx: oneshot::Receiver<CbSink>,
mut result_rx: Receiver<CommandResult>,
) -> Result<()> {
let cb_sink: CbSink = cb_sink_rx.await.wrap_err("Error getting cb_sink")?;
while let Some(result) = result_rx.recv().await {
cb_sink
.send(Box::new(move |siv: &mut Cursive| {
siv.call_on_name("results-list", |view: &mut ListView| {
add_result(view, result);
});
}))
.map_err(|e| eyre!("Error sending UI update to cb_sink: {:?}", e))?;
}
Ok(())
}
fn add_result(view: &mut ListView, result: CommandResult) {
view.add_child(result.url.as_str(), TextView::new(result.output));
}
fn send_command(s: &mut Cursive, reader_tx: Sender<Command>, line: &str) {
if line.is_empty() {
// Try again as many times as we need!
s.add_layer(Dialog::info("Please enter URLs"));
} else {
let mut urls = HashSet::new();
for url in line.split(" ") {
urls.insert(url.into());
}
let command = Command { urls };
reader_tx
.blocking_send(command)
.wrap_err("Error sending Command")
.unwrap();
}
}
fn create_quit_commands() -> HashSet<&'static str> {
let mut quit_commands = HashSet::new();
quit_commands.insert("q");
quit_commands.insert("quit");
quit_commands
}
|
use std::collections::HashMap;
use std::error::Error;
use rusoto_core::credential::{ChainProvider, ProvideAwsCredentials};
use rusoto_core::param::{Params, ServiceParams};
use rusoto_core::Region;
use rusoto_signature::SignedRequest;
use serde_json::json;
/// The authentication options to be passed into the main auth function
#[derive(Debug)]
pub struct Parameters {
/// Optionally defined Vault IAM Server ID value to be attached
/// as a header to the authentication request
pub iam_server_id: Option<String>,
/// The mount path of the AWS authentication engine in Vault
pub mount_path: String,
/// The role in Vault to authenticate as under the AWS engine
pub role: String,
/// The full Vault server address and port to send the request
pub vault_address: String,
}
/// Builds the authentication request payload from the credentials
/// found in the provider chain and sends it to the designated
/// Vault server to attempt a login for the argued role
pub async fn authenticate(params: &Parameters) -> Result<serde_json::Value, Box<dyn Error>> {
let payload = new_iam_payload(¶ms.role, ¶ms.iam_server_id).await?;
let url = format!(
"{}/v1/auth/{}/login",
params.vault_address, params.mount_path
);
let client = reqwest::Client::new();
let res = client
.post(url)
.header("Accept", "application/json")
.json(&payload)
.send()
.await?
.json::<serde_json::Value>()
.await?;
Ok(res)
}
/// Creates the AWS4 signed request headers and the authentication
/// payload that will be sent to Vault in the login attempt
async fn new_iam_payload(
role: &str,
iam_server_id: &Option<String>,
) -> Result<serde_json::Value, Box<dyn Error>> {
let credentials = ChainProvider::new().credentials().await?;
let signed_request = {
let mut req = SignedRequest::new("POST", "sts", &Region::UsEast1, "/");
if let Some(id) = iam_server_id {
req.add_header("X-Vault-AWS-IAM-Server-ID", id);
}
let mut params = Params::new();
params.put("Action", "GetCallerIdentity");
params.put("Version", "2011-06-15");
req.set_payload(Some(serde_urlencoded::to_string(¶ms)?));
req.set_content_type(String::from("application/x-www-form-urlencoded"));
req.sign(&credentials);
req
};
let signed_headers = {
let mut headers = HashMap::<String, Vec<String>>::new();
for (key, values) in signed_request.headers() {
let entries = values
.iter()
.map(|v| String::from_utf8(v.to_owned()).unwrap())
.collect::<Vec<String>>();
headers.insert(key.to_owned(), entries);
}
serde_json::to_string(&headers)?
};
Ok(json!({
"iam_http_request_method": "POST",
"iam_request_url": base64::encode(b"https://sts.amazonaws.com/"),
"iam_request_headers": base64::encode(signed_headers.as_bytes()),
"iam_request_body": base64::encode(b"Action=GetCallerIdentity&Version=2011-06-15"),
"role": role
}))
}
|
use ansi_colors::*;
fn main(){
let mut pass = ColouredStr::new("ansi_coloring");
pass.white();
pass.bold();
pass.hidden();
println!("{}",pass);
}
|
use iced::{
button, executor, Application, Button, Column, Command, Element, Row, Subscription, Text,
};
use crate::jsonrpc::{self, JsonRpc};
use crate::statuses::{self, Statuses};
pub struct App {
record_status_buttons: Vec<RecordStatusButton>,
button: button::State,
statuses: Statuses,
jsonrpc: JsonRpc,
}
#[derive(Debug, Clone)]
pub enum Message {
AddStatus(String),
Export,
StatusesMessage(statuses::Message),
JsonRpc(jsonrpc::Receive),
}
impl Application for App {
type Executor = executor::Default;
type Message = Message;
type Flags = Vec<String>;
fn new(flags: Self::Flags) -> (App, Command<Self::Message>) {
let statuses = Statuses::new();
(
App {
record_status_buttons: flags
.into_iter()
.map(|name| RecordStatusButton::new(name))
.collect(),
button: button::State::default(),
statuses,
jsonrpc: JsonRpc::new(),
},
Command::none(),
)
}
fn title(&self) -> String {
String::from("Record Stuff")
}
fn update(&mut self, message: Self::Message) -> Command<Self::Message> {
match message {
Message::AddStatus(name) => {
self.statuses.get_status(name, &mut self.jsonrpc);
Command::none()
}
Message::Export => {
self.statuses.export(&mut self.jsonrpc);
Command::none()
}
Message::StatusesMessage(statuses::Message::SetName(index, name)) => {
self.statuses.set_status_name(index, name);
Command::none()
}
Message::JsonRpc(jsonrpc::Receive { id, response }) => {
use jsonrpc::{Response, ResponseResult};
match response {
ResponseResult::Response(Response::ImportStatus(import_status)) => {
self.statuses.set_status_value(id, import_status);
}
ResponseResult::Error {
code: 1,
message: _message,
} => {
self.statuses.remove_status(id);
}
_ => {}
}
Command::none()
}
}
}
fn subscription(&self) -> Subscription<Self::Message> {
self.jsonrpc.receive().map(Message::JsonRpc)
}
fn view(&mut self) -> Element<Self::Message> {
let row = Row::new().padding(20).spacing(20);
let col = Column::new()
.spacing(20)
.push(Button::new(&mut self.button, Text::new("Export")).on_press(Message::Export));
let col = self
.record_status_buttons
.iter_mut()
.fold(col, |column, button| {
column.push(button.view().map(|name| Message::AddStatus(name)))
});
row.push(col)
.push(self.statuses.view().map(Message::StatusesMessage))
.into()
}
}
struct RecordStatusButton {
name: String,
button: button::State,
}
impl RecordStatusButton {
fn new(name: String) -> Self {
RecordStatusButton {
name,
button: button::State::default(),
}
}
fn view(&mut self) -> Element<String> {
Button::new(&mut self.button, Text::new(&self.name))
.on_press(self.name.clone())
.into()
}
}
|
pub fn run(){
let mut numbers:Vec<i32>=vec![10,7,100,87,99,65,2,1,34,0];
numbers.sort();
println!("Assending order sort: {:?}", numbers);
//Smallest Number
println!("Smallest Number: {}", numbers[0]);
numbers.reverse();
println!("Desending order sort: {:?}", numbers);
//Largest Number
println!("Largest Number: {}", numbers[0]);
//Loop through vector values
for x in numbers.iter(){
println!("Number: {}", x);
}
} |
#[doc = "Register `TAFCR` reader"]
pub type R = crate::R<TAFCR_SPEC>;
#[doc = "Register `TAFCR` writer"]
pub type W = crate::W<TAFCR_SPEC>;
#[doc = "Field `TAMP1E` reader - Tamper 1 detection enable"]
pub type TAMP1E_R = crate::BitReader<TAMP1E_A>;
#[doc = "Tamper 1 detection enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMP1E_A {
#[doc = "0: RTC_TAMPx input detection disabled"]
Disabled = 0,
#[doc = "1: RTC_TAMPx input detection enabled"]
Enabled = 1,
}
impl From<TAMP1E_A> for bool {
#[inline(always)]
fn from(variant: TAMP1E_A) -> Self {
variant as u8 != 0
}
}
impl TAMP1E_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMP1E_A {
match self.bits {
false => TAMP1E_A::Disabled,
true => TAMP1E_A::Enabled,
}
}
#[doc = "RTC_TAMPx input detection disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == TAMP1E_A::Disabled
}
#[doc = "RTC_TAMPx input detection enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == TAMP1E_A::Enabled
}
}
#[doc = "Field `TAMP1E` writer - Tamper 1 detection enable"]
pub type TAMP1E_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TAMP1E_A>;
impl<'a, REG, const O: u8> TAMP1E_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "RTC_TAMPx input detection disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMP1E_A::Disabled)
}
#[doc = "RTC_TAMPx input detection enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMP1E_A::Enabled)
}
}
#[doc = "Field `TAMP1TRG` reader - Active level for tamper 1"]
pub type TAMP1TRG_R = crate::BitReader<TAMP1TRG_A>;
#[doc = "Active level for tamper 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMP1TRG_A {
#[doc = "0: If TAMPFLT = 00: RTC_TAMPx input rising edge triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input staying low triggers a tamper detection event."]
RisingEdge = 0,
#[doc = "1: If TAMPFLT = 00: RTC_TAMPx input staying high triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input falling edge triggers a tamper detection event"]
FallingEdge = 1,
}
impl From<TAMP1TRG_A> for bool {
#[inline(always)]
fn from(variant: TAMP1TRG_A) -> Self {
variant as u8 != 0
}
}
impl TAMP1TRG_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMP1TRG_A {
match self.bits {
false => TAMP1TRG_A::RisingEdge,
true => TAMP1TRG_A::FallingEdge,
}
}
#[doc = "If TAMPFLT = 00: RTC_TAMPx input rising edge triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input staying low triggers a tamper detection event."]
#[inline(always)]
pub fn is_rising_edge(&self) -> bool {
*self == TAMP1TRG_A::RisingEdge
}
#[doc = "If TAMPFLT = 00: RTC_TAMPx input staying high triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input falling edge triggers a tamper detection event"]
#[inline(always)]
pub fn is_falling_edge(&self) -> bool {
*self == TAMP1TRG_A::FallingEdge
}
}
#[doc = "Field `TAMP1TRG` writer - Active level for tamper 1"]
pub type TAMP1TRG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TAMP1TRG_A>;
impl<'a, REG, const O: u8> TAMP1TRG_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "If TAMPFLT = 00: RTC_TAMPx input rising edge triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input staying low triggers a tamper detection event."]
#[inline(always)]
pub fn rising_edge(self) -> &'a mut crate::W<REG> {
self.variant(TAMP1TRG_A::RisingEdge)
}
#[doc = "If TAMPFLT = 00: RTC_TAMPx input staying high triggers a tamper detection event. If TAMPFLT =\u{338} 00: RTC_TAMPx input falling edge triggers a tamper detection event"]
#[inline(always)]
pub fn falling_edge(self) -> &'a mut crate::W<REG> {
self.variant(TAMP1TRG_A::FallingEdge)
}
}
#[doc = "Field `TAMPIE` reader - Tamper interrupt enable"]
pub type TAMPIE_R = crate::BitReader<TAMPIE_A>;
#[doc = "Tamper interrupt enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMPIE_A {
#[doc = "0: Tamper interrupt disabled"]
Disabled = 0,
#[doc = "1: Tamper interrupt enabled"]
Enabled = 1,
}
impl From<TAMPIE_A> for bool {
#[inline(always)]
fn from(variant: TAMPIE_A) -> Self {
variant as u8 != 0
}
}
impl TAMPIE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPIE_A {
match self.bits {
false => TAMPIE_A::Disabled,
true => TAMPIE_A::Enabled,
}
}
#[doc = "Tamper interrupt disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == TAMPIE_A::Disabled
}
#[doc = "Tamper interrupt enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == TAMPIE_A::Enabled
}
}
#[doc = "Field `TAMPIE` writer - Tamper interrupt enable"]
pub type TAMPIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TAMPIE_A>;
impl<'a, REG, const O: u8> TAMPIE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Tamper interrupt disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMPIE_A::Disabled)
}
#[doc = "Tamper interrupt enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMPIE_A::Enabled)
}
}
#[doc = "Field `TAMP2E` reader - Tamper 2 detection enable"]
pub use TAMP1E_R as TAMP2E_R;
#[doc = "Field `TAMP2E` writer - Tamper 2 detection enable"]
pub use TAMP1E_W as TAMP2E_W;
#[doc = "Field `TAMP2TRG` reader - Active level for tamper 2"]
pub use TAMP1TRG_R as TAMP2TRG_R;
#[doc = "Field `TAMP2TRG` writer - Active level for tamper 2"]
pub use TAMP1TRG_W as TAMP2TRG_W;
#[doc = "Field `TAMPTS` reader - Activate timestamp on tamper detection event"]
pub type TAMPTS_R = crate::BitReader<TAMPTS_A>;
#[doc = "Activate timestamp on tamper detection event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMPTS_A {
#[doc = "0: Tamper detection event does not cause a timestamp to be saved"]
NoSave = 0,
#[doc = "1: Save timestamp on tamper detection event"]
Save = 1,
}
impl From<TAMPTS_A> for bool {
#[inline(always)]
fn from(variant: TAMPTS_A) -> Self {
variant as u8 != 0
}
}
impl TAMPTS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPTS_A {
match self.bits {
false => TAMPTS_A::NoSave,
true => TAMPTS_A::Save,
}
}
#[doc = "Tamper detection event does not cause a timestamp to be saved"]
#[inline(always)]
pub fn is_no_save(&self) -> bool {
*self == TAMPTS_A::NoSave
}
#[doc = "Save timestamp on tamper detection event"]
#[inline(always)]
pub fn is_save(&self) -> bool {
*self == TAMPTS_A::Save
}
}
#[doc = "Field `TAMPTS` writer - Activate timestamp on tamper detection event"]
pub type TAMPTS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TAMPTS_A>;
impl<'a, REG, const O: u8> TAMPTS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Tamper detection event does not cause a timestamp to be saved"]
#[inline(always)]
pub fn no_save(self) -> &'a mut crate::W<REG> {
self.variant(TAMPTS_A::NoSave)
}
#[doc = "Save timestamp on tamper detection event"]
#[inline(always)]
pub fn save(self) -> &'a mut crate::W<REG> {
self.variant(TAMPTS_A::Save)
}
}
#[doc = "Field `TAMPFREQ` reader - Tamper sampling frequency"]
pub type TAMPFREQ_R = crate::FieldReader<TAMPFREQ_A>;
#[doc = "Tamper sampling frequency\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum TAMPFREQ_A {
#[doc = "0: RTCCLK / 32768 (1 Hz when RTCCLK = 32768 Hz)"]
Div32768 = 0,
#[doc = "1: RTCCLK / 16384 (2 Hz when RTCCLK = 32768 Hz)"]
Div16384 = 1,
#[doc = "2: RTCCLK / 8192 (4 Hz when RTCCLK = 32768 Hz)"]
Div8192 = 2,
#[doc = "3: RTCCLK / 4096 (8 Hz when RTCCLK = 32768 Hz)"]
Div4096 = 3,
#[doc = "4: RTCCLK / 2048 (16 Hz when RTCCLK = 32768 Hz)"]
Div2048 = 4,
#[doc = "5: RTCCLK / 1024 (32 Hz when RTCCLK = 32768 Hz)"]
Div1024 = 5,
#[doc = "6: RTCCLK / 512 (64 Hz when RTCCLK = 32768 Hz)"]
Div512 = 6,
#[doc = "7: RTCCLK / 256 (128 Hz when RTCCLK = 32768 Hz)"]
Div256 = 7,
}
impl From<TAMPFREQ_A> for u8 {
#[inline(always)]
fn from(variant: TAMPFREQ_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for TAMPFREQ_A {
type Ux = u8;
}
impl TAMPFREQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPFREQ_A {
match self.bits {
0 => TAMPFREQ_A::Div32768,
1 => TAMPFREQ_A::Div16384,
2 => TAMPFREQ_A::Div8192,
3 => TAMPFREQ_A::Div4096,
4 => TAMPFREQ_A::Div2048,
5 => TAMPFREQ_A::Div1024,
6 => TAMPFREQ_A::Div512,
7 => TAMPFREQ_A::Div256,
_ => unreachable!(),
}
}
#[doc = "RTCCLK / 32768 (1 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div32768(&self) -> bool {
*self == TAMPFREQ_A::Div32768
}
#[doc = "RTCCLK / 16384 (2 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div16384(&self) -> bool {
*self == TAMPFREQ_A::Div16384
}
#[doc = "RTCCLK / 8192 (4 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div8192(&self) -> bool {
*self == TAMPFREQ_A::Div8192
}
#[doc = "RTCCLK / 4096 (8 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div4096(&self) -> bool {
*self == TAMPFREQ_A::Div4096
}
#[doc = "RTCCLK / 2048 (16 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div2048(&self) -> bool {
*self == TAMPFREQ_A::Div2048
}
#[doc = "RTCCLK / 1024 (32 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div1024(&self) -> bool {
*self == TAMPFREQ_A::Div1024
}
#[doc = "RTCCLK / 512 (64 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div512(&self) -> bool {
*self == TAMPFREQ_A::Div512
}
#[doc = "RTCCLK / 256 (128 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn is_div256(&self) -> bool {
*self == TAMPFREQ_A::Div256
}
}
#[doc = "Field `TAMPFREQ` writer - Tamper sampling frequency"]
pub type TAMPFREQ_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, TAMPFREQ_A>;
impl<'a, REG, const O: u8> TAMPFREQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "RTCCLK / 32768 (1 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div32768(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div32768)
}
#[doc = "RTCCLK / 16384 (2 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div16384(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div16384)
}
#[doc = "RTCCLK / 8192 (4 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div8192(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div8192)
}
#[doc = "RTCCLK / 4096 (8 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div4096(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div4096)
}
#[doc = "RTCCLK / 2048 (16 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div2048(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div2048)
}
#[doc = "RTCCLK / 1024 (32 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div1024(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div1024)
}
#[doc = "RTCCLK / 512 (64 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div512(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div512)
}
#[doc = "RTCCLK / 256 (128 Hz when RTCCLK = 32768 Hz)"]
#[inline(always)]
pub fn div256(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFREQ_A::Div256)
}
}
#[doc = "Field `TAMPFLT` reader - Tamper filter count"]
pub type TAMPFLT_R = crate::FieldReader<TAMPFLT_A>;
#[doc = "Tamper filter count\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum TAMPFLT_A {
#[doc = "0: Tamper event is activated on edge of RTC_TAMPx input transitions to the active level (no internal pull-up on RTC_TAMPx input)"]
Immediate = 0,
#[doc = "1: Tamper event is activated after 2 consecutive samples at the active level"]
Samples2 = 1,
#[doc = "2: Tamper event is activated after 4 consecutive samples at the active level"]
Samples4 = 2,
#[doc = "3: Tamper event is activated after 8 consecutive samples at the active level"]
Samples8 = 3,
}
impl From<TAMPFLT_A> for u8 {
#[inline(always)]
fn from(variant: TAMPFLT_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for TAMPFLT_A {
type Ux = u8;
}
impl TAMPFLT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPFLT_A {
match self.bits {
0 => TAMPFLT_A::Immediate,
1 => TAMPFLT_A::Samples2,
2 => TAMPFLT_A::Samples4,
3 => TAMPFLT_A::Samples8,
_ => unreachable!(),
}
}
#[doc = "Tamper event is activated on edge of RTC_TAMPx input transitions to the active level (no internal pull-up on RTC_TAMPx input)"]
#[inline(always)]
pub fn is_immediate(&self) -> bool {
*self == TAMPFLT_A::Immediate
}
#[doc = "Tamper event is activated after 2 consecutive samples at the active level"]
#[inline(always)]
pub fn is_samples2(&self) -> bool {
*self == TAMPFLT_A::Samples2
}
#[doc = "Tamper event is activated after 4 consecutive samples at the active level"]
#[inline(always)]
pub fn is_samples4(&self) -> bool {
*self == TAMPFLT_A::Samples4
}
#[doc = "Tamper event is activated after 8 consecutive samples at the active level"]
#[inline(always)]
pub fn is_samples8(&self) -> bool {
*self == TAMPFLT_A::Samples8
}
}
#[doc = "Field `TAMPFLT` writer - Tamper filter count"]
pub type TAMPFLT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, TAMPFLT_A>;
impl<'a, REG, const O: u8> TAMPFLT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Tamper event is activated on edge of RTC_TAMPx input transitions to the active level (no internal pull-up on RTC_TAMPx input)"]
#[inline(always)]
pub fn immediate(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFLT_A::Immediate)
}
#[doc = "Tamper event is activated after 2 consecutive samples at the active level"]
#[inline(always)]
pub fn samples2(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFLT_A::Samples2)
}
#[doc = "Tamper event is activated after 4 consecutive samples at the active level"]
#[inline(always)]
pub fn samples4(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFLT_A::Samples4)
}
#[doc = "Tamper event is activated after 8 consecutive samples at the active level"]
#[inline(always)]
pub fn samples8(self) -> &'a mut crate::W<REG> {
self.variant(TAMPFLT_A::Samples8)
}
}
#[doc = "Field `TAMPPRCH` reader - Tamper precharge duration"]
pub type TAMPPRCH_R = crate::FieldReader<TAMPPRCH_A>;
#[doc = "Tamper precharge duration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum TAMPPRCH_A {
#[doc = "0: 1 RTCCLK cycle"]
Cycles1 = 0,
#[doc = "1: 2 RTCCLK cycles"]
Cycles2 = 1,
#[doc = "2: 4 RTCCLK cycles"]
Cycles4 = 2,
#[doc = "3: 8 RTCCLK cycles"]
Cycles8 = 3,
}
impl From<TAMPPRCH_A> for u8 {
#[inline(always)]
fn from(variant: TAMPPRCH_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for TAMPPRCH_A {
type Ux = u8;
}
impl TAMPPRCH_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPPRCH_A {
match self.bits {
0 => TAMPPRCH_A::Cycles1,
1 => TAMPPRCH_A::Cycles2,
2 => TAMPPRCH_A::Cycles4,
3 => TAMPPRCH_A::Cycles8,
_ => unreachable!(),
}
}
#[doc = "1 RTCCLK cycle"]
#[inline(always)]
pub fn is_cycles1(&self) -> bool {
*self == TAMPPRCH_A::Cycles1
}
#[doc = "2 RTCCLK cycles"]
#[inline(always)]
pub fn is_cycles2(&self) -> bool {
*self == TAMPPRCH_A::Cycles2
}
#[doc = "4 RTCCLK cycles"]
#[inline(always)]
pub fn is_cycles4(&self) -> bool {
*self == TAMPPRCH_A::Cycles4
}
#[doc = "8 RTCCLK cycles"]
#[inline(always)]
pub fn is_cycles8(&self) -> bool {
*self == TAMPPRCH_A::Cycles8
}
}
#[doc = "Field `TAMPPRCH` writer - Tamper precharge duration"]
pub type TAMPPRCH_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, TAMPPRCH_A>;
impl<'a, REG, const O: u8> TAMPPRCH_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "1 RTCCLK cycle"]
#[inline(always)]
pub fn cycles1(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPRCH_A::Cycles1)
}
#[doc = "2 RTCCLK cycles"]
#[inline(always)]
pub fn cycles2(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPRCH_A::Cycles2)
}
#[doc = "4 RTCCLK cycles"]
#[inline(always)]
pub fn cycles4(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPRCH_A::Cycles4)
}
#[doc = "8 RTCCLK cycles"]
#[inline(always)]
pub fn cycles8(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPRCH_A::Cycles8)
}
}
#[doc = "Field `TAMPPUDIS` reader - TAMPER pull-up disable"]
pub type TAMPPUDIS_R = crate::BitReader<TAMPPUDIS_A>;
#[doc = "TAMPER pull-up disable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMPPUDIS_A {
#[doc = "0: Precharge RTC_TAMPx pins before sampling (enable internal pull-up)"]
Enabled = 0,
#[doc = "1: Disable precharge of RTC_TAMPx pins"]
Disabled = 1,
}
impl From<TAMPPUDIS_A> for bool {
#[inline(always)]
fn from(variant: TAMPPUDIS_A) -> Self {
variant as u8 != 0
}
}
impl TAMPPUDIS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMPPUDIS_A {
match self.bits {
false => TAMPPUDIS_A::Enabled,
true => TAMPPUDIS_A::Disabled,
}
}
#[doc = "Precharge RTC_TAMPx pins before sampling (enable internal pull-up)"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == TAMPPUDIS_A::Enabled
}
#[doc = "Disable precharge of RTC_TAMPx pins"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == TAMPPUDIS_A::Disabled
}
}
#[doc = "Field `TAMPPUDIS` writer - TAMPER pull-up disable"]
pub type TAMPPUDIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TAMPPUDIS_A>;
impl<'a, REG, const O: u8> TAMPPUDIS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Precharge RTC_TAMPx pins before sampling (enable internal pull-up)"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPUDIS_A::Enabled)
}
#[doc = "Disable precharge of RTC_TAMPx pins"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(TAMPPUDIS_A::Disabled)
}
}
#[doc = "Field `PC13VALUE` reader - PC13 value"]
pub type PC13VALUE_R = crate::BitReader<PC13VALUE_A>;
#[doc = "PC13 value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PC13VALUE_A {
#[doc = "0: If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic low"]
Low = 0,
#[doc = "1: If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic high"]
High = 1,
}
impl From<PC13VALUE_A> for bool {
#[inline(always)]
fn from(variant: PC13VALUE_A) -> Self {
variant as u8 != 0
}
}
impl PC13VALUE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PC13VALUE_A {
match self.bits {
false => PC13VALUE_A::Low,
true => PC13VALUE_A::High,
}
}
#[doc = "If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic low"]
#[inline(always)]
pub fn is_low(&self) -> bool {
*self == PC13VALUE_A::Low
}
#[doc = "If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic high"]
#[inline(always)]
pub fn is_high(&self) -> bool {
*self == PC13VALUE_A::High
}
}
#[doc = "Field `PC13VALUE` writer - PC13 value"]
pub type PC13VALUE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PC13VALUE_A>;
impl<'a, REG, const O: u8> PC13VALUE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic low"]
#[inline(always)]
pub fn low(self) -> &'a mut crate::W<REG> {
self.variant(PC13VALUE_A::Low)
}
#[doc = "If the LSE is disabled and PCxMODE = 1, set PCxVALUE to logic high"]
#[inline(always)]
pub fn high(self) -> &'a mut crate::W<REG> {
self.variant(PC13VALUE_A::High)
}
}
#[doc = "Field `PC13MODE` reader - PC13 mode"]
pub type PC13MODE_R = crate::BitReader<PC13MODE_A>;
#[doc = "PC13 mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PC13MODE_A {
#[doc = "0: PCx is controlled by the GPIO configuration Register. Consequently PC15 is floating in Standby mode"]
Floating = 0,
#[doc = "1: PCx is forced to push-pull output if LSE is disabled"]
PushPull = 1,
}
impl From<PC13MODE_A> for bool {
#[inline(always)]
fn from(variant: PC13MODE_A) -> Self {
variant as u8 != 0
}
}
impl PC13MODE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PC13MODE_A {
match self.bits {
false => PC13MODE_A::Floating,
true => PC13MODE_A::PushPull,
}
}
#[doc = "PCx is controlled by the GPIO configuration Register. Consequently PC15 is floating in Standby mode"]
#[inline(always)]
pub fn is_floating(&self) -> bool {
*self == PC13MODE_A::Floating
}
#[doc = "PCx is forced to push-pull output if LSE is disabled"]
#[inline(always)]
pub fn is_push_pull(&self) -> bool {
*self == PC13MODE_A::PushPull
}
}
#[doc = "Field `PC13MODE` writer - PC13 mode"]
pub type PC13MODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PC13MODE_A>;
impl<'a, REG, const O: u8> PC13MODE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PCx is controlled by the GPIO configuration Register. Consequently PC15 is floating in Standby mode"]
#[inline(always)]
pub fn floating(self) -> &'a mut crate::W<REG> {
self.variant(PC13MODE_A::Floating)
}
#[doc = "PCx is forced to push-pull output if LSE is disabled"]
#[inline(always)]
pub fn push_pull(self) -> &'a mut crate::W<REG> {
self.variant(PC13MODE_A::PushPull)
}
}
#[doc = "Field `PC14MODE` reader - PC 14 mode"]
pub use PC13MODE_R as PC14MODE_R;
#[doc = "Field `PC15MODE` reader - PC15 mode"]
pub use PC13MODE_R as PC15MODE_R;
#[doc = "Field `PC14MODE` writer - PC 14 mode"]
pub use PC13MODE_W as PC14MODE_W;
#[doc = "Field `PC15MODE` writer - PC15 mode"]
pub use PC13MODE_W as PC15MODE_W;
#[doc = "Field `PC14VALUE` reader - PC14 value"]
pub use PC13VALUE_R as PC14VALUE_R;
#[doc = "Field `PC15VALUE` reader - PC15 value"]
pub use PC13VALUE_R as PC15VALUE_R;
#[doc = "Field `PC14VALUE` writer - PC14 value"]
pub use PC13VALUE_W as PC14VALUE_W;
#[doc = "Field `PC15VALUE` writer - PC15 value"]
pub use PC13VALUE_W as PC15VALUE_W;
impl R {
#[doc = "Bit 0 - Tamper 1 detection enable"]
#[inline(always)]
pub fn tamp1e(&self) -> TAMP1E_R {
TAMP1E_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Active level for tamper 1"]
#[inline(always)]
pub fn tamp1trg(&self) -> TAMP1TRG_R {
TAMP1TRG_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Tamper interrupt enable"]
#[inline(always)]
pub fn tampie(&self) -> TAMPIE_R {
TAMPIE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Tamper 2 detection enable"]
#[inline(always)]
pub fn tamp2e(&self) -> TAMP2E_R {
TAMP2E_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Active level for tamper 2"]
#[inline(always)]
pub fn tamp2trg(&self) -> TAMP2TRG_R {
TAMP2TRG_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 7 - Activate timestamp on tamper detection event"]
#[inline(always)]
pub fn tampts(&self) -> TAMPTS_R {
TAMPTS_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bits 8:10 - Tamper sampling frequency"]
#[inline(always)]
pub fn tampfreq(&self) -> TAMPFREQ_R {
TAMPFREQ_R::new(((self.bits >> 8) & 7) as u8)
}
#[doc = "Bits 11:12 - Tamper filter count"]
#[inline(always)]
pub fn tampflt(&self) -> TAMPFLT_R {
TAMPFLT_R::new(((self.bits >> 11) & 3) as u8)
}
#[doc = "Bits 13:14 - Tamper precharge duration"]
#[inline(always)]
pub fn tampprch(&self) -> TAMPPRCH_R {
TAMPPRCH_R::new(((self.bits >> 13) & 3) as u8)
}
#[doc = "Bit 15 - TAMPER pull-up disable"]
#[inline(always)]
pub fn tamppudis(&self) -> TAMPPUDIS_R {
TAMPPUDIS_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 18 - PC13 value"]
#[inline(always)]
pub fn pc13value(&self) -> PC13VALUE_R {
PC13VALUE_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - PC13 mode"]
#[inline(always)]
pub fn pc13mode(&self) -> PC13MODE_R {
PC13MODE_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - PC14 value"]
#[inline(always)]
pub fn pc14value(&self) -> PC14VALUE_R {
PC14VALUE_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - PC 14 mode"]
#[inline(always)]
pub fn pc14mode(&self) -> PC14MODE_R {
PC14MODE_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - PC15 value"]
#[inline(always)]
pub fn pc15value(&self) -> PC15VALUE_R {
PC15VALUE_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - PC15 mode"]
#[inline(always)]
pub fn pc15mode(&self) -> PC15MODE_R {
PC15MODE_R::new(((self.bits >> 23) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Tamper 1 detection enable"]
#[inline(always)]
#[must_use]
pub fn tamp1e(&mut self) -> TAMP1E_W<TAFCR_SPEC, 0> {
TAMP1E_W::new(self)
}
#[doc = "Bit 1 - Active level for tamper 1"]
#[inline(always)]
#[must_use]
pub fn tamp1trg(&mut self) -> TAMP1TRG_W<TAFCR_SPEC, 1> {
TAMP1TRG_W::new(self)
}
#[doc = "Bit 2 - Tamper interrupt enable"]
#[inline(always)]
#[must_use]
pub fn tampie(&mut self) -> TAMPIE_W<TAFCR_SPEC, 2> {
TAMPIE_W::new(self)
}
#[doc = "Bit 3 - Tamper 2 detection enable"]
#[inline(always)]
#[must_use]
pub fn tamp2e(&mut self) -> TAMP2E_W<TAFCR_SPEC, 3> {
TAMP2E_W::new(self)
}
#[doc = "Bit 4 - Active level for tamper 2"]
#[inline(always)]
#[must_use]
pub fn tamp2trg(&mut self) -> TAMP2TRG_W<TAFCR_SPEC, 4> {
TAMP2TRG_W::new(self)
}
#[doc = "Bit 7 - Activate timestamp on tamper detection event"]
#[inline(always)]
#[must_use]
pub fn tampts(&mut self) -> TAMPTS_W<TAFCR_SPEC, 7> {
TAMPTS_W::new(self)
}
#[doc = "Bits 8:10 - Tamper sampling frequency"]
#[inline(always)]
#[must_use]
pub fn tampfreq(&mut self) -> TAMPFREQ_W<TAFCR_SPEC, 8> {
TAMPFREQ_W::new(self)
}
#[doc = "Bits 11:12 - Tamper filter count"]
#[inline(always)]
#[must_use]
pub fn tampflt(&mut self) -> TAMPFLT_W<TAFCR_SPEC, 11> {
TAMPFLT_W::new(self)
}
#[doc = "Bits 13:14 - Tamper precharge duration"]
#[inline(always)]
#[must_use]
pub fn tampprch(&mut self) -> TAMPPRCH_W<TAFCR_SPEC, 13> {
TAMPPRCH_W::new(self)
}
#[doc = "Bit 15 - TAMPER pull-up disable"]
#[inline(always)]
#[must_use]
pub fn tamppudis(&mut self) -> TAMPPUDIS_W<TAFCR_SPEC, 15> {
TAMPPUDIS_W::new(self)
}
#[doc = "Bit 18 - PC13 value"]
#[inline(always)]
#[must_use]
pub fn pc13value(&mut self) -> PC13VALUE_W<TAFCR_SPEC, 18> {
PC13VALUE_W::new(self)
}
#[doc = "Bit 19 - PC13 mode"]
#[inline(always)]
#[must_use]
pub fn pc13mode(&mut self) -> PC13MODE_W<TAFCR_SPEC, 19> {
PC13MODE_W::new(self)
}
#[doc = "Bit 20 - PC14 value"]
#[inline(always)]
#[must_use]
pub fn pc14value(&mut self) -> PC14VALUE_W<TAFCR_SPEC, 20> {
PC14VALUE_W::new(self)
}
#[doc = "Bit 21 - PC 14 mode"]
#[inline(always)]
#[must_use]
pub fn pc14mode(&mut self) -> PC14MODE_W<TAFCR_SPEC, 21> {
PC14MODE_W::new(self)
}
#[doc = "Bit 22 - PC15 value"]
#[inline(always)]
#[must_use]
pub fn pc15value(&mut self) -> PC15VALUE_W<TAFCR_SPEC, 22> {
PC15VALUE_W::new(self)
}
#[doc = "Bit 23 - PC15 mode"]
#[inline(always)]
#[must_use]
pub fn pc15mode(&mut self) -> PC15MODE_W<TAFCR_SPEC, 23> {
PC15MODE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "tamper and alternate function configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tafcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tafcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TAFCR_SPEC;
impl crate::RegisterSpec for TAFCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`tafcr::R`](R) reader structure"]
impl crate::Readable for TAFCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`tafcr::W`](W) writer structure"]
impl crate::Writable for TAFCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets TAFCR to value 0"]
impl crate::Resettable for TAFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
pub mod error;
pub mod process;
pub mod util;
pub use checksec;
pub use goblin;
pub use ropr;
|
#![feature(no_std)]
#![feature(lang_items)]
#![feature(core)]
#![feature(asm)]
#![no_std]
use prelude::*;
#[macro_use]
extern crate core;
mod std {
pub use core::fmt;
pub use core::cmp;
pub use core::ops;
pub use core::iter;
pub use core::option;
pub use core::marker;
}
mod prelude;
#[macro_use]
mod macros;
#[path="../port/mod.rs"]
pub mod port;
#[path="../pc/mod.rs"]
pub mod pc;
#[lang="start"]
#[no_mangle]
pub extern "C" fn main()
{
let mb = ::pc::multiboot::multibootptr;
log!("Plan 9");
log!("multiboot flags = {:032b}", mb.flags);
if (mb.flags & 1 << 0) != 0 {
log!("have memory map");
} else {
log!("no memory map");
}
if (mb.flags & 1 << 1) != 0 {
let b = mb.boot_device;
log!("boot device: {} {} {} {}", b[0], b[1], b[2], b[3]);
} else {
log!("no boot device");
}
log!("cli arguments: {}", cmdline());
log!("bootloader: {}", bootloader());
}
fn c2str(c_str: *const u8) -> Option<&'static [u8]>
{
unsafe {
let mut ptr = c_str;
while *ptr != 0 {
ptr = ptr.offset(1);
}
Some(::core::mem::transmute(::core::raw::Slice{data: c_str, len: ptr as usize - c_str as usize}))
}
}
fn bootloader() -> &'static str
{
let mb = ::pc::multiboot::multibootptr;
if (mb.flags & 1 << 9) == 0 {
return "";
}
let paddr = mb.boot_loader_name as usize;
let cptr = (paddr + 0xFFFFFFFF_80000000) as *const u8;
if let Some(s) = c2str(cptr) {
::core::str::from_utf8(s).ok().unwrap_or("invalid")
} else {
""
}
}
fn cmdline() -> &'static str
{
let mb = ::pc::multiboot::multibootptr;
if (mb.flags & 1 << 2) == 0 {
return "";
}
let paddr = mb.cmdline as usize;
let cptr = (paddr + 0xFFFFFFFF_80000000) as *const u8;
if let Some(s) = c2str(cptr) {
::core::str::from_utf8(s).ok().unwrap_or("invalid")
} else {
""
}
}
// vim: ft=rust
|
extern crate std;
use app;
pub fn new_directory(directory: &str) -> std::io::Result<()> {
std::fs::create_dir(directory)?;
Ok(())
}
pub fn new_file(file_path: &str) -> std::io::Result<()> {
std::fs::write(file_path, b"")?;
Ok(())
}
pub fn temp_file() -> String {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let file_path = format!("/temp/file_{}", timestamp);
new_file(&file_path).unwrap();
return file_path;
}
pub fn file_get_contents(file_path: &str) -> std::vec::Vec<u8> {
return std::fs::read(file_path).unwrap();
}
pub fn file_put_contents(file_path: &str, content: &str) -> bool {
std::fs::write(file_path, content).unwrap();
return true;
}
pub fn log(kind: &str, content: &str) {
if app::config::DEBUG {
println!("{:} - {:}", kind, content);
}
}
|
use std::fmt::Debug;
use bitflags::{ bitflags, __bitflags, __impl_bitflags };
use crate::prelude::*;
use crate::scene::Scene;
use crate::interaction::{ Interactions, BaseInteraction, Sample };
use crate::sampler::Sampler;
use crate::math::*;
use crate::math::Transform;
mod point;
pub use self::point::PointLight;
bitflags! {
pub struct LightType: u8 {
/// The light uses a Delta Function for its position.
/// This means that it cannot be intersected by chance.
#[cfg_attr(feature = "cargo-clippy", allow(identity_op))]
const DeltaPosition = 1 << 0;
const DeltaDirection = 1 << 1;
const Area = 1 << 2;
const Infinite = 1 << 3;
}
}
pub trait Light: Debug {
fn ty(&self) -> LightType;
fn is_delta_light(&self) -> bool {
self.ty().intersects(LightType::DeltaPosition | LightType::DeltaDirection)
}
fn num_samples(&self) -> u32 {
1
}
fn medium_interface(&self) -> Option<()>;
fn light_to_world(&self) -> &Transform;
fn world_to_light(&self) -> &Transform;
fn preprocess(&mut self, _: &Scene) {
}
fn le(&self, ray: &Ray) -> Spectrum;
/// Returns the radiance arriving at the `isect` point and time,
/// assuming there are no occluding objects between them.
/// The `VisibilityTester` is not returned if the radiance is black,
/// as in this case, visibility is irrelevant.
fn sample_li(&self, isect: &Interactions<'a>, sample: Point2f) -> (Sample, Option<VisibilityTester>);
fn power(&self) -> Spectrum;
}
#[derive(Debug)]
pub struct VisibilityTester {
pub p0: BaseInteraction,
pub p1: BaseInteraction,
}
impl VisibilityTester {
pub fn new(p0: BaseInteraction, p1: BaseInteraction) -> Self {
Self { p0, p1 }
}
#[inline(always)]
fn ray(&self) -> Ray {
self.p0.spawn_ray_to(self.p1.clone())
}
#[inline(always)]
pub fn unoccluded(&self, scene: &Scene) -> bool {
!scene.intersect_p(&self.ray())
}
pub fn tr(&self, scene: &Scene, _sampler: &dyn Sampler) -> Spectrum {
let mut ray = self.ray();
let tr = Spectrum::new(1.0);
loop {
let isect = scene.intersect(&mut ray);
// if the ray intersects something
// and that something has a material
// then the ray is occluded
if let Some(isect) = &isect {
if isect.primitive.map_or(false, |p| p.get_material().is_some()) {
return Spectrum::new(0.0);
}
}
// todo - transmittance for current ray segment in medium
if let Some(_medium) = ray.medium {
// tr *= ray.medium.tr(ray, sampler)
}
// if no intersection is found then the ray has got to
// p1!
// else - we've hit an invisible surface, so start
// tracing again from that surface -> p1
match &isect {
Some(isect) => ray = isect.spawn_ray_to(self.p1.clone()),
None => break,
}
}
tr
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// UsageIndexedSpansResponse : A response containing indexed spans usage.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageIndexedSpansResponse {
/// Array with the number of hourly traces indexed for a given organization.
#[serde(rename = "usage", skip_serializing_if = "Option::is_none")]
pub usage: Option<Vec<crate::models::UsageIndexedSpansHour>>,
}
impl UsageIndexedSpansResponse {
/// A response containing indexed spans usage.
pub fn new() -> UsageIndexedSpansResponse {
UsageIndexedSpansResponse {
usage: None,
}
}
}
|
use specs::{Component, VecStorage};
#[derive(Debug, Component)]
#[storage(VecStorage)]
pub struct Note {
pub line_layer: i32,
pub line_index: i32,
pub note_type: i32,
pub time: f32,
pub direction: i32,
}
|
use uuid::Uuid;
use serde::{Serialize, Deserialize, };
use std::collections::HashMap;
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
enum Predicate {
AND(Vec<Predicate>),
OR(Vec<Predicate>),
Not(Box<Predicate>),
TRUE,
FALSE,
EQ(SPValue, SPValue), // use SPValue::ID to fetch the value from the state
NEQ(SPValue, SPValue),
INDOMAIN(SPValue, Vec<SPValue>)
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
enum SPValue {
Bool(bool),
//byte(u8), deprecated
//char(char), deprecated
Float32(f32),
Float64(f64),
Int8(i8),
Uint8(u8),
Int16(i16),
Uint16(u16),
Int32(i32),
Uint32(u32),
Int64(i64),
Uint64(u64),
String(String),
Time(u32),
Duration(u32),
ID(Uuid), // use to also find the value in a state of variable with id
Array(Vec<SPValue>),
Map(HashMap<String, SPValue>)
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
struct SPState {
s: HashMap<String, SPValue>
}
//#[derive(Debug, PartialEq)]
// Was hard to define something to store genereal functions
// struct StateTransformation {
// tf: Fn(SPState) -> SPState, // will be used by the runner
// to_action: Fn(SPState -> Vec<Action>) // will be used for converting to a formal model
// }
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
struct Action {
var: Uuid,
value: Compute
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
/// Used in actions to compute a new SPValue
enum Compute {
Value(SPValue),
Get(Uuid),
TakeNext(Uuid, Vec<SPValue>),
TakeBefore(Uuid, Vec<SPValue>),
Add(Box<Compute>, Box<Compute>),
Sub(Box<Compute>, Box<Compute>),
Join(Box<Compute>, Box<Compute>),
}
#[cfg(test)]
mod testing_predicates {
use super::Predicate::*;
use super::*;
#[test]
fn testing() {
let test = AND(vec!(OR(vec!(TRUE))));
let kalle = SPAttributes::make(test).unwrap();
kalle.test();
let id = uuid::Uuid::new_v4();
let a = Action{var: id, value: Compute::Get(id)};
println!("{:?}", a)
}
}
///**************
///
type SPJson = serde_json::Value;
type SPJsonError = serde_json::Error;
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
struct SPAttributes {
attr: SPJson
}
impl SPAttributes {
fn test(&self) {
println!("{:?}", &self.attr)
}
pub fn make<T>(value: T) -> Result<SPAttributes, SPJsonError> where T: Serialize {
serde_json::to_value(value).map(|v| SPAttributes{attr: v})
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct SPObject {
id: Uuid,
name: String,
attributes: SPAttributes,
}
trait IDAble {
fn id(&self) -> Uuid;
fn name(&self) -> String;
fn attributes(&self) -> SPAttributes; // use serde Value enum
}
#[derive(Debug, PartialEq)]
struct SPVariable {
sp: SPObject,
domain: Vec<SPValue>,
init: SPValue
}
#[cfg(test)]
mod mytest {
use super::Predicate::*;
use super::*;
#[test]
fn testing() {
let test = AND(vec!(OR(vec!(TRUE))));
let kalle = SPAttributes::make(test).unwrap();
kalle.test()
}
} |
//! Common utilities
use rand::Rng;
// Public types
/// 2x2 Matrix
pub type Mat2 = [f32; 4];
/// 2x3 Matrix
pub type Mat2d = [f32; 6];
/// 3x3 Matrix
pub type Mat3 = [f32; 9];
/// 4x4 Matrix
pub type Mat4 = [f32; 16];
/// Quaternion
pub type Quat = [f32; 4];
/// Dual Quaternion
pub type Quat2 = [f32; 8];
/// 2 Dimensional Vector
pub type Vec2 = [f32; 2];
/// 3 Dimensional Vector
pub type Vec3 = [f32; 3];
/// 4 Dimensional Vector
pub type Vec4 = [f32; 4];
// Configuration Constants
/// Archimedes' constant (π).
///
/// [core::f32::consts::PI](https://doc.rust-lang.org/core/f32/consts/constant.PI.html)
pub static PI: f32 = core::f32::consts::PI;
/// This is the difference between 1.0 and the next largest representable number.
///
/// our value is ```0.000001```.
pub static EPSILON: f32 = 0.000001;
/// Infinity (∞).
///
/// [core::f32::INFINITY](https://doc.rust-lang.org/core/f32/constant.INFINITY.html)
pub static INFINITY: f32 = 1.0_f32 / 0.0_f32;
/// Negative infinity (-∞).
///
/// [core::f32::NEG_INFINITY](https://doc.rust-lang.org/core/f32/constant.NEG_INFINITY.html)
pub static NEG_INFINITY: f32 = -1.0_f32 / 0.0_f32;
// We don't have a set_matrix_array_type
// we only support f32
static DEGREE: f32 = PI / 180.0_f32;
/// Convert Degree to Radian.
///
/// [glMatrix Documentation](http://glmatrix.net/docs/module-glMatrix.html)
pub fn to_radian(a: f32) -> f32{
return a * DEGREE
}
/// Tests whether or not the arguments have approximately the same value, within an absolute
/// or relative tolerance of common::EPSILON (an absolute tolerance is used for values less
/// than or equal to 1.0, and a relative tolerance is used for larger values).
///
/// [glMatrix Documentation](http://glmatrix.net/docs/module-glMatrix.html)
pub fn equals(a: f32, b: f32) -> bool {
return (a - b).abs() <= EPSILON * 1.0_f32.max(a.abs().max(b.abs()));
}
/// The hypot function returns the square root of the sum of squares of the array elements.
///
/// [MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/hypot)
pub fn hypot(arguments: &[f32]) -> f32 {
let mut y: f32 = 0_f32;
let len = arguments.len();
for i in 0..len {
y += arguments[i].powi(2);
}
y.sqrt()
}
/// A random f32 between 0-1.
pub fn random_f32() -> f32 {
let mut rng = rand::thread_rng();
// f64 gives a uniform distriution over 0-1
// f32 gives random numbers over the entire f32 space
// however we want a f32 between 0-1
let r_f32: f64 = rng.gen();
// convert the f64 to f32 so we can use it
let r_f32 = r_f32 as f32;
r_f32
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn degrees_to_radian() {
let deg = 80_f32;
let rad = to_radian(deg);
assert_eq!(1.3962634, rad);
}
#[test]
fn epsilon_equals_false() {
let a = 1.00001_f32;
let b = 1_f32;
assert!(!equals(a, b));
}
#[test]
fn epsilon_equals_true() {
let a = 1.000001_f32;
let b = 1_f32;
assert!(equals(a, b));
}
#[test]
fn get_hypot() {
let x = 2.0_f32;
let y = 3.0_f32;
let vec2: [f32; 2] = [x, y];
assert_eq!(x.hypot(y), hypot(&vec2));
}
#[test]
fn random_f32_between_zero_and_one() {
let r = random_f32();
assert!(r >= 0_f32 && r <= 1_f32);
}
}
|
extern crate clap;
use clap::{App, Arg, ArgMatches};
extern crate reqwest;
use reqwest::{Client, Response, Url};
#[macro_use] extern crate hyper;
use hyper::header::Headers;
extern crate serde_json;
use std::collections::BTreeMap;
use std::error::Error;
use std::fmt;
use std::fs::{File, OpenOptions};
use std::io;
use std::io::prelude::*;
use std::str;
use std::vec::Vec;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
pub fn new_app<'a>() -> ArgMatches<'a> {
App::new("gloss").
version("0.1").
arg(Arg::with_name("headword").
takes_value(true).
index(1).
help("word to define")).
arg(Arg::with_name("columns").
short("c").
long("columns").
default_value("1").
help("amount of columns when listing lexemes")).
arg(Arg::with_name("definitions").
short("d").
long("definitions").
help("just show the definitions part of the JSON")).
arg(Arg::with_name("etym").
short("e").
long("etym").
help("just show the etymologies part of the JSON")).
arg(Arg::with_name("file").
short("f").
long("file").
takes_value(true).
help("file containing words to define, one word per line")).
arg(Arg::with_name("lexemes").
short("l").
long("lexemes").
help("list stored words which have definitions")).
arg(Arg::with_name("nonlexemes").
short("n").
long("nonlexemes").
help("list stored words which do not have definitions")).
arg(Arg::with_name("remove").
takes_value(true).
short("r").
long("remove").
help("erase any data stored for a word")).
get_matches()
}
#[derive(Debug)]
struct GlossError {
err_string: String
}
impl fmt::Display for GlossError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "GlossError")
}
}
impl Error for GlossError {
fn description(&self) -> &str {
&self.err_string[..]
}
}
fn read_file(filename: &str) -> Result<String, io::Error> {
let mut file = File::open(filename)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
fn parse_key_line(key_line: &str) -> Result<(&str, &str), &'static str> {
let idx = key_line.find('=').ok_or("Failed to parse keys file")?;
Ok(key_line.split_at(idx+1))
}
fn parse_keys(key_string: &str) -> Result<BTreeMap<&str, &str>, &'static str> {
let pairs = key_string.lines();
let results: Vec<Result<(&str, &str), _>> = pairs.map(parse_key_line).collect();
let mut keys = BTreeMap::new();
for r in results {
match r {
Err(e) => return Err(e),
Ok((k, v)) => keys.insert(k, v),
};
};
Ok(keys)
}
// Submit API request and return response.
fn get_response(word: &str) -> Result<Response, Box<Error>> {
let key_string = read_file("keys.txt")?;
let keys = parse_keys(&key_string[..])?;
let base_url = keys.get("base_url=").ok_or("Missing base_url key")?;
let app_key = keys.get("app_key=").ok_or("Missing app_key key")?;
let app_id = keys.get("app_id=").ok_or("Missing app_id key")?;
let url = (String::from(*base_url) + word).parse::<Url>().unwrap();
header! { (Appkey, "app_key") => [String] }
header! { (Appid, "app_id") => [String] }
let mut heads = Headers::new();
heads.set(Appkey(String::from(*app_key)));
heads.set(Appid(String::from(*app_id)));
let resp = Client::new()?.
get(url)?.
headers(heads).
send();
match resp {
Err(e) => Err(Box::new(e)),
Ok(a) => Ok(a),
}
}
// Read map of glosses from serialised string.
fn read_glosses(text: &str) ->
Result<BTreeMap<String, Option<String>>, Box<Error>> {
serde_json::from_str(text).or({
Ok(BTreeMap::new())
})
}
// Write map of glosses to file.
fn save_glosses(glosses: BTreeMap<String, Option<String>>) ->
Result<(), Box<Error>> {
let serial = serde_json::to_string(&glosses)?;
let mut gloss_file = File::create("glosses")?;
gloss_file.write_all(serial.as_bytes())?;
Ok(())
}
// Request gloss and insert into map.
fn get_new_gloss<'a>(word: String,
glosses: &'a mut BTreeMap<String, Option<String>>) ->
Result<String, Box<Error>> {
// Used even though we know a gloss exists to satisfy types.
let impossible_error =
Box::new(GlossError {err_string: String::from("Expected gloss in map.")});
let mut resp = get_response(&word[..])?;
if resp.status().is_success() {
let mut content = String::new();
resp.read_to_string(&mut content)?;
let new_entry = Some(content);
glosses.insert(word, new_entry.clone());
new_entry.ok_or(impossible_error)
} else {
glosses.insert(word, None);
Ok(String::from("Not defined"))
}
}
fn potentially_create_glossfile() -> Result<String, Box<Error>> {
read_file("glosses").or({
OpenOptions::new().append(true).create(true).open("glosses")?;
Ok(String::new())
})
}
// If non is true then print undefined words.
pub fn list_lexemes(non: bool, amt_columns: Option<&str>) ->
Result<(), Box<Error>> {
let glosses_result = potentially_create_glossfile();
let glosses_unwrapped = glosses_result.unwrap();
let glossmap = read_glosses(&glosses_unwrapped[..])?;
let amt_str = amt_columns.unwrap_or("1");
let amt_int : usize = amt_str.trim().parse()?;
let columns = if amt_int > 1 {amt_int} else {1};
let mut i = 0;
if non {
for (word, def) in glossmap {
if let None = def {
print!("{:15}\t", word);
i = i + 1;
if i % columns == 0 {
println!();
}
}
}
} else {
for (word, def) in glossmap {
if let Some(_) = def {
print!("{:15}\t", word);
i = i + 1;
if i % columns == 0 {
println!();
}
}
}
}
Ok(())
}
pub fn remove_lexeme(word: &str) -> Result<(), Box<Error>> {
let glosses_result = potentially_create_glossfile();
let glosses_unwrapped = glosses_result.unwrap();
let mut glossmap = read_glosses(&glosses_unwrapped[..])?;
glossmap.remove(word).ok_or("No data was stored for that word.")?;
save_glosses(glossmap)?;
Ok(())
}
fn list_from_json<'a>(j: &'a serde_json::Value, needle: String) ->
Result<String, &'static str> {
let lex_entries = j.pointer("/results/0/lexicalEntries").
ok_or("Not defined")?;
let empty_vec = Vec::new();
let entries = (*lex_entries).as_array().unwrap_or(&empty_vec);
let mut s = String::from("");
if needle == "etymologies" {
for entry in entries {
let etym_maybe = entry.pointer("/entries/0/etymologies");
if let Some(arr) = etym_maybe {
for etym in (*arr).as_array().unwrap_or(&empty_vec) {
let etym_formed = format!("* {}\n", etym.as_str().unwrap_or(""));
s.push_str(&etym_formed[..]);
}
}
}
return Ok(s);
}
for entry in entries {
let def_maybe = entry.pointer("/entries/0/senses/0/definitions");
if let Some(defs) = def_maybe {
for def in (*defs).as_array().unwrap_or(&empty_vec) {
let def_formed = format!("* {}\n", def.as_str().unwrap_or(""));
s.push_str(&def_formed[..]);
}
}
}
Ok(s)
}
pub fn define_one(word: &str, matches: &ArgMatches) -> Result<(), Box<Error>> {
let glosses_result = potentially_create_glossfile();
let glosses_unwrapped = glosses_result.unwrap();
let mut glossmap = read_glosses(&glosses_unwrapped[..])?;
let cloned = glossmap.clone();
{
let resp: String = match cloned.get(word) {
Some(entry) => match entry {
&Some(ref def) => def.clone(),
&None => String::from("Not defined.")
},
None => get_new_gloss(word.to_string(), &mut glossmap)?
};
if matches.is_present("definitions") {
let j = serde_json::from_str(&resp[..]).or(Err("Not defined."))?;
let definitions = list_from_json(&j, String::from("definitions"))?;
println!("{}", definitions);
} else if matches.is_present("etym") {
let j = serde_json::from_str(&resp[..]).or(Err("Not defined."))?;
let etym = list_from_json(&j, String::from("etymologies"))?;
println!("{}", etym);
} else {
println!("{}", resp);
}
}
save_glosses(glossmap)?;
Ok(())
}
pub fn define_list(filename: &str) -> Result<(), Box<Error>> {
let glosses_result = potentially_create_glossfile();
let glosses_unwrapped = glosses_result.unwrap();
let mut glossmap = read_glosses(&glosses_unwrapped[..])?;
let cloned = glossmap.clone();
let wordfile = read_file(filename)?;
let wordlist : str::Lines = wordfile.lines();
for word in wordlist {
let def_opt = cloned.get(word);
match def_opt {
Some(_) => String::from("Already defined"),
None => get_new_gloss(word.to_string(), &mut glossmap)?
};
}
save_glosses(glossmap)?;
Ok(())
}
|
extern crate quux;
fn main() {
let mut y = 2;
{
let x = || {
7 + y
};
let retval = quux::quux00(x);
println!("retval: {:?}", retval);
}
y = 5;
println!("y : {:?}", y);
}
|
/*
Copyright 2019-2023 Didier Plaindoux
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#[cfg(test)]
mod tests_char {
use celma_core::parser::char::alpha;
use celma_core::parser::char::alpha_lower;
use celma_core::parser::char::alpha_upper;
use celma_core::parser::char::char;
use celma_core::parser::char::digit;
use celma_core::parser::char::not_char;
use celma_core::parser::parser::Parse;
use celma_core::stream::char_stream::CharStream;
#[test]
fn it_parse_a_specific_character() {
let response = char('a').parse(CharStream::new("a"));
assert_eq!(response.fold(|v, _, _| v == 'a', |_, _| false), true);
}
#[test]
fn it_cannot_parse_a_specific_character() {
let response = char('a').parse(CharStream::new("b"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
#[test]
fn it_parse_another_specific_character() {
let response = not_char('b').parse(CharStream::new("a"));
assert_eq!(response.fold(|v, _, _| v == 'a', |_, _| false), true);
}
#[test]
fn it_cannot_parse_another_specific_character() {
let response = not_char('a').parse(CharStream::new("a"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
#[test]
fn it_can_parse_an_integer() {
let response = digit().parse(CharStream::new("1"));
assert_eq!(response.fold(|v, _, _| v == '1', |_, _| false), true);
}
#[test]
fn it_cannot_parse_an_integer() {
let response = digit().parse(CharStream::new("a"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
#[test]
fn it_can_parse_a_lowercase_alpha() {
let response = alpha_lower().parse(CharStream::new("a"));
assert_eq!(response.fold(|v, _, _| v == 'a', |_, _| false), true);
}
#[test]
fn it_cannot_parse_a_lowercase_alpha() {
let response = alpha_lower().parse(CharStream::new("A"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
#[test]
fn it_can_parse_a_uppercase_alpha() {
let response = alpha_upper().parse(CharStream::new("A"));
assert_eq!(response.fold(|v, _, _| v == 'A', |_, _| false), true);
}
#[test]
fn it_cannot_parse_a_uppercase_alpha() {
let response = alpha_upper().parse(CharStream::new("a"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
#[test]
fn it_can_parse_an_alpha() {
let response = alpha().parse(CharStream::new("a"));
assert_eq!(response.fold(|v, _, _| v == 'a', |_, _| false), true);
}
#[test]
fn it_can_parse_another_alpha() {
let response = alpha().parse(CharStream::new("A"));
assert_eq!(response.fold(|v, _, _| v == 'A', |_, _| false), true);
}
#[test]
fn it_cannot_parse_an_alpha() {
let response = alpha().parse(CharStream::new("0"));
assert_eq!(response.fold(|_, _, _| false, |_, _| true), true);
}
}
// -------------------------------------------------------------------------------------------------
|
// Copyright 2019. The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
use crate::{
keys::{PublicKey, SecretKey},
ristretto::{RistrettoPublicKey, RistrettoSecretKey},
};
pub(crate) fn get_keypair() -> (RistrettoSecretKey, RistrettoPublicKey) {
let mut rng = rand::thread_rng();
let k = RistrettoSecretKey::random(&mut rng);
let pk = RistrettoPublicKey::from_secret_key(&k);
(k, pk)
}
|
/// Computes the `Address` of an `Account / Template`.
///
/// The algorithm must be deterministic.
pub trait ComputeAddress<T> {
type Address;
fn compute(item: &T) -> Self::Address;
}
|
use util::mem::{read_u16, read_u32, write_u16, write_u32};
use super::{PAL_MASK, PAL_SIZE};
pub struct Palette {
pub(crate) data: [u8; PAL_SIZE as usize],
}
impl Default for Palette {
fn default() -> Self {
Palette {
data: [0; PAL_SIZE as usize],
}
}
}
impl Palette {
pub fn get_bg256(&self, entry: u8) -> u16 {
self.view16((entry as u32) * 2)
}
pub fn get_obj256(&self, entry: u8) -> u16 {
let addr = (entry as u32) * 2 + 0x200;
self.view16(addr)
}
pub fn get_bg16(&self, palette: u8, entry: u8) -> u16 {
self.get_bg256(palette * 16 + entry)
}
pub fn get_obj16(&self, palette: u8, entry: u8) -> u16 {
self.get_obj256(palette * 16 + entry)
}
pub fn load32(&self, address: u32) -> u32 {
read_u32(&self.data, (address & PAL_MASK) as usize)
}
pub fn load16(&self, address: u32) -> u16 {
read_u16(&self.data, (address & PAL_MASK) as usize)
}
pub fn load8(&self, address: u32) -> u8 {
self.data[(address & PAL_MASK) as usize]
}
pub fn store32(&mut self, address: u32, value: u32) {
write_u32(&mut self.data, (address & PAL_MASK) as usize, value);
}
pub fn store16(&mut self, address: u32, value: u16) {
write_u16(&mut self.data, (address & PAL_MASK) as usize, value);
}
pub fn store8(&mut self, address: u32, value: u8) {
// 8bit writes to PAL write the 8bit value to both the lower and upper byte of
// the addressed halfword.
let address = ((address & !0x1) & PAL_MASK) as usize;
self.data[address] = value;
self.data[address + 1] = value;
}
pub fn view32(&self, address: u32) -> u32 {
read_u32(&self.data, (address & PAL_MASK) as usize)
}
pub fn view16(&self, address: u32) -> u16 {
read_u16(&self.data, (address & PAL_MASK) as usize)
}
pub fn view8(&self, address: u32) -> u8 {
self.data[(address & PAL_MASK) as usize]
}
}
|
use std::path::{Path, PathBuf};
use std::process::{self, Command};
use std::{env, fs, io};
use crate::{redoxer_dir, status_error, target};
//TODO: Rewrite with hyper or reqwest, tar-rs, sha2, and some gzip crate?
fn download<P: AsRef<Path>>(url: &str, path: P) -> io::Result<()> {
Command::new("curl")
.arg("--proto")
.arg("=https")
.arg("--tlsv1.2")
.arg("--fail")
.arg("--output")
.arg(path.as_ref())
.arg(url)
.status()
.and_then(status_error)
}
//TODO: Rewrite with hyper or reqwest, tar-rs, sha2, and some gzip crate?
fn shasum<P: AsRef<Path>>(path: P) -> io::Result<bool> {
let parent = path.as_ref().parent().ok_or(io::Error::new(
io::ErrorKind::Other,
"shasum path had no parent",
))?;
Command::new("sha256sum")
.arg("--check")
.arg("--ignore-missing")
.arg("--quiet")
.arg(path.as_ref())
.current_dir(parent)
.status()
.map(|status| status.success())
}
//TODO: Rewrite with hyper or reqwest, tar-rs, sha2, and some gzip crate?
pub fn toolchain() -> io::Result<PathBuf> {
if let Ok(redoxer_toolchain) = env::var("REDOXER_TOOLCHAIN") {
return Ok(PathBuf::from(redoxer_toolchain));
}
let url = format!("https://static.redox-os.org/toolchain/{}", target());
let toolchain_dir = redoxer_dir().join("toolchain");
if !toolchain_dir.is_dir() {
println!("redoxer: building toolchain");
let toolchain_partial = redoxer_dir().join("toolchain.partial");
if toolchain_partial.is_dir() {
fs::remove_dir_all(&toolchain_partial)?;
}
fs::create_dir_all(&toolchain_partial)?;
let shasum_file = toolchain_partial.join("SHA256SUM");
download(&format!("{}/SHA256SUM", url), &shasum_file)?;
let prefix_tar = toolchain_partial.join("rust-install.tar.gz");
download(&format!("{}/rust-install.tar.gz", url), &prefix_tar)?;
if !shasum(&shasum_file)? {
return Err(io::Error::new(io::ErrorKind::Other, "shasum invalid"));
}
Command::new("tar")
.arg("--extract")
.arg("--file")
.arg(&prefix_tar)
.arg("-C")
.arg(&toolchain_partial)
.status()
.and_then(status_error)?;
fs::rename(&toolchain_partial, &toolchain_dir)?;
}
Ok(toolchain_dir)
}
pub fn main(_args: &[String]) {
match toolchain() {
Ok(_) => {
process::exit(0);
}
Err(err) => {
eprintln!("redoxer toolchain: {}", err);
process::exit(1);
}
}
}
|
fn main() {
// vertex shader
let a =
" #version 110
uniform mat4 matrix;
attribute vec2 position;
attribute vec3 color;
varying vec3 v_color;
void main() {
gl_Position = vec4(position, 0.0, 1.0) * matrix;
v_color = color;
}
";
println!("{:?}", a);
}
|
use crate::types::linalg::dimension::Dimension;
use sdl2::event::{Event, WindowEvent};
use sdl2::keyboard::Scancode;
use sdl2::video::{GLContext, Window};
use sdl2::Sdl;
use std::ffi::c_void;
pub struct Demo {
sdl: Sdl,
window: Window,
_gl_context: GLContext, // Make sure that current gl_context isn't dropped
}
pub fn initialize_demo(title: &str, dimensions: Dimension) -> Demo {
let sdl = sdl2::init().unwrap();
let video_subsystem = sdl.video().unwrap();
let gl_attr = video_subsystem.gl_attr();
gl_attr.set_context_profile(sdl2::video::GLProfile::Core);
gl_attr.set_context_version(4, 6);
let window = video_subsystem
.window(title, dimensions.rows as u32, dimensions.columns as u32)
.opengl()
.resizable()
.build()
.unwrap();
let gl_context = window.gl_create_context().unwrap();
window.gl_set_context_to_current().unwrap();
gl::load_with(|s| video_subsystem.gl_get_proc_address(s) as *const c_void);
unsafe {
gl::Viewport(0, 0, dimensions.rows as i32, dimensions.columns as i32);
}
Demo {
sdl,
window,
_gl_context: gl_context,
}
}
pub fn quick_demo<F, G>(demo: Demo, mut render_loop: F, event_soaker: G)
where
F: FnMut(),
G: Fn(Event),
{
let sdl = demo.sdl;
let window = demo.window;
let mut event_pump = sdl.event_pump().unwrap();
let mut wireframe = false;
'main: loop {
for event in event_pump.poll_iter() {
match event {
Event::Quit { .. }
| Event::KeyDown {
scancode: Some(Scancode::Escape),
..
} => break 'main,
Event::KeyDown {
scancode: Some(Scancode::Space),
..
} => {
wireframe = !wireframe;
unsafe {
gl::PolygonMode(
gl::FRONT_AND_BACK,
if wireframe { gl::LINE } else { gl::FILL },
);
}
}
_ => {
event_soaker(event.clone());
housekeeping(event);
}
}
}
render_loop();
window.gl_swap_window();
}
}
pub fn housekeeping(event: Event) {
if let Event::Window { win_event, .. } = event {
match win_event {
WindowEvent::Resized(new_x, new_y) | WindowEvent::SizeChanged(new_x, new_y) => {
unsafe { gl::Viewport(0, 0, new_x, new_y) };
}
_ => {}
}
}
}
|
fn main() {
let x: u32 =5;
println!("our number is {}",x);
{
let mut x=x;
loop
{
println!("and now {}",x);
x=x-1;
if x==0
{
break;
}
}
}
println!("but our number is fine {}",x);
}
|
use std::collections::hash_map::{Entry, HashMap};
use actix::prelude::*;
use futures::{future, Future};
#[cfg(feature = "python")]
use cpython::{PyDict, Python, ToPyObject};
use crate::opentracing::tags::{IkrellnTags, KnownTag, OpenTracingTag};
#[derive(Default)]
pub struct TraceParser;
impl Actor for TraceParser {
type Context = Context<Self>;
}
impl actix::Supervised for TraceParser {}
impl actix::SystemService for TraceParser {
fn service_started(&mut self, _ctx: &mut Context<Self>) {}
}
#[derive(Message)]
pub struct TraceDone(pub String);
impl Handler<TraceDone> for TraceParser {
type Result = ();
fn handle(&mut self, msg: TraceDone, _ctx: &mut Context<Self>) -> Self::Result {
Arbiter::spawn(
crate::DB_READ_EXECUTOR_POOL
.send(crate::db::read::span::GetSpans(
crate::db::read::span::SpanQuery::default()
.with_trace_id(msg.0)
.with_limit(1000),
))
.map(|spans| {
let te = TestResult::try_from(&spans);
match te {
Ok(te) => Some(te),
Err(tag) => {
warn!(
"missing / invalid tag {:?} in trace for spans {:?}",
tag, spans
);
None
}
}
})
.then(|test_exec| {
if let Ok(Some(test_exec)) = test_exec {
actix::System::current()
.registry()
.get::<super::test_result::TraceParser>()
.do_send(TestExecutionToSave(test_exec));
}
future::result(Ok(()))
}),
)
}
}
#[derive(Message, Debug)]
pub struct TestExecutionToSave(TestResult);
impl Handler<TestExecutionToSave> for TraceParser {
type Result = ();
fn handle(&mut self, msg: TestExecutionToSave, _ctx: &mut Context<Self>) -> Self::Result {
Arbiter::spawn(
crate::DB_EXECUTOR_POOL
.send(msg.0.clone())
.then(|test_result| {
if let Ok(test_result) = test_result {
actix::System::current()
.registry()
.get::<crate::engine::streams::Streamer>()
.do_send(crate::engine::streams::Test(test_result.clone()));
actix::System::current()
.registry()
.get::<crate::engine::report::Reporter>()
.do_send(crate::engine::report::ComputeReportsForResult(test_result));
}
future::result(Ok(()))
}),
)
}
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Hash)]
pub enum TestStatus {
Success,
Failure,
Skipped,
Any,
}
impl Eq for TestStatus {}
impl TestStatus {
fn try_from(s: &str) -> Result<Self, KnownTag> {
match s.to_lowercase().as_ref() {
"success" => Ok(TestStatus::Success),
"failure" => Ok(TestStatus::Failure),
"skipped" => Ok(TestStatus::Skipped),
_ => Err(IkrellnTags::Result.into()),
}
}
}
impl From<i32> for TestStatus {
fn from(v: i32) -> Self {
match v {
0 => crate::engine::test_result::TestStatus::Success,
1 => crate::engine::test_result::TestStatus::Failure,
2 => crate::engine::test_result::TestStatus::Skipped,
_ => crate::engine::test_result::TestStatus::Failure,
}
}
}
impl TestStatus {
pub fn as_i32(&self) -> i32 {
match self {
crate::engine::test_result::TestStatus::Success => 0,
crate::engine::test_result::TestStatus::Failure => 1,
crate::engine::test_result::TestStatus::Skipped => 2,
crate::engine::test_result::TestStatus::Any => 3,
}
}
pub fn as_str(&self) -> &'static str {
match self {
crate::engine::test_result::TestStatus::Success => "Success",
crate::engine::test_result::TestStatus::Failure => "Failure",
crate::engine::test_result::TestStatus::Skipped => "Skipped",
crate::engine::test_result::TestStatus::Any => "Any",
}
}
}
impl Into<i32> for TestStatus {
fn into(self) -> i32 {
self.as_i32()
}
}
impl Into<&'static str> for TestStatus {
fn into(self) -> &'static str {
self.as_str()
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct TestResult {
pub test_id: String,
pub path: Vec<String>,
pub name: String,
pub trace_id: String,
pub date: i64,
pub status: TestStatus,
pub duration: i64,
pub environment: Option<String>,
pub components_called: HashMap<String, i32>,
pub nb_spans: i32,
#[serde(skip_serializing_if = "Option::is_none")]
pub main_span: Option<crate::opentracing::Span>,
}
#[cfg(feature = "python")]
impl ToPyObject for TestResult {
type ObjectType = PyDict;
fn to_py_object(&self, py: Python) -> Self::ObjectType {
let object = PyDict::new(py);
object
.set_item(py, "test_id", self.test_id.clone())
.unwrap();
object.set_item(py, "path", self.path.clone()).unwrap();
object.set_item(py, "name", self.name.clone()).unwrap();
object
.set_item(py, "trace_id", self.trace_id.clone())
.unwrap();
object.set_item(py, "date", self.date).unwrap();
object.set_item(py, "status", self.status.as_str()).unwrap();
object.set_item(py, "duration", self.duration).unwrap();
if let Some(environment) = self.environment.clone() {
object.set_item(py, "environment", environment).unwrap();
}
if let Some(main_span) = self.main_span.clone() {
object.set_item(py, "main_span", main_span).unwrap();
}
object
}
}
impl TestResult {
fn value_from_tag<T>(tags: &HashMap<String, String>, tag: T) -> Result<String, KnownTag>
where
T: Clone,
KnownTag: From<T>,
&'static str: From<T>,
{
tags.get(tag.clone().into())
.ok_or_else(|| tag.into())
.map(std::string::ToString::to_string)
}
fn value_from_tag_or(
span: &crate::opentracing::Span,
tag: IkrellnTags,
f: fn(&crate::opentracing::Span) -> Option<String>,
) -> Result<String, KnownTag> {
match span
.tags
.get(tag.clone().into())
.ok_or_else(|| tag.into())
.map(std::string::ToString::to_string)
{
Ok(value) => Ok(value),
Err(err) => f(span).ok_or(err),
}
}
fn try_from(spans: &[crate::opentracing::Span]) -> Result<Self, KnownTag> {
let main_span = match spans.iter().find(|span| span.parent_id.is_none()) {
Some(span) => span,
None => return Err(IkrellnTags::StepType.into()),
};
let suite = Self::value_from_tag_or(main_span, IkrellnTags::Suite, |span| {
span.local_endpoint.clone().and_then(|ep| ep.service_name)
})?;
let class = Self::value_from_tag(&main_span.tags, IkrellnTags::Class)?;
let remote_services: Vec<String> = spans
.iter()
.filter_map(|span| span.clone().remote_endpoint.and_then(|ep| ep.service_name))
.collect();
let mut call_by_remote_endpoint = HashMap::new();
for token in remote_services {
let item = call_by_remote_endpoint.entry(token);
match item {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get() + 1;
}
Entry::Vacant(entry) => {
entry.insert(1);
}
}
}
Ok(TestResult {
test_id: "n/a".to_string(),
path: vec![suite, class],
name: Self::value_from_tag_or(main_span, IkrellnTags::Name, |span| span.name.clone())?,
trace_id: main_span.trace_id.clone(),
date: main_span.timestamp.ok_or(KnownTag {
tag: "ts".to_string(),
})?,
status: TestStatus::try_from(&Self::value_from_tag_or(
main_span,
IkrellnTags::Result,
|span| {
Self::value_from_tag(&span.tags, OpenTracingTag::Error)
.ok()
.map(|v| match v.to_lowercase().as_ref() {
"true" => "failure".to_string(),
other => other.to_string(),
})
},
)?)?,
duration: main_span.duration.ok_or(KnownTag {
tag: "duration".to_string(),
})?,
environment: Self::value_from_tag(&main_span.tags, IkrellnTags::Environment).ok(),
components_called: call_by_remote_endpoint,
nb_spans: spans.len() as i32,
main_span: Some(main_span.clone()),
})
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use uuid;
use crate::opentracing::span::Kind;
use crate::opentracing::tags::IkrellnTags;
use crate::opentracing::Span;
use super::*;
#[test]
fn can_get_test_result_from_span() {
let trace_id = uuid::Uuid::new_v4().to_string();
let mut tags: HashMap<String, String> = HashMap::new();
tags.insert(
String::from({
let tag: &str = IkrellnTags::Suite.into();
tag
}),
"test_suite".to_string(),
);
tags.insert(
String::from({
let tag: &str = IkrellnTags::Class.into();
tag
}),
"test_class".to_string(),
);
tags.insert(
String::from({
let tag: &str = IkrellnTags::Result.into();
tag
}),
"success".to_string(),
);
let spans = vec![Span {
trace_id: trace_id.to_string(),
id: trace_id.clone(),
parent_id: None,
name: Some("span_name".to_string()),
kind: Some(Kind::CLIENT),
duration: Some(25),
timestamp: Some(50),
debug: false,
shared: false,
local_endpoint: None,
remote_endpoint: None,
annotations: vec![],
tags,
binary_annotations: vec![],
}];
let tr = TestResult::try_from(&spans);
assert!(tr.is_ok());
}
}
|
mod r#async;
mod sync;
use std::sync::atomic::AtomicU64;
pub use r#async::BufferUnorderedBatchedAsync;
use serde_derive::{Deserialize, Serialize};
pub use sync::BufferUnorderedBatchedSync;
#[derive(Debug)]
pub struct BufferUnorderedBatchedStats {
pub buffer: AtomicU64,
pub buffer_total: AtomicU64,
pub parallel: AtomicU64,
pub parallel_total: AtomicU64,
pub batch: AtomicU64,
pub batch_size: AtomicU64,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
pub struct BufferUnorderedBatchedConfig {
pub buffer_size: usize,
pub max_parallel: usize,
pub batch_size: usize,
pub when_ready: bool,
}
impl Default for BufferUnorderedBatchedConfig {
fn default() -> Self {
Self {
buffer_size: 8,
max_parallel: 2,
batch_size: 8,
when_ready: false,
}
}
}
#[macro_export]
macro_rules! buffer_unordered_batch_poller_macro {
($t: tt, $h: tt, $st1: expr, $st2: expr) => {
async fn buffer_unordered_batch_poller<$t, M, R>(
mut rx: mpsc::UnboundedReceiver<Request<M>>,
bus: Bus,
ut: Untyped,
_stats: Arc<BufferUnorderedBatchedStats>,
cfg: BufferUnorderedBatchedConfig,
stx: mpsc::UnboundedSender<Event<R, $t::Error>>,
) where
$t: $h<M, Response = R> + 'static,
M: Message,
R: Message,
{
let ut = ut.downcast::<$t>().unwrap();
let semaphore = Arc::new(tokio::sync::Semaphore::new(cfg.max_parallel));
let mut buffer_mid = Vec::with_capacity(cfg.batch_size);
let mut buffer = Vec::with_capacity(cfg.batch_size);
while let Some(msg) = rx.recv().await {
let bus = bus.clone();
let ut = ut.clone();
let semaphore = semaphore.clone();
let stx = stx.clone();
match msg {
Request::Request(mid, msg, req) => {
buffer_mid.push((mid, req));
buffer.push(msg);
if buffer_mid.len() >= cfg.batch_size {
let task_permit = semaphore.acquire_owned().await;
let buffer_mid_clone = buffer_mid.drain(..).collect::<Vec<_>>();
let buffer_clone = buffer.drain(..).collect();
#[allow(clippy::redundant_closure_call)]
let _ =
($st1)(buffer_mid_clone, buffer_clone, bus, ut, task_permit, stx);
}
}
Request::Action(Action::Init(..)) => {
stx.send(Event::Ready).unwrap();
}
Request::Action(Action::Close) => {
rx.close();
}
Request::Action(Action::Flush) => {
let stx_clone = stx.clone();
if !buffer_mid.is_empty() {
let buffer_mid_clone = buffer_mid.drain(..).collect::<Vec<_>>();
let buffer_clone = buffer.drain(..).collect();
let task_permit = semaphore.clone().acquire_owned().await;
#[allow(clippy::redundant_closure_call)]
let _ =
($st1)(buffer_mid_clone, buffer_clone, bus, ut, task_permit, stx);
}
let _ = semaphore.acquire_many(cfg.max_parallel as _).await;
stx_clone.send(Event::Flushed).unwrap();
}
Request::Action(Action::Sync) => {
let lock = semaphore.acquire_many(cfg.max_parallel as _).await;
#[allow(clippy::redundant_closure_call)]
let resp = ($st2)(bus.clone(), ut.clone()).await;
drop(lock);
stx.send(Event::Synchronized(resp.map_err(Error::Other)))
.unwrap();
}
_ => unimplemented!(),
}
}
}
};
}
|
#![cfg_attr(feature = "clippy", allow(unstable_features))]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![cfg_attr(feature = "clippy", deny(warnings))]
use std::borrow::Borrow;
use std::hash::{BuildHasher,Hash};
use std::collections;
pub trait MapLike<'a, K: 'a, V: 'a> {
type Iter: Iterator<Item=(&'a K, &'a V)>;
fn get<Q: ?Sized>(&'a self, k: &Q)
-> Option<&'a V>
where K: Borrow<Q>,
Q: Hash + Eq + Ord;
fn insert(&'a mut self, k: K, v: V) -> Option<V>;
fn iter(&'a self) -> Self::Iter;
}
impl<'a, K: 'a, V: 'a, S> MapLike<'a, K, V> for collections::HashMap<K, V, S>
where S: BuildHasher, K: Hash + Eq {
type Iter = collections::hash_map::Iter<'a, K, V>;
fn get<Q: ?Sized>(&'a self, k: &Q)
-> Option<&'a V>
where K: Borrow<Q>,
Q: Hash + Eq + Ord
{ collections::HashMap::get(self, k) }
fn insert(&'a mut self, k: K, v: V) -> Option<V> { collections::HashMap::insert(self, k, v) }
fn iter(&'a self) -> Self::Iter { collections::HashMap::iter(self) }
}
impl<'a, K: 'a, V: 'a> MapLike<'a, K, V> for collections::BTreeMap<K, V>
where K: Ord {
type Iter = collections::btree_map::Iter<'a, K, V>;
fn get<Q: ?Sized>(&'a self, k: &Q)
-> Option<&'a V>
where K: Borrow<Q>,
Q: Hash + Eq + Ord
{ collections::BTreeMap::get(self, k) }
fn insert(&'a mut self, k: K, v: V) -> Option<V> { collections::BTreeMap::insert(self, k, v) }
fn iter(&'a self) -> Self::Iter { collections::BTreeMap::iter(self) }
}
pub trait MultiMap<'a, K, V> {
fn get_only<Q: ?Sized>(&'a self, k: &Q)
-> Option<&'a V>
where K: Borrow<Q>,
Q: Hash + Eq + Ord;
}
impl<'a, K: 'a, V: 'a, T: MapLike<'a, K, Vec<V>>> MultiMap<'a, K, V> for T {
fn get_only<Q: ?Sized>(&'a self, k: &Q)
-> Option<&'a V>
where K: Borrow<Q>,
Q: Hash + Eq + Ord {
self.get(k)
.and_then(|v| if v.len() == 1 { Some(&v[0]) } else { None })
}
}
#[test]
fn basic_usage() {
fn prepare<'a, T: MapLike<'a, String, Vec<usize>>>(t: &'a mut T) {
t.insert("foobar".to_owned(), vec![3]);
}
fn foo<'a, T: MultiMap<'a, String, usize>>(t: &'a T) {
assert_eq!(Some(&3), t.get_only("foobar"));
assert!(t.get_only("baz").is_none());
}
let mut t = collections::HashMap::new();
prepare(&mut t);
t.insert("baz".to_owned(), vec![1,2,3]);
foo(&t);
}
|
use cpal::{SampleRate, Stream};
use ringbuf::{Consumer, RingBuffer};
use srt_media::{
source::{
filters::{AudioFilter, AudioFormat},
SrtSource, StreamDescriptor,
},
FrameData, NextFrameResult,
};
use stainless_ffmpeg::prelude::*;
const SAMPLE_RATE: SampleRate = SampleRate(48_000);
fn main() {
pretty_env_logger::init();
let url = std::env::args().last().unwrap();
let mut srt_source = SrtSource::new(&url);
let nb_stream = srt_source.format_context.lock().unwrap().get_nb_streams();
let mut first_audio_stream = None;
for i in 0..nb_stream {
let stream_type = srt_source
.format_context
.lock()
.unwrap()
.get_stream_type(i as isize);
log::info!("Stream {}: {:?}", i, stream_type);
if stream_type == AVMediaType::AVMEDIA_TYPE_AUDIO {
first_audio_stream = Some(i);
}
}
let first_audio_stream = first_audio_stream.unwrap();
let channel_layouts = vec!["stereo".to_string()];
let sample_formats = vec!["s32".to_string()];
let sample_rates = vec![48000];
let audio_filters = vec![AudioFilter::Format(AudioFormat {
sample_rates,
channel_layouts,
sample_formats,
})];
let selection = vec![StreamDescriptor::new_audio(
first_audio_stream as usize,
audio_filters,
)];
srt_source.select_streams(selection).unwrap();
let (mut producer, consumer) = RingBuffer::<f32>::new(1024 * 1024).split();
let _stream = audio_player(consumer);
loop {
let next_frame = srt_source.next_frame().unwrap();
match &next_frame {
NextFrameResult::Nothing | NextFrameResult::WaitMore => {
std::thread::sleep(std::time::Duration::from_millis(10));
}
NextFrameResult::Frame { frame, .. } => {
if let FrameData::AudioVideo(av_frame) = frame {
unsafe {
let av_frame = av_frame.frame;
let size = ((*av_frame).channels * (*av_frame).nb_samples) as usize;
log::info!(
"Frame {} samples, {} channels, {} bytes // {} bytes",
(*av_frame).nb_samples,
(*av_frame).channels,
(*av_frame).linesize[0],
size,
);
let samples: Vec<i32> = Vec::from_raw_parts((*av_frame).data[0] as _, size, size);
let float_samples: Vec<f32> = samples
.iter()
.map(|value| (*value as f32) / i32::MAX as f32)
.collect();
producer.push_slice(&float_samples);
std::mem::forget(samples);
}
}
}
_ => {}
}
}
}
fn audio_player(mut consumer: Consumer<f32>) -> Stream {
use cpal::traits::{DeviceTrait, HostTrait};
let host = cpal::default_host();
let device = host
.default_output_device()
.expect("no output device available");
let mut supported_configs_range = device
.supported_output_configs()
.expect("error while querying configs");
let supported_config = supported_configs_range
.find(|config| {
config.channels() == 2
&& SAMPLE_RATE >= config.min_sample_rate()
&& SAMPLE_RATE <= config.max_sample_rate()
})
.expect("no supported config?!")
.with_sample_rate(SAMPLE_RATE);
let config = supported_config.into();
device
.build_output_stream(
&config,
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
for i in 0..data.len() {
data[i] = 0.0;
}
if consumer.len() > data.len() {
consumer.pop_slice(data);
}
},
move |err| log::error!("CPAL error: {:?}", err),
)
.unwrap()
}
|
use std::iter;
use std::str::Chars;
use peekmore::PeekMore;
use crate::token::*;
#[derive(Debug)]
pub struct ScannerError {
pub line: usize,
pub desc: String,
}
#[derive(Debug)]
pub struct Scanner<'a> {
iter: peekmore::PeekMoreIterator<Chars<'a>>,
buff: Vec<char>,
line: usize,
}
impl<'a> Iterator for Scanner<'a> {
type Item = Result<Token, ScannerError>;
//type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
while let Some(c) = self.iter.next() {
match c {
'{' => {
return self.empty_token(TokenType::LeftBrace, c.to_string());
}
'}' => {
return self.empty_token(TokenType::RightBrace, c.to_string());
}
'(' => {
return self.empty_token(TokenType::LeftParen, c.to_string());
}
')' => {
return self.empty_token(TokenType::RightParen, c.to_string());
}
'+' => {
return self.empty_token(TokenType::Plus, c.to_string());
}
',' => {
return self.empty_token(TokenType::Comma, c.to_string());
}
'.' => {
return self.empty_token(TokenType::Dot, c.to_string());
}
'-' => {
return self.empty_token(TokenType::Minus, c.to_string());
}
';' => {
return self.empty_token(TokenType::Semicolon, c.to_string());
}
'*' => {
return self.empty_token(TokenType::Star, c.to_string());
}
'=' => match self.iter.peek() {
Some('=') => {
self.iter.next();
return self.empty_token(TokenType::EqualEqual, "==".to_string());
}
_ => {
return Some(Ok(Token {
kind: TokenType::Equal,
lexeme: c.to_string(),
line: self.line,
value: None,
}))
}
},
'!' => match self.iter.peek() {
Some('=') => {
self.iter.next();
return self.empty_token(TokenType::BangEqual, "!=".to_string());
}
_ => {
return self.empty_token(TokenType::Bang, c.to_string());
}
},
'<' => match self.iter.peek() {
Some('=') => {
self.iter.next();
return self.empty_token(TokenType::LessEqual, "<=".to_string());
}
_ => return self.empty_token(TokenType::Less, c.to_string()),
},
'>' => match self.iter.peek() {
Some('=') => {
self.iter.next();
return self.empty_token(TokenType::GreaterEqual, ">=".to_string());
}
_ => return self.empty_token(TokenType::Greater, c.to_string()),
},
'/' => match self.iter.peek() {
Some('/') => {
while let Some(nchar) = self.iter.peek() {
if *nchar == '\n' {
break;
}
self.iter.next();
}
}
_ => {
return self.empty_token(TokenType::Slash, c.to_string());
}
},
'"' => loop {
let cnext = self.iter.peek();
match cnext {
Some('"') => {
let ret =
self.token(TokenType::String, self.buff.iter().collect::<String>());
self.iter.next();
self.buff.clear();
return ret;
}
Some(c) => {
self.buff.push(*c);
self.iter.next();
}
None => return self.scan_err("Unterminated string literal"),
}
},
'\t' => continue,
'\r' => continue,
' ' => continue,
'\n' => self.line += 1,
_ => return self.scan_err("invalid character"),
};
}
None
}
}
impl<'a> Scanner<'a> {
pub fn new(iter: Chars<'a>) -> Self {
Scanner {
iter: iter.peekmore(),
buff: vec![],
line: 1,
}
}
fn empty_token(&self, kind: TokenType, lexeme: String) -> Option<Result<Token, ScannerError>> {
Some(Ok(Token {
kind,
lexeme,
line: self.line,
value: None,
}))
}
fn token(&self, kind: TokenType, lexeme: String) -> Option<Result<Token, ScannerError>> {
Some(Ok(Token {
kind,
lexeme,
line: self.line,
value: None,
}))
}
fn scan_err(&self, message: &str) -> Option<Result<Token, ScannerError>> {
Some(Err(ScannerError {
line: self.line,
desc: message.to_string(),
}))
}
}
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Debug)]
pub struct Info {
pub apiversion: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub author: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub color: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub head: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tail: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct Move {
#[serde(rename = "move")]
movement: Movement,
#[serde(skip_serializing_if = "Option::is_none")]
shout: Option<String>,
}
impl Move {
pub fn new(movement: Movement) -> Move {
Move {
movement,
shout: None,
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
#[serde(rename_all(serialize = "lowercase", deserialize = "lowercase"))]
pub enum Movement {
Right,
Left,
Up,
Down,
}
|
//! Defines the `MoveGenerator` trait.
use std::mem::uninitialized;
use std::cmp::max;
use uci::SetOption;
use board::*;
use moves::*;
use value::*;
use evaluator::Evaluator;
use bitsets::*;
use utils::BoardGeometry;
/// A trait for move generators.
///
/// A `MoveGenerator` holds a chess position and can:
///
/// * Calculate Zobrist hashes.
///
/// * Find which pieces and pawns attack a given square.
///
/// * Find if the side to move is in check.
///
/// * Generate all legal moves, or a subset of all legal moves in the
/// current position.
///
/// * Generate a null move.
///
/// * Verify if a random move digest represents a proper move.
///
/// * Play a selected move and take it back.
///
/// * Provide a static evaluator bound to the current position.
///
/// * Perform static exchange evaluation for the generated moves.
///
/// **Important note:** `MoveGenerator` is unaware of repeating
/// positions and rule-50.
pub trait MoveGenerator: Clone + SetOption + Send + 'static {
/// The type of static evaluator that the implementation works
/// with.
type Evaluator: Evaluator;
/// Creates a new instance, consuming the supplied `Board`
/// instance.
///
/// Returns `Err(IllegalBoard)` if the position is illegal.
fn from_board(board: Board) -> Result<Self, IllegalBoard>;
/// Returns a reference to the underlying `Board` instance.
fn board(&self) -> &Board;
/// Returns the Zobrist hash value for the underlying `Board`
/// instance.
///
/// Zobrist hashing is a technique to transform a board position
/// into a number of a fixed length, with an equal distribution
/// over all possible numbers, invented by Albert Zobrist. The key
/// property of this method is that two similar positions generate
/// entirely different hash numbers.
///
/// **Important note:** This method will be relatively slow if the
/// implementation calculates the hash value "from
/// scratch". Inspect the implementation before using `hash` in
/// time-critical paths. (See `do_move`.)
fn hash(&self) -> u64;
/// Returns a bitboard with all pieces and pawns that attack a
/// given square.
fn attacks_to(&self, square: Square) -> Bitboard;
/// Returns if the side to move is in check.
fn is_check(&self) -> bool;
/// Generates all legal moves, possibly including some
/// pseudo-legal moves too.
///
/// The moves are added to `moves`. All generated moves with
/// pieces other than the king will be legal. Some of the
/// generated king's moves may be illegal because the destination
/// square is under attack. This arrangement has two important
/// advantages:
///
/// * `do_move` can do its work without knowing the set of
/// checkers and pinned pieces, so there is no need to keep
/// those around.
///
/// * A beta cut-off may make the verification that king's
/// destination square is not under attack unnecessary, thus
/// saving time.
///
/// The initial move score for the generated moves is
/// *unspecified*.
///
/// **Note:** A pseudo-legal move is a move that is otherwise
/// legal, except it might leave the king in check.
fn generate_all<T: AddMove>(&self, moves: &mut T);
/// Generates moves for the quiescence search.
///
/// The moves are added to `moves`. This method always generates a
/// **subset** of the moves generated by `generate_all`:
///
/// * If the king is in check, all legal moves are included.
///
/// * Captures and pawn promotions to queen are always included.
///
/// * If `generate_checks` is `true`, moves that give check are
/// included too. Discovered checks and checks given by castling
/// can be omitted for speed.
fn generate_forcing<T: AddMove>(&self, generate_checks: bool, moves: &mut T);
/// Returns a null move.
///
/// "Null move" is a pseudo-move that changes only the side to
/// move. It is sometimes useful to include a speculative null
/// move in the search tree so as to achieve more aggressive
/// pruning. Null moves are represented as king's moves for which
/// the origin and destination squares are the same.
fn null_move(&self) -> Move;
/// Verifies if the supplied move digest represents a proper move.
///
/// If a move `m` exists that would be generated by `generate_all`
/// if called for the current position on the board, and for that
/// move `m.digest()` equals the supplied move digest, this method
/// will return `Some(m)`. Otherwise it will return `None`. This
/// is useful when playing moves from the transposition table,
/// without calling `generate_all`.
fn try_move_digest(&self, move_digest: MoveDigest) -> Option<Move>;
/// Plays a move on the board.
///
/// It the move leaves the king in check, `None` is returned
/// without updating the board. Otherwise, the board is updated
/// and an `u64` value is returned, which should be XOR-ed with
/// board's old Zobrist hash value to obtain board's new Zobrist
/// hash value. The move passed to this method **must** have been
/// generated by `generate_all`, `generate_forcing`,
/// `try_move_digest`, or `null_move` methods for the current
/// position on the board.
fn do_move(&mut self, m: Move) -> Option<u64>;
/// Takes back last played move.
///
/// The move passed to this method **must** be the last move passed
/// to `do_move`.
fn undo_move(&mut self, m: Move);
/// Returns a reference to a static evaluator bound to the current
/// position.
fn evaluator(&self) -> &Self::Evaluator;
/// Returns the likely evaluation change (material) to be lost or
/// gained as a result of a given move.
///
/// This method performs static exchange evaluation (SEE). It
/// examines the consequence of a series of exchanges on the
/// destination square after a given move. A positive returned
/// value indicates a "winning" move. For example, "PxQ" will
/// always be a win, since the pawn side can choose to stop the
/// exchange after its pawn is recaptured, and still be ahead. SEE
/// is just an evaluation calculated without actually trying moves
/// on the board, and therefore the returned value might be
/// incorrect.
///
/// The move passed to this method must have been generated by
/// `generate_all`, `generate_forcing`, `try_move_digest`, or
/// `null_move` methods for the current position on the board.
fn evaluate_move(&self, m: Move) -> Value {
debug_assert!(m.played_piece() < PIECE_NONE);
debug_assert!(m.captured_piece() <= PIECE_NONE);
const PIECE_VALUES: [Value; 8] = [10000, 975, 500, 325, 325, 100, 0, 0];
unsafe {
let mut piece = m.played_piece();
let captured_piece = m.captured_piece();
// Try not to waste CPU cycles when the played piece is
// less valuable than the captured piece.
if piece > captured_piece {
return *PIECE_VALUES.get_unchecked(captured_piece);
}
// This is the square on which all the action takes place.
let exchange_square = m.dest_square();
let color: &[Bitboard; 2] = &self.board().pieces.color;
let piece_type: &[Bitboard; 6] = &self.board().pieces.piece_type;
let file_sliders = piece_type[QUEEN] | piece_type[ROOK];
let diag_sliders = piece_type[QUEEN] | piece_type[BISHOP];
let geometry = BoardGeometry::get();
let behind_blocker: &[Bitboard; 64] = geometry
.squares_behind_blocker
.get_unchecked(exchange_square);
// These variables (along with `piece`) will be updated on each capture:
let mut us = self.board().to_move;
let mut depth = 0;
let mut orig_square_bb = 1 << m.orig_square();
let mut attackers_and_defenders = self.attacks_to(exchange_square);
// The `gain` array will hold the total material gained at
// each `depth`, from the viewpoint of the side that made the
// last capture (`us`).
let mut gain: [Value; 34] = uninitialized();
gain[0] = if m.move_type() == MOVE_PROMOTION {
piece = Move::piece_from_aux_data(m.aux_data());
PIECE_VALUES[captured_piece] + PIECE_VALUES[piece] - PIECE_VALUES[PAWN]
} else {
*PIECE_VALUES.get_unchecked(captured_piece)
};
// Examine the possible exchanges, fill the `gain` array.
'exchange: while orig_square_bb != 0 {
let current_gain = *gain.get_unchecked(depth);
// Store a speculative value that will be used if the
// captured piece happens to be defended.
let speculative_gain: &mut Value = gain.get_unchecked_mut(depth + 1);
*speculative_gain = *PIECE_VALUES.get_unchecked(piece) - current_gain;
if max(-current_gain, *speculative_gain) < 0 {
// The side that made the last capture wins even if
// the captured piece happens to be defended. So, we
// stop here to save precious CPU cycles. Note that
// here we may happen to return an incorrect SEE
// value, but the sign will be correct, which is by
// far the most important information.
break;
}
// Register that capturing piece's origin square is now vacant.
attackers_and_defenders &= !orig_square_bb;
// Consider adding new attackers/defenders, now that
// capturing piece's origin square is vacant.
let behind = self.board().occupied &
*behind_blocker.get_unchecked(bsf(orig_square_bb));
if behind & (file_sliders | diag_sliders) != 0 && piece != KING {
attackers_and_defenders |=
match behind & file_sliders &
geometry.attacks_from_unsafe(ROOK, exchange_square, behind) {
0 => {
// Not a file slider, possibly a diagonal slider.
behind & diag_sliders &
geometry.attacks_from_unsafe(BISHOP, exchange_square, behind)
}
bb => {
// A file slider.
bb
}
};
}
// Change the side to move.
us ^= 1;
// Find the next piece to enter the exchange. (The least
// valuable piece belonging to the side to move.)
let candidates = attackers_and_defenders & *color.get_unchecked(us);
if candidates != 0 {
for p in (KING..PIECE_NONE).rev() {
let bb = candidates & piece_type[p];
if bb != 0 {
depth += 1;
piece = p;
orig_square_bb = lsb(bb);
continue 'exchange;
}
}
}
break 'exchange;
}
// Negamax the `gain` array for the final static exchange
// evaluation. (The `gain` array actually represents an unary
// tree, at each node of which the player can either continue
// the exchange or back off.)
while depth > 0 {
*gain.get_unchecked_mut(depth - 1) = -max(-*gain.get_unchecked(depth - 1),
*gain.get_unchecked(depth));
depth -= 1;
}
gain[0]
}
}
}
|
// &'static is a "lifetime specifier", something you'll learn more about later
pub fn hello() -> &'static str {
"Hello, World!"
}
// This stub file contains items which aren't used yet; feel free to remove this module attribute
// to enable stricter warnings.
#![allow(unused)]
pub fn expected_minutes_in_oven() -> i32 {
40
}
pub fn remaining_minutes_in_oven(actual_minutes_in_oven: i32) -> i32 {
expected_minutes_in_oven() - actual_minutes_in_oven
}
pub fn preparation_time_in_minutes(number_of_layers: i32) -> i32 {
number_of_layers * 2
}
pub fn elapsed_time_in_minutes(number_of_layers: i32, actual_minutes_in_oven: i32) -> i32 {
preparation_time_in_minutes(number_of_layers) + actual_minutes_in_oven
}
// This stub file contains items which aren't used yet; feel free to remove this module attribute
// to enable stricter warnings.
#![allow(unused)]
const production: u8 = 221;
pub fn production_rate_per_hour(speed: u8) -> f64 {
if speed >= 9 {
production as f64 * speed as f64 * 0.77
} else if speed >= 5 {
production as f64 * speed as f64 * 0.9
} else {
production as f64 * speed as f64
}
}
pub fn working_items_per_minute(speed: u8) -> u32 {
production_rate_per_hour(speed) as u32 / 60
}
fn main() {
println!("{}", hello());
}
|
#![feature(plugin)]
#![feature(proc_macro)]
#![plugin(rocket_codegen)]
extern crate maud;
extern crate rocket;
use maud::{html, Markup};
use std::borrow::Cow;
#[get("/<name>")]
fn hello<'a>(name: Cow<'a, str>) -> Markup {
html! {
h1 { "Hello, " (name) "!" }
p "Nice to meet you!"
p {"🥁Hi, " (name) "!"}
}
}
fn main() {
rocket::ignite().mount("/", routes![hello]).launch();
} |
extern crate iron;
extern crate router;
use iron::prelude::*;
use router::router;
mod handlers;
static SERVER_ADDR: &str = "localhost:3000";
fn main() {
let router = router!(
index: get "/" => handlers::index,
index_name: get "/:name" => handlers::index_name
);
let _server = Iron::new(router).http(SERVER_ADDR).unwrap();
println!("Starting server on http://{}", SERVER_ADDR)
}
|
// Copyright 2019 The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
//! A commitment is like a sealed envelope. You put some information inside the envelope, and then seal (commit) it.
//! You can't change what you've said, but also, no-one knows what you've said until you're ready to open (open) the
//! envelope and reveal its contents. Also it's a special envelope that can only be opened by a special opener that
//! you keep safe in your drawer.
use core::{
cmp::Ordering,
convert::TryFrom,
hash::{Hash, Hasher},
ops::{Add, Mul, Sub},
};
use tari_utilities::{ByteArray, ByteArrayError};
use crate::{
alloc::string::ToString,
errors::CommitmentError,
keys::{PublicKey, SecretKey},
};
/// There are also different types of commitments that vary in their security guarantees, but all of them are
/// represented by binary data; so [HomomorphicCommitment](trait.HomomorphicCommitment.html) implements
/// [ByteArray](trait.ByteArray.html).
///
/// The Homomorphic part means, more or less, that commitments follow some of the standard rules of
/// arithmetic. Adding two commitments is the same as committing to the sum of their parts:
/// $$ \begin{aligned}
/// C_1 &= v_1.H + k_1.G \\\\
/// C_2 &= v_2.H + k_2.G \\\\
/// \therefore C_1 + C_2 &= (v_1 + v_2)H + (k_1 + k_2)G
/// \end{aligned} $$
#[derive(Debug, Clone, Default)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct HomomorphicCommitment<P>(pub(crate) P);
#[cfg(feature = "borsh")]
impl<P: borsh::BorshDeserialize> borsh::BorshDeserialize for HomomorphicCommitment<P> {
fn deserialize_reader<R>(reader: &mut R) -> Result<Self, borsh::maybestd::io::Error>
where R: borsh::maybestd::io::Read {
Ok(Self(P::deserialize_reader(reader)?))
}
}
#[cfg(feature = "borsh")]
impl<P: borsh::BorshSerialize> borsh::BorshSerialize for HomomorphicCommitment<P> {
fn serialize<W: borsh::maybestd::io::Write>(&self, writer: &mut W) -> borsh::maybestd::io::Result<()> {
self.0.serialize(writer)
}
}
impl<P> HomomorphicCommitment<P>
where P: PublicKey
{
/// Get this commitment as a public key point
pub fn as_public_key(&self) -> &P {
&self.0
}
/// Converts a public key into a commitment
pub fn from_public_key(p: &P) -> HomomorphicCommitment<P> {
HomomorphicCommitment(p.clone())
}
}
impl<P> ByteArray for HomomorphicCommitment<P>
where P: PublicKey
{
fn from_bytes(bytes: &[u8]) -> Result<Self, ByteArrayError> {
let p = P::from_bytes(bytes)?;
Ok(Self(p))
}
fn as_bytes(&self) -> &[u8] {
self.0.as_bytes()
}
}
impl<P> PartialOrd for HomomorphicCommitment<P>
where P: PublicKey
{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<P> Ord for HomomorphicCommitment<P>
where P: PublicKey
{
fn cmp(&self, other: &Self) -> Ordering {
self.0.cmp(&other.0)
}
}
/// Add two commitments together. Note! There is no check that the bases are equal.
impl<'b, P> Add for &'b HomomorphicCommitment<P>
where
P: PublicKey,
&'b P: Add<&'b P, Output = P>,
{
type Output = HomomorphicCommitment<P>;
fn add(self, rhs: &'b HomomorphicCommitment<P>) -> Self::Output {
HomomorphicCommitment(&self.0 + &rhs.0)
}
}
/// Add a public key to a commitment. Note! There is no check that the bases are equal.
impl<'b, P> Add<&'b P> for &'b HomomorphicCommitment<P>
where
P: PublicKey,
&'b P: Add<&'b P, Output = P>,
{
type Output = HomomorphicCommitment<P>;
fn add(self, rhs: &'b P) -> Self::Output {
HomomorphicCommitment(&self.0 + rhs)
}
}
/// Subtracts the left commitment from the right commitment. Note! There is no check that the bases are equal.
impl<'b, P> Sub for &'b HomomorphicCommitment<P>
where
P: PublicKey,
&'b P: Sub<&'b P, Output = P>,
{
type Output = HomomorphicCommitment<P>;
fn sub(self, rhs: &'b HomomorphicCommitment<P>) -> Self::Output {
HomomorphicCommitment(&self.0 - &rhs.0)
}
}
/// Multiply the commitment with a private key
impl<'a, 'b, P, K> Mul<&'b K> for &'a HomomorphicCommitment<P>
where
P: PublicKey<K = K>,
K: SecretKey,
&'b K: Mul<&'a P, Output = P>,
{
type Output = HomomorphicCommitment<P>;
fn mul(self, rhs: &'b K) -> HomomorphicCommitment<P> {
let p = rhs * &self.0;
HomomorphicCommitment::<P>::from_public_key(&p)
}
}
impl<P: PublicKey> Hash for HomomorphicCommitment<P> {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write(self.as_bytes())
}
}
impl<P: PublicKey> PartialEq for HomomorphicCommitment<P> {
fn eq(&self, other: &Self) -> bool {
self.as_public_key().eq(other.as_public_key())
}
}
impl<P: PublicKey> Eq for HomomorphicCommitment<P> {}
/// A trait for creating commitments
pub trait HomomorphicCommitmentFactory {
/// The type of public key that the underlying commitment will be based on
type P: PublicKey;
/// Create a new commitment with the blinding factor _k_ and value _v_ provided. The implementing type will provide
/// the base values
fn commit(&self, k: &<Self::P as PublicKey>::K, v: &<Self::P as PublicKey>::K) -> HomomorphicCommitment<Self::P>;
/// Return an identity point for addition using the specified base point. This is a commitment to zero with a zero
/// blinding factor on the base point
fn zero(&self) -> HomomorphicCommitment<Self::P>;
/// Test whether the given blinding factor _k_ and value _v_ open the given commitment
fn open(
&self,
k: &<Self::P as PublicKey>::K,
v: &<Self::P as PublicKey>::K,
commitment: &HomomorphicCommitment<Self::P>,
) -> bool;
/// Create a commitment from a blinding factor _k_ and an integer value
fn commit_value(&self, k: &<Self::P as PublicKey>::K, value: u64) -> HomomorphicCommitment<Self::P>;
/// Test whether the given private key and value open the given commitment
fn open_value(&self, k: &<Self::P as PublicKey>::K, v: u64, commitment: &HomomorphicCommitment<Self::P>) -> bool;
}
/// A trait for creating extended commitments that are based on a public key
pub trait ExtendedHomomorphicCommitmentFactory {
/// The type of public key that the underlying commitment will be based on
type P: PublicKey;
/// Create a new commitment with the blinding factor vector **k** and value _v_ provided. The implementing type will
/// provide the base values
fn commit_extended(
&self,
k_vec: &[<Self::P as PublicKey>::K],
v: &<Self::P as PublicKey>::K,
) -> Result<HomomorphicCommitment<Self::P>, CommitmentError>;
/// Return an identity point for addition using the specified base points. This is a commitment to zero with a zero
/// blinding factor vector on the base points
fn zero_extended(&self) -> HomomorphicCommitment<Self::P>;
/// Test whether the given blinding factor vector **k** and value _v_ open the given commitment
fn open_extended(
&self,
k_vec: &[<Self::P as PublicKey>::K],
v: &<Self::P as PublicKey>::K,
commitment: &HomomorphicCommitment<Self::P>,
) -> Result<bool, CommitmentError>;
/// Create a commitment from a blinding factor vector **k** and an integer value
fn commit_value_extended(
&self,
k_vec: &[<Self::P as PublicKey>::K],
value: u64,
) -> Result<HomomorphicCommitment<Self::P>, CommitmentError>;
/// Test whether the given private keys and value open the given commitment
fn open_value_extended(
&self,
k_vec: &[<Self::P as PublicKey>::K],
v: u64,
commitment: &HomomorphicCommitment<Self::P>,
) -> Result<bool, CommitmentError>;
}
/// The extension degree for extended Pedersen commitments. Currently this is limited to adding 5 base points to the
/// default Pedersen commitment, but in theory it could be arbitrarily long, although practically, very few if any
/// test cases will need to add more than 2 base points.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ExtensionDegree {
/// Default Pedersen commitment (`C = v.H + sum(k_i.G_i)|i=1`)
DefaultPedersen = 1,
/// Pedersen commitment extended with one degree (`C = v.H + sum(k_i.G_i)|i=1..2`)
AddOneBasePoint = 2,
/// Pedersen commitment extended with two degrees (`C = v.H + sum(k_i.G_i)|i=1..3`)
AddTwoBasePoints = 3,
/// Pedersen commitment extended with three degrees (`C = v.H + sum(k_i.G_i)|i=1..4`)
AddThreeBasePoints = 4,
/// Pedersen commitment extended with four degrees (`C = v.H + sum(k_i.G_i)|i=1..5`)
AddFourBasePoints = 5,
/// Pedersen commitment extended with five degrees (`C = v.H + sum(k_i.G_i)|i=1..6`)
AddFiveBasePoints = 6,
}
impl ExtensionDegree {
/// Helper function to convert a size into an extension degree
pub fn try_from_size(size: usize) -> Result<ExtensionDegree, CommitmentError> {
match size {
1 => Ok(ExtensionDegree::DefaultPedersen),
2 => Ok(ExtensionDegree::AddOneBasePoint),
3 => Ok(ExtensionDegree::AddTwoBasePoints),
4 => Ok(ExtensionDegree::AddThreeBasePoints),
5 => Ok(ExtensionDegree::AddFourBasePoints),
6 => Ok(ExtensionDegree::AddFiveBasePoints),
_ => Err(CommitmentError::CommitmentExtensionDegree {
reason: "Extension degree not valid".to_string(),
}),
}
}
}
impl TryFrom<usize> for ExtensionDegree {
type Error = CommitmentError;
fn try_from(value: usize) -> Result<Self, Self::Error> {
Self::try_from_size(value)
}
}
|
use crate::lib::error::DfxError;
use ring::error::Unspecified;
use std::boxed::Box;
use std::path::PathBuf;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum IdentityError {
#[error("Identity already exists.")]
IdentityAlreadyExists(),
#[error("Identity {0} does not exist at '{1}'.")]
IdentityDoesNotExist(String, PathBuf),
#[error("Cannot generate key pair.")]
CannotGenerateKeyPair(Unspecified),
#[error("Cannot create identity directory at '{0}': {1}")]
CannotCreateIdentityDirectory(PathBuf, Box<DfxError>),
#[error("Cannot rename identity directory from '{0}' to '{1}': {2}")]
CannotRenameIdentityDirectory(PathBuf, PathBuf, Box<DfxError>),
#[error("Cannot delete the default identity.")]
CannotDeleteDefaultIdentity(),
#[error("Cannot create an anonymous identity.")]
CannotCreateAnonymousIdentity(),
#[error("Cannot find home directory.")]
CannotFindHomeDirectory(),
#[error("Cannot read identity file at '{0}': {1}")]
CannotReadIdentityFile(PathBuf, Box<DfxError>),
}
|
use std::collections::HashMap;
use serde::ser::{Error, SerializeMap};
use serde::{Serialize, Serializer};
use crate::{Element, Value};
use crate::error::TychoError;
use crate::ident::ValueIdent;
use crate::into::ident::Ident;
use crate::serde::ser::TychoSerializer;
use std::fmt;
pub struct MapSerializer {
content: HashMap<Value, Element>,
map_type: ValueIdent,
key: Option<Value>
}
impl MapSerializer {
pub fn new() -> Self {
Self {
content: HashMap::new(),
map_type: ValueIdent::Null,
key: None
}
}
pub fn typed(x: ValueIdent) -> Self {
Self {
content: HashMap::new(),
map_type: x,
key: None
}
}
}
impl SerializeMap for MapSerializer {
type Ok = Element;
type Error = TychoError;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error> where
T: Serialize {
let data = key.serialize(TychoSerializer)?;
if let Element::Value(value) = data {
if self.map_type == ValueIdent::Null {
self.map_type = value.ident();
} else if value.ident() != self.map_type {
return Err(TychoError::MismatchedType { found: value.ident(), expected: self.map_type.clone() })
}
self.key = Some(value);
} else {
return Err(TychoError::InvalidKeyType { found: data.ident() })
}
Ok(())
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where
T: Serialize {
let value = value.serialize(TychoSerializer)?;
let key = std::mem::replace(&mut self.key, None);
if let Some(k) = key {
self.content.insert(k, value);
} else {
return Err(TychoError::custom("Invalid key state."));
}
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(Element::Map(self.map_type, self.content))
}
}
impl Serializer for MapSerializer {
type Ok = Element;
type Error = TychoError;
type SerializeSeq = serde::ser::Impossible<Element, TychoError>;
type SerializeTuple = serde::ser::Impossible<Element, TychoError>;
type SerializeTupleStruct = serde::ser::Impossible<Element, TychoError>;
type SerializeTupleVariant = serde::ser::Impossible<Element, TychoError>;
type SerializeMap = Self;
type SerializeStruct = serde::ser::Impossible<Element, TychoError>;
type SerializeStructVariant = serde::ser::Impossible<Element, TychoError>;
fn serialize_bool(self, _v: bool) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_i8(self, _v: i8) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_i16(self, _v: i16) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_i32(self, _v: i32) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_i64(self, _v: i64) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_u8(self, _v: u8) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_u16(self, _v: u16) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_u32(self, _v: u32) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_u64(self, _v: u64) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_f32(self, _v: f32) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_f64(self, _v: f64) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_char(self, _v: char) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_str(self, _v: &str) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_bytes(self, _v: &[u8]) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_unit_variant(self, _name: &'static str, _variant_index: u32, _variant: &'static str) -> Result<Self::Ok, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_newtype_struct<T: ?Sized>(self, _name: &'static str, _value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_newtype_variant<T: ?Sized>(self, _name: &'static str, _variant_index: u32, _variant: &'static str, _value: &T) -> Result<Self::Ok, Self::Error> where
T: Serialize {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_tuple_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeTupleStruct, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_tuple_variant(self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeTupleVariant, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Ok(self)
}
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn serialize_struct_variant(self, _name: &'static str, _variant_index: u32, _variant: &'static str, _len: usize) -> Result<Self::SerializeStructVariant, Self::Error> {
Err(TychoError::custom("called serialize on invalid serializer"))
}
fn collect_str<T: ?Sized>(self, _value: &T) -> Result<Self::Ok, Self::Error> where
T: fmt::Display {
Err(TychoError::custom("called serialize on invalid serializer"))
}
} |
// Copyright 2020 The MWC Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::swap::fsm::state::{Input, State, StateEtaInfo, StateId, StateProcessRespond};
use crate::swap::types::SwapTransactionsConfirmations;
use crate::swap::{Context, ErrorKind, Swap};
use std::collections::HashMap;
/// Swap State machine
pub struct StateMachine<'a> {
/// Available States
state_map: HashMap<StateId, Box<dyn State + 'a>>,
}
impl<'a> StateMachine<'a> {
/// Create
pub fn new(states: Vec<Box<dyn State + 'a>>) -> Self {
let mut state_map: HashMap<StateId, Box<dyn State>> = HashMap::new();
for st in states {
let _ = state_map.insert(st.get_state_id(), st);
}
#[cfg(build = "debug")]
for st in state_map.values() {
assert!(state_map.contains_key(&st.get_state_id()));
if let Some(state) = st.get_prev_swap_state() {
assert!(state_map.contains_key(&state));
}
if let Some(state) = st.get_next_swap_state() {
assert!(state_map.contains_key(&state));
}
}
StateMachine { state_map }
}
/// Check if this trade can be cancelled.
pub fn is_cancellable(&self, swap: &Swap) -> Result<bool, ErrorKind> {
let state = self
.state_map
.get(&swap.state)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
swap.state
)))?;
Ok(state.is_cancellable())
}
/// Verify if the state is valid for this machine
pub fn has_state(&self, state: &StateId) -> bool {
self.state_map.contains_key(state)
}
/// Process the step
pub fn process(
&mut self,
input: Input,
swap: &mut Swap,
context: &Context,
tx_conf: &SwapTransactionsConfirmations,
) -> Result<StateProcessRespond, ErrorKind> {
debug!(
"Swap {} processing state {:?} for Input {:?}",
swap.id, swap.state, input
);
let state = self
.state_map
.get_mut(&swap.state)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
swap.state
)))?;
let mut respond = state.process(input, swap, context, tx_conf)?;
while respond.next_state_id != swap.state {
debug!("New state: {:?}", swap.state);
swap.state = respond.next_state_id.clone();
let state =
self.state_map
.get_mut(&swap.state)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
swap.state
)))?;
respond = state.process(Input::Check, swap, context, tx_conf)?;
}
respond.journal = swap.journal.clone();
debug!("Responding with {:?}", respond);
Ok(respond)
}
/// Build a roadmap for the swap process
pub fn get_swap_roadmap(&self, swap: &Swap) -> Result<Vec<StateEtaInfo>, ErrorKind> {
let state = self
.state_map
.get(&swap.state)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
swap.state
)))?;
let mut result: Vec<StateEtaInfo> = Vec::new();
// go backward first
let mut prev_state_id = state.get_prev_swap_state();
while prev_state_id.is_some() {
let psid = prev_state_id.unwrap();
let prev_state = self
.state_map
.get(&psid)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
psid
)))?;
if let Some(info) = prev_state.get_eta(swap) {
result.insert(0, info);
}
prev_state_id = prev_state.get_prev_swap_state();
}
// current state
if let Some(info) = state.get_eta(swap) {
result.push(info.active());
}
// going forward
let mut next_state_id = state.get_next_swap_state();
while next_state_id.is_some() {
let nsid = next_state_id.unwrap();
let next_state = self
.state_map
.get(&nsid)
.ok_or(ErrorKind::SwapStateMachineError(format!(
"Unknown state {:?}",
nsid
)))?;
if let Some(info) = next_state.get_eta(swap) {
result.push(info);
}
next_state_id = next_state.get_next_swap_state();
}
Ok(result)
}
}
|
use dors::DorsError;
use dors::{all_tasks, run, run_with_args};
#[test]
fn test_workspace_only() {
[
"check",
"should-be-on-member",
"should-run-before-only-once",
"should-run-after-only-once",
"should-not-run-befores-on-members",
]
.iter()
.for_each(|task| assert!(run(task, "./tests/workspace_only").unwrap().success()));
}
#[test]
fn test_workspace_failures() {
["should-fail", "should-fail-in-multiline"]
.iter()
.for_each(|task| {
assert_eq!(
run(task, "./tests/workspace_only").unwrap().code().unwrap(),
55
)
});
}
#[test]
fn test_workspace_failures_from_member() {
[
"should-fail",
"should-fail-in-multiline",
"fail-if-not-on-root",
]
.iter()
.for_each(|task| {
assert_eq!(
run(task, "./tests/workspace_only/member1")
.unwrap()
.code()
.unwrap(),
55
)
});
}
#[test]
fn test_member_only() {
[
"should-be-here",
"should-be-here-explicit",
"should-be-in-workspace",
"should-be-in-tests",
"should-be-one",
"should-be-one-at-root",
"should-have-default-env",
]
.iter()
.for_each(|task| {
assert!(run(task, "./tests/workspace_member_only/member1")
.unwrap()
.success())
});
}
#[test]
fn test_workspace_all() {
[
"should-not-overwrite",
"should-overwrite-members",
"nested-works-with-run-variants",
"should-not-run-before-or-after-on-member",
"only-member1",
"only-member2",
"should-inherit-envs",
"should-have-no-args",
]
.iter()
.for_each(|task| {
let result = run(task, "./tests/workspace_all").unwrap().success();
assert!(result);
});
}
#[test]
fn test_workspace_all_args() {
assert!(run_with_args(
"should-pass-args",
"tests/workspace_all",
&["".to_string(), "2".to_string()]
)
.unwrap()
.success());
}
#[test]
fn test_workspace_all_failures() {
["should-overwrite", "should-fail", "should-pass-args"]
.iter()
.for_each(|task| {
assert_eq!(
run(task, "./tests/workspace_all").unwrap().code().unwrap(),
55
)
});
["should-overwrite"].iter().for_each(|task| {
assert_eq!(
run(task, "./tests/workspace_all/member2")
.unwrap()
.code()
.unwrap(),
55
)
});
}
#[test]
fn test_workspace_all_member1() {
["should-overwrite"].iter().for_each(|task| {
assert!(run(task, "./tests/workspace_all/member1")
.unwrap()
.success())
});
}
#[test]
fn test_list_workspace_all() {
let mut all_tasks = all_tasks("./tests/workspace_all").unwrap();
all_tasks.sort();
assert_eq!(
all_tasks,
[
"check",
"nested-works-with-run-variants",
"only-member1",
"only-member2",
"should-fail",
"should-have-no-args",
"should-inherit-envs",
"should-not-overwrite",
"should-not-run-before-or-after-on-member",
"should-overwrite",
"should-overwrite-members",
"should-pass-args",
]
);
}
#[test]
fn test_list_member_only() {
let all_tasks = all_tasks("./tests/workspace_member_only/member1").unwrap();
assert_eq!(all_tasks.len(), 7);
}
#[test]
fn test_no_task() {
let err = run("fake-task", "tests/workspace_all").unwrap_err();
assert!(matches!(
err.kind(),
DorsError::NoTask(task_name) if task_name == "fake-task"
));
}
#[test]
fn test_workspace_only_from_member() {
["should-be-on-member", "should-run-before-only-once"]
.iter()
.for_each(|task| {
assert!(run(task, "./tests/workspace_only/member1")
.unwrap()
.success())
});
}
#[test]
fn test_no_dorsfiles() {
// since writing is occurring, careful not to use ths dir outside this test!
let tmp_file = "tests/no_dorsfiles/Dorsfile.toml";
assert!(matches!(
all_tasks("./tests/no_dorsfiles").unwrap_err().kind(),
DorsError::NoDorsfile
));
assert!(matches!(
run("", "tests/no_dorsfiles/member1").unwrap_err().kind(),
DorsError::NoMemberDorsfile
));
std::fs::write(tmp_file, b"invalid-syntax").unwrap();
assert!(matches!(
all_tasks("tests/no_dorsfiles").unwrap_err().kind(),
DorsError::CouldNotParseDorsfile(_)
));
std::fs::write(tmp_file, b"[task.a]\nunexpected-field=1").unwrap();
assert!(matches!(
all_tasks("tests/no_dorsfiles").unwrap_err().kind(),
DorsError::CouldNotParseDorsfile(_)
));
std::fs::remove_file(tmp_file).unwrap();
}
|
// implements the voxel volume submodule
pub struct Voxel {
}
impl Voxel {
} |
#![deny(clippy::pedantic)]
#![allow(clippy::used_underscore_binding)]
mod otp;
use clap::Clap;
use colored::Colorize;
use itertools::Itertools;
use onep_backend_api as api;
use onep_backend_op as backend;
use std::{collections::BTreeMap, convert::TryFrom};
use term_table::{
row::Row,
table_cell::{Alignment, TableCell},
Table, TableStyle,
};
#[derive(thiserror::Error, Debug)]
enum Error {
#[error("Couldn't find the requested item.")]
NotFound,
}
#[derive(Clap, Debug)]
#[clap(author, version)]
/// 1password cli for humans
enum Opt {
/// List all items
#[clap(alias = "ls")]
List {
#[clap(long, short = 'u')]
show_uuids: bool,
#[clap(long, short = 'n')]
show_account_names: bool,
},
/// Search for an item
Search {
#[clap(long, short = 'u')]
show_uuids: bool,
#[clap(long, short = 'n')]
show_account_names: bool,
terms: String,
},
/// Show existing password and optionally put it on the clipboard
#[clap(alias = "get")]
Show { uuid: String },
/// Generates a new password and stores it in your password store
#[clap(alias = "gen")]
Generate {
/// Name of the login to create
name: String,
/// Username to associate with the login
#[clap(long, short = 'n')]
username: Option<String>,
/// URL to associate with the login
#[clap(long, short = 'u')]
url: Option<String>,
/// Comma-separated list of tags to associate with the login
#[clap(long, short = 't')]
tags: Option<String>,
},
}
#[tokio::main(core_threads = 1)]
async fn main() {
if let Err(e) = run(&backend::OpBackend {}).await {
eprintln!("{}", e);
std::process::exit(1);
}
}
async fn run<T: api::Backend>(backend: &T) -> anyhow::Result<()>
where
T::Error: 'static + std::error::Error + Send + Sync,
{
match Opt::parse() {
Opt::List {
show_uuids,
show_account_names,
} => search(backend, None, show_uuids, show_account_names).await?,
Opt::Search {
terms,
show_uuids,
show_account_names,
} => search(backend, Some(terms), show_uuids, show_account_names).await?,
Opt::Show { uuid } => {
let result = backend.get(&uuid).await?.ok_or(Error::NotFound)?;
show(result);
}
Opt::Generate {
name,
username,
url,
tags,
} => {
let result = backend
.generate(&name, username.as_deref(), url.as_deref(), tags.as_deref())
.await?;
show(result);
}
}
Ok(())
}
#[allow(clippy::non_ascii_literal)]
async fn search<T: api::Backend>(
backend: &T,
terms: Option<String>,
show_uuids: bool,
show_account_names: bool,
) -> anyhow::Result<()>
where
T::Error: 'static + std::error::Error + Send + Sync,
{
let (account, vaults, results) = tokio::try_join!(
backend.account(),
backend.vaults(),
backend.search(terms.as_deref())
)?;
let mut results_grouped: BTreeMap<_, Vec<_>> = BTreeMap::new();
for (key, group) in &results.into_iter().group_by(|v| v.vault_uuid.clone()) {
results_grouped.insert(key, group.collect());
}
// slow path for when vault is an exact match
if let Some(terms) = terms {
if let Some(vault) = vaults
.iter()
.find(|v| v.name.to_lowercase() == terms.to_lowercase())
{
results_grouped.insert(vault.uuid.clone(), backend.search(Some(&vault.uuid)).await?);
}
}
println!("{} ({})", account.name, account.domain);
let vault_count = results_grouped.len() - 1;
for (current_vault_index, (vault, group)) in results_grouped.into_iter().enumerate() {
let vault = vaults
.iter()
.find(|v| v.uuid == vault)
.map_or_else(|| format!("Unknown Vault ({})", vault), |v| v.name.clone());
println!(
"{} {}",
if current_vault_index < vault_count {
"├──"
} else {
"└──"
},
vault.blue()
);
let line_start = if current_vault_index < vault_count {
"│"
} else {
" "
};
let item_count = group.len() - 1;
for (current_item_index, result) in group.into_iter().enumerate() {
println!(
"{} {} {}",
line_start,
if current_item_index < item_count {
"├──"
} else {
"└──"
},
result.title.trim()
);
let prefix = if current_item_index < item_count {
"│ "
} else {
" "
};
if show_account_names && !result.account_info.trim().is_empty() {
println!(
"{} {} {}",
line_start,
prefix,
result.account_info.trim().green()
);
}
if show_uuids {
println!("{} {} {}", line_start, prefix, result.uuid.yellow());
}
}
}
Ok(())
}
fn show(item: api::Item) {
let mut table = Table::new();
table.style = TableStyle::extended();
table.add_row(Row::new(vec![TableCell::new_with_alignment(
item.title,
2,
Alignment::Center,
)]));
for field in item.fields {
table.add_row(Row::new(vec![
TableCell::new(field.name),
TableCell::new_with_alignment(field.value, 1, Alignment::Right),
]));
}
println!("{}", table.render());
for section in item.sections {
if section.fields.is_empty() {
continue;
}
let mut table = Table::new();
table.style = TableStyle::extended();
if !section.name.is_empty() {
table.add_row(Row::new(vec![TableCell::new_with_alignment(
section.name,
2,
Alignment::Center,
)]));
}
for field in section.fields {
let mut value = field.value;
if field.field_type == api::ItemFieldType::Totp {
if let Ok(tfa) = otp::TwoFactorAuth::try_from(value.as_ref()) {
value = tfa.generate().value;
}
}
table.add_row(Row::new(vec![
TableCell::new(field.name),
TableCell::new_with_alignment(value, 1, Alignment::Right),
]));
}
println!("{}", table.render());
}
}
|
pub mod peer;
pub use peer::Peer; |
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - TIM14 control register 1"]
pub cr1: CR1,
_reserved1: [u8; 0x0a],
#[doc = "0x0c - TIM14 Interrupt enable register"]
pub dier: DIER,
_reserved2: [u8; 0x02],
#[doc = "0x10 - TIM14 status register"]
pub sr: SR,
_reserved3: [u8; 0x02],
#[doc = "0x14 - TIM14 event generation register"]
pub egr: EGR,
_reserved4: [u8; 0x02],
_reserved_4_ccmr1: [u8; 0x04],
_reserved5: [u8; 0x04],
#[doc = "0x20 - TIM14 capture/compare enable register"]
pub ccer: CCER,
_reserved6: [u8; 0x02],
#[doc = "0x24 - TIM14 counter"]
pub cnt: CNT,
#[doc = "0x28 - TIM14 prescaler"]
pub psc: PSC,
_reserved8: [u8; 0x02],
#[doc = "0x2c - TIM14 auto-reload register"]
pub arr: ARR,
_reserved9: [u8; 0x06],
#[doc = "0x34 - TIM14 capture/compare register 1"]
pub ccr1: CCR1,
_reserved10: [u8; 0x32],
#[doc = "0x68 - TIM14 timer input selection register"]
pub tisel: TISEL,
}
impl RegisterBlock {
#[doc = "0x18 - TIM14 capture/compare mode register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn ccmr1_output(&self) -> &CCMR1_OUTPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(24usize).cast() }
}
#[doc = "0x18 - TIM14 capture/compare mode register 1 \\[alternate\\]"]
#[inline(always)]
pub const fn ccmr1_input(&self) -> &CCMR1_INPUT {
unsafe { &*(self as *const Self).cast::<u8>().add(24usize).cast() }
}
}
#[doc = "CR1 (rw) register accessor: TIM14 control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1`]
module"]
pub type CR1 = crate::Reg<cr1::CR1_SPEC>;
#[doc = "TIM14 control register 1"]
pub mod cr1;
#[doc = "DIER (rw) register accessor: TIM14 Interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dier`]
module"]
pub type DIER = crate::Reg<dier::DIER_SPEC>;
#[doc = "TIM14 Interrupt enable register"]
pub mod dier;
#[doc = "SR (rw) register accessor: TIM14 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`]
module"]
pub type SR = crate::Reg<sr::SR_SPEC>;
#[doc = "TIM14 status register"]
pub mod sr;
#[doc = "EGR (w) register accessor: TIM14 event generation register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`egr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`egr`]
module"]
pub type EGR = crate::Reg<egr::EGR_SPEC>;
#[doc = "TIM14 event generation register"]
pub mod egr;
#[doc = "CCMR1_input (rw) register accessor: TIM14 capture/compare mode register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccmr1_input::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccmr1_input::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccmr1_input`]
module"]
pub type CCMR1_INPUT = crate::Reg<ccmr1_input::CCMR1_INPUT_SPEC>;
#[doc = "TIM14 capture/compare mode register 1 \\[alternate\\]"]
pub mod ccmr1_input;
#[doc = "CCMR1_output (rw) register accessor: TIM14 capture/compare mode register 1 \\[alternate\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccmr1_output::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccmr1_output::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccmr1_output`]
module"]
pub type CCMR1_OUTPUT = crate::Reg<ccmr1_output::CCMR1_OUTPUT_SPEC>;
#[doc = "TIM14 capture/compare mode register 1 \\[alternate\\]"]
pub mod ccmr1_output;
#[doc = "CCER (rw) register accessor: TIM14 capture/compare enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccer::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccer::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccer`]
module"]
pub type CCER = crate::Reg<ccer::CCER_SPEC>;
#[doc = "TIM14 capture/compare enable register"]
pub mod ccer;
#[doc = "CNT (rw) register accessor: TIM14 counter\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cnt::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cnt::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cnt`]
module"]
pub type CNT = crate::Reg<cnt::CNT_SPEC>;
#[doc = "TIM14 counter"]
pub mod cnt;
#[doc = "PSC (rw) register accessor: TIM14 prescaler\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`psc::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`psc::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`psc`]
module"]
pub type PSC = crate::Reg<psc::PSC_SPEC>;
#[doc = "TIM14 prescaler"]
pub mod psc;
#[doc = "ARR (rw) register accessor: TIM14 auto-reload register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`arr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`arr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`arr`]
module"]
pub type ARR = crate::Reg<arr::ARR_SPEC>;
#[doc = "TIM14 auto-reload register"]
pub mod arr;
#[doc = "CCR1 (rw) register accessor: TIM14 capture/compare register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccr1`]
module"]
pub type CCR1 = crate::Reg<ccr1::CCR1_SPEC>;
#[doc = "TIM14 capture/compare register 1"]
pub mod ccr1;
#[doc = "TISEL (rw) register accessor: TIM14 timer input selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tisel::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tisel::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tisel`]
module"]
pub type TISEL = crate::Reg<tisel::TISEL_SPEC>;
#[doc = "TIM14 timer input selection register"]
pub mod tisel;
|
use sensor_value::SensorValue;
use telegram_bot::types::refs::UserId;
use actix_files as axtix_fs;
use actix_identity::{CookieIdentityPolicy, IdentityService};
use actix_rt::System;
use actix_web::web::{Bytes, Data};
use actix_web::HttpRequest;
use actix_web::HttpResponse;
use actix_web::{web, App, HttpServer, Responder};
use std::collections::HashMap;
use std::sync::{atomic::AtomicUsize, Arc, Mutex, RwLock};
use std::thread;
use crate::controller::Event;
use crate::input;
use crate::input::bot;
pub mod database;
pub mod login;
pub mod login_redirect;
use super::{wakeup, commands, music};
pub use database::PasswordDatabase;
pub use login::{login_get_and_check, login_page, logout, make_random_cookie_key};
pub use login_redirect::CheckLogin;
pub struct Session {} //TODO deprecate
#[derive(Clone)]
pub struct State {
pub controller_addr: crossbeam_channel::Sender<Event>,
pub jobs: input::jobs::Jobs,
pub wakeup: input::jobs::WakeUp,
pub passw_db: PasswordDatabase,
pub sessions: Arc<RwLock<HashMap<u16, Arc<Mutex<Session>>>>>,
pub free_session_ids: Arc<AtomicUsize>,
pub youtube_dl: input::YoutubeDownloader,
pub bot_token: String,
pub valid_ids: Vec<UserId>,
}
impl State {
pub fn new(
passw_db: PasswordDatabase,
controller_tx: crossbeam_channel::Sender<Event>,
jobs: input::jobs::Jobs,
wakeup: input::jobs::WakeUp,
youtube_dl: input::YoutubeDownloader,
bot_token: String,
valid_ids: Vec<i64>,
) -> Self {
let free_session_ids = Arc::new(AtomicUsize::new(0));
let sessions = Arc::new(RwLock::new(HashMap::new()));
let valid_ids = valid_ids.into_iter().map(|id| UserId::from(id)).collect();
State {
controller_addr: controller_tx,
jobs,
wakeup,
passw_db,
youtube_dl,
sessions,
free_session_ids,
bot_token,
valid_ids,
}
}
}
pub async fn index(_req: HttpRequest) -> impl Responder {
"Hello world!"
}
pub fn start_webserver(
state: State,
port: u16,
domain: String,
ha_key: String,
) -> actix_web::dev::Server {
let cookie_key = make_random_cookie_key();
let (tx, rx) = crossbeam_channel::unbounded();
thread::spawn(move || {
let sys = System::new("HttpServer");
let web_server = HttpServer::new(move || {
// data the webservers functions have access to
let data = actix_web::web::Data::new(state.clone());
App::new()
.app_data(data)
.wrap(IdentityService::new(
CookieIdentityPolicy::new(&cookie_key[..])
.domain(&domain)
.name("auth-cookie")
.path("/")
.secure(true),
))
.service(
web::scope("/login").service(
web::resource(r"/{path}")
.route(web::post().to(login_get_and_check))
.route(web::get().to(login_page)),
),
)
.service(web::resource("/commands/lamps/toggle").to(commands::toggle))
.service(web::resource("/commands/lamps/evening").to(commands::evening))
.service(web::resource("/commands/lamps/night").to(commands::night))
.service(web::resource("/commands/lamps/day").to(commands::normal))
.service(web::resource("/commands/lamps/dimmest").to(commands::dimmest))
.service(web::resource("/commands/lamps/dim").to(commands::dim))
.service(web::resource("/commands/lightloop").to(commands::lightloop))
.service(web::resource("/alarm/tomorrow").to(wakeup::tomorrow))
.service(web::resource("/alarm/usually").to(wakeup::usually))
.service(web::resource(&format!("/{}", &state.bot_token)).to(bot::handle_webhook))
.service(
web::resource(&format!("/{}", ha_key)).route(web::post().to(handle_sensor)),
)
.service(
web::scope("/")
.wrap(CheckLogin {})
.service(web::resource("").to(index))
.service(web::resource("logout/").to(logout))
.service(web::resource("add_song").to(music::add_song_from_url))
//for all other urls we try to resolve to static files in the "web" dir
.service(axtix_fs::Files::new("", "./web/")),
)
})
.bind(&format!("127.0.0.1:{}", port)) // SEC: disallow connections from the outside
.unwrap()
.shutdown_timeout(5) // shut down 5 seconds after getting the signal to shut down
.run();
let _ = tx.send(web_server.clone());
sys.run()
});
let web_handle = rx.recv().unwrap();
web_handle
}
pub fn handle_sensor(body: Bytes, state: Data<State>) -> HttpResponse {
let res = bincode::deserialize::<SensorValue>(&body[..]);
match res {
Err(err) => error!("deserialize sensorval failed: {:?}", err),
Ok(event) => state.controller_addr.send(Event::Sensor(event)).unwrap(),
}
HttpResponse::Ok().finish()
}
|
pub mod config;
pub mod devices;
pub mod healthcheck;
pub mod index;
pub mod integrations;
pub mod notifications;
pub mod sessions;
pub mod settings;
|
#[doc = "Reader of register HSEM_C2IER"]
pub type R = crate::R<u32, super::HSEM_C2IER>;
#[doc = "Writer for register HSEM_C2IER"]
pub type W = crate::W<u32, super::HSEM_C2IER>;
#[doc = "Register HSEM_C2IER `reset()`'s with value 0"]
impl crate::ResetValue for super::HSEM_C2IER {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ISE0`"]
pub type ISE0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE0`"]
pub struct ISE0_W<'a> {
w: &'a mut W,
}
impl<'a> ISE0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ISE1`"]
pub type ISE1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE1`"]
pub struct ISE1_W<'a> {
w: &'a mut W,
}
impl<'a> ISE1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `ISE2`"]
pub type ISE2_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE2`"]
pub struct ISE2_W<'a> {
w: &'a mut W,
}
impl<'a> ISE2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `ISE3`"]
pub type ISE3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE3`"]
pub struct ISE3_W<'a> {
w: &'a mut W,
}
impl<'a> ISE3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `ISE4`"]
pub type ISE4_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE4`"]
pub struct ISE4_W<'a> {
w: &'a mut W,
}
impl<'a> ISE4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `ISE5`"]
pub type ISE5_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE5`"]
pub struct ISE5_W<'a> {
w: &'a mut W,
}
impl<'a> ISE5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `ISE6`"]
pub type ISE6_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE6`"]
pub struct ISE6_W<'a> {
w: &'a mut W,
}
impl<'a> ISE6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `ISE7`"]
pub type ISE7_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE7`"]
pub struct ISE7_W<'a> {
w: &'a mut W,
}
impl<'a> ISE7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `ISE8`"]
pub type ISE8_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE8`"]
pub struct ISE8_W<'a> {
w: &'a mut W,
}
impl<'a> ISE8_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `ISE9`"]
pub type ISE9_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE9`"]
pub struct ISE9_W<'a> {
w: &'a mut W,
}
impl<'a> ISE9_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `ISE10`"]
pub type ISE10_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE10`"]
pub struct ISE10_W<'a> {
w: &'a mut W,
}
impl<'a> ISE10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `ISE11`"]
pub type ISE11_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE11`"]
pub struct ISE11_W<'a> {
w: &'a mut W,
}
impl<'a> ISE11_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `ISE12`"]
pub type ISE12_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE12`"]
pub struct ISE12_W<'a> {
w: &'a mut W,
}
impl<'a> ISE12_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `ISE13`"]
pub type ISE13_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE13`"]
pub struct ISE13_W<'a> {
w: &'a mut W,
}
impl<'a> ISE13_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `ISE14`"]
pub type ISE14_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE14`"]
pub struct ISE14_W<'a> {
w: &'a mut W,
}
impl<'a> ISE14_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `ISE15`"]
pub type ISE15_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE15`"]
pub struct ISE15_W<'a> {
w: &'a mut W,
}
impl<'a> ISE15_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `ISE16`"]
pub type ISE16_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE16`"]
pub struct ISE16_W<'a> {
w: &'a mut W,
}
impl<'a> ISE16_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `ISE17`"]
pub type ISE17_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE17`"]
pub struct ISE17_W<'a> {
w: &'a mut W,
}
impl<'a> ISE17_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `ISE18`"]
pub type ISE18_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE18`"]
pub struct ISE18_W<'a> {
w: &'a mut W,
}
impl<'a> ISE18_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `ISE19`"]
pub type ISE19_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE19`"]
pub struct ISE19_W<'a> {
w: &'a mut W,
}
impl<'a> ISE19_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `ISE20`"]
pub type ISE20_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE20`"]
pub struct ISE20_W<'a> {
w: &'a mut W,
}
impl<'a> ISE20_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `ISE21`"]
pub type ISE21_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE21`"]
pub struct ISE21_W<'a> {
w: &'a mut W,
}
impl<'a> ISE21_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `ISE22`"]
pub type ISE22_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE22`"]
pub struct ISE22_W<'a> {
w: &'a mut W,
}
impl<'a> ISE22_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `ISE23`"]
pub type ISE23_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE23`"]
pub struct ISE23_W<'a> {
w: &'a mut W,
}
impl<'a> ISE23_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `ISE24`"]
pub type ISE24_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE24`"]
pub struct ISE24_W<'a> {
w: &'a mut W,
}
impl<'a> ISE24_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `ISE25`"]
pub type ISE25_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE25`"]
pub struct ISE25_W<'a> {
w: &'a mut W,
}
impl<'a> ISE25_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `ISE26`"]
pub type ISE26_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE26`"]
pub struct ISE26_W<'a> {
w: &'a mut W,
}
impl<'a> ISE26_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `ISE27`"]
pub type ISE27_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE27`"]
pub struct ISE27_W<'a> {
w: &'a mut W,
}
impl<'a> ISE27_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "Reader of field `ISE28`"]
pub type ISE28_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE28`"]
pub struct ISE28_W<'a> {
w: &'a mut W,
}
impl<'a> ISE28_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `ISE29`"]
pub type ISE29_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE29`"]
pub struct ISE29_W<'a> {
w: &'a mut W,
}
impl<'a> ISE29_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `ISE30`"]
pub type ISE30_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE30`"]
pub struct ISE30_W<'a> {
w: &'a mut W,
}
impl<'a> ISE30_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `ISE31`"]
pub type ISE31_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISE31`"]
pub struct ISE31_W<'a> {
w: &'a mut W,
}
impl<'a> ISE31_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise0(&self) -> ISE0_R {
ISE0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise1(&self) -> ISE1_R {
ISE1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise2(&self) -> ISE2_R {
ISE2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise3(&self) -> ISE3_R {
ISE3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise4(&self) -> ISE4_R {
ISE4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise5(&self) -> ISE5_R {
ISE5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise6(&self) -> ISE6_R {
ISE6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise7(&self) -> ISE7_R {
ISE7_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise8(&self) -> ISE8_R {
ISE8_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise9(&self) -> ISE9_R {
ISE9_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise10(&self) -> ISE10_R {
ISE10_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise11(&self) -> ISE11_R {
ISE11_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise12(&self) -> ISE12_R {
ISE12_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise13(&self) -> ISE13_R {
ISE13_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise14(&self) -> ISE14_R {
ISE14_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise15(&self) -> ISE15_R {
ISE15_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 16 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise16(&self) -> ISE16_R {
ISE16_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise17(&self) -> ISE17_R {
ISE17_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise18(&self) -> ISE18_R {
ISE18_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise19(&self) -> ISE19_R {
ISE19_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise20(&self) -> ISE20_R {
ISE20_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise21(&self) -> ISE21_R {
ISE21_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 22 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise22(&self) -> ISE22_R {
ISE22_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise23(&self) -> ISE23_R {
ISE23_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise24(&self) -> ISE24_R {
ISE24_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise25(&self) -> ISE25_R {
ISE25_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise26(&self) -> ISE26_R {
ISE26_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 27 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise27(&self) -> ISE27_R {
ISE27_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 28 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise28(&self) -> ISE28_R {
ISE28_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise29(&self) -> ISE29_R {
ISE29_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise30(&self) -> ISE30_R {
ISE30_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Interrupt(N) semaphore n enable bit."]
#[inline(always)]
pub fn ise31(&self) -> ISE31_R {
ISE31_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise0(&mut self) -> ISE0_W {
ISE0_W { w: self }
}
#[doc = "Bit 1 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise1(&mut self) -> ISE1_W {
ISE1_W { w: self }
}
#[doc = "Bit 2 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise2(&mut self) -> ISE2_W {
ISE2_W { w: self }
}
#[doc = "Bit 3 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise3(&mut self) -> ISE3_W {
ISE3_W { w: self }
}
#[doc = "Bit 4 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise4(&mut self) -> ISE4_W {
ISE4_W { w: self }
}
#[doc = "Bit 5 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise5(&mut self) -> ISE5_W {
ISE5_W { w: self }
}
#[doc = "Bit 6 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise6(&mut self) -> ISE6_W {
ISE6_W { w: self }
}
#[doc = "Bit 7 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise7(&mut self) -> ISE7_W {
ISE7_W { w: self }
}
#[doc = "Bit 8 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise8(&mut self) -> ISE8_W {
ISE8_W { w: self }
}
#[doc = "Bit 9 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise9(&mut self) -> ISE9_W {
ISE9_W { w: self }
}
#[doc = "Bit 10 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise10(&mut self) -> ISE10_W {
ISE10_W { w: self }
}
#[doc = "Bit 11 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise11(&mut self) -> ISE11_W {
ISE11_W { w: self }
}
#[doc = "Bit 12 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise12(&mut self) -> ISE12_W {
ISE12_W { w: self }
}
#[doc = "Bit 13 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise13(&mut self) -> ISE13_W {
ISE13_W { w: self }
}
#[doc = "Bit 14 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise14(&mut self) -> ISE14_W {
ISE14_W { w: self }
}
#[doc = "Bit 15 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise15(&mut self) -> ISE15_W {
ISE15_W { w: self }
}
#[doc = "Bit 16 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise16(&mut self) -> ISE16_W {
ISE16_W { w: self }
}
#[doc = "Bit 17 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise17(&mut self) -> ISE17_W {
ISE17_W { w: self }
}
#[doc = "Bit 18 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise18(&mut self) -> ISE18_W {
ISE18_W { w: self }
}
#[doc = "Bit 19 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise19(&mut self) -> ISE19_W {
ISE19_W { w: self }
}
#[doc = "Bit 20 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise20(&mut self) -> ISE20_W {
ISE20_W { w: self }
}
#[doc = "Bit 21 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise21(&mut self) -> ISE21_W {
ISE21_W { w: self }
}
#[doc = "Bit 22 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise22(&mut self) -> ISE22_W {
ISE22_W { w: self }
}
#[doc = "Bit 23 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise23(&mut self) -> ISE23_W {
ISE23_W { w: self }
}
#[doc = "Bit 24 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise24(&mut self) -> ISE24_W {
ISE24_W { w: self }
}
#[doc = "Bit 25 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise25(&mut self) -> ISE25_W {
ISE25_W { w: self }
}
#[doc = "Bit 26 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise26(&mut self) -> ISE26_W {
ISE26_W { w: self }
}
#[doc = "Bit 27 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise27(&mut self) -> ISE27_W {
ISE27_W { w: self }
}
#[doc = "Bit 28 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise28(&mut self) -> ISE28_W {
ISE28_W { w: self }
}
#[doc = "Bit 29 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise29(&mut self) -> ISE29_W {
ISE29_W { w: self }
}
#[doc = "Bit 30 - Interrupt semaphore n enable bit"]
#[inline(always)]
pub fn ise30(&mut self) -> ISE30_W {
ISE30_W { w: self }
}
#[doc = "Bit 31 - Interrupt(N) semaphore n enable bit."]
#[inline(always)]
pub fn ise31(&mut self) -> ISE31_W {
ISE31_W { w: self }
}
}
|
use hello_tf::InferRequest;
use std::fs;
use std::slice;
use tensorflow::Tensor;
use hello_tf::infer_client::InferClient;
use tokio::runtime::Builder;
fn main() {
let rt = Builder::new_current_thread().enable_all().build().unwrap();
rt.block_on(async {
let mut client = InferClient::connect("http://localhost:5000").await.unwrap();
let req = InferRequest {
shape: vec![1, 224, 224, 3],
data: read_data(),
};
let output = client.infer(req).await.unwrap().into_inner();
let output = Tensor::new(&output.shape)
.with_values(&output.data)
.unwrap();
println!("{:?}", output.to_vec());
});
}
fn read_data() -> Vec<f32> {
let data = fs::read("pys/request").unwrap();
unsafe { slice::from_raw_parts(data.as_ptr() as *const f32, data.len() / 4 as usize).into() }
}
|
fn main() {
let age:u32 = 1;
if age>12{
println!("Hello world");
}
else{
println!("Go out side");
}
}
|
use core::marker::Copy;
use core::ops::{Div, Rem};
pub fn div_rem<T: Copy + Div<T,Output=T> + Rem<T,Output=T>>(x: T, y: T) -> (T, T) {
(x / y, x % y)
}
|
use crate::isa;
pub struct Decoder {
table: [Option<isa::Opcode>; 256],
}
impl Decoder {
pub fn new() -> Self {
Decoder {
table: build_opcode_table(),
}
}
pub fn opcode(&self, code: u8) -> Option<isa::Opcode> {
self.table[code as usize]
}
}
// Build an array of isa::Opcode indexed by by their u8 opcode.
fn build_opcode_table() -> [Option<isa::Opcode>; 256] {
let mut optab = [None; 256];
for opcode in isa::opcode_list() {
optab[opcode.code as usize] = Some(opcode);
}
optab
}
|
// SPDX-License-Identifier: Apache-2.0
use mmap::Protections;
use sgx_types::page::Flags;
/// Convert `Protections` to `Flags`
pub fn p2f(prot: Protections) -> Flags {
let mut flags = Flags::empty();
if prot.contains(Protections::READ) {
flags |= Flags::R;
}
if prot.contains(Protections::WRITE) {
flags |= Flags::W;
}
if prot.contains(Protections::EXEC) {
flags |= Flags::X;
}
flags
}
/// Convert `Flags` to `Protections`
pub fn f2p(flags: Flags) -> Protections {
let mut prot = Protections::empty();
if flags.contains(Flags::R) {
prot |= Protections::READ;
}
if flags.contains(Flags::W) {
prot |= Protections::WRITE;
}
if flags.contains(Flags::X) {
prot |= Protections::EXEC;
}
prot
}
|
use crate::error::RPCError;
use ckb_jsonrpc_types::Alert;
use ckb_logger::error;
use ckb_network::NetworkController;
use ckb_network_alert::{notifier::Notifier as AlertNotifier, verifier::Verifier as AlertVerifier};
use ckb_sync::NetworkProtocol;
use ckb_types::{packed, prelude::*};
use ckb_util::Mutex;
use jsonrpc_core::Result;
use jsonrpc_derive::rpc;
use std::sync::Arc;
#[rpc(server)]
pub trait AlertRpc {
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"send_alert","params": [{}]}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "send_alert")]
fn send_alert(&self, _alert: Alert) -> Result<()>;
}
pub(crate) struct AlertRpcImpl {
network_controller: NetworkController,
verifier: Arc<AlertVerifier>,
notifier: Arc<Mutex<AlertNotifier>>,
}
impl AlertRpcImpl {
pub fn new(
verifier: Arc<AlertVerifier>,
notifier: Arc<Mutex<AlertNotifier>>,
network_controller: NetworkController,
) -> Self {
AlertRpcImpl {
network_controller,
verifier,
notifier,
}
}
}
impl AlertRpc for AlertRpcImpl {
fn send_alert(&self, alert: Alert) -> Result<()> {
let alert: packed::Alert = alert.into();
let now_ms = faketime::unix_time_as_millis();
let notice_until: u64 = alert.raw().notice_until().unpack();
if notice_until < now_ms {
return Err(RPCError::custom(
RPCError::Invalid,
format!(
"expired alert, notice_until: {} server: {}",
notice_until, now_ms
),
));
}
let result = self.verifier.verify_signatures(&alert);
match result {
Ok(()) => {
if let Err(err) = self
.network_controller
.broadcast(NetworkProtocol::ALERT.into(), alert.as_slice().into())
{
error!("Broadcast alert failed: {:?}", err);
}
// set self node notifier
self.notifier.lock().add(&alert);
Ok(())
}
Err(e) => Err(RPCError::custom(RPCError::Invalid, format!("{:#}", e))),
}
}
}
|
use clap::Clap;
use itertools::Itertools;
/// Largest palindrome product
///
/// A palindromic number reads the same both ways. The
/// largest palindrome made from the product of two
/// 2-digit numbers is 9009 = 91 × 99.
///
/// Find the largest palindrome made from the product
/// of two 3-digit numbers.
#[derive(Clap)]
pub struct Solution {
#[clap(short, long, default_value = "1000")]
limit: u64,
}
impl Solution {
pub fn run(&self) -> u64 {
(1..self.limit)
.permutations(2)
.map(|v| v[0] * v[1])
.filter(is_palindrome)
.max()
.unwrap_or(0)
}
}
#[allow(clippy::trivially_copy_pass_by_ref)]
// allowed in order to match signature for .filter(...)
fn is_palindrome(num: &u64) -> bool {
let forward = num.to_string();
let reverse = forward.chars().rev().collect::<String>();
forward == reverse
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_palindrome() {
assert!(is_palindrome(&1));
assert!(is_palindrome(&11));
assert!(is_palindrome(&121));
assert!(is_palindrome(&1221));
assert!(is_palindrome(&12321));
assert!(is_palindrome(&123321));
assert!(!is_palindrome(&12));
assert!(!is_palindrome(&123));
}
#[test]
fn test_provided() {
let s = Solution { limit: 100 };
let palindrome = s.run();
assert_eq!(palindrome, 9009);
}
}
|
use crate::{
rels::Dir2D,
symmetry::{Rot, DEG_0, DEG_180},
};
use std::{
collections::{HashMap, HashSet},
hash::Hash,
};
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct TileDesc<T: Eq + Hash> {
// How many symmetries are there?
pub cardinality: usize,
// Side type -> Rotations
pub desc: HashMap<T, Vec<Rot>>,
}
impl<T> TileDesc<T>
where
T: Eq + Hash,
{
pub fn new(cardinality: usize, desc: impl IntoIterator<Item = (T, Vec<Rot>)>) -> Self
where
T: Hash + Eq, {
Self {
cardinality,
desc: desc.into_iter().collect(),
}
}
}
pub struct Desc<K: Hash + Copy + Eq, T: Hash + Copy + Eq> {
// tile name -> tile description
pub tiles: HashMap<K, TileDesc<T>>,
}
impl<K, T> Desc<K, T>
where
K: Hash + Copy + Eq,
T: Hash + Copy + Eq,
{
/// returns all possible tile configurations for this description
pub fn items(&self) -> impl Iterator<Item = (K, Rot)> + '_ {
self
.tiles
.iter()
.flat_map(|(&name, desc)| Rot::up_to(desc.cardinality).map(move |r| (name, r)))
}
pub fn rels(&self) -> HashMap<(K, Rot), HashSet<((K, Rot), Dir2D)>> {
let mut out = HashMap::new();
self.tiles.iter().for_each(|(&a, desc)| {
let a_card = desc.cardinality;
let a_desc = &desc.desc;
self.tiles.iter().for_each(|(&b, desc)| {
let b_card = desc.cardinality;
let b_desc = &desc.desc;
a_desc.iter().for_each(|(a_side, a_rots)| {
if let Some(b_rots) = b_desc.get(a_side) {
a_rots.iter().copied().for_each(|a_rot| {
b_rots.iter().copied().for_each(|b_rot| {
// align a to face right
let a_rot_dest = a_rot.to(DEG_0, a_card);
// align to face left
let b_rot_dest = b_rot.to(DEG_180, b_card);
use Dir2D::*;
[Right, Up, Left, Down]
.iter()
.copied()
.enumerate()
.for_each(|(i, dir)| {
let a_side = (a, a_rot_dest.rot_90_n(a_card, i));
let b_side = (b, b_rot_dest.rot_90_n(b_card, i));
out
.entry(a_side)
.or_insert_with(HashSet::new)
.insert((b_side, dir));
out
.entry(b_side)
.or_insert_with(HashSet::new)
.insert((a_side, dir.opp()));
})
});
})
}
});
});
});
out
}
}
|
extern crate tch;
use tch::{Tensor, no_grad};
#[macro_use(c)]
extern crate cute;
fn _drop_rightmost_dim(t: Tensor) -> Tensor {
let t = t.narrow(t.ld() as i64, 0, 1);
t.reshape(&t.size().as_slice()[..t.ld()])
}
pub trait TensorUtil {
/// Last dimension
fn ld(&self) -> usize;
fn eshape(&self, i: usize) -> Vec<i64>;
}
impl TensorUtil for Tensor {
fn ld(&self) -> usize {
self.size().len() - 1
}
fn eshape(&self, i: usize) -> Vec<i64> {
self.size()[&self.dim() - i ..].to_vec()
}
}
pub trait Distribution {
/// Generates a sample_shape shaped sample or sample_shape shaped batch of
/// samples if the distribution parameters are batched.
fn sample(&self, sample_shape: &[i64]) -> Tensor {
no_grad(|| {
return self.rsample(sample_shape)
})
}
/// Generates a sample_shape shaped reparameterized sample or sample_shape
/// shaped batch of reparameterized samples if the distribution parameters
/// are batched.
fn rsample(&self, sample_shape: &[i64]) -> Tensor;
/// Generates n samples or n batches of samples if the distribution
/// parameters are batched.
fn sample_n(&self, _n: i64) -> Tensor { unimplemented!(); }
/// Returns a new distribution instance (or populates an existing instance
/// provided by a derived class) with batch dimensions expanded to
/// `batch_shape`. This method calls :class:`~torch.Tensor.expand` on
/// the distribution's parameters. As such, this does not allocate new
/// memory for the expanded distribution instance. Additionally,
/// this does not repeat any args checking or parameter broadcasting in
/// `__init__.py`, when an instance is first created.
///
/// Args:
/// batch_shape (torch.Size): the desired expanded size.
/// _instance: new instance provided by subclasses that
/// need to override `.expand`.
///
/// Returns:
/// New distribution instance with batch dimensions expanded to
/// `batch_size`.
fn expand(&self, batch_shape: &[i64], _instance: bool) -> Self;
/// Returns the log of the probability density/mass function evaluated at
/// `value`.
///
/// Args:
/// value (Tensor):
fn log_prob(&self, value: &Tensor) -> Tensor;
/// Returns the cumulative density/mass function evaluated at
/// `value`.
///
/// Args:
/// value (Tensor):
fn cdf(&self, _value: &Tensor) -> Tensor { unimplemented!(); }
/// Returns the inverse cumulative density/mass function evaluated at
/// `value`.
///
///Args:
/// value (Tensor):
fn icdf(&self, _value: &Tensor) -> Tensor { unimplemented!(); }
/// Returns tensor containing all values supported by a discrete
/// distribution. The result will enumerate over dimension 0, so the shape
/// of the result will be `(cardinality,) + batch_shape + event_shape`
/// (where `event_shape = ()` for univariate distributions).
///
/// Note that this enumerates over all batched tensors in lock-step
/// `[[0, 0], [1, 1], ...]`. With `expand=False`, enumeration happens
/// along dim 0, but with the remaining batch dimensions being
/// singleton dimensions, `[[0], [1], ..`.
///
/// To iterate over the full Cartesian product use
/// `itertools.product(m.enumerate_support())`.
///
/// Args:
/// expand (bool): whether to expand the support over the
/// batch dims to match the distribution's `batch_shape`.
///
/// Returns:
/// Tensor iterating over dimension 0.
fn enumerate_support(&self, _expand: bool) -> Tensor { unimplemented!(); }
/// Returns entropy of distribution, batched over batch_shape.
///
/// Returns:
/// Tensor of shape batch_shape.
fn entropy(&self) -> Tensor { unimplemented!(); }
/// Returns perplexity of distribution, batched over batch_shape.
///
/// Returns:
/// Tensor of shape batch_shape.
fn perplexity(&self) -> Tensor { Tensor::exp(&self.entropy()) }
}
pub fn log_prob_2d(d: &impl Distribution, xs: &[f32], ys: &[f32])
-> Vec<f32> {
let mut xys = vec![];
let ny = ys.len() as i64;
let nx = xs.len() as i64;
for y in ys {
for x in xs {
xys.push(*x);
xys.push(*y);
}
}
// println!("{:?}", &xys);
let xys = Tensor::from(xys.as_slice()).reshape(&[-1, 2]);
let lps = d.log_prob(&xys);
Vec::<f32>::from(&lps)
}
pub mod multivariate_normal;
pub mod plotly_evcxr;
|
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Types that should only be used for testing!
use crate::crypto::KeyTypeId;
#[cfg(feature = "std")]
use crate::{
crypto::{CryptoTypePublicPair, Pair, Public},
ecdsa, ed25519, sr25519,
traits::Error,
vrf::{make_transcript, VRFSignature, VRFTranscriptData},
};
#[cfg(feature = "std")]
use std::collections::HashSet;
/// Key type for generic Ed25519 key.
pub const ED25519: KeyTypeId = KeyTypeId(*b"ed25");
/// Key type for generic Sr 25519 key.
pub const SR25519: KeyTypeId = KeyTypeId(*b"sr25");
/// Key type for generic Sr 25519 key.
pub const ECDSA: KeyTypeId = KeyTypeId(*b"ecds");
/// A keystore implementation usable in tests.
#[cfg(feature = "std")]
#[derive(Default)]
pub struct KeyStore {
/// `KeyTypeId` maps to public keys and public keys map to private keys.
keys: std::collections::HashMap<KeyTypeId, std::collections::HashMap<Vec<u8>, String>>,
}
#[cfg(feature = "std")]
impl KeyStore {
/// Creates a new instance of `Self`.
pub fn new() -> crate::traits::BareCryptoStorePtr {
std::sync::Arc::new(parking_lot::RwLock::new(Self::default()))
}
fn sr25519_key_pair(&self, id: KeyTypeId, pub_key: &sr25519::Public) -> Option<sr25519::Pair> {
self.keys.get(&id).and_then(|inner| {
inner.get(pub_key.as_slice()).map(|s| {
sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid")
})
})
}
fn ed25519_key_pair(&self, id: KeyTypeId, pub_key: &ed25519::Public) -> Option<ed25519::Pair> {
self.keys.get(&id).and_then(|inner| {
inner.get(pub_key.as_slice()).map(|s| {
ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid")
})
})
}
fn ecdsa_key_pair(&self, id: KeyTypeId, pub_key: &ecdsa::Public) -> Option<ecdsa::Pair> {
self.keys.get(&id).and_then(|inner| {
inner
.get(pub_key.as_slice())
.map(|s| ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid"))
})
}
}
#[cfg(feature = "std")]
impl crate::traits::BareCryptoStore for KeyStore {
fn keys(&self, id: KeyTypeId) -> Result<Vec<CryptoTypePublicPair>, Error> {
self.keys
.get(&id)
.map(|map| {
Ok(map.keys().fold(Vec::new(), |mut v, k| {
v.push(CryptoTypePublicPair(sr25519::CRYPTO_ID, k.clone()));
v.push(CryptoTypePublicPair(ed25519::CRYPTO_ID, k.clone()));
v.push(CryptoTypePublicPair(ecdsa::CRYPTO_ID, k.clone()));
v
}))
})
.unwrap_or_else(|| Ok(vec![]))
}
fn sr25519_public_keys(&self, id: KeyTypeId) -> Vec<sr25519::Public> {
self.keys
.get(&id)
.map(|keys| {
keys.values()
.map(|s| {
sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid")
})
.map(|p| p.public())
.collect()
})
.unwrap_or_default()
}
fn sr25519_generate_new(
&mut self,
id: KeyTypeId,
seed: Option<&str>,
) -> Result<sr25519::Public, Error> {
match seed {
Some(seed) => {
let pair = sr25519::Pair::from_string(seed, None).map_err(|_| {
Error::ValidationError("Generates an `sr25519` pair.".to_owned())
})?;
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into());
Ok(pair.public())
},
None => {
let (pair, phrase, _) = sr25519::Pair::generate_with_phrase(None);
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), phrase);
Ok(pair.public())
},
}
}
fn ed25519_public_keys(&self, id: KeyTypeId) -> Vec<ed25519::Public> {
self.keys
.get(&id)
.map(|keys| {
keys.values()
.map(|s| {
ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid")
})
.map(|p| p.public())
.collect()
})
.unwrap_or_default()
}
fn ed25519_generate_new(
&mut self,
id: KeyTypeId,
seed: Option<&str>,
) -> Result<ed25519::Public, Error> {
match seed {
Some(seed) => {
let pair = ed25519::Pair::from_string(seed, None).map_err(|_| {
Error::ValidationError("Generates an `ed25519` pair.".to_owned())
})?;
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into());
Ok(pair.public())
},
None => {
let (pair, phrase, _) = ed25519::Pair::generate_with_phrase(None);
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), phrase);
Ok(pair.public())
},
}
}
fn ecdsa_public_keys(&self, id: KeyTypeId) -> Vec<ecdsa::Public> {
self.keys
.get(&id)
.map(|keys| {
keys.values()
.map(|s| {
ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid")
})
.map(|p| p.public())
.collect()
})
.unwrap_or_default()
}
fn ecdsa_generate_new(
&mut self,
id: KeyTypeId,
seed: Option<&str>,
) -> Result<ecdsa::Public, Error> {
match seed {
Some(seed) => {
let pair = ecdsa::Pair::from_string(seed, None)
.map_err(|_| Error::ValidationError("Generates an `ecdsa` pair.".to_owned()))?;
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into());
Ok(pair.public())
},
None => {
let (pair, phrase, _) = ecdsa::Pair::generate_with_phrase(None);
self.keys.entry(id).or_default().insert(pair.public().to_raw_vec(), phrase);
Ok(pair.public())
},
}
}
fn insert_unknown(&mut self, id: KeyTypeId, suri: &str, public: &[u8]) -> Result<(), ()> {
self.keys.entry(id).or_default().insert(public.to_owned(), suri.to_string());
Ok(())
}
fn password(&self) -> Option<&str> {
None
}
fn has_keys(&self, public_keys: &[(Vec<u8>, KeyTypeId)]) -> bool {
public_keys.iter().all(|(k, t)| self.keys.get(&t).and_then(|s| s.get(k)).is_some())
}
fn supported_keys(
&self,
id: KeyTypeId,
keys: Vec<CryptoTypePublicPair>,
) -> std::result::Result<Vec<CryptoTypePublicPair>, Error> {
let provided_keys = keys.into_iter().collect::<HashSet<_>>();
let all_keys = self.keys(id)?.into_iter().collect::<HashSet<_>>();
Ok(provided_keys.intersection(&all_keys).cloned().collect())
}
fn sign_with(
&self,
id: KeyTypeId,
key: &CryptoTypePublicPair,
msg: &[u8],
) -> Result<Vec<u8>, Error> {
use codec::Encode;
match key.0 {
ed25519::CRYPTO_ID => {
let key_pair: ed25519::Pair = self
.ed25519_key_pair(id, &ed25519::Public::from_slice(key.1.as_slice()))
.ok_or_else(|| Error::PairNotFound("ed25519".to_owned()))?;
return Ok(key_pair.sign(msg).encode())
},
sr25519::CRYPTO_ID => {
let key_pair: sr25519::Pair = self
.sr25519_key_pair(id, &sr25519::Public::from_slice(key.1.as_slice()))
.ok_or_else(|| Error::PairNotFound("sr25519".to_owned()))?;
return Ok(key_pair.sign(msg).encode())
},
ecdsa::CRYPTO_ID => {
let key_pair: ecdsa::Pair = self
.ecdsa_key_pair(id, &ecdsa::Public::from_slice(key.1.as_slice()))
.ok_or_else(|| Error::PairNotFound("ecdsa".to_owned()))?;
return Ok(key_pair.sign(msg).encode())
},
_ => Err(Error::KeyNotSupported(id)),
}
}
fn sr25519_vrf_sign(
&self,
key_type: KeyTypeId,
public: &sr25519::Public,
transcript_data: VRFTranscriptData,
) -> Result<VRFSignature, Error> {
let transcript = make_transcript(transcript_data);
let pair = self
.sr25519_key_pair(key_type, public)
.ok_or_else(|| Error::PairNotFound("Not found".to_owned()))?;
let (inout, proof, _) = pair.as_ref().vrf_sign(transcript);
Ok(VRFSignature { output: inout.to_output(), proof })
}
}
/// Macro for exporting functions from wasm in with the expected signature for using it with the
/// wasm executor. This is useful for tests where you need to call a function in wasm.
///
/// The input parameters are expected to be SCALE encoded and will be automatically decoded for you.
/// The output value is also SCALE encoded when returned back to the host.
///
/// The functions are feature-gated with `#[cfg(not(feature = "std"))]`, so they are only available
/// from within wasm.
///
/// # Example
///
/// ```
/// # use sp_core::wasm_export_functions;
///
/// wasm_export_functions! {
/// fn test_in_wasm(value: bool, another_value: Vec<u8>) -> bool {
/// value && another_value.is_empty()
/// }
///
/// fn without_return_value() {
/// // do something
/// }
/// }
/// ```
#[macro_export]
macro_rules! wasm_export_functions {
(
$(
fn $name:ident (
$( $arg_name:ident: $arg_ty:ty ),* $(,)?
) $( -> $ret_ty:ty )? { $( $fn_impl:tt )* }
)*
) => {
$(
$crate::wasm_export_functions! {
@IMPL
fn $name (
$( $arg_name: $arg_ty ),*
) $( -> $ret_ty )? { $( $fn_impl )* }
}
)*
};
(@IMPL
fn $name:ident (
$( $arg_name:ident: $arg_ty:ty ),*
) { $( $fn_impl:tt )* }
) => {
#[no_mangle]
#[allow(unreachable_code)]
#[cfg(not(feature = "std"))]
pub fn $name(input_data: *mut u8, input_len: usize) -> u64 {
let input: &[u8] = if input_len == 0 {
&[0u8; 0]
} else {
unsafe {
$crate::sp_std::slice::from_raw_parts(input_data, input_len)
}
};
{
let ($( $arg_name ),*) : ($( $arg_ty ),*) = $crate::Decode::decode(
&mut &input[..],
).expect("Input data is correctly encoded");
$( $fn_impl )*
}
$crate::to_substrate_wasm_fn_return_value(&())
}
};
(@IMPL
fn $name:ident (
$( $arg_name:ident: $arg_ty:ty ),*
) $( -> $ret_ty:ty )? { $( $fn_impl:tt )* }
) => {
#[no_mangle]
#[allow(unreachable_code)]
#[cfg(not(feature = "std"))]
pub fn $name(input_data: *mut u8, input_len: usize) -> u64 {
let input: &[u8] = if input_len == 0 {
&[0u8; 0]
} else {
unsafe {
$crate::sp_std::slice::from_raw_parts(input_data, input_len)
}
};
let output $( : $ret_ty )? = {
let ($( $arg_name ),*) : ($( $arg_ty ),*) = $crate::Decode::decode(
&mut &input[..],
).expect("Input data is correctly encoded");
$( $fn_impl )*
};
$crate::to_substrate_wasm_fn_return_value(&output)
}
};
}
/// A task executor that can be used in tests.
///
/// Internally this just wraps a `ThreadPool` with a pool size of `8`. This
/// should ensure that we have enough threads in tests for spawning blocking futures.
#[cfg(feature = "std")]
#[derive(Clone)]
pub struct TaskExecutor(futures::executor::ThreadPool);
#[cfg(feature = "std")]
impl TaskExecutor {
/// Create a new instance of `Self`.
pub fn new() -> Self {
let mut builder = futures::executor::ThreadPoolBuilder::new();
Self(builder.pool_size(8).create().expect("Failed to create thread pool"))
}
}
#[cfg(feature = "std")]
impl crate::traits::SpawnNamed for TaskExecutor {
fn spawn_blocking(&self, _: &'static str, future: futures::future::BoxFuture<'static, ()>) {
self.0.spawn_ok(future);
}
fn spawn(&self, _: &'static str, future: futures::future::BoxFuture<'static, ()>) {
self.0.spawn_ok(future);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::sr25519;
use crate::testing::{ED25519, SR25519};
use crate::vrf::VRFTranscriptValue;
#[test]
fn store_key_and_extract() {
let store = KeyStore::new();
let public = store.write().ed25519_generate_new(ED25519, None).expect("Generates key");
let public_keys = store.read().keys(ED25519).unwrap();
assert!(public_keys.contains(&public.into()));
}
#[test]
fn store_unknown_and_extract_it() {
let store = KeyStore::new();
let secret_uri = "//Alice";
let key_pair = sr25519::Pair::from_string(secret_uri, None).expect("Generates key pair");
store
.write()
.insert_unknown(SR25519, secret_uri, key_pair.public().as_ref())
.expect("Inserts unknown key");
let public_keys = store.read().keys(SR25519).unwrap();
assert!(public_keys.contains(&key_pair.public().into()));
}
#[test]
fn vrf_sign() {
let store = KeyStore::new();
let secret_uri = "//Alice";
let key_pair = sr25519::Pair::from_string(secret_uri, None).expect("Generates key pair");
let transcript_data = VRFTranscriptData {
label: b"Test",
items: vec![
("one", VRFTranscriptValue::U64(1)),
("two", VRFTranscriptValue::U64(2)),
("three", VRFTranscriptValue::Bytes("test".as_bytes())),
],
};
let result =
store.read().sr25519_vrf_sign(SR25519, &key_pair.public(), transcript_data.clone());
assert!(result.is_err());
store
.write()
.insert_unknown(SR25519, secret_uri, key_pair.public().as_ref())
.expect("Inserts unknown key");
let result = store.read().sr25519_vrf_sign(SR25519, &key_pair.public(), transcript_data);
assert!(result.is_ok());
}
}
|
#[macro_use]
extern crate log;
fn main() {
// RUST_LOG=info cargo run --bin output-log
env_logger::init();
info!("starting up");
warn!("oops, nothing implemented!");
}
|
mod curve25519;
// mod dh_group_sha1;
pub use self::curve25519::Curve25519;
// pub use self::dh_group_sha1::DhGroupSha1;
use connection::Connection;
use packet::Packet;
pub enum KexResult {
Ok(Packet),
Done(Packet),
Error,
}
pub trait KeyExchange {
fn process(&mut self, conn: &mut Connection, packet: Packet) -> KexResult;
fn shared_secret<'a>(&'a self) -> Option<&'a [u8]>;
fn exchange_hash<'a>(&'a self) -> Option<&'a [u8]>;
fn hash(&self, data: &[&[u8]]) -> Vec<u8>;
}
|
extern crate rand;
use std::env;
mod vec3;
use crate::rand::Rng;
use rayon::prelude::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use vec3::Vec3;
#[derive(Copy, Clone, Debug)]
struct Ray {
origin: Vec3,
direction: Vec3,
}
impl Ray {
fn new(origin: Vec3, direction: Vec3) -> Ray {
Ray { origin, direction }
}
fn point_at_parameter(&self, t: f64) -> Vec3 {
self.origin + t * self.direction
}
}
#[derive(Clone, Copy, Debug)]
struct Sphere {
center: Vec3,
radius: f64,
}
impl Sphere {
fn new(center: Vec3, radius: f64) -> Sphere {
Sphere { center, radius }
}
}
#[derive(Copy, Clone, Debug)]
struct HitRecord {
t: f64,
p: Vec3,
normal: Vec3,
}
trait Hit {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;
}
impl Hit for Sphere {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let oc = ray.origin - self.center;
let a = ray.direction.dot(&ray.direction);
let b = oc.dot(&ray.direction);
let c = oc.dot(&oc) - self.radius * self.radius;
let discriminant = b * b - a * c;
if discriminant < 0.0 {
return None;
}
let temp = -(b + (b * b - a * c).sqrt()) / a;
if temp < t_max && temp > t_min {
let point = ray.point_at_parameter(temp);
return Some(HitRecord {
t: temp,
p: point,
normal: (point - self.center) / self.radius,
});
}
let temp = (-b + (b * b - a * c).sqrt()) / a;
if temp < t_max && temp > t_min {
let point = ray.point_at_parameter(temp);
return Some(HitRecord {
t: temp,
p: point,
normal: (point - self.center) / self.radius,
});
}
None
}
}
enum Hittable {
Sphere(Sphere),
}
impl Hittable {
fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
match self {
Hittable::Sphere(sphere) => sphere.hit(ray, t_min, t_max),
}
}
}
struct HittableList {
list: Vec<Hittable>,
}
impl<'a> HittableList {
fn hit_all(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut closest_so_far = t_max;
let mut possible_hit_record: Option<HitRecord> = None;
self.list.iter().for_each(|hittable| {
if let Some(record) = hittable.hit(ray, t_min, closest_so_far) {
closest_so_far = record.t;
possible_hit_record = Some(record)
}
});
possible_hit_record
}
}
#[derive(Clone, Copy, Debug)]
struct Camera {
lower_left_corner: Vec3,
horizontal: Vec3,
vertical: Vec3,
origin: Vec3,
}
impl Camera {
fn new() -> Camera {
Camera {
lower_left_corner: Vec3::new(-2.0, -1.0, -1.0),
horizontal: Vec3::new(4.0, 0.0, 0.0),
vertical: Vec3::new(0.0, 2.0, 0.0),
origin: Vec3::new(0.0, 0.0, 0.0),
}
}
fn get_ray(&self, u: f64, v: f64) -> Ray {
Ray::new(
self.origin,
self.lower_left_corner + u * self.horizontal + v * self.vertical - self.origin,
)
}
}
fn random_in_unit_sphere() -> Vec3 {
let mut rng = rand::thread_rng();
let mut p: Vec3;
loop {
p =
2.0 * Vec3 {
x: rng.gen(),
y: rng.gen(),
z: rng.gen(),
} - Vec3::new(1.0, 1.0, 1.0);
if p.squared_length() >= 1.0 {
return p;
}
}
}
fn color(ray: &Ray, world: &HittableList) -> Vec3 {
if let Some(hit_record) = world.hit_all(&ray, 0.001, std::f64::MAX) {
let target = hit_record.p + hit_record.normal + random_in_unit_sphere();
0.5 * color(
&Ray {
origin: hit_record.p,
direction: target - hit_record.p,
},
world,
)
} else {
let unit_direction = ray.direction.unit();
let t = (unit_direction.y + 1.0) * 0.5;
(1.0 - t) * Vec3::new(1.0, 1.0, 1.0) + t * Vec3::new(0.5, 0.7, 1.0)
}
}
#[derive(Debug, Clone, Copy)]
struct ImageParams {
width: u32,
height: u32,
samples: u32,
}
fn parse_args() -> ImageParams {
let args: Vec<String> = env::args().collect();
match args.len() {
1 => panic!("Need to pass some arguments!"),
3 => {
let parsed_args: Vec<u32> = args[1..]
.iter()
.map(|arg| arg.parse::<u32>().unwrap())
.collect();
ImageParams {
height: parsed_args[0],
width: parsed_args[0] * 2,
samples: parsed_args[1],
}
}
_ => panic!("No idea!"),
}
}
fn main() {
let image_params = parse_args();
println!("P3");
println!("{} {}", image_params.width, image_params.height);
println!("255");
let camera = Camera::new();
let world = &HittableList {
list: vec![
Hittable::Sphere(Sphere::new(Vec3::new(0.0, 0.0, -1.0), 0.5)),
Hittable::Sphere(Sphere::new(Vec3::new(0.0, -100.5, -1.0), 100.0)),
],
};
let colors: Vec<Vec3> = (0..image_params.height)
.into_par_iter()
.rev()
.flat_map(|j| {
(0..image_params.width).into_par_iter().map(move |i| {
let mut rng = rand::thread_rng();
let mut col = Vec3::new(0.0, 0.0, 0.0);
for _ in 0..image_params.samples {
let u = (f64::from(i) + rng.gen::<f64>()) / f64::from(image_params.width);
let v = (f64::from(j) + rng.gen::<f64>()) / f64::from(image_params.height);
let ray = camera.get_ray(u, v);
col += color(&ray, world);
}
col = col / f64::from(image_params.samples);
Vec3::new(col.x.sqrt(), col.y.sqrt(), col.z.sqrt())
})
})
.collect();
for color in colors {
println!("{}", color.as_color_string())
}
}
|
use crate::pool::TxPool;
use ckb_types::core::TransactionView;
use ckb_types::packed::ProposalShortId;
use futures::future::Future;
use tokio::prelude::{Async, Poll};
use tokio::sync::lock::Lock;
pub struct FetchTxRPCProcess {
pub tx_pool: Lock<TxPool>,
pub proposal_id: Option<ProposalShortId>,
}
impl FetchTxRPCProcess {
pub fn new(tx_pool: Lock<TxPool>, proposal_id: ProposalShortId) -> FetchTxRPCProcess {
FetchTxRPCProcess {
tx_pool,
proposal_id: Some(proposal_id),
}
}
}
impl Future for FetchTxRPCProcess {
type Item = Option<(bool, TransactionView)>;
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.tx_pool.poll_lock() {
Async::Ready(guard) => {
let id = self.proposal_id.take().expect("cannot poll twice");
let ret = guard
.proposed()
.get(&id)
.map(|entry| (true, entry.transaction.clone()))
.or_else(|| guard.get_tx_without_conflict(&id).map(|tx| (false, tx)));
Ok(Async::Ready(ret))
}
Async::NotReady => Ok(Async::NotReady),
}
}
}
|
use auto_impl::auto_impl;
trait Supi {}
#[auto_impl(Fn)]
trait Foo: Supi {
fn foo(&self, x: u32) -> String;
}
fn main() {}
|
use iron::Handler;
use std::io::Read;
use serde_json;
use iron::Iron;
use iron::IronResult;
use iron::IronError;
use iron::Request;
use iron::Response;
use iron::status;
use router::Router;
struct DrawHandler {
}
impl DrawHandler {
pub fn new() -> DrawHandler {
DrawHandler {}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Point {
pub x: i16,
pub y: i16,
}
#[derive(Debug, Serialize, Deserialize)]
struct DrawRequest {
pub points: Vec<Point>,
}
impl Handler for DrawHandler {
fn handle(&self, request: &mut Request) -> IronResult<Response> {
let mut body = String::new();
let _ = request.body.read_to_string(&mut body)
.map_err(|e| IronError::new(e, "Read error occurred"))?;
println!("Request Body: {}", body);
let draw_request : DrawRequest = serde_json::from_str(&body)
.map_err(|e| IronError::new(e, "Parsing error occurred"))?;
println!("Request Json: {:?}", draw_request);
let mut response = Response::new();
Ok(response)
}
}
pub fn start_http_server() {
let mut router = Router::new();
router.get("/", |_: &mut Request| {
Ok(Response::with((status::Ok, "Index")))
}, "index");
router.post("/draw", DrawHandler::new(), "draw");
// TODO: Parameters for port and hostname
let host = "localhost:8888";
println!("Launching server on: {}", host);
Iron::new(router).http(host).expect("Server could not start");
}
|
use lazy_static::lazy_static;
use super::*;
macro_rules! matchtest {
($name:ident, $fun:expr, $given:expr, $expected:expr) => {
#[test]
fn $name() {
let prefix = "# ";
let prefix_pattern= Regex::new(&format!(r"^(?P<head>\s*){}(?P<tail>.*?)$", prefix)).unwrap();
assert_eq!($fun(&prefix_pattern, prefix, $given), $expected);
}
};
}
matchtest!(comment_case1, comment_line, "# simple case one", "# simple case one");
matchtest!(uncomment_case1, uncomment_line, "# simple case one", "simple case one");
matchtest!(toggle_case1, toggle_line, "# simple case one", "simple case one");
matchtest!(comment_case2, comment_line, "simple_case = 2", "# simple_case = 2");
matchtest!(uncomment_case2, uncomment_line, "simple_case = 2", "simple_case = 2");
matchtest!(toggle_case2, toggle_line, "simple_case = 2", "# simple_case = 2");
#[test]
fn toggle_initial_uncomment() {
let example = vec![
"a = 1",
"b = 2",
"#c = 3",
"d = 4",
];
let expected = vec![
"# a = 1",
"# b = 2",
"# #c = 3",
"# d = 4",
];
let actual = comment_block(&CommentingMode::Toggle, "# ", &example);
assert_eq!(actual, expected);
}
#[test]
fn toggle_initial_comment() {
let example = vec![
"# a = 1",
"b = 2",
"# c = 3",
"d = 4"
];
let expected = vec![
"# # a = 1",
"# b = 2",
"# # c = 3",
"# d = 4",
];
let actual = comment_block(&CommentingMode::Toggle, "# ", &example);
assert_eq!(actual, expected);
}
#[test]
fn toggle_comment_initial_blank() {
let example = vec![
" ",
" def foo(self, bar):",
" # NOTE: choose better names",
" return bar",
];
let expected = vec![
" ",
"# def foo(self, bar):",
"# # NOTE: choose better names",
"# return bar",
];
let actual = comment_block(&CommentingMode::Toggle, "# ", &example);
assert_eq!(actual, expected);
}
#[test]
fn line_address_only_matches_one() {
let pattern = AddressPattern::new_single(Line(2));
let lines = vec![
"one",
"two",
"three",
];
let matches = get_matches(&pattern, &lines, EMPTY_STATE.unchanged());
assert_eq!(matches.len(), 3);
assert_eq!(matches[1], (true, vec!["two"]));
}
#[test]
fn line_range_address_matches_block() {
let pattern = AddressPattern::new_range(Line(2), Line(4));
let lines = vec![
"one",
"two",
"three",
"four"
];
let matches = get_matches(&pattern, &lines, EMPTY_STATE.unchanged());
assert_eq!(matches.len(), 2);
assert_eq!(matches[1], (true, vec!["two", "three", "four"]));
}
#[test]
fn regex_range_address_matches_block() {
let re = Regex::new("two").unwrap();
let pattern = AddressPattern::new_range(RegexPattern(re), Line(4));
let lines = vec![
"one",
"two",
"three",
"four"
];
let matches = get_matches(&pattern, &lines, EMPTY_STATE.unchanged());
assert_eq!(matches.len(), 2);
assert_eq!(matches[1], (true, vec!["two", "three", "four"]));
}
lazy_static! {
static ref PREFIX: Regex = Regex::new(r"^(?P<head>\s*)# (?P<tail>.*?)$").unwrap();
}
#[test]
fn not_all_lines_commented_should_comment() {
let example = vec![
"# not all lines commented should comment",
"abc = 123",
];
assert!(will_comment(&PREFIX, &example));
}
#[test]
fn all_lines_commented_should_uncomment() {
let example = vec![
"# all lines commented should uncomment",
"# abc = 123",
];
assert!(!will_comment(&PREFIX, &example));
}
#[test]
fn blanks_do_not_affect_will_comment() {
let example1 = vec![
"all lines uncommented or blank should comment",
"",
];
assert!(will_comment(&PREFIX, &example1));
let example2 = vec![
"# all lines commented or blank should uncomment",
"",
];
assert!(!will_comment(&PREFIX, &example2));
}
#[test]
fn all_blank_lines_are_unchanged() {
let expected = vec![
"",
"",
];
assert!(!will_comment(&PREFIX, &expected));
let prefix = "# ";
let actual = comment_block(&CommentingMode::Toggle, prefix, &expected);
assert_eq!(actual, expected);
}
#[test]
fn round_trip() {
let example = vec![
"# not all lines commented",
"abc = 123",
];
let expected = vec![
"# # not all lines commented",
"# abc = 123",
];
let actual = comment_block(&CommentingMode::Toggle, "# ", &example);
assert_eq!(actual, expected);
assert_eq!(comment_block(&CommentingMode::Toggle, "# ", &actual), example);
}
#[test]
fn zero_address_toggles_whole_file_not_individual_lines() {
let example = vec![
"a = 1",
"# b = 2",
"c = 3",
].join("\n");
let expected = vec![
"# a = 1",
"# # b = 2",
"# c = 3",
];
let pattern = AddressPattern::new_zero();
let actual: Vec<String> = body(example.lines(), EMPTY_STATE.unchanged(), &pattern, "# ", &CommentingMode::Toggle);
assert_eq!(actual, expected);
}
use {Address::AddressRange, AddressComponent::*};
macro_rules! address_range {
($range:expr) => { AddressPattern { pattern: $range, negated: false }; };
($range:expr, $negated:expr) => { AddressPattern { pattern: $range, negated: $negated }; };
}
macro_rules! assert_matches_lines { ($addr:expr, $( $l:expr ),*) => { $( assert!($addr.matches($l, "", &EMPTY_STATE).0); )* }; }
macro_rules! assert_not_matches_lines { ($addr:expr, $( $l:expr ),*) => { $( assert!(!$addr.matches($l, "", &EMPTY_STATE).0); )* }; }
#[test]
fn zero_address_always_matches() {
let addr = address_range!(Address::ZeroAddress);
assert_matches_lines!(addr, 1, 2, 3, 4, 5);
// TODO: quickcheck/predicate tests
}
#[test]
fn zero_address_invert_never_matches() {
let addr = address_range!(Address::ZeroAddress, true);
assert_not_matches_lines!(addr, 1, 2, 3, 4, 5);
}
#[test]
fn one_address_matches_one_line() {
let addr = address_range!(Address::OneAddress(Line(3)));
assert_matches_lines!(addr, 3);
assert_not_matches_lines!(addr, 1, 2, 4, 5);
}
#[test]
fn one_address_inverted_matches_but_one_line() {
let addr = address_range!(Address::OneAddress(Line(3)), true);
assert_not_matches_lines!(addr, 3);
assert_matches_lines!(addr, 1, 2, 4, 5);
}
#[test]
fn matches_range_lines() {
let addr = address_range!(AddressRange(Line(3), Line(5)));
assert_matches_lines!(addr, 3, 4, 5);
assert_not_matches_lines!(addr, 2, 9);
}
#[test]
fn matches_range_lines_invert() {
let addr = address_range!(AddressRange(Line(3), Line(5)), true);
assert_not_matches_lines!(addr, 3, 4, 5);
assert_matches_lines!(addr, 2, 9);
}
#[test]
fn matches_range_relative_lines() {
let addr = address_range!(AddressRange(Line(3), Relative(5)));
assert_matches_lines!(addr, 3, 8);
assert_not_matches_lines!(addr, 2, 9);
}
#[test]
fn matches_range_relative_lines_invert() {
let addr = address_range!(AddressRange(Line(3), Relative(5)), true);
assert_not_matches_lines!(addr, 3, 8);
assert_matches_lines!(addr, 2, 9);
}
#[test]
fn matches_regex_relative_range() {
let re = Regex::new("foo").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re), Relative(3)));
assert!( addr.matches(1, "foo", &EMPTY_STATE).0);
let state = MatchState { left_match: Some(1), right_match: None };
assert!( addr.matches(2, "match", &state).0);
assert!( addr.matches(3, "match", &state).0);
assert!( addr.matches(4, "match", &state).0);
assert!(!addr.matches(5, "un-match", &state).0);
}
#[test]
fn matches_regex_absolute_range() {
let re = Regex::new("foo").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re), Line(4)));
assert!( addr.matches(1, "foo", &EMPTY_STATE).0);
let state = MatchState { left_match: Some(1), right_match: None };
assert!( addr.matches(2, "match", &state).0);
assert!( addr.matches(3, "match", &state).0);
assert!( addr.matches(4, "match", &state).0);
assert!(!addr.matches(5, "un-match", &state).0);
}
#[test]
fn matches_regex_empty_absolute_range() {
let re = Regex::new("foo").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re), Line(2)));
assert!(!addr.matches(1, "un-match", &EMPTY_STATE).0);
assert!(!addr.matches(2, "un-match", &EMPTY_STATE).0);
assert!( addr.matches(3, "foo", &EMPTY_STATE).0);
let state = MatchState { left_match: Some(3), right_match: None };
assert!(!addr.matches(4, "un-match", &state).0);
assert!(!addr.matches(5, "un-match", &state).0);
}
#[test]
fn matches_absolute_regex_end_range() {
let re = Regex::new("foo").unwrap();
let addr = address_range!(AddressRange(Line(2), RegexPattern(re)));
assert!(!addr.matches(1, "un-match", &EMPTY_STATE).0);
assert!( addr.matches(2, "match", &EMPTY_STATE).0);
assert!( addr.matches(3, "match", &EMPTY_STATE).0);
assert!( addr.matches(4, "foo", &EMPTY_STATE).0);
let state = MatchState { left_match: None, right_match: Some(4) };
assert!(!addr.matches(5, "un-match", &state).0);
}
#[test]
fn matches_double_regex_range() {
let re1 = Regex::new("foo").unwrap();
let re2 = Regex::new("bar").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re1), RegexPattern(re2)));
assert!(!addr.matches(1, "un-match", &EMPTY_STATE).0);
assert!( addr.matches(2, "foo", &EMPTY_STATE).0);
let state = MatchState { left_match: Some(2), right_match: None };
assert!( addr.matches(3, "match", &state).0);
assert!( addr.matches(4, "bar", &state).0);
let state = MatchState { left_match: Some(2), right_match: Some(4) };
assert!(!addr.matches(5, "un-match", &state).0);
}
#[test]
fn matches_double_regex_range_update() {
let re1 = Regex::new("foo").unwrap();
let re2 = Regex::new("bar").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re1), RegexPattern(re2)));
let (is_match, state) = addr.matches(1, "un-match", &EMPTY_STATE);
assert!(!is_match);
let (is_match, state) = addr.matches(2, "foo", &state);
assert!(is_match);
let (is_match, state) = addr.matches(3, "match", &state);
assert!(is_match);
let (is_match, state) = addr.matches(4, "bar", &state);
assert!(is_match);
let (is_match, _state) = addr.matches(5, "un-match", &state);
assert!(!is_match);
}
#[test]
fn matches_double_regex_range_with_multiple_matches_on_same_line() {
let re1 = Regex::new("foo").unwrap();
let re2 = Regex::new("bar").unwrap();
let addr = address_range!(AddressRange(RegexPattern(re1), RegexPattern(re2)));
let (is_match, state) = addr.matches(1, "foo", &EMPTY_STATE);
assert!(is_match, "line 1 failed");
let (is_match, state) = addr.matches(2, "bar", &state);
assert!(is_match, "line 2 failed");
let (is_match, state) = addr.matches(3, "bar", &state);
assert!(!is_match, "line 3 failed");
let (is_match, state) = addr.matches(4, "foo bar", &state);
assert!(is_match, "line 4 failed");
let (is_match, _state) = addr.matches(5, "match", &state);
assert!(is_match, "line 5 failed");
}
|
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
extern crate libc;
use std::ffi::CString;
use std::os::raw::c_char;
pub enum History {
KeepLast {n: u32},
KeepAll
}
pub enum Durability {
Volatile,
TransientLocal,
Transient,
Persistent
}
pub struct QoS {
qos: *mut dds_qos_t
}
impl QoS {
fn new() -> QoS {
QoS { qos : unsafe {dds_create_qos()} }
}
fn reset(&mut self) {
unsafe { dds_qos_reset(self.qos) }
}
fn history(&mut self, h: &History) {
match h {
History::KeepLast { n } => {
unsafe { dds_qset_history(self.qos, dds_history_kind_DDS_HISTORY_KEEP_LAST, *n as i32)}
},
History::KeepAll => {
unsafe { dds_qset_history(self.qos, dds_history_kind_DDS_HISTORY_KEEP_ALL, 0)}
}
}
}
fn partitions(&mut self, ps: &[String]) {
// let mut xs : [*const c_char; ps.len()] = [ std::ptr::null(); ps.len()];
// let p = CString::new(ps[0]).unwrap().as_ptr();
let mut cps : Vec<*const c_char> = ps.iter().map(|s| CString::new(String::from(s)).unwrap().as_ptr()).collect();
unsafe { dds_qset_partition(self.qos, ps.len() as u32, cps.as_mut_ptr() as *mut *const ::std::os::raw::c_char) }
}
}
impl PartialEq for QoS {
fn eq(&self, other: &Self) -> bool {
unsafe { dds_qos_equal(self.qos, other.qos) }
}
}
impl Eq for QoS {}
impl Clone for QoS {
fn clone(&self) -> Self {
let dst = QoS { qos : unsafe { dds_create_qos()} };
unsafe { dds_copy_qos(dst.qos, self.qos as *const dds_qos_t) };
dst
}
}
impl Drop for QoS {
fn drop(&mut self) {
unsafe { dds_qos_delete(self.qos) };
}
}
pub struct Participant {
entity: dds_entity_t
}
impl Drop for Participant {
fn drop(&mut self) {
unsafe { dds_delete(self.entity) };
}
}
impl Participant {
pub fn new(d: dds_domainid_t) -> Participant {
let e = unsafe { dds_create_participant(d, std::ptr::null(), std::ptr::null()) };
Participant { entity: e }
}
}
|
pub use crate::ast::expressions::Expression;
use crate::ast::parser;
use crate::ast::stack;
#[macro_export]
macro_rules! sexp {
($e: expr) => {
Some(Box::new($e) as Box<dyn Expression>)
};
}
#[macro_export]
macro_rules! exp {
($e: expr) => {
Box::new($e) as Box<dyn Expression>
};
}
#[allow(dead_code)]
pub fn parse_string<F>(source_code: &str, func: F) -> stack::Stack
where
F: Fn(&mut parser::Parser, &mut stack::Stack) -> bool,
{
let mut parser = parser::Parser::new(source_code.to_string());
let mut stack = stack::Stack::default();
func(&mut parser, &mut stack);
assert!(
parser.peek().is_none(),
"Parser contains tokens after parsing"
);
stack
}
|
#[doc = "Register `PMCR` reader"]
pub type R = crate::R<PMCR_SPEC>;
#[doc = "Register `PMCR` writer"]
pub type W = crate::W<PMCR_SPEC>;
#[doc = "Field `I2C1FMP` reader - I2C1 Fm+"]
pub type I2C1FMP_R = crate::BitReader;
#[doc = "Field `I2C1FMP` writer - I2C1 Fm+"]
pub type I2C1FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C2FMP` reader - I2C2 Fm+"]
pub type I2C2FMP_R = crate::BitReader;
#[doc = "Field `I2C2FMP` writer - I2C2 Fm+"]
pub type I2C2FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C3FMP` reader - I2C3 Fm+"]
pub type I2C3FMP_R = crate::BitReader;
#[doc = "Field `I2C3FMP` writer - I2C3 Fm+"]
pub type I2C3FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C4FMP` reader - I2C4 Fm+"]
pub type I2C4FMP_R = crate::BitReader;
#[doc = "Field `I2C4FMP` writer - I2C4 Fm+"]
pub type I2C4FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB6FMP` reader - PB(6) Fm+"]
pub type PB6FMP_R = crate::BitReader;
#[doc = "Field `PB6FMP` writer - PB(6) Fm+"]
pub type PB6FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB7FMP` reader - PB(7) Fast Mode Plus"]
pub type PB7FMP_R = crate::BitReader;
#[doc = "Field `PB7FMP` writer - PB(7) Fast Mode Plus"]
pub type PB7FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB8FMP` reader - PB(8) Fast Mode Plus"]
pub type PB8FMP_R = crate::BitReader;
#[doc = "Field `PB8FMP` writer - PB(8) Fast Mode Plus"]
pub type PB8FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PB9FMP` reader - PB(9) Fm+"]
pub type PB9FMP_R = crate::BitReader;
#[doc = "Field `PB9FMP` writer - PB(9) Fm+"]
pub type PB9FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BOOSTE` reader - Booster Enable"]
pub type BOOSTE_R = crate::BitReader;
#[doc = "Field `BOOSTE` writer - Booster Enable"]
pub type BOOSTE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BOOSTVDDSEL` reader - Analog switch supply voltage selection"]
pub type BOOSTVDDSEL_R = crate::BitReader;
#[doc = "Field `BOOSTVDDSEL` writer - Analog switch supply voltage selection"]
pub type BOOSTVDDSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C5FMP` reader - I2C5 Fm+"]
pub type I2C5FMP_R = crate::BitReader;
#[doc = "Field `I2C5FMP` writer - I2C5 Fm+"]
pub type I2C5FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EPIS` reader - Ethernet PHY Interface Selection"]
pub type EPIS_R = crate::FieldReader;
#[doc = "Field `EPIS` writer - Ethernet PHY Interface Selection"]
pub type EPIS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `PA0SO` reader - PA0 Switch Open"]
pub type PA0SO_R = crate::BitReader;
#[doc = "Field `PA0SO` writer - PA0 Switch Open"]
pub type PA0SO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PA1SO` reader - PA1 Switch Open"]
pub type PA1SO_R = crate::BitReader;
#[doc = "Field `PA1SO` writer - PA1 Switch Open"]
pub type PA1SO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PC2SO` reader - PC2 Switch Open"]
pub type PC2SO_R = crate::BitReader;
#[doc = "Field `PC2SO` writer - PC2 Switch Open"]
pub type PC2SO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PC3SO` reader - PC3 Switch Open"]
pub type PC3SO_R = crate::BitReader;
#[doc = "Field `PC3SO` writer - PC3 Switch Open"]
pub type PC3SO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - I2C1 Fm+"]
#[inline(always)]
pub fn i2c1fmp(&self) -> I2C1FMP_R {
I2C1FMP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - I2C2 Fm+"]
#[inline(always)]
pub fn i2c2fmp(&self) -> I2C2FMP_R {
I2C2FMP_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - I2C3 Fm+"]
#[inline(always)]
pub fn i2c3fmp(&self) -> I2C3FMP_R {
I2C3FMP_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - I2C4 Fm+"]
#[inline(always)]
pub fn i2c4fmp(&self) -> I2C4FMP_R {
I2C4FMP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - PB(6) Fm+"]
#[inline(always)]
pub fn pb6fmp(&self) -> PB6FMP_R {
PB6FMP_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - PB(7) Fast Mode Plus"]
#[inline(always)]
pub fn pb7fmp(&self) -> PB7FMP_R {
PB7FMP_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - PB(8) Fast Mode Plus"]
#[inline(always)]
pub fn pb8fmp(&self) -> PB8FMP_R {
PB8FMP_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - PB(9) Fm+"]
#[inline(always)]
pub fn pb9fmp(&self) -> PB9FMP_R {
PB9FMP_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Booster Enable"]
#[inline(always)]
pub fn booste(&self) -> BOOSTE_R {
BOOSTE_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Analog switch supply voltage selection"]
#[inline(always)]
pub fn boostvddsel(&self) -> BOOSTVDDSEL_R {
BOOSTVDDSEL_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - I2C5 Fm+"]
#[inline(always)]
pub fn i2c5fmp(&self) -> I2C5FMP_R {
I2C5FMP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bits 21:23 - Ethernet PHY Interface Selection"]
#[inline(always)]
pub fn epis(&self) -> EPIS_R {
EPIS_R::new(((self.bits >> 21) & 7) as u8)
}
#[doc = "Bit 24 - PA0 Switch Open"]
#[inline(always)]
pub fn pa0so(&self) -> PA0SO_R {
PA0SO_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - PA1 Switch Open"]
#[inline(always)]
pub fn pa1so(&self) -> PA1SO_R {
PA1SO_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - PC2 Switch Open"]
#[inline(always)]
pub fn pc2so(&self) -> PC2SO_R {
PC2SO_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - PC3 Switch Open"]
#[inline(always)]
pub fn pc3so(&self) -> PC3SO_R {
PC3SO_R::new(((self.bits >> 27) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - I2C1 Fm+"]
#[inline(always)]
#[must_use]
pub fn i2c1fmp(&mut self) -> I2C1FMP_W<PMCR_SPEC, 0> {
I2C1FMP_W::new(self)
}
#[doc = "Bit 1 - I2C2 Fm+"]
#[inline(always)]
#[must_use]
pub fn i2c2fmp(&mut self) -> I2C2FMP_W<PMCR_SPEC, 1> {
I2C2FMP_W::new(self)
}
#[doc = "Bit 2 - I2C3 Fm+"]
#[inline(always)]
#[must_use]
pub fn i2c3fmp(&mut self) -> I2C3FMP_W<PMCR_SPEC, 2> {
I2C3FMP_W::new(self)
}
#[doc = "Bit 3 - I2C4 Fm+"]
#[inline(always)]
#[must_use]
pub fn i2c4fmp(&mut self) -> I2C4FMP_W<PMCR_SPEC, 3> {
I2C4FMP_W::new(self)
}
#[doc = "Bit 4 - PB(6) Fm+"]
#[inline(always)]
#[must_use]
pub fn pb6fmp(&mut self) -> PB6FMP_W<PMCR_SPEC, 4> {
PB6FMP_W::new(self)
}
#[doc = "Bit 5 - PB(7) Fast Mode Plus"]
#[inline(always)]
#[must_use]
pub fn pb7fmp(&mut self) -> PB7FMP_W<PMCR_SPEC, 5> {
PB7FMP_W::new(self)
}
#[doc = "Bit 6 - PB(8) Fast Mode Plus"]
#[inline(always)]
#[must_use]
pub fn pb8fmp(&mut self) -> PB8FMP_W<PMCR_SPEC, 6> {
PB8FMP_W::new(self)
}
#[doc = "Bit 7 - PB(9) Fm+"]
#[inline(always)]
#[must_use]
pub fn pb9fmp(&mut self) -> PB9FMP_W<PMCR_SPEC, 7> {
PB9FMP_W::new(self)
}
#[doc = "Bit 8 - Booster Enable"]
#[inline(always)]
#[must_use]
pub fn booste(&mut self) -> BOOSTE_W<PMCR_SPEC, 8> {
BOOSTE_W::new(self)
}
#[doc = "Bit 9 - Analog switch supply voltage selection"]
#[inline(always)]
#[must_use]
pub fn boostvddsel(&mut self) -> BOOSTVDDSEL_W<PMCR_SPEC, 9> {
BOOSTVDDSEL_W::new(self)
}
#[doc = "Bit 10 - I2C5 Fm+"]
#[inline(always)]
#[must_use]
pub fn i2c5fmp(&mut self) -> I2C5FMP_W<PMCR_SPEC, 10> {
I2C5FMP_W::new(self)
}
#[doc = "Bits 21:23 - Ethernet PHY Interface Selection"]
#[inline(always)]
#[must_use]
pub fn epis(&mut self) -> EPIS_W<PMCR_SPEC, 21> {
EPIS_W::new(self)
}
#[doc = "Bit 24 - PA0 Switch Open"]
#[inline(always)]
#[must_use]
pub fn pa0so(&mut self) -> PA0SO_W<PMCR_SPEC, 24> {
PA0SO_W::new(self)
}
#[doc = "Bit 25 - PA1 Switch Open"]
#[inline(always)]
#[must_use]
pub fn pa1so(&mut self) -> PA1SO_W<PMCR_SPEC, 25> {
PA1SO_W::new(self)
}
#[doc = "Bit 26 - PC2 Switch Open"]
#[inline(always)]
#[must_use]
pub fn pc2so(&mut self) -> PC2SO_W<PMCR_SPEC, 26> {
PC2SO_W::new(self)
}
#[doc = "Bit 27 - PC3 Switch Open"]
#[inline(always)]
#[must_use]
pub fn pc3so(&mut self) -> PC3SO_W<PMCR_SPEC, 27> {
PC3SO_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "peripheral mode configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pmcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pmcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PMCR_SPEC;
impl crate::RegisterSpec for PMCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pmcr::R`](R) reader structure"]
impl crate::Readable for PMCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pmcr::W`](W) writer structure"]
impl crate::Writable for PMCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PMCR to value 0"]
impl crate::Resettable for PMCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate libcub;
extern crate rusqlite;
use libcub::{list_notes, Limit, SortOrder};
use rusqlite::{params, Connection};
/// Bootstraps a test db with a table with a similar schema to the Bear notes db
/// and some notes.
fn bootstrap(conn: &Connection) {
conn.execute(
"CREATE TABLE ZSFNOTE (
Z_PK INTEGER PRIMARY KEY,
ZARCHIVED INTEGER,
ZTITLE VARCHAR,
ZSUBTITLE VARCHAR,
ZTEXT VARCHAR,
ZLASTEDITINGDEVICE VARCHAR,
ZCREATIONDATE TIMESTAMP,
ZMODIFICATIONDATE TIMESTAMP,
ZTRASHED INTEGER)",
params![],
)
.unwrap();
conn.execute(
"INSERT INTO ZSFNOTE (
Z_PK, ZARCHIVED, ZTITLE, ZSUBTITLE, ZTEXT, ZLASTEDITINGDEVICE,
ZCREATIONDATE, ZMODIFICATIONDATE, ZTRASHED
) VALUES (
1, 0, 'title', 'subtitle', 'text body', 'device', 0, 0, 0
)",
params![],
)
.unwrap();
conn.execute(
"INSERT INTO ZSFNOTE (
Z_PK, ZARCHIVED, ZTITLE, ZSUBTITLE, ZTEXT, ZLASTEDITINGDEVICE,
ZCREATIONDATE, ZMODIFICATIONDATE, ZTRASHED
) VALUES (
2, 0, 'title', NULL, NULL, 'device', 0, 0, 0
)",
params![],
)
.unwrap();
}
#[test]
fn test_list_notes() {
let conn = Connection::open_in_memory().unwrap();
bootstrap(&conn);
let notes = list_notes(&conn, &[], &SortOrder::Title, &[], &Limit::INFINITE).unwrap();
assert_eq!(notes.len(), 2);
}
|
use super::ast::{Expr, ExprKind, Symbol};
use super::partial_types::PartialType::Unknown;
use super::parser::parse_expr;
use super::pretty_print::*;
use super::type_inference::*;
#[test]
fn parse_and_print_literal_expressions() {
let tests = vec![
// i32 literal expressions
("23", "23"),
("0b111", "7"),
("0xff", "255"),
// i64 literal expressions
("23L", "23L"),
("7L", "7L"),
("0xffL", "255L"),
// f64 literal expressions
("23.0", "23.0"),
("23.5", "23.5"),
("23e5", "2300000.0"),
("23.5e5", "2350000.0"),
// f32 literal expressions
("23.0f", "23.0F"),
("23.5f", "23.5F"),
("23e5f", "2300000.0F"),
("23.5e5f", "2350000.0F"),
// bool literal expressions
("true", "true"),
("false", "false"),
];
for test in tests {
let e = parse_expr(test.0).unwrap();
assert_eq!(print_expr(&e).as_str(), test.1);
}
// Test overflow of integer types
assert!(parse_expr("999999999999999").is_err()); // i32 literal too big
assert!(parse_expr("999999999999999L").is_ok());
assert!(parse_expr("999999999999999999999999999999L").is_err()); // i64 literal too big
}
#[test]
fn parse_and_print_simple_expressions() {
let e = parse_expr("23 + 32").unwrap();
assert_eq!(print_expr(&e).as_str(), "(23+32)");
let e = parse_expr("2 - 3 - 4").unwrap();
assert_eq!(print_expr(&e).as_str(), "((2-3)-4)");
let e = parse_expr("2 - (3 - 4)").unwrap();
assert_eq!(print_expr(&e).as_str(), "(2-(3-4))");
let e = parse_expr("a").unwrap();
assert_eq!(print_expr(&e).as_str(), "a");
let e = parse_expr("let a = 2; a").unwrap();
assert_eq!(print_expr(&e).as_str(), "(let a=(2);a)");
let e = parse_expr("let a = 2.0; a").unwrap();
assert_eq!(print_expr(&e).as_str(), "(let a=(2.0);a)");
let e = parse_expr("[1, 2, 3]").unwrap();
assert_eq!(print_expr(&e).as_str(), "[1,2,3]");
let e = parse_expr("[1.0, 2.0, 3.0]").unwrap();
assert_eq!(print_expr(&e).as_str(), "[1.0,2.0,3.0]");
let e = parse_expr("|a, b| a + b").unwrap();
assert_eq!(print_expr(&e).as_str(), "|a,b|(a+b)");
let e = parse_expr("for(d, appender, |e| e+1)").unwrap();
assert_eq!(print_expr(&e).as_str(), "for(d,appender[?],|e|(e+1))");
}
#[test]
fn parse_and_print_typed_expressions() {
let e = parse_expr("a").unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "a:?");
let e = Expr {
kind: ExprKind::Ident(Symbol{name: "a".to_string(), id: 1}),
ty: Unknown
};
assert_eq!(print_typed_expr(&e).as_str(), "a#1:?");
let e = parse_expr("a:i32").unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "a:i32");
let mut e = parse_expr("let a = 2; a").unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "(let a:?=(2);a:?)");
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "(let a:i32=(2);a:i32)");
let mut e = parse_expr("let a = 2; let a = false; a").unwrap();
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "(let a:i32=(2);(let a:bool=(false);a:bool))");
// Types should propagate from function parameters to body
let mut e = parse_expr("|a:i32, b:i32| a + b").unwrap();
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "|a:i32,b:i32|(a:i32+b:i32)");
let mut e = parse_expr("|a:f32, b:f32| a + b").unwrap();
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "|a:f32,b:f32|(a:f32+b:f32)");
let mut e = parse_expr("let a = [1, 2, 3]; 1").unwrap();
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(), "(let a:vec[i32]=([1,2,3]);1)");
// Mismatched types in MakeVector
let mut e = parse_expr("[1, true]").unwrap();
assert!(infer_types(&mut e).is_err());
let mut e = parse_expr("for([1],appender[?],|b,x|merge(b,x))").unwrap();
infer_types(&mut e).unwrap();
assert_eq!(print_typed_expr(&e).as_str(),
"for([1],appender[i32],|b:appender[i32],x:i32|merge(b:appender[i32],x:i32))");
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.