text stringlengths 8 4.13M |
|---|
use crate::endpoint::{Endpoint, EndpointRequest, EndpointResponse};
use crate::verification::StorageProofVerifier;
use codec::{Decode, Encode, FullCodec, MaxEncodedLen};
use frame_support::storage::generator::StorageMap;
use frame_support::{log, Identity};
use scale_info::TypeInfo;
use serde::{Deserialize, Serialize};
use sp_core::storage::StorageKey;
use sp_domains::DomainId;
use sp_runtime::app_crypto::sp_core::U256;
use sp_runtime::traits::CheckedAdd;
use sp_runtime::{sp_std, DispatchError};
use sp_std::marker::PhantomData;
use sp_std::vec::Vec;
use sp_trie::StorageProof;
/// Channel identity.
pub type ChannelId = U256;
/// Nonce used as an identifier and ordering of messages within a channel.
/// Nonce is always increasing.
pub type Nonce = U256;
/// Unique Id of a message between two chains.
pub type MessageId = (ChannelId, Nonce);
/// Execution Fee to execute a send or receive request.
#[derive(Default, Debug, Encode, Decode, Clone, Copy, Eq, PartialEq, TypeInfo)]
pub struct ExecutionFee<Balance> {
/// Fee paid to the relayer pool for the execution.
pub relayer_pool_fee: Balance,
/// Fee paid to the network for computation.
pub compute_fee: Balance,
}
/// Fee model to send a request and receive a response from another chain.
/// A user of the endpoint will pay
/// - outbox_fee on src_chain
/// - inbox_fee on dst_chain
/// The reward is distributed to
/// - src_chain relayer pool when the response is received
/// - dst_chain relayer pool when the response acknowledgement from src_chain.
#[derive(Default, Debug, Encode, Decode, Clone, Copy, Eq, PartialEq, TypeInfo)]
pub struct FeeModel<Balance> {
/// Fee paid by the endpoint user for any outgoing message.
pub outbox_fee: ExecutionFee<Balance>,
/// Fee paid by the endpoint user any incoming message.
pub inbox_fee: ExecutionFee<Balance>,
}
// TODO: `compute_fee` and `relayer_pool_fee` should be distributed separately, where
// `compute_fee` should be distributed to executor and `relayer_pool_fee` should be
// distributed to relayer.
impl<Balance: CheckedAdd> FeeModel<Balance> {
pub fn outbox_fee(&self) -> Option<Balance> {
self.outbox_fee
.compute_fee
.checked_add(&self.outbox_fee.relayer_pool_fee)
}
pub fn inbox_fee(&self) -> Option<Balance> {
self.inbox_fee
.compute_fee
.checked_add(&self.inbox_fee.relayer_pool_fee)
}
}
/// Parameters for a new channel between two chains.
#[derive(Default, Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo, Copy)]
pub struct InitiateChannelParams<Balance> {
pub max_outgoing_messages: u32,
pub fee_model: FeeModel<Balance>,
}
/// Defines protocol requests performed on chains.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub enum ProtocolMessageRequest<Balance> {
/// Request to open a channel with foreign chain.
ChannelOpen(InitiateChannelParams<Balance>),
/// Request to close an open channel with foreign chain.
ChannelClose,
}
/// Defines protocol requests performed on chains.
pub type ProtocolMessageResponse = Result<(), DispatchError>;
/// Protocol message that encompasses request or its response.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub enum RequestResponse<Request, Response> {
Request(Request),
Response(Response),
}
/// Payload of the message
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub enum Payload<Balance> {
/// Protocol message.
Protocol(RequestResponse<ProtocolMessageRequest<Balance>, ProtocolMessageResponse>),
/// Endpoint message.
Endpoint(RequestResponse<EndpointRequest, EndpointResponse>),
}
/// Versioned message payload
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub enum VersionedPayload<Balance> {
V0(Payload<Balance>),
}
/// Message weight tag used to indicate the consumed weight when handling the message
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo, Default)]
pub enum MessageWeightTag {
ProtocolChannelOpen,
ProtocolChannelClose,
EndpointRequest(Endpoint),
EndpointResponse(Endpoint),
#[default]
None,
}
impl MessageWeightTag {
// Construct the weight tag for outbox message based on the outbox payload
pub fn outbox<Balance>(outbox_payload: &VersionedPayload<Balance>) -> Self {
match outbox_payload {
VersionedPayload::V0(Payload::Protocol(RequestResponse::Request(
ProtocolMessageRequest::ChannelOpen(_),
))) => MessageWeightTag::ProtocolChannelOpen,
VersionedPayload::V0(Payload::Protocol(RequestResponse::Request(
ProtocolMessageRequest::ChannelClose,
))) => MessageWeightTag::ProtocolChannelClose,
VersionedPayload::V0(Payload::Endpoint(RequestResponse::Request(endpoint_req))) => {
MessageWeightTag::EndpointRequest(endpoint_req.dst_endpoint.clone())
}
_ => MessageWeightTag::None,
}
}
// Construct the weight tag for inbox response based on the weight tag of the request
// message and the response payload
pub fn inbox_response<Balance>(
req_tyep: MessageWeightTag,
resp_payload: &VersionedPayload<Balance>,
) -> Self {
match (req_tyep, resp_payload) {
(
MessageWeightTag::ProtocolChannelOpen,
VersionedPayload::V0(Payload::Protocol(RequestResponse::Response(Ok(_)))),
) => MessageWeightTag::ProtocolChannelOpen,
(
MessageWeightTag::EndpointRequest(endpoint),
VersionedPayload::V0(Payload::Endpoint(RequestResponse::Response(_))),
) => MessageWeightTag::EndpointResponse(endpoint),
_ => MessageWeightTag::None,
}
}
}
/// Identifier of a chain.
#[derive(
Clone,
Copy,
Debug,
Hash,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
TypeInfo,
Serialize,
Deserialize,
MaxEncodedLen,
)]
pub enum ChainId {
Consensus,
Domain(DomainId),
}
impl ChainId {
#[inline]
pub fn consensus_chain_id() -> Self {
Self::Consensus
}
#[inline]
pub fn is_consensus_chain(&self) -> bool {
match self {
ChainId::Consensus => true,
ChainId::Domain(_) => false,
}
}
}
impl From<u32> for ChainId {
#[inline]
fn from(x: u32) -> Self {
Self::Domain(DomainId::new(x))
}
}
impl From<DomainId> for ChainId {
#[inline]
fn from(x: DomainId) -> Self {
Self::Domain(x)
}
}
/// Message contains information to be sent to or received from another chain.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct Message<Balance> {
/// Chain which initiated this message.
pub src_chain_id: ChainId,
/// Chain this message is intended for.
pub dst_chain_id: ChainId,
/// ChannelId the message was sent through.
pub channel_id: ChannelId,
/// Message nonce within the channel.
pub nonce: Nonce,
/// Payload of the message
pub payload: VersionedPayload<Balance>,
/// Last delivered message response nonce on src_chain.
pub last_delivered_message_response_nonce: Option<Nonce>,
}
/// Block info used as part of the Cross chain message proof.
#[derive(Default, Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct BlockInfo<Number, Hash> {
/// Block number of the chain.
pub block_number: Number,
/// Block hash of the chain.
pub block_hash: Hash,
}
/// Proof combines the storage proofs to validate messages.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct Proof<BlockNumber, BlockHash, StateRoot> {
/// Consensus chain block info when proof was constructed
pub consensus_chain_block_info: BlockInfo<BlockNumber, BlockHash>,
/// State root of Consensus chain at above number and block hash.
/// This is the used to extract the message from proof.
pub consensus_chain_state_root: StateRoot,
/// Storage proof that src chain state_root is registered on Consensus chain.
/// This is optional when the src_chain is Consensus.
/// BlockNumber and BlockHash is used with storage proof to validate and fetch its state root.
pub domain_proof: Option<(BlockInfo<BlockNumber, BlockHash>, StorageProof)>,
/// Storage proof that message is processed on src_chain.
pub message_proof: StorageProof,
}
impl<BlockNumber: Default, BlockHash: Default, StateRoot: Default>
Proof<BlockNumber, BlockHash, StateRoot>
{
#[cfg(feature = "runtime-benchmarks")]
pub fn dummy() -> Self {
Proof {
consensus_chain_block_info: BlockInfo {
block_number: Default::default(),
block_hash: Default::default(),
},
consensus_chain_state_root: Default::default(),
domain_proof: None,
message_proof: StorageProof::empty(),
}
}
}
/// Holds the Block info and state roots from which a proof was constructed.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct ExtractedStateRootsFromProof<BlockNumber, BlockHash, StateRoot> {
/// Consensus chain block info when proof was constructed
pub consensus_chain_block_info: BlockInfo<BlockNumber, BlockHash>,
/// State root of Consensus chain at above number and block hash.
pub consensus_chain_state_root: StateRoot,
/// Storage proof that src chain state_root is registered on Consensus chain.
/// This is optional when the src_chain is the consensus chain.
/// BlockNumber and BlockHash is used with storage proof to validate and fetch its state root.
pub domain_info: Option<(DomainId, BlockInfo<BlockNumber, BlockHash>, StateRoot)>,
}
/// Cross Domain message contains Message and its proof on src_chain.
#[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct CrossDomainMessage<BlockNumber, BlockHash, StateRoot> {
/// Chain which initiated this message.
pub src_chain_id: ChainId,
/// Chain this message is intended for.
pub dst_chain_id: ChainId,
/// ChannelId the message was sent through.
pub channel_id: ChannelId,
/// Message nonce within the channel.
pub nonce: Nonce,
/// Proof of message processed on src_chain.
pub proof: Proof<BlockNumber, BlockHash, StateRoot>,
/// The message weight tag
pub weight_tag: MessageWeightTag,
}
impl<BlockNumber, BlockHash, StateRoot> CrossDomainMessage<BlockNumber, BlockHash, StateRoot> {
/// Extracts state roots.
/// If the chain proof is present, then then we construct the trie and extract chain state root.
pub fn extract_state_roots_from_proof<Hashing>(
&self,
) -> Option<ExtractedStateRootsFromProof<BlockNumber, BlockHash, StateRoot>>
where
Hashing: hash_db::Hasher,
StateRoot: Clone + Decode + Into<Hashing::Out> + FullCodec + TypeInfo + 'static,
BlockNumber: Clone + FullCodec + TypeInfo + 'static,
BlockHash: Clone + FullCodec + TypeInfo + 'static,
{
let xdm_proof = self.proof.clone();
let consensus_chain_state_root = xdm_proof.consensus_chain_state_root.clone();
let mut extracted_state_roots = ExtractedStateRootsFromProof {
consensus_chain_block_info: xdm_proof.consensus_chain_block_info,
consensus_chain_state_root: xdm_proof.consensus_chain_state_root,
domain_info: None,
};
// verify intermediate domain proof and retrieve state root of the message.
let domain_proof = xdm_proof.domain_proof;
match self.src_chain_id {
// if the src_chain is a consensus chain, return the state root as is since message is on consensus runtime
ChainId::Consensus if domain_proof.is_none() => Some(extracted_state_roots),
// if the src_chain is a domain, then return the state root of the domain by verifying the domain proof.
ChainId::Domain(domain_id) if domain_proof.is_some() => {
let (domain_info, domain_state_root_proof) =
domain_proof.expect("checked for existence value above");
let domain_state_root_key = DomainStateRootStorage::<_, _, StateRoot>::storage_key(
domain_id,
domain_info.block_number.clone(),
domain_info.block_hash.clone(),
);
let domain_state_root =
match StorageProofVerifier::<Hashing>::verify_and_get_value::<StateRoot>(
&consensus_chain_state_root.into(),
domain_state_root_proof,
domain_state_root_key,
) {
Ok(result) => result,
Err(err) => {
log::error!(
target: "runtime::messenger",
"Failed to verify Domain proof: {:?}",
err
);
return None;
}
};
extracted_state_roots.domain_info =
Some((domain_id, domain_info, domain_state_root));
Some(extracted_state_roots)
}
_ => None,
}
}
}
/// Relayer message with storage key to generate storage proof using the backend.
#[derive(Debug, Encode, Decode, TypeInfo, Clone, Eq, PartialEq)]
pub struct RelayerMessageWithStorageKey {
/// Chain which initiated this message.
pub src_chain_id: ChainId,
/// Chain this message is intended for.
pub dst_chain_id: ChainId,
/// ChannelId the message was sent through.
pub channel_id: ChannelId,
/// Message nonce within the channel.
pub nonce: Nonce,
/// Storage key to generate proof for using proof backend.
pub storage_key: Vec<u8>,
/// The message weight tag
pub weight_tag: MessageWeightTag,
}
/// Set of messages with storage keys to be relayed by a given relayer.
#[derive(Default, Debug, Encode, Decode, TypeInfo, Clone, Eq, PartialEq)]
pub struct RelayerMessagesWithStorageKey {
pub outbox: Vec<RelayerMessageWithStorageKey>,
pub inbox_responses: Vec<RelayerMessageWithStorageKey>,
}
impl<BlockNumber, BlockHash, StateRoot> CrossDomainMessage<BlockNumber, BlockHash, StateRoot> {
pub fn from_relayer_msg_with_proof(
r_msg: RelayerMessageWithStorageKey,
proof: Proof<BlockNumber, BlockHash, StateRoot>,
) -> Self {
CrossDomainMessage {
src_chain_id: r_msg.src_chain_id,
dst_chain_id: r_msg.dst_chain_id,
channel_id: r_msg.channel_id,
nonce: r_msg.nonce,
proof,
weight_tag: r_msg.weight_tag,
}
}
}
/// This is a representation of actual StateRoots storage in pallet-domains.
/// Any change in key or value there should be changed here accordingly.
pub struct DomainStateRootStorage<Number, Hash, StateRoot>(PhantomData<(Number, Hash, StateRoot)>);
impl<Number, Hash, StateRoot> StorageMap<(DomainId, Number, Hash), StateRoot>
for DomainStateRootStorage<Number, Hash, StateRoot>
where
Number: FullCodec + TypeInfo + 'static,
Hash: FullCodec + TypeInfo + 'static,
StateRoot: FullCodec + TypeInfo + 'static,
{
type Query = Option<StateRoot>;
type Hasher = Identity;
fn module_prefix() -> &'static [u8] {
"Domains".as_ref()
}
fn storage_prefix() -> &'static [u8] {
"StateRoots".as_ref()
}
fn from_optional_value_to_query(v: Option<StateRoot>) -> Self::Query {
v
}
fn from_query_to_optional_value(v: Self::Query) -> Option<StateRoot> {
v
}
}
impl<Number, Hash, StateRoot> DomainStateRootStorage<Number, Hash, StateRoot>
where
Number: FullCodec + TypeInfo + 'static,
Hash: FullCodec + TypeInfo + 'static,
StateRoot: FullCodec + TypeInfo + 'static,
{
pub fn storage_key(domain_id: DomainId, number: Number, hash: Hash) -> StorageKey {
StorageKey(Self::storage_map_final_key::<(DomainId, Number, Hash)>((
domain_id, number, hash,
)))
}
}
|
use std::collections::BTreeMap;
fn stock_list(list_art: Vec<&str>, list_cat: Vec<&str>) -> String {
let mut dic: BTreeMap<char, i32> = BTreeMap::new();
let mut ret: Vec<String> = vec![];
for elem in list_art {
let temp: Vec<&str> = elem.split(" ").collect();
*dic.entry(temp[0].chars().next().unwrap()).or_insert(0) += temp[1].parse::<i32>().unwrap();
}
if dic.is_empty() { return String::new() }
for elem in list_cat {
let ch = &elem.chars().next().unwrap();
ret.push( format!("({} : {})", ch, *dic.get(ch).unwrap_or(&0)) );
}
ret.into_iter().collect::<Vec<String>>().join(" - ")
}
#[test]
fn test0() {
let a = vec!["BBAR 150", "CDXE 515", "BKWR 250", "BTSQ 890", "DRTY 600"];
let b = vec!["A"] ;
assert_eq!( stock_list(a,b), "(A : 0)" );
}
#[test]
fn test1() {
let a = vec!["BBAR 150", "CDXE 515", "BKWR 250", "BTSQ 890", "DRTY 600"];
let b = vec!["A", "B"] ;
assert_eq!( stock_list(a,b), "(A : 0) - (B : 1290)" );
}
#[test]
fn test2() {
let a = vec!["BBAR 150", "CDXE 515", "BKWR 250", "BTSQ 890", "DRTY 600"];
let b = vec!["A", "B", "C"] ;
assert_eq!( stock_list(a,b), "(A : 0) - (B : 1290) - (C : 515)" );
}
#[test]
fn test3() {
let a = vec!["BBAR 150", "CDXE 515", "BKWR 250", "BTSQ 890", "DRTY 600"];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "(A : 0) - (B : 1290) - (C : 515) - (D : 600)" );
}
#[test]
fn test4() {
let a = vec!["BBAR 50", "CDXE 50", "BKWR 50", "BTSQ 50", "DRTY 50"];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "(A : 0) - (B : 150) - (C : 50) - (D : 50)" );
}
#[test]
fn test5() {
let a = vec!["AZB 650", "AXE 545", "AXZ 250", "AXEA 190", "AXI 800"];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "(A : 2435) - (B : 0) - (C : 0) - (D : 0)" );
}
#[test]
fn test6() {
let a = vec!["ABAR 150", "ADXE 515", "AKWR 250", "ATSQ 890", "ARTY 600"];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "(A : 2405) - (B : 0) - (C : 0) - (D : 0)" );
}
#[test]
fn test7() {
let a = vec!["ABAR 1500", "CDXE 5150", "BKWR 2500", "BTSQ 8900", "DRTY 6000"];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "(A : 1500) - (B : 11400) - (C : 5150) - (D : 6000)" );
}
#[test]
fn test8() {
let a = vec![];
let b = vec!["A", "B", "C", "D"] ;
assert_eq!( stock_list(a,b), "" );
}
#[test]
fn test9() {
let a = vec![];
let b = vec!["A"] ;
assert_eq!( stock_list(a,b), "" );
}
fn main() {
}
|
//#![allow(dead_code, unused_must_use, unused_imports, unstable)]
// Temporary warning removal until old_io is updated et al.
#![feature(io, collections, core)]
#[macro_use] extern crate log;
use factory::{Record, RecordResult};
use std::string::ToString;
pub mod controller;
pub mod factory;
mod utils;
struct Txtdb {
factory: factory::Factory,
}
impl Txtdb {
pub fn new(factory: factory::Factory) -> Txtdb {
Txtdb {
factory: factory,
}
}
#[allow(dead_code, unused_must_use, unused_variables)]
fn add<T: ToString>(&mut self, record: T) -> RecordResult<u64, String> {
//! Add a new record to the database. Returns the id of the record added.
Ok(1)
}
#[allow(dead_code, unused_must_use, unused_variables)]
fn remove_id(&mut self, id: u64) -> RecordResult<Record, String> {
//! Removes a record with the id provided if it exists.
//! Returns a `RecordResult` of the record removed.
Err("Not implemented yet".to_string())
}
#[allow(dead_code, unused_must_use, unused_variables)]
fn remove(&mut self, record: Record) -> RecordResult<u64, String> {
//! Finds and removes the first instance of a record that matches the one provided.
//! Returns the id of the record it removes.
Err("Not implemented yet".to_string())
}
#[allow(dead_code, unused_must_use, unused_variables)]
fn find_id(id: u64) -> RecordResult<Record, String> {
//! Searches for a record with the id provided.
//! Returns a copy of the record.
// 1. Read each line?
// 2. Check if the ID matches
// 3. Return
Err("Not implemented yet".to_string())
}
#[allow(dead_code, unused_must_use, unused_variables)]
fn find(&self, record: Record) -> RecordResult<u64, String> {
//! Searches for the first instance of a record that matches the one provided.
//! Returns the id of the record in the database.
// TODO, how do you create a `Record` if you don't know the id?
// Since we aren't using it, should we document not having the id in there?
//
// 1. Base64 encode the Record
// 2. Read each line to find the match encoded value
// 3. Return id
Err("Not implemented yet".to_string())
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate diesel;
extern crate rand;
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
extern crate rocket_cors;
extern crate reqwest;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate toml;
pub mod db;
pub mod login;
pub mod model;
pub mod schema;
pub mod state;
use db::DatabaseConn;
use rocket::http::Method;
use rocket_cors::{AllowedHeaders, AllowedOrigins};
use state::global_config::GlobalConfig;
#[get("/")]
fn index() -> &'static str {
"Hello Rocket !"
}
fn main() {
// Load config
println!("Loading config ...");
let config: GlobalConfig = GlobalConfig::load();
println!("Config loaded successfully !");
// Setup CORS Options
let (allowed_origins, failed_origins) = AllowedOrigins::some(&[
"http://192.168.1.1:8080",
"http://192.168.1.1:8000",
"http://localhost",
]);
let cors_options = rocket_cors::Cors {
allowed_origins: allowed_origins,
allowed_methods: vec![Method::Get].into_iter().map(From::from).collect(),
allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]),
allow_credentials: true,
..Default::default()
};
println!("Launching the server ...");
rocket::ignite()
.manage(config)
.attach(DatabaseConn::fairing())
.attach(cors_options)
.mount("/", routes![index])
.mount(
"/login",
routes![
login::github::cb_login_github,
login::gitlab::cb_login_gitlab
],
)
.mount("/api", routes![model::user::get_username])
.launch();
}
|
use std::collections::{BTreeMap, HashMap, HashSet};
#[derive(Debug, Clone)]
struct Food {
ingredients: Vec<String>,
allergens: Vec<String>,
}
impl std::str::FromStr for Food {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.trim_end_matches(')').split(" (contains ");
Ok(Self {
ingredients: parts
.next()
.unwrap()
.split(' ')
.map(ToOwned::to_owned)
.collect(),
allergens: parts
.next()
.unwrap()
.split(", ")
.map(ToOwned::to_owned)
.collect(),
})
}
}
fn main() {
let foods: Vec<Food> = std::fs::read_to_string("input")
.unwrap()
.lines()
.map(|l| l.parse().unwrap())
.collect();
// let foods: Vec<Food> = "mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
// trh fvjkl sbzzf mxmxvkd (contains dairy)
// sqjhc fvjkl (contains soy)
// sqjhc mxmxvkd sbzzf (contains fish)".lines().map(|l| l.parse().unwrap()).collect();
part1(&foods);
part2(&foods);
}
fn analyze_foods(foods: &[Food]) -> (BTreeMap<&String, HashMap<&String, u32>>, HashSet<&String>) {
let mut allergen_counts: BTreeMap<&String, HashMap<&String, u32>> = BTreeMap::new();
let mut anti_candidates = HashSet::new();
for Food { ingredients, allergens } in foods.iter() {
for allergen in allergens.iter() {
let counts= allergen_counts.entry(allergen).or_default();
for ingredient in ingredients.iter() {
*counts.entry(ingredient).or_default() += 1;
}
}
anti_candidates.extend(ingredients);
}
for (_, counts) in allergen_counts.iter_mut() {
let (_, max) = counts.iter().max_by_key(|(_, count)| **count).unwrap();
let mut remove = Vec::new();
for (ingredient, count) in counts.iter() {
if count == max {
anti_candidates.remove(ingredient);
} else {
remove.push(*ingredient);
}
}
for ingredient in remove.iter() {
counts.remove(ingredient);
}
}
(allergen_counts, anti_candidates)
}
fn part1(foods: &[Food]) {
let (_, anti_candidates) = analyze_foods(&foods);
let mut total = 0;
for Food { ingredients, .. } in foods.iter() {
for ingredient in ingredients.iter() {
if anti_candidates.contains(ingredient) {
total += 1;
}
}
}
println!("{}", total);
}
fn part2(foods: &[Food]) {
let (mut allergen_counts, anti_candidates) = analyze_foods(&foods);
let mut all_ingredients: HashSet<&String> = HashSet::new();
for (_, counts) in allergen_counts.iter_mut() {
for ac in anti_candidates.iter() {
counts.remove(ac);
}
all_ingredients.extend(counts.keys());
}
let mut ingredients: BTreeMap<&String, &String> = BTreeMap::new();
while ingredients.len() != all_ingredients.len() {
for (allergen, counts) in allergen_counts.iter() {
if counts.len() == 1 && !ingredients.contains_key(allergen) {
ingredients.insert(allergen, counts.keys().next().unwrap());
}
}
for (_, counts) in allergen_counts.iter_mut() {
for ingredient in ingredients.values() {
counts.remove(ingredient);
}
}
}
println!("{}", ingredients.into_values().cloned().collect::<Vec<_>>().join(","));
}
|
use std::sync::{Arc, Mutex, Once, ONCE_INIT};
use std::mem;
use std::thread;
use render::render_system::RenderSystem;
use resource::resource_system::ResourcSystem;
use common;
use glutin;
use whi;
use gfx;
use gfx_window_glutin;
use state;
use specs::*;
use render::types::*;
use config;
use common::timer::*;
use std::time::{Duration, Instant};
pub struct FrameTime {
frame_last_fps: u64,
last_frame_time: u64,
frame_count: i32,
frame_rate: i32,
delta_time: Duration,
fixed_step: Duration,
}
impl FrameTime{
pub fn new() -> Self{
FrameTime{
frame_last_fps: 0u64,
last_frame_time: 0u64,
frame_count: 0i32,
frame_rate: 0i32,
delta_time: Duration::new(0, 0),
fixed_step: Duration::new(0, 16666666),
}
}
}
pub struct Game {
timer: Stopwatch,
frame_time: FrameTime,
window: glutin::Window,
render_system: RenderSystem,
resource_system: ResourcSystem,
planner: Planner<()>,
pipe: Pipeline;
state: State,
}
impl Game {
pub fn new(config: config::Config) -> Self{
let mut wb = whi::dxgi::window::init();
let mut world = World::new();
use num_cpus;
let num = num_cpus::get();
let mut planner = Planner::new(world, num);
let (window, mut device, mut factory, main_color, mut main_depth) =
gfx_window_glutin::init::<ColorFormat, DepthFormat>(wb);
let mut render_system = RenderSystem::new(device, factory, main_color, main_depth);
let mut rescource_system = ResourcSystem::new();
let mut frame_time = FrameTime::new();
Game {
timer: Stopwatch::new(),
frame_time: frame_time,
window: window,
render_system: render_system,
resource_system: rescource_system,
planner: planner,
}
}
// TODO for different kinds of window
// fn init_window(&mut self){
// }
#[allow(dead_code)]
fn update(&mut self, elapsed_time:u64) {
elapsed_time;
}
fn render(&mut self, elapsed_time:u64) {
self.render_system.render(elapsed_time)
}
pub fn get_game_time() -> u64 {
common::timer::current_time_ns()
}
pub fn run(&mut self) {
let world = &mut self.planner.mut_world();
let resource_system = &mut self.resource_system;
let pipe = &mut self.pipe;
self.states.start(world, resource_system, pipe);
while self.states.is_running() {
self.timer.restart();
self.advance_frame();
self.timer.stop();
self.frame_time.delta_time = self.timer.elapsed();
}
}
fn advance_frame(&mut self) {
for event in self.window.poll_events() {
match event {
glutin::Event::Closed => self.states.S ,
_ => ()
}
}
// let events = self.gfx_device.poll_events();
let world = &mut self.planner.mut_world();
// let assets = &mut self.assets;
// let pipe = &mut self.pipe;
// self.states.handle_events(events.as_ref(), world, assets, pipe);
// if self.last_fixed_update.elapsed() >= self.fixed_step {
// self.states.fixed_update(world, assets, pipe);
// self.last_fixed_update += self.fixed_step;
// }
// self.states.update(world, assets, pipe);
self.planner.dispatch(());
self.planner.wait();
if self.frame_time.last_frame_time == 0u64 {
self.frame_time.last_frame_time = Game::get_game_time();
}
let frame_time = Game::get_game_time();
let elapsed_time = frame_time - self.frame_time.last_frame_time;
self.frame_time.last_frame_time = frame_time;
self.update(elapsed_time);
self.render(elapsed_time);
self.frame_time.frame_count += 1;
if (Game::get_game_time() - self.frame_time.frame_last_fps) >= 1000 {
self.frame_time.frame_rate = self.frame_time.frame_count;
self.frame_time.frame_count = 0;
self.frame_time.frame_last_fps = Game::get_game_time();
}
}
pub fn exit(&mut self){
// tell thread to exit
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_init() {
}
}
|
use postgres::{Client, NoTls};
use std::env::var;
pub mod community;
pub mod user;
pub mod member;
pub mod pin;
pub mod review;
pub mod restaurant;
pub fn get_client() -> Client {
let url = var("DATABASE_URL").unwrap_or("localhost".to_string());
let client = Client::connect(
format!("postgresql://postgres:root@{url}/goyotashi", url=url).as_str(),
NoTls
).unwrap();
return client;
} |
#![allow(unused_imports)]
use {
crate::{
config::CONFIG,
layout::LayoutTag,
models::{rect::*, window_type::WindowType, windowwrapper::*, HandleState, WindowState},
state::State,
wm,
xlibwrapper::action,
xlibwrapper::core::*,
xlibwrapper::masks::*,
xlibwrapper::util::*,
xlibwrapper::xlibmodels::*,
},
reducer::*,
std::cell::RefCell,
std::rc::Rc,
};
impl Reducer<action::MotionNotify> for State {
fn reduce(&mut self, action: action::MotionNotify) {
let actual_mon = wm::get_monitor_by_point(self, action.x_root, action.y_root);
let old_mon = self.current_monitor;
if self.current_monitor != actual_mon {
if let Some(mon) = self.monitors.get_mut(&old_mon) {
mon.mouse_follow.replace(false);
if let Some(client) = mon.get_client(self.focus_w) {
client.handle_state.replace(HandleState::Unfocus.into());
}
}
self.current_monitor = actual_mon;
let mon = self
.monitors
.get_mut(&self.current_monitor)
.expect("MotionNotify - monitor - get_mut - change handle state");
mon.handle_state.replace(HandleState::Focus);
mon.mouse_follow.replace(false);
}
let layout = self
.monitors
.get(&self.current_monitor)
.expect("MotionNotify - monitor - get - check layout")
.get_current_layout()
.expect("MotionNotify - monitor - get_current_layout");
let is_trans = match self
.monitors
.get(&self.current_monitor)
.expect("MotionNotify - monitor - get - action.win is_trans")
.get_client(action.win)
{
Some(client) => client.is_trans,
None => return,
};
if layout != LayoutTag::Floating && !is_trans {
return;
}
let new_pos = calculcate_destination(self, &action);
if (action.state & (Button1Mask | CONFIG.super_key)) == Button1Mask | CONFIG.super_key {
if action.win == self.lib.get_root() {
return;
}
if action.win != self.lib.get_root() {
let ww = self
.monitors
.get_mut(&old_mon)
.expect("MotionNotify - old_mon - get_mut")
.remove_window(action.win)
.expect("Trying to remove window in motion_notify");
self.monitors
.get_mut(&actual_mon)
.expect("MotionNotify - old_mon - get_mut")
.add_window(action.win, ww);
}
let (pos, _) = self
.monitors
.get_mut(&self.current_monitor)
.expect("MotionNotify - monitor - get_mut")
.move_window(action.win, new_pos.x, new_pos.y);
let w = self
.monitors
.get_mut(&self.current_monitor)
.expect("MotionNotify - monitor - get_mut")
.get_client_mut(action.win)
.expect("motion_notify some window");
if w.current_state != WindowState::Monocle {
w.set_position(pos);
w.handle_state = HandleState::Move.into();
}
return;
}
}
}
fn calculcate_destination(state: &State, action: &action::MotionNotify) -> Position {
let drag_pos = Position {
x: action.x_root,
y: action.y_root,
};
let (delta_x, delta_y) = (
drag_pos.x - state.drag_start_pos.0,
drag_pos.y - state.drag_start_pos.1,
);
Position {
x: state.drag_start_frame_pos.0 + delta_x,
y: state.drag_start_frame_pos.1 + delta_y,
}
}
|
use crate::types::Vector2;
#[derive(Clone, Debug)]
pub enum InputCharacter {
Char(char),
ControlCode {
key_code: crate::input_handler::KeyCode,
modifier_shift: bool,
modifier_ctrl: bool
},
}
#[derive(Default, Clone)]
pub struct Input {
pub mouse_position: Vector2,
pub is_mouse_down: bool,
pub click_down: bool,
pub click_up: bool,
pub mouse_wheel: Vector2,
pub input_buffer: Vec<InputCharacter>,
pub cursor_grabbed: bool,
}
impl Input {
pub fn is_mouse_down(&self) -> bool {
self.is_mouse_down && self.cursor_grabbed == false
}
pub fn click_down(&self) -> bool {
self.click_down && self.cursor_grabbed == false
}
pub fn click_up(&self) -> bool {
self.click_up && self.cursor_grabbed == false
}
pub fn reset(&mut self) {
self.click_down = false;
self.click_up = false;
self.mouse_wheel = Vector2::new(0., 0.);
self.input_buffer = vec![];
}
}
|
#[macro_use]
extern crate bencher;
extern crate regex;
use rex_regex as rex;
use bencher::Bencher;
fn bench_simple_re(b: &mut Bencher) {
b.iter(|| {
assert!(
rex::match_re_str("^(Hello)? [Ww]orld!?$", "Hello world")
.unwrap()
.0
);
});
}
fn bench_simple_precompile(b: &mut Bencher) {
let re = rex::compile("^(Hello)? [Ww]orld!?$").unwrap();
b.iter(|| {
assert!(rex::match_re(&re, "Hello world").0);
});
}
fn bench_simplest_precompile(b: &mut Bencher) {
let re = rex::compile("^Hello world$").unwrap();
b.iter(|| {
assert!(rex::match_re(&re, "Hello world").0);
});
}
fn bench_notorious(b: &mut Bencher) {
let re = rex::compile("(x+x+)+y").unwrap();
b.iter(|| {
assert!(rex::match_re(&re, "xxxxxxxxxxy").0);
});
}
fn bench_notorious_but_sane(b: &mut Bencher) {
let re = rex::compile("(x+)y").unwrap();
b.iter(|| {
assert!(rex::match_re(&re, "xxxxxxxxxxy").0);
});
}
fn bench_notorious_regex_crate(b: &mut Bencher) {
let re = regex::Regex::new("(x+x+)+y").unwrap();
b.iter(|| {
assert!(re.is_match("xxxxxxxxxxxxxxxxxxxxxxy"));
});
}
fn bench_regex_crate(b: &mut Bencher) {
let re = regex::Regex::new("^(Hello)? [Ww]orld!?$").unwrap();
b.iter(|| {
assert!(re.is_match("Hello World"));
});
}
benchmark_group!(
benchs,
bench_simple_re,
bench_simple_precompile,
bench_notorious,
bench_notorious_but_sane,
bench_notorious_regex_crate,
bench_regex_crate,
bench_simplest_precompile
);
benchmark_main!(benchs);
|
mod cli;
mod config;
mod download;
mod install;
mod status;
mod uninstall;
use anyhow::{Context, Result};
use cli::{Args, ConfigPath, Platform};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use structopt::StructOpt;
use pahkat_client::{Config, PackageStore};
#[inline]
#[cfg(any(windows, target_os = "macos", target_os = "linux"))]
fn config_path(holder: &dyn ConfigPath) -> Result<PathBuf> {
holder
.config_path()
.map(Path::to_owned)
.or_else(|| directories::BaseDirs::new().map(|x| x.config_dir().join("Pahkat")))
.with_context(|| "No default config path could be found")
}
#[inline(always)]
#[cfg(feature = "windows")]
async fn store(config_path: Option<&Path>) -> anyhow::Result<Arc<dyn PackageStore>> {
let config = match config_path {
Some(v) => pahkat_client::Config::load(&v, pahkat_client::Permission::ReadWrite).0,
None => pahkat_client::Config::load_default()?,
};
let store = pahkat_client::WindowsPackageStore::new(config).await;
let store = Arc::new(store);
if store.config().read().unwrap().repos().len() == 0 {
println!("WARNING: There are no repositories in the given config.");
}
Ok(store)
}
#[inline(always)]
#[cfg(feature = "prefix")]
async fn store(config_path: Option<&Path>) -> anyhow::Result<Arc<dyn PackageStore>> {
let config_path = config_path.ok_or_else(|| anyhow::anyhow!("No prefix path specified"))?;
let store = pahkat_client::PrefixPackageStore::open(config_path).await?;
let store = Arc::new(store);
if store.config().read().unwrap().repos().len() == 0 {
println!("WARNING: There are no repositories in the given config.");
}
Ok(store)
}
#[inline(always)]
#[cfg(feature = "prefix")]
async fn create_store(config_path: Option<&Path>) -> anyhow::Result<Arc<dyn PackageStore>> {
let config_path = config_path.ok_or_else(|| anyhow::anyhow!("No prefix path specified"))?;
let store = pahkat_client::PrefixPackageStore::create(config_path).await?;
let store = Arc::new(store);
if store.config().read().unwrap().repos().len() == 0 {
println!("WARNING: There are no repositories in the given config.");
}
Ok(store)
}
#[inline(always)]
#[cfg(feature = "macos")]
async fn store(config_path: Option<&Path>) -> anyhow::Result<Arc<dyn PackageStore>> {
let config = match config_path {
Some(v) => pahkat_client::Config::load(&v, pahkat_client::Permission::ReadWrite)?,
None => pahkat_client::Config::load_default()?,
};
let store = pahkat_client::MacOSPackageStore::new(config).await;
let store = Arc::new(store);
if store.config().read().unwrap().repos().len() == 0 {
println!("WARNING: There are no repositories in the given config.");
}
Ok(store)
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
env_logger::init();
let args = Args::from_args();
match &args {
cli::Args::Init(a) => {
// TODO: init should only be built for prefix builds.
#[cfg(feature = "prefix")]
create_store(args.config_path()).await?;
}
cli::Args::Download(a) => {
let store = store(args.config_path()).await?;
download::download(
store,
&a.packages,
&a.output_path
.as_ref()
.map(|x| x.clone())
.unwrap_or_else(|| std::env::current_dir().unwrap()),
)
.await?
}
cli::Args::Status(a) => {
let store = store(args.config_path()).await?;
status::status(&*store, &a.packages, Default::default())?
}
cli::Args::Uninstall(a) => {
let store = store(args.config_path()).await?;
uninstall::uninstall(&*store, &a.packages, Default::default())?
}
cli::Args::Install(a) => {
let store = store(args.config_path()).await?;
install::install(store, &a.packages, Default::default(), &args).await?
}
cli::Args::Config(a) => {
let store = store(args.config_path()).await?;
config::config(store, a, Default::default(), &args).await?
}
}
Ok(())
}
|
pub mod dmabmr {
pub mod sr {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029000u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod da {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod dsl {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 2) & 0x1F
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFFFF83u32;
reg |= (val & 0x1F) << 2;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod pbl {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 8) & 0x3F
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFFC0FFu32;
reg |= (val & 0x3F) << 8;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod rtpr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 14) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFF3FFFu32;
reg |= (val & 0x3) << 14;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod fb {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 16) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFFFEFFFFu32;
reg |= (val & 0x1) << 16;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod rdp {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 17) & 0x3F
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFF81FFFFu32;
reg |= (val & 0x3F) << 17;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod usp {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 23) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFF7FFFFFu32;
reg |= (val & 0x1) << 23;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod fpm {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 24) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFEFFFFFFu32;
reg |= (val & 0x1) << 24;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
pub mod aab {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029000u32 as *const u32) >> 25) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029000u32 as *const u32);
reg &= 0xFDFFFFFFu32;
reg |= (val & 0x1) << 25;
core::ptr::write_volatile(0x40029000u32 as *mut u32, reg);
}
}
}
}
pub mod dmatpdr {
pub mod tpd {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029004u32 as *const u32) & 0xFFFFFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029004u32 as *const u32);
reg &= 0x0u32;
reg |= val & 0xFFFFFFFF;
core::ptr::write_volatile(0x40029004u32 as *mut u32, reg);
}
}
}
}
pub mod dmarpdr {
pub mod rpd {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029008u32 as *const u32) & 0xFFFFFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029008u32 as *const u32);
reg &= 0x0u32;
reg |= val & 0xFFFFFFFF;
core::ptr::write_volatile(0x40029008u32 as *mut u32, reg);
}
}
}
}
pub mod dmardlar {
pub mod srl {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4002900Cu32 as *const u32) & 0xFFFFFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002900Cu32 as *const u32);
reg &= 0x0u32;
reg |= val & 0xFFFFFFFF;
core::ptr::write_volatile(0x4002900Cu32 as *mut u32, reg);
}
}
}
}
pub mod dmatdlar {
pub mod stl {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029010u32 as *const u32) & 0xFFFFFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029010u32 as *const u32);
reg &= 0x0u32;
reg |= val & 0xFFFFFFFF;
core::ptr::write_volatile(0x40029010u32 as *mut u32, reg);
}
}
}
}
pub mod dmasr {
pub mod ts {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029014u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tpss {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tbus {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tjts {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod ros {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 4) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFEFu32;
reg |= (val & 0x1) << 4;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tus {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 5) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFDFu32;
reg |= (val & 0x1) << 5;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod rs {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod rbus {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod rpss {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 8) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFEFFu32;
reg |= (val & 0x1) << 8;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod pwts {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod ets {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod fbes {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 13) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFDFFFu32;
reg |= (val & 0x1) << 13;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod ers {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 14) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFFBFFFu32;
reg |= (val & 0x1) << 14;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod ais {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 15) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFF7FFFu32;
reg |= (val & 0x1) << 15;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod nis {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 16) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFFEFFFFu32;
reg |= (val & 0x1) << 16;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod rps {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 17) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFFF1FFFFu32;
reg |= (val & 0x7) << 17;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tps {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 20) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFF8FFFFFu32;
reg |= (val & 0x7) << 20;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod ebs {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 23) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xFC7FFFFFu32;
reg |= (val & 0x7) << 23;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod mmcs {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 27) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xF7FFFFFFu32;
reg |= (val & 0x1) << 27;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod pmts {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 28) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xEFFFFFFFu32;
reg |= (val & 0x1) << 28;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
pub mod tsts {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029014u32 as *const u32) >> 29) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029014u32 as *const u32);
reg &= 0xDFFFFFFFu32;
reg |= (val & 0x1) << 29;
core::ptr::write_volatile(0x40029014u32 as *mut u32, reg);
}
}
}
}
pub mod dmaomr {
pub mod sr {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod osf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod rtc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 3) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFFFE7u32;
reg |= (val & 0x3) << 3;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod fugf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod fef {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod st {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 13) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFFDFFFu32;
reg |= (val & 0x1) << 13;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod ttc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 14) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFFE3FFFu32;
reg |= (val & 0x7) << 14;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod ftf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 20) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFEFFFFFu32;
reg |= (val & 0x1) << 20;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod tsf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 21) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFFDFFFFFu32;
reg |= (val & 0x1) << 21;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod dfrf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 24) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFEFFFFFFu32;
reg |= (val & 0x1) << 24;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod rsf {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 25) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFDFFFFFFu32;
reg |= (val & 0x1) << 25;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
pub mod dtcefd {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029018u32 as *const u32) >> 26) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40029018u32 as *const u32);
reg &= 0xFBFFFFFFu32;
reg |= (val & 0x1) << 26;
core::ptr::write_volatile(0x40029018u32 as *mut u32, reg);
}
}
}
}
pub mod dmaier {
pub mod tie {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4002901Cu32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod tpsie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod tbuie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod tjtie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod roie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 4) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFEFu32;
reg |= (val & 0x1) << 4;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod tuie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 5) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFDFu32;
reg |= (val & 0x1) << 5;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod rie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod rbuie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod rpsie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 8) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFEFFu32;
reg |= (val & 0x1) << 8;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod rwtie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod etie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod fbeie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 13) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFDFFFu32;
reg |= (val & 0x1) << 13;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod erie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 14) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFFBFFFu32;
reg |= (val & 0x1) << 14;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod aise {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 15) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFF7FFFu32;
reg |= (val & 0x1) << 15;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
pub mod nise {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4002901Cu32 as *const u32) >> 16) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4002901Cu32 as *const u32);
reg &= 0xFFFEFFFFu32;
reg |= (val & 0x1) << 16;
core::ptr::write_volatile(0x4002901Cu32 as *mut u32, reg);
}
}
}
}
pub mod dmamfbocr {
pub mod mfc {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029020u32 as *const u32) & 0xFFFF
}
}
}
pub mod omfc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029020u32 as *const u32) >> 16) & 0x1
}
}
}
pub mod mfa {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029020u32 as *const u32) >> 17) & 0x7FF
}
}
}
pub mod ofoc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40029020u32 as *const u32) >> 28) & 0x1
}
}
}
}
pub mod dmachtdr {
pub mod htdap {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029048u32 as *const u32) & 0xFFFFFFFF
}
}
}
}
pub mod dmachrdr {
pub mod hrdap {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4002904Cu32 as *const u32) & 0xFFFFFFFF
}
}
}
}
pub mod dmachtbar {
pub mod htbap {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029050u32 as *const u32) & 0xFFFFFFFF
}
}
}
}
pub mod dmachrbar {
pub mod hrbap {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40029054u32 as *const u32) & 0xFFFFFFFF
}
}
}
}
|
use std::path::PathBuf;
use axum::{routing::get, Router, extract::State, Json};
use chrono::{Utc, Duration};
use shared::models::clicks::Clicks;
use shuttle_persist::PersistInstance;
use tower_http::services::ServeDir;
static CLICKS_KEY: &str = "clicks";
async fn hello_world() -> &'static str {
"Hello, world!"
}
async fn get_clicks(state: State<AppState>) -> Json<Clicks> {
let persist = &state.persist;
let clicks = persist.load::<Clicks>(CLICKS_KEY).unwrap_or_default();
Json(clicks)
}
async fn add_clicks(state: State<AppState>) -> Result<Json<Clicks>, String> {
let persist = &state.persist;
let clicks = persist.load::<Clicks>(CLICKS_KEY).unwrap_or_default();
let now = Utc::now();
if now > clicks.last_click_ts + Duration::seconds(5) {
let clicks = Clicks {
amount: clicks.amount + 1,
last_click_ts: now,
};
let _ = persist.save(CLICKS_KEY, &clicks);
Ok(Json(clicks))
}
else {
Err("Clicks need to wait 5s".to_string())
}
}
#[derive(Clone)]
struct AppState {
persist: PersistInstance,
}
#[shuttle_runtime::main]
async fn axum(
#[shuttle_static_folder::StaticFolder(folder = "public")] public_folder: PathBuf,
#[shuttle_persist::Persist] persist: PersistInstance,
) -> shuttle_axum::ShuttleAxum {
let state = AppState {
persist,
};
let router = Router::new()
.nest_service("/", ServeDir::new(public_folder).precompressed_gzip())
.route("/api/hello", get(hello_world))
.route("/api/clicks", get(get_clicks).post(add_clicks))
.with_state(state);
Ok(router.into())
}
|
use std::rc::Rc;
use std::cell::RefCell;
use std::marker::PhantomData;
use std::ops::{Deref, DerefMut};
use std::any::Any;
use super::{Env, eval, apply, StepState};
use {Value, AresResult, AresError, parse, stdlib, Environment, ForeignFunction};
use intern::SymbolIntern;
use stdlib::core::macroexpand;
pub struct Context<S: State + ?Sized> {
env: Env,
interner: SymbolIntern,
_state: PhantomData<S>,
}
pub struct LoadedContext<'a, S: State + ?Sized> {
ctx: &'a mut Context<S>,
state: &'a mut S,
pub env_stack: Vec<Env>,
pub stack: Vec<StepState>,
}
pub trait State: Any {}
impl <T: Any> State for T {}
impl <S: State + ?Sized> Context<S> {
pub fn new() -> Context<S> {
let env = Rc::new(RefCell::new(Environment::new()));
let mut ctx = Context {
env: env,
interner: SymbolIntern::new(),
_state: PhantomData,
};
stdlib::load_all(&mut ctx);
ctx
}
pub fn new_empty() -> Context<S> {
Context {
env: Rc::new(RefCell::new(Environment::new())),
interner: SymbolIntern::new(),
_state: PhantomData,
}
}
pub fn with_debug(mut self) -> Context<S> {
stdlib::load_debug(&mut self);
self
}
pub fn format_value(&self, value: &Value) -> String {
::stdlib::types::to_string_helper(value, self.interner())
}
pub fn load<'a>(&'a mut self, state: &'a mut S) -> LoadedContext<'a, S> {
LoadedContext {
ctx: self,
state: state,
env_stack: vec![],
stack: vec![],
}
}
pub fn get<N: ?Sized + AsRef<str>>(&self, name: &N) -> Option<Value> {
if let Some(symbol) = self.interner.symbol_for_name(name) {
self.env.borrow_mut().get(symbol)
} else {
None
}
}
pub fn set<N: AsRef<str> + Into<String>>(&mut self, name: N, value: Value) -> Option<Value> {
let ret = self.env.borrow_mut().insert_here(self.interner.intern(name), value);
ret
}
pub fn set_fn<N: AsRef<str> + Into<String>>(&mut self,
name: N,
f: ForeignFunction<S>)
-> Option<Value> {
self.set(name, Value::ForeignFn(f.erase()))
}
pub fn env(&self) -> &Env {
&self.env
}
pub fn env_mut(&mut self) -> &mut Env {
&mut self.env
}
pub fn interner(&self) -> &SymbolIntern {
&self.interner
}
pub fn interner_mut(&mut self) -> &mut SymbolIntern {
&mut self.interner
}
}
impl <'a, S: State + ?Sized> LoadedContext<'a, S> {
pub fn with_other_env<F, R>(&mut self, env: Env, f: F) -> (Env, R)
where F: FnOnce(&mut LoadedContext<'a, S>) -> R
{
self.env_stack.push(env.clone());
let r = f(self);
(self.env_stack.pop().unwrap(), r)
}
pub fn with_other_state<F, R>(&mut self, state: &mut S, f: F) -> R
where F: FnOnce(&mut LoadedContext<'a, S>) -> R
{
use std::mem::{swap, transmute};
// This is safe because the state gets immeditately swapped back out.
let mut state: &'a mut S = unsafe { transmute(state) };
swap(&mut self.state, &mut state);
let r = f(self);
swap(&mut self.state, &mut state);
r
}
pub fn env(&self) -> &Env {
let &LoadedContext { ref ctx, ref env_stack, ..} = self;
env_stack.last().unwrap_or(&ctx.env)
}
pub fn env_mut(&mut self) -> &mut Env {
let &mut LoadedContext { ref mut ctx, ref mut env_stack, ..} = self;
env_stack.last_mut().unwrap_or(&mut ctx.env)
}
pub fn state(&mut self) -> &mut S {
self.state
}
pub fn eval(&mut self, value: &Value) -> AresResult<Value> {
eval(value, self)
}
pub fn macroexpand(&mut self, value: Value) -> AresResult<Value> {
macroexpand(&[value], self)
}
pub fn eval_str(&mut self, program: &str) -> AresResult<Value> {
let trees = try!(parse(program, &mut self.interner));
let mut last = None;
for tree in trees {
let tree = try!(self.macroexpand(tree));
last = Some(try!(self.eval(&tree)))
}
match last {
Some(v) => Ok(v),
None => Err(AresError::NoProgram),
}
}
pub fn call(&mut self, func: &Value, args: &[Value]) -> AresResult<Value> {
// FIXME
let args: Vec<_> = args.iter().cloned().collect();
apply(func, args, self)
}
pub fn call_named<N: ?Sized + AsRef<str>>(&mut self,
named_fn: &N,
args: &[Value])
-> AresResult<Value> {
let func = self.get(named_fn);
match func {
Some(v) => self.call(&v, args),
None => Err(AresError::UndefinedName(named_fn.as_ref().into())),
}
}
pub fn unload(self) {
}
}
impl <'a, S: State + ?Sized> Deref for LoadedContext<'a, S> {
type Target = Context<S>;
fn deref(&self) -> &Context<S> {
&self.ctx
}
}
impl <'a, S: State + ?Sized> DerefMut for LoadedContext<'a, S> {
fn deref_mut(&mut self) -> &mut Context<S> {
&mut self.ctx
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use async_raft::NodeId;
use common_sled_store::SledSerde;
use serde::Deserialize;
use serde::Serialize;
/// A slot is a virtual and intermediate allocation unit in a distributed storage.
/// The key of an object is mapped to a slot by some hashing algo.
/// A slot is assigned to several physical servers(normally 3 for durability).
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
pub struct Slot {
pub node_ids: Vec<NodeId>,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)]
pub struct Node {
pub name: String,
pub address: String,
}
impl fmt::Display for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}={}", self.name, self.address)
}
}
/// For Node to be able to be stored in sled::Tree as a value.
impl SledSerde for Node {}
|
extern crate gcc;
fn main() {
gcc::Config::new().flag("-fexceptions")
.flag("-fobjc-exceptions")
.file("extern/exception.m")
.compile("libexception.a");
}
|
/*!
```rudra-poc
[target]
crate = "buttplug"
version = "0.10.0"
indexed_version = "0.4.0"
[[target.peer]]
crate = "futures"
version = "0.3.8"
[report]
issue_url = "https://github.com/buttplugio/buttplug-rs/issues/225"
issue_date = 2020-12-18
rustsec_url = "https://github.com/RustSec/advisory-db/pull/592"
rustsec_id = "RUSTSEC-2020-0112"
[[bugs]]
analyzer = "SendSyncVariance"
bug_class = "SendSyncVariance"
bug_count = 2
rudra_report_locations = ["src/util/future.rs:90:1: 91:2", "src/util/future.rs:88:1: 89:2"]
```
!*/
#![forbid(unsafe_code)]
use buttplug::util::future::ButtplugFuture;
use futures::executor;
use std::cell::Cell;
use std::sync::Arc;
use std::thread;
#[derive(Debug, Clone, Copy)]
enum RefOrInt<'a> {
Ref(&'a u64),
Int(u64),
}
static X: u64 = 0;
fn main() {
let future = ButtplugFuture::default();
let shared = future.get_state_clone();
thread::spawn(move || {
let shared = shared;
let cell = Arc::new(Cell::new(RefOrInt::Int(0xdeadbeef)));
shared.set_reply(Arc::clone(&cell));
loop {
cell.set(RefOrInt::Int(0xdeadbeef));
cell.set(RefOrInt::Ref(&X))
}
});
let smuggled_cell: Arc<Cell<RefOrInt>> = executor::block_on(future);
println!("Future resolved");
loop {
if let RefOrInt::Ref(addr) = smuggled_cell.get() {
if addr as *const _ as usize != 0xdeadbeef {
continue;
}
// Due to the data race, obtaining Ref(0xdeadbeef) is possible
println!("Pointer is now: {:p}", addr);
println!("Dereferencing addr will now segfault: {}", *addr);
}
}
}
|
#[doc = "Register `MDMA_C26BNDTR` reader"]
pub type R = crate::R<MDMA_C26BNDTR_SPEC>;
#[doc = "Register `MDMA_C26BNDTR` writer"]
pub type W = crate::W<MDMA_C26BNDTR_SPEC>;
#[doc = "Field `BNDT` reader - BNDT"]
pub type BNDT_R = crate::FieldReader<u32>;
#[doc = "Field `BNDT` writer - BNDT"]
pub type BNDT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 17, O, u32>;
#[doc = "Field `BRSUM` reader - BRSUM"]
pub type BRSUM_R = crate::BitReader;
#[doc = "Field `BRSUM` writer - BRSUM"]
pub type BRSUM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BRDUM` reader - BRDUM"]
pub type BRDUM_R = crate::BitReader;
#[doc = "Field `BRDUM` writer - BRDUM"]
pub type BRDUM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BRC` reader - BRC"]
pub type BRC_R = crate::FieldReader<u16>;
#[doc = "Field `BRC` writer - BRC"]
pub type BRC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>;
impl R {
#[doc = "Bits 0:16 - BNDT"]
#[inline(always)]
pub fn bndt(&self) -> BNDT_R {
BNDT_R::new(self.bits & 0x0001_ffff)
}
#[doc = "Bit 18 - BRSUM"]
#[inline(always)]
pub fn brsum(&self) -> BRSUM_R {
BRSUM_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - BRDUM"]
#[inline(always)]
pub fn brdum(&self) -> BRDUM_R {
BRDUM_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bits 20:31 - BRC"]
#[inline(always)]
pub fn brc(&self) -> BRC_R {
BRC_R::new(((self.bits >> 20) & 0x0fff) as u16)
}
}
impl W {
#[doc = "Bits 0:16 - BNDT"]
#[inline(always)]
#[must_use]
pub fn bndt(&mut self) -> BNDT_W<MDMA_C26BNDTR_SPEC, 0> {
BNDT_W::new(self)
}
#[doc = "Bit 18 - BRSUM"]
#[inline(always)]
#[must_use]
pub fn brsum(&mut self) -> BRSUM_W<MDMA_C26BNDTR_SPEC, 18> {
BRSUM_W::new(self)
}
#[doc = "Bit 19 - BRDUM"]
#[inline(always)]
#[must_use]
pub fn brdum(&mut self) -> BRDUM_W<MDMA_C26BNDTR_SPEC, 19> {
BRDUM_W::new(self)
}
#[doc = "Bits 20:31 - BRC"]
#[inline(always)]
#[must_use]
pub fn brc(&mut self) -> BRC_W<MDMA_C26BNDTR_SPEC, 20> {
BRC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "In Linked List mode, at the end of a block (single or last block in repeated block transfer mode), this register will be loaded from memory (from address given by current LAR\\[31:0\\]
+ 0x04).\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdma_c26bndtr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdma_c26bndtr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MDMA_C26BNDTR_SPEC;
impl crate::RegisterSpec for MDMA_C26BNDTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mdma_c26bndtr::R`](R) reader structure"]
impl crate::Readable for MDMA_C26BNDTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mdma_c26bndtr::W`](W) writer structure"]
impl crate::Writable for MDMA_C26BNDTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MDMA_C26BNDTR to value 0"]
impl crate::Resettable for MDMA_C26BNDTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! RDRAND and RDSEED instructions.
/// Returns a hardware generated 64-bit random value
#[inline]
#[target_feature(enable = "rdrand")]
pub unsafe fn _rdrand64_step() -> Option<u64> {
let mut v: u64 = 0;
if ::arch::_rdrand64_step(&mut v) == 1 {
Some(v)
} else {
None
}
}
/// Returns a 64-bit NIST SP800-90B and SP800-90C compliant random value.
#[inline]
#[target_feature(enable = "rdseed")]
pub unsafe fn _rdseed64_step() -> Option<u64> {
let mut v: u64 = 0;
if ::arch::_rdseed64_step(&mut v) == 1 {
Some(v)
} else {
None
}
}
|
pub mod authn;
pub mod authz;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct UserDetails {
pub user_id: String,
pub roles: Vec<String>,
}
impl UserDetails {
pub fn is_admin(&self) -> bool {
self.roles.iter().any(|s| s == "drogue-admin")
}
}
#[derive(Clone, Debug)]
pub enum UserInformation {
Authenticated(UserDetails),
Anonymous,
}
static EMPTY_ROLES: Vec<String> = vec![];
impl UserInformation {
pub fn user_id(&self) -> Option<&str> {
match self {
Self::Authenticated(details) => Some(&details.user_id),
Self::Anonymous => None,
}
}
pub fn roles(&self) -> &Vec<String> {
match self {
Self::Authenticated(details) => &details.roles,
Self::Anonymous => &EMPTY_ROLES,
}
}
pub fn is_admin(&self) -> bool {
match self {
Self::Authenticated(details) => details.is_admin(),
Self::Anonymous => false,
}
}
}
#[cfg(feature = "with_actix")]
impl actix_web::FromRequest for UserInformation {
type Config = ();
type Error = actix_web::Error;
type Future = core::future::Ready<Result<Self, Self::Error>>;
fn from_request(
req: &actix_web::HttpRequest,
_: &mut actix_web::dev::Payload<actix_web::dev::PayloadStream>,
) -> Self::Future {
match req.extensions().get::<UserInformation>() {
Some(user) => core::future::ready(Ok(user.clone())),
None => core::future::ready(Ok(UserInformation::Anonymous)),
}
}
}
|
extern crate byteorder;
extern crate ethereum_types;
extern crate parity_bytes;
extern crate common_types;
extern crate wasm;
extern crate vm;
extern crate machine;
extern crate trace;
pub mod state_provider;
pub mod stateless_ext;
pub mod stateless_vm;
pub mod transaction;
|
use crate::scrabbleutils::{Board, PlayerTrait, Move, Direction::*, Hand};
use crate::scrabbleutils::bonuses::{WordBonus, LetterBonus};
use std::io::stdin;
// I don't like designing ui, please feel free to improve it.
pub fn print_board(board : &Board) {
print!(" ");
for _ in 0..15 {
print!("-----");
}
print!("-\n |");
for i in 1..16 {
print!(" {:>2} |", i);
}
print!("\n |");
for _ in 1..16 {
print!(" |");
}
println!("");
for _ in 0..16 {
print!("-----");
}
println!("-");
// For each board row
for y in 0..15 {
print!("|");
for x in 0..16 {
if x == 0 {
// Print the row number
print!(" {:>2} |", y + 1);
}
else {
let letter = board.get_letter(x - 1, y);
match letter {
Some(x) => {
print!(" {} |", x);
}
None => {
// Check letter and word bonus
let (lb, wb) = board.get_bonuses(x - 1, y);
match wb {
WordBonus::Triple => {
print!(" W3 |");
continue;
}
WordBonus::Double => {
print!(" W2 |");
continue;
}
_ => {}
}
match lb {
LetterBonus::Triple => {
print!(" L3 |");
}
LetterBonus::Double => {
print!(" L2 |");
}
_ => {
print!(" |");
}
}
}
}
}
}
print!("\n|");
for x in 0..16 {
if x == 0{
print!(" |");
}
else {
let tile = board.get_tile(x - 1, y);
if tile == None {
print!(" |");
}
else {
print!(" {:>2}|", tile.unwrap().points());
}
}
}
println!("");
for _ in 0..16 {
print!("-----");
}
println!("-");
}
println!("");
}
pub fn print_hand(hand : &Hand) {
// Top line
for _ in 0..hand.get().len() {
print!("-----");
}
// End of first line and start of second
print!("-\n|");
// Second line (letters + separators)
for i in hand.get() {
print!(" {} |", i.letter());
}
// End of second line + start separator of third line
print!("\n|");
// Third line
for i in hand.get() {
print!(" {:>2}|", i.points());
}
println!("");
// Bottom line
for _ in 0..hand.get().len() {
print!("-----");
}
print!("-\n");
}
pub struct SimplePlayer {
name : String,
}
impl SimplePlayer {
pub fn new(name : String) -> SimplePlayer {
SimplePlayer {
name,
}
}
}
impl PlayerTrait for SimplePlayer {
fn name(&self) -> &str {
return &self.name;
}
fn play(&self, board : &Board, hand : &Hand) -> Move {
let mv : Move;
let mut error_msg : Option<&str> = None;
loop {
print_board(board);
print_hand(hand);
if let Some(msg) = error_msg {
eprintln!("{}", msg);
}
let word;
println!("What do you want to play ?");
let mut line = String::new();
stdin().read_line(&mut line).expect("This is not a correct string");
let words : Vec<&str> = line.split_whitespace().collect();
if words.len() != 1 {
error_msg = Some("You should give exactly one word");
continue;
}
else {
word = words[0].to_owned();
}
println!("At what position do you want to play it ?\n\tex : 1 15");
let mut line = String::new();
stdin().read_line(&mut line).expect("This is not a correct string");
let positions : Vec<&str> = line.split_whitespace().collect();
if positions.len() != 2 {
error_msg = Some("You should give two positions");
continue;
}
let positions : Vec<Result<u8, std::num::ParseIntError>> = positions.iter().map(|e| e.parse::<u8>()).collect();
if positions.iter().any(|e| e.is_err()) {
error_msg = Some("Positions should be numbers, the first is on the x coordinate from 1 to 15, the second in the descending y coordinate from 1 to 15.");
continue;
}
let positions : Vec<u8> = positions.into_iter().map(|e| e.unwrap().clone().to_owned()).collect();
if positions.iter().any(|e| e < &1 || e > &15) {
error_msg = Some("Positions should be between 1 and 15");
continue;
}
println!("Choose your direction (H/V) :");
let mut line = String::new();
stdin().read_line(&mut line).expect("This is not a correct string");
let line : Vec<&str> = line.split_whitespace().collect();
if line.len() == 0 {
error_msg = Some("Please provide at least the first character of a direction");
continue;
}
let direction_char = line.get(0).unwrap().chars().next().unwrap();
let direction = match direction_char {
'H' | 'h' => Horizontal,
'V' | 'v' => Vertical,
_ => {
error_msg = Some("Could not get the direction properly, write `V` for vertical and `H` for horizontal");
continue;
}
};
mv = Move::new(positions[0] - 1, positions[1] - 1, word, direction);
break;
}
return mv;
}
fn move_score(&self, score : u32) {
println!("Your move made {} points!", score);
}
fn total_score(&self, score : u32) {
println!("You have a total of {} points!", score);
}
}
pub fn handle_error(error : &str) {
eprintln!("Error : {}", error);
}
|
pub fn reorder_list(head: &mut Option<Box<ListNode>>) {
if let Some(mut head) = head.as_mut() {
fn split_node(mut node: Box<ListNode>, nodes: &mut Vec<Option<Box<ListNode>>>) {
if let Some(next) = node.next.take() {
split_node(next, nodes);
}
nodes.push(Some(node));
}
let mut nodes = vec![];
if let Some(next) = head.next.take() {
split_node(next, &mut nodes);
}
// println!("{:?}", nodes);
let mut head_index = 0;
let mut tail_index = nodes.len();
let mut pick_from_head = true;
println!("{}, {}", head_index, tail_index);
while tail_index >= head_index + 1 {
let node = if pick_from_head {
head_index += 1;
nodes[head_index-1].take()
} else {
tail_index -= 1;
nodes[tail_index].take()
};
pick_from_head = !pick_from_head;
head.next = node;
head = head.next.as_mut().unwrap();
}
}
} |
use {
crate::assert_eq_trimmed, littlebigint::BigUint, num_bigint::BigUint as NumBigUint,
proptest::prelude::*,
};
#[test]
fn basic_multiplication() {
let mut a_array = [42];
let a = BigUint::from_slice(&mut a_array);
let mut b_array = [6];
let b = BigUint::from_slice(&mut b_array);
let mut c_array = [7];
let c = BigUint::from_slice(&mut c_array);
let mut buf = [0u8];
assert_eq!(a, b.mul(&c, &mut buf));
}
#[test]
fn basic_carry_multiplication() {
// 412897704603
let mut a_array = [155, 250, 158, 34, 96];
let a = BigUint::from_slice(&mut a_array);
// 6887253
let mut b_array = [85, 23, 105, 0, 0];
let b = BigUint::from_slice(&mut b_array);
// 59951
let mut c_array = [47, 234];
let c = BigUint::from_slice(&mut c_array);
let mut buf = [0u8; 6];
assert_eq!(a, b.mul(&c, &mut buf));
}
proptest! {
#[test]
fn small_multiplication(a: u8, b: u8) {
let mut a_array = [a, 0];
let a_bigint = BigUint::from_slice(&mut a_array);
let mut b_array = [b];
let b_bigint = BigUint::from_slice(&mut b_array);
let mut buf = [0, 0];
assert_eq!((a as u16 * b as u16).to_le_bytes(), a_bigint.mul(&b_bigint, &mut buf).into_slice());
}
#[test]
fn big_multiplication(mut a: Vec<u8>, mut b: Vec<u8>) {
// Ignore zero-length inputs and prevent panicking due to overflow
if a.len() < b.len() || a.is_empty() {
return Ok(());
}
let mut correct = (NumBigUint::from_bytes_le(&a) * NumBigUint::from_bytes_le(&b)).to_bytes_le();
for _ in 0..=b.len() {
a.push(0);
}
let len = a.len() + b.len();
let a = BigUint::from_slice(&mut a);
let b = BigUint::from_slice(&mut b);
let mut buf = vec![0u8; len];
assert_eq_trimmed(correct.as_mut_slice(), (a.mul(&b, &mut buf)).into_slice());
}
}
|
#![allow(dead_code)]
extern crate clap;
extern crate colored;
extern crate ring;
extern crate futures;
extern crate grpcio;
#[macro_use]
extern crate lazy_static;
extern crate protobuf;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate num;
extern crate rand;
extern crate rusqlite;
#[path = "../bc/mod.rs"]
mod bc;
#[path = "../cli/mod.rs"]
mod cli;
#[path = "../ctx/mod.rs"]
mod ctx;
#[path = "../proto/mod.rs"]
mod proto;
use bc::block;
use clap::{App, SubCommand};
use colored::*;
use grpcio::{ChannelBuilder, EnvBuilder};
use num::bigint::BigUint;
use num::bigint::RandBigInt;
use proto::byzan::{Block, BlockId, BlockIdx, BlockKey, BlockRange, BlockTill, Empty, NewBlock};
use proto::byzan_grpc::BlockChainClient;
use std::sync::Arc;
struct Config {
connect_host: String,
connect_port: String,
}
impl Config {
fn new() -> Config {
Config {
connect_host: String::from("127.0.0.1"),
connect_port: String::from("10799"),
}
}
}
fn create_id() -> String {
let mut r = rand::thread_rng();
let n: BigUint = r.gen_biguint(256);
format!("{:x}", n)
}
fn client(config: &Config) -> BlockChainClient {
let env = Arc::new(EnvBuilder::new().build());
let ch = ChannelBuilder::new(env)
.connect(&format!("{}:{}", config.connect_host, config.connect_port));
let client = BlockChainClient::new(ch);
client
}
fn upsert(config: &Config, matches: &clap::ArgMatches) -> () {
let id = create_id();
let key = matches.value_of("key").unwrap();
let value = matches.value_of("value").unwrap();
let mut new_block = NewBlock::new();
new_block.set_id(id);
new_block.set_key(key.to_string());
new_block.set_value(value.to_string());
match client(config).upsert(&new_block) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
}
fn push(config: &Config, matches: &clap::ArgMatches) -> () {
let idx = matches
.value_of("idx")
.unwrap()
.to_string()
.parse::<u32>()
.unwrap();
let id = matches.value_of("id").unwrap().to_string();
let key = matches.value_of("key").unwrap().to_string();
let value = matches.value_of("value").unwrap().to_string();
let prev_hash = matches.value_of("prev_hash").unwrap().to_string();
let self_hash = matches.value_of("self_hash").unwrap().to_string();
let mut block = Block::new();
block.set_idx(idx);
block.set_id(id);
block.set_key(key);
block.set_value(value);
block.set_prev_hash(prev_hash);
block.set_self_hash(self_hash);
match client(config).push(&block) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
}
fn get(config: &Config, matches: &clap::ArgMatches) -> () {
if let Some(key) = matches.value_of("key") {
let mut block_key = BlockKey::new();
block_key.set_key(key.to_string());
match client(config).get_by_key(&block_key) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
return;
}
if let Some(idx) = matches.value_of("idx") {
let mut block_idx = BlockIdx::new();
block_idx.set_idx(idx.to_string().parse::<u32>().unwrap());
match client(config).get_by_idx(&block_idx) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
return;
}
if let Some(id) = matches.value_of("id") {
let mut block_id = BlockId::new();
block_id.set_id(id.to_string());
match client(config).get_by_id(&block_id) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
return;
}
println!("please specify key (--key), idx (--idx) or id (--id)");
}
fn history(config: &Config, matches: &clap::ArgMatches) -> () {
let key = matches.value_of("key").unwrap().to_string();
let mut block_key = BlockKey::new();
block_key.set_key(key);
match client(config).history(&block_key) {
Ok(h) => {
handle_blocks(h.get_blocks());
}
Err(e) => {
handle_error(&e);
}
}
}
fn last(config: &Config) -> () {
let empty = Empty::new();
match client(config).last(&empty) {
Ok(b) => {
handle_block(b.get_block().clone());
}
Err(e) => {
handle_error(&e);
}
}
}
fn len(config: &Config) -> () {
let empty = Empty::new();
match client(config).len(&empty) {
Ok(len) => {
println!("{}", json!({ "len": len.get_len() }));
}
Err(e) => {
handle_error(&e);
}
}
}
fn range(config: &Config, matches: &clap::ArgMatches) -> () {
let first = matches
.value_of("first")
.unwrap()
.to_string()
.parse::<u32>()
.unwrap();
let last = matches
.value_of("last")
.unwrap()
.to_string()
.parse::<u32>()
.unwrap();
let mut block_range = BlockRange::new();
block_range.set_first(first);
block_range.set_last(last);
match client(config).range(&block_range) {
Ok(blocks) => {
handle_blocks(blocks.get_blocks());
}
Err(e) => {
handle_error(&e);
}
}
}
fn till(config: &Config, matches: &clap::ArgMatches) -> () {
let first = matches
.value_of("first")
.unwrap()
.to_string()
.parse::<u32>()
.unwrap();
let mut block_till = BlockTill::new();
block_till.set_first(first);
match client(config).till(&block_till) {
Ok(blocks) => {
handle_blocks(blocks.get_blocks());
}
Err(e) => {
handle_error(&e);
}
}
}
fn handle_error(e: &grpcio::Error) {
match e {
grpcio::Error::RpcFailure(e) => {
println!(
"\n {}\n",
"An error has been occurred for the request.".red().bold(),
);
if let Some(reason) = &e.details {
println!(" the reason is follwing : {}\n", reason.red().bold());
} else {
println!(" the reason is {}\n", "unknown".red());
}
}
_ => {
println!("\n unknown error: {:?}\n", e);
}
}
}
fn handle_block(b: Block) {
match serde_json::to_string_pretty(&block::Block::from(b)) {
Ok(b_json) => {
println!("{}", b_json);
}
Err(e) => {
println!("failed to parse the block: {}", e);
}
}
}
fn handle_blocks(blocks: &[Block]) {
let blocks: Vec<block::Block> = blocks
.to_vec()
.iter()
.map(|b| block::Block::from(b.clone()))
.collect();
match serde_json::to_string_pretty(&blocks) {
Ok(b_json) => {
println!("{}", b_json);
}
Err(e) => {
println!("failed to parse the blocks: {}", e);
}
}
}
fn main() {
let matches = App::new("byz")
.version("0.1")
.author("tbrand (Taichiro Suzuki)")
.about("command line client for byzan server (byzd)")
.subcommand(
SubCommand::with_name("upsert")
.about("create or update new block with key and value")
.arg(cli::arg_key(true))
.arg(cli::arg_value()),
).subcommand(
SubCommand::with_name("push")
.about("push a new block")
.arg(cli::arg_idx(true))
.arg(cli::arg_id(true))
.arg(cli::arg_key(true))
.arg(cli::arg_value())
.arg(cli::arg_prev_hash())
.arg(cli::arg_self_hash()),
).subcommand(
SubCommand::with_name("get")
.about("get a block for the arg")
.arg(cli::arg_key(false))
.arg(cli::arg_id(false))
.arg(cli::arg_idx(false)),
).subcommand(
SubCommand::with_name("history")
.about("get a history for the key")
.arg(cli::arg_key(true)),
).subcommand(SubCommand::with_name("last").about("get a last block"))
.subcommand(SubCommand::with_name("len").about("get a length of the blockchain"))
.subcommand(
SubCommand::with_name("range")
.about("get a subset of the blockchain for the specified range")
.arg(cli::arg_first())
.arg(cli::arg_last()),
).subcommand(
SubCommand::with_name("till")
.about("get a subset of the blockchain from the specified index")
.arg(cli::arg_first()),
).arg(cli::arg_connect_host())
.arg(cli::arg_connect_port())
.get_matches();
let mut config = Config::new();
if let Some(connect_host) = matches.value_of("connect_host") {
config.connect_host = connect_host.to_string();
}
if let Some(connect_port) = matches.value_of("connect_port") {
config.connect_port = connect_port.to_string();
}
match matches.subcommand() {
("upsert", Some(matches)) => upsert(&config, &matches),
("push", Some(matches)) => push(&config, &matches),
("get", Some(matches)) => get(&config, &matches),
("history", Some(matches)) => history(&config, &matches),
("last", Some(_)) => last(&config),
("len", Some(_)) => len(&config),
("range", Some(matches)) => range(&config, &matches),
("till", Some(matches)) => till(&config, &matches),
_ => {
println!("{}", matches.usage());
}
}
}
|
//! Configuration for line inputs
use crate::bitmask::BitMask;
use crate::EnableDisable;
#[derive(Debug, Copy, Clone)]
pub struct LineIn {
pub(crate) data: u16,
}
pub struct Volume<'a> {
index: u16,
bitmask: BitMask<'a>,
}
impl<'a> Volume<'a> {
/// Line input volume in half-dB steps.
///
/// The `half_dBs` parameter value must be twice the desired dB gain; for example:
/// * for a 1.5dB gain, call `half_dB_steps(3)`
/// * for 9dB attenuation, call `half_dB_steps(-18)`
///
/// # Panics
///
/// Panics if the `half_dBs` parameter is out of range (below 34.5dB or above 12dB), or if the
/// `half_dBs` parameter does not correspond to an exact multiple of 1.5dB.
#[allow(non_snake_case)]
fn half_dB_steps(&mut self, half_dBs: i16) {
// The WM8731 supports -34.5dB up to 12dB. Make sure the input is in range:
assert!((-69..=24).contains(&half_dBs));
let offset = half_dBs + 69;
// and make sure the input lines up on the 1.5dB steps:
assert_eq!(offset % 3, 0);
self.bitmask.apply(self.index, 5, (offset / 3) as u16);
}
/// Set line input volume to nearest representable value
///
/// Set the line input volume to the nearest gain available. The WM8731 only supports 1.5dB
/// steps, so the actual results may be rounded by ±0.5dB. For example:
/// * 0dB, 3dB, etc. are exact multiples of 1.5dB, and will not be rounded.
/// * 1dB will get rounded up to 1.5dB
/// * 2dB will get rounded down to 1.5dB
///
/// # Panics
///
/// Panics if `dB_gain` is out of range (below -35 or above 12, covering the hardware's
/// capability of -34.5dB to 12dB).
#[allow(non_snake_case)]
pub fn nearest_dB(&mut self, dB_gain: i16) {
let half_dBs = dB_gain * 2;
let rounded_to_multiple_of_three = match half_dBs.rem_euclid(3) {
0 => half_dBs,
1 => half_dBs - 1,
2 => half_dBs + 1,
x => panic!("{} cannot possibly be the result of .rem_euclid(3)", x),
};
self.half_dB_steps(rounded_to_multiple_of_three);
}
}
impl LineIn {
pub fn new() -> Self {
LineIn {
data: 0b0_1001_0111,
}
}
/// Line input volume
pub fn volume(&mut self) -> Volume {
Volume {
index: 0,
bitmask: BitMask::new(&mut self.data),
}
}
/// Line input mute to ADC
pub fn mute(&mut self) -> EnableDisable {
EnableDisable::new(7, &mut self.data)
}
/// Left to right channel line input volume and mute data load
/// When enabled, left and right channels will have same volume and mute values
pub fn both(&mut self) -> EnableDisable {
EnableDisable::new(8, &mut self.data)
}
}
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(untagged_unions)]
#![allow(dead_code)]
#![warn(clippy::expl_impl_clone_on_copy)]
use std::hash::{Hash, Hasher};
#[derive(PartialEq, Hash)]
struct Foo;
impl PartialEq<u64> for Foo {
fn eq(&self, _: &u64) -> bool {
true
}
}
#[derive(Hash)]
struct Bar;
impl PartialEq for Bar {
fn eq(&self, _: &Bar) -> bool {
true
}
}
#[derive(Hash)]
struct Baz;
impl PartialEq<Baz> for Baz {
fn eq(&self, _: &Baz) -> bool {
true
}
}
#[derive(PartialEq)]
struct Bah;
impl Hash for Bah {
fn hash<H: Hasher>(&self, _: &mut H) {}
}
#[derive(Copy)]
struct Qux;
impl Clone for Qux {
fn clone(&self) -> Self {
Qux
}
}
// looks like unions don't support deriving Clone for now
#[derive(Copy)]
union Union {
a: u8,
}
impl Clone for Union {
fn clone(&self) -> Self {
Union { a: 42 }
}
}
// See #666
#[derive(Copy)]
struct Lt<'a> {
a: &'a u8,
}
impl<'a> Clone for Lt<'a> {
fn clone(&self) -> Self {
unimplemented!()
}
}
// Ok, `Clone` cannot be derived because of the big array
#[derive(Copy)]
struct BigArray {
a: [u8; 65],
}
impl Clone for BigArray {
fn clone(&self) -> Self {
unimplemented!()
}
}
// Ok, function pointers are not always Clone
#[derive(Copy)]
struct FnPtr {
a: fn() -> !,
}
impl Clone for FnPtr {
fn clone(&self) -> Self {
unimplemented!()
}
}
// Ok, generics
#[derive(Copy)]
struct Generic<T> {
a: T,
}
impl<T> Clone for Generic<T> {
fn clone(&self) -> Self {
unimplemented!()
}
}
fn main() {}
|
use itertools::Itertools;
fn look_and_say(n: &str) -> String {
let num: String = n.to_string()
.chars()
.group_by(|c| *c)
.into_iter()
.map(|(key, val)| format!("{}{}", val.count(), key))
.collect();
num
}
pub fn run() -> String {
let mut res = "3113322113".to_string();
for _ in 0..40 {
res = look_and_say(&res);
}
res.to_string().chars().count().to_string()
}
pub fn run_pt2() -> String {
let mut res = "3113322113".to_string();
for _ in 0..50 {
res = look_and_say(&res);
}
res.to_string().chars().count().to_string()
}
#[test]
fn test_run() {
assert_eq!(look_and_say("1"), "11");
assert_eq!(look_and_say("11"), "21");
assert_eq!(look_and_say("21"), "1211");
assert_eq!(look_and_say("111221"), "312211");
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::notify;
use super::pubsub;
use super::TxnSubscribers;
use actix::AsyncContext;
use futures::channel::mpsc;
use starcoin_txpool_api::TxnStatusFullEvent;
pub struct TransactionSubscriptionActor {
txn_receiver: Option<mpsc::UnboundedReceiver<TxnStatusFullEvent>>,
subscribers: TxnSubscribers,
}
impl TransactionSubscriptionActor {
pub fn new(
subscribers: TxnSubscribers,
txn_receiver: mpsc::UnboundedReceiver<TxnStatusFullEvent>,
) -> Self {
Self {
subscribers,
txn_receiver: Some(txn_receiver),
}
}
}
impl actix::Actor for TransactionSubscriptionActor {
type Context = actix::Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
ctx.add_stream(self.txn_receiver.take().unwrap());
}
}
impl actix::StreamHandler<TxnStatusFullEvent> for TransactionSubscriptionActor {
fn handle(&mut self, item: TxnStatusFullEvent, _ctx: &mut Self::Context) {
let hs = item.as_ref().iter().map(|(h, _)| *h).collect::<Vec<_>>();
for subscriber in self.subscribers.read().values() {
notify::notify(subscriber, pubsub::Result::TransactionHash(hs.clone()));
}
}
}
|
#[doc = "Register `IER1` reader"]
pub type R = crate::R<IER1_SPEC>;
#[doc = "Register `IER1` writer"]
pub type W = crate::W<IER1_SPEC>;
#[doc = "Field `TIM2IE` reader - TIM2IE"]
pub type TIM2IE_R = crate::BitReader;
#[doc = "Field `TIM2IE` writer - TIM2IE"]
pub type TIM2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM3IE` reader - TIM3IE"]
pub type TIM3IE_R = crate::BitReader;
#[doc = "Field `TIM3IE` writer - TIM3IE"]
pub type TIM3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM4IE` reader - TIM4IE"]
pub type TIM4IE_R = crate::BitReader;
#[doc = "Field `TIM4IE` writer - TIM4IE"]
pub type TIM4IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM5IE` reader - TIM5IE"]
pub type TIM5IE_R = crate::BitReader;
#[doc = "Field `TIM5IE` writer - TIM5IE"]
pub type TIM5IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM6IE` reader - TIM6IE"]
pub type TIM6IE_R = crate::BitReader;
#[doc = "Field `TIM6IE` writer - TIM6IE"]
pub type TIM6IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM7IE` reader - TIM7IE"]
pub type TIM7IE_R = crate::BitReader;
#[doc = "Field `TIM7IE` writer - TIM7IE"]
pub type TIM7IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WWDGIE` reader - WWDGIE"]
pub type WWDGIE_R = crate::BitReader;
#[doc = "Field `WWDGIE` writer - WWDGIE"]
pub type WWDGIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IWDGIE` reader - IWDGIE"]
pub type IWDGIE_R = crate::BitReader;
#[doc = "Field `IWDGIE` writer - IWDGIE"]
pub type IWDGIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI2IE` reader - SPI2IE"]
pub type SPI2IE_R = crate::BitReader;
#[doc = "Field `SPI2IE` writer - SPI2IE"]
pub type SPI2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI3IE` reader - SPI3IE"]
pub type SPI3IE_R = crate::BitReader;
#[doc = "Field `SPI3IE` writer - SPI3IE"]
pub type SPI3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART2IE` reader - USART2IE"]
pub type USART2IE_R = crate::BitReader;
#[doc = "Field `USART2IE` writer - USART2IE"]
pub type USART2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART3IE` reader - USART3IE"]
pub type USART3IE_R = crate::BitReader;
#[doc = "Field `USART3IE` writer - USART3IE"]
pub type USART3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `UART4IE` reader - UART4IE"]
pub type UART4IE_R = crate::BitReader;
#[doc = "Field `UART4IE` writer - UART4IE"]
pub type UART4IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `UART5IE` reader - UART5IE"]
pub type UART5IE_R = crate::BitReader;
#[doc = "Field `UART5IE` writer - UART5IE"]
pub type UART5IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C1IE` reader - I2C1IE"]
pub type I2C1IE_R = crate::BitReader;
#[doc = "Field `I2C1IE` writer - I2C1IE"]
pub type I2C1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C2IE` reader - I2C2IE"]
pub type I2C2IE_R = crate::BitReader;
#[doc = "Field `I2C2IE` writer - I2C2IE"]
pub type I2C2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C3IE` reader - I2C3IE"]
pub type I2C3IE_R = crate::BitReader;
#[doc = "Field `I2C3IE` writer - I2C3IE"]
pub type I2C3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CRSIE` reader - CRSIE"]
pub type CRSIE_R = crate::BitReader;
#[doc = "Field `CRSIE` writer - CRSIE"]
pub type CRSIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DACIE` reader - DACIE"]
pub type DACIE_R = crate::BitReader;
#[doc = "Field `DACIE` writer - DACIE"]
pub type DACIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OPAMPIE` reader - OPAMPIE"]
pub type OPAMPIE_R = crate::BitReader;
#[doc = "Field `OPAMPIE` writer - OPAMPIE"]
pub type OPAMPIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM1IE` reader - LPTIM1IE"]
pub type LPTIM1IE_R = crate::BitReader;
#[doc = "Field `LPTIM1IE` writer - LPTIM1IE"]
pub type LPTIM1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPUART1IE` reader - LPUART1IE"]
pub type LPUART1IE_R = crate::BitReader;
#[doc = "Field `LPUART1IE` writer - LPUART1IE"]
pub type LPUART1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C4IE` reader - I2C4IE"]
pub type I2C4IE_R = crate::BitReader;
#[doc = "Field `I2C4IE` writer - I2C4IE"]
pub type I2C4IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM2IE` reader - LPTIM2IE"]
pub type LPTIM2IE_R = crate::BitReader;
#[doc = "Field `LPTIM2IE` writer - LPTIM2IE"]
pub type LPTIM2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM3IE` reader - LPTIM3IE"]
pub type LPTIM3IE_R = crate::BitReader;
#[doc = "Field `LPTIM3IE` writer - LPTIM3IE"]
pub type LPTIM3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FDCAN1IE` reader - FDCAN1IE"]
pub type FDCAN1IE_R = crate::BitReader;
#[doc = "Field `FDCAN1IE` writer - FDCAN1IE"]
pub type FDCAN1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USBFSIE` reader - USBFSIE"]
pub type USBFSIE_R = crate::BitReader;
#[doc = "Field `USBFSIE` writer - USBFSIE"]
pub type USBFSIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `UCPD1IE` reader - UCPD1IE"]
pub type UCPD1IE_R = crate::BitReader;
#[doc = "Field `UCPD1IE` writer - UCPD1IE"]
pub type UCPD1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VREFBUFIE` reader - VREFBUFIE"]
pub type VREFBUFIE_R = crate::BitReader;
#[doc = "Field `VREFBUFIE` writer - VREFBUFIE"]
pub type VREFBUFIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `COMPIE` reader - COMPIE"]
pub type COMPIE_R = crate::BitReader;
#[doc = "Field `COMPIE` writer - COMPIE"]
pub type COMPIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM1IE` reader - TIM1IE"]
pub type TIM1IE_R = crate::BitReader;
#[doc = "Field `TIM1IE` writer - TIM1IE"]
pub type TIM1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI1IE` reader - SPI1IE"]
pub type SPI1IE_R = crate::BitReader;
#[doc = "Field `SPI1IE` writer - SPI1IE"]
pub type SPI1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - TIM2IE"]
#[inline(always)]
pub fn tim2ie(&self) -> TIM2IE_R {
TIM2IE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TIM3IE"]
#[inline(always)]
pub fn tim3ie(&self) -> TIM3IE_R {
TIM3IE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - TIM4IE"]
#[inline(always)]
pub fn tim4ie(&self) -> TIM4IE_R {
TIM4IE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - TIM5IE"]
#[inline(always)]
pub fn tim5ie(&self) -> TIM5IE_R {
TIM5IE_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - TIM6IE"]
#[inline(always)]
pub fn tim6ie(&self) -> TIM6IE_R {
TIM6IE_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TIM7IE"]
#[inline(always)]
pub fn tim7ie(&self) -> TIM7IE_R {
TIM7IE_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - WWDGIE"]
#[inline(always)]
pub fn wwdgie(&self) -> WWDGIE_R {
WWDGIE_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - IWDGIE"]
#[inline(always)]
pub fn iwdgie(&self) -> IWDGIE_R {
IWDGIE_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - SPI2IE"]
#[inline(always)]
pub fn spi2ie(&self) -> SPI2IE_R {
SPI2IE_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - SPI3IE"]
#[inline(always)]
pub fn spi3ie(&self) -> SPI3IE_R {
SPI3IE_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - USART2IE"]
#[inline(always)]
pub fn usart2ie(&self) -> USART2IE_R {
USART2IE_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - USART3IE"]
#[inline(always)]
pub fn usart3ie(&self) -> USART3IE_R {
USART3IE_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - UART4IE"]
#[inline(always)]
pub fn uart4ie(&self) -> UART4IE_R {
UART4IE_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - UART5IE"]
#[inline(always)]
pub fn uart5ie(&self) -> UART5IE_R {
UART5IE_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - I2C1IE"]
#[inline(always)]
pub fn i2c1ie(&self) -> I2C1IE_R {
I2C1IE_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - I2C2IE"]
#[inline(always)]
pub fn i2c2ie(&self) -> I2C2IE_R {
I2C2IE_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - I2C3IE"]
#[inline(always)]
pub fn i2c3ie(&self) -> I2C3IE_R {
I2C3IE_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - CRSIE"]
#[inline(always)]
pub fn crsie(&self) -> CRSIE_R {
CRSIE_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - DACIE"]
#[inline(always)]
pub fn dacie(&self) -> DACIE_R {
DACIE_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - OPAMPIE"]
#[inline(always)]
pub fn opampie(&self) -> OPAMPIE_R {
OPAMPIE_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - LPTIM1IE"]
#[inline(always)]
pub fn lptim1ie(&self) -> LPTIM1IE_R {
LPTIM1IE_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - LPUART1IE"]
#[inline(always)]
pub fn lpuart1ie(&self) -> LPUART1IE_R {
LPUART1IE_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C4IE"]
#[inline(always)]
pub fn i2c4ie(&self) -> I2C4IE_R {
I2C4IE_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - LPTIM2IE"]
#[inline(always)]
pub fn lptim2ie(&self) -> LPTIM2IE_R {
LPTIM2IE_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - LPTIM3IE"]
#[inline(always)]
pub fn lptim3ie(&self) -> LPTIM3IE_R {
LPTIM3IE_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - FDCAN1IE"]
#[inline(always)]
pub fn fdcan1ie(&self) -> FDCAN1IE_R {
FDCAN1IE_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - USBFSIE"]
#[inline(always)]
pub fn usbfsie(&self) -> USBFSIE_R {
USBFSIE_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - UCPD1IE"]
#[inline(always)]
pub fn ucpd1ie(&self) -> UCPD1IE_R {
UCPD1IE_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - VREFBUFIE"]
#[inline(always)]
pub fn vrefbufie(&self) -> VREFBUFIE_R {
VREFBUFIE_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - COMPIE"]
#[inline(always)]
pub fn compie(&self) -> COMPIE_R {
COMPIE_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - TIM1IE"]
#[inline(always)]
pub fn tim1ie(&self) -> TIM1IE_R {
TIM1IE_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - SPI1IE"]
#[inline(always)]
pub fn spi1ie(&self) -> SPI1IE_R {
SPI1IE_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TIM2IE"]
#[inline(always)]
#[must_use]
pub fn tim2ie(&mut self) -> TIM2IE_W<IER1_SPEC, 0> {
TIM2IE_W::new(self)
}
#[doc = "Bit 1 - TIM3IE"]
#[inline(always)]
#[must_use]
pub fn tim3ie(&mut self) -> TIM3IE_W<IER1_SPEC, 1> {
TIM3IE_W::new(self)
}
#[doc = "Bit 2 - TIM4IE"]
#[inline(always)]
#[must_use]
pub fn tim4ie(&mut self) -> TIM4IE_W<IER1_SPEC, 2> {
TIM4IE_W::new(self)
}
#[doc = "Bit 3 - TIM5IE"]
#[inline(always)]
#[must_use]
pub fn tim5ie(&mut self) -> TIM5IE_W<IER1_SPEC, 3> {
TIM5IE_W::new(self)
}
#[doc = "Bit 4 - TIM6IE"]
#[inline(always)]
#[must_use]
pub fn tim6ie(&mut self) -> TIM6IE_W<IER1_SPEC, 4> {
TIM6IE_W::new(self)
}
#[doc = "Bit 5 - TIM7IE"]
#[inline(always)]
#[must_use]
pub fn tim7ie(&mut self) -> TIM7IE_W<IER1_SPEC, 5> {
TIM7IE_W::new(self)
}
#[doc = "Bit 6 - WWDGIE"]
#[inline(always)]
#[must_use]
pub fn wwdgie(&mut self) -> WWDGIE_W<IER1_SPEC, 6> {
WWDGIE_W::new(self)
}
#[doc = "Bit 7 - IWDGIE"]
#[inline(always)]
#[must_use]
pub fn iwdgie(&mut self) -> IWDGIE_W<IER1_SPEC, 7> {
IWDGIE_W::new(self)
}
#[doc = "Bit 8 - SPI2IE"]
#[inline(always)]
#[must_use]
pub fn spi2ie(&mut self) -> SPI2IE_W<IER1_SPEC, 8> {
SPI2IE_W::new(self)
}
#[doc = "Bit 9 - SPI3IE"]
#[inline(always)]
#[must_use]
pub fn spi3ie(&mut self) -> SPI3IE_W<IER1_SPEC, 9> {
SPI3IE_W::new(self)
}
#[doc = "Bit 10 - USART2IE"]
#[inline(always)]
#[must_use]
pub fn usart2ie(&mut self) -> USART2IE_W<IER1_SPEC, 10> {
USART2IE_W::new(self)
}
#[doc = "Bit 11 - USART3IE"]
#[inline(always)]
#[must_use]
pub fn usart3ie(&mut self) -> USART3IE_W<IER1_SPEC, 11> {
USART3IE_W::new(self)
}
#[doc = "Bit 12 - UART4IE"]
#[inline(always)]
#[must_use]
pub fn uart4ie(&mut self) -> UART4IE_W<IER1_SPEC, 12> {
UART4IE_W::new(self)
}
#[doc = "Bit 13 - UART5IE"]
#[inline(always)]
#[must_use]
pub fn uart5ie(&mut self) -> UART5IE_W<IER1_SPEC, 13> {
UART5IE_W::new(self)
}
#[doc = "Bit 14 - I2C1IE"]
#[inline(always)]
#[must_use]
pub fn i2c1ie(&mut self) -> I2C1IE_W<IER1_SPEC, 14> {
I2C1IE_W::new(self)
}
#[doc = "Bit 15 - I2C2IE"]
#[inline(always)]
#[must_use]
pub fn i2c2ie(&mut self) -> I2C2IE_W<IER1_SPEC, 15> {
I2C2IE_W::new(self)
}
#[doc = "Bit 16 - I2C3IE"]
#[inline(always)]
#[must_use]
pub fn i2c3ie(&mut self) -> I2C3IE_W<IER1_SPEC, 16> {
I2C3IE_W::new(self)
}
#[doc = "Bit 17 - CRSIE"]
#[inline(always)]
#[must_use]
pub fn crsie(&mut self) -> CRSIE_W<IER1_SPEC, 17> {
CRSIE_W::new(self)
}
#[doc = "Bit 18 - DACIE"]
#[inline(always)]
#[must_use]
pub fn dacie(&mut self) -> DACIE_W<IER1_SPEC, 18> {
DACIE_W::new(self)
}
#[doc = "Bit 19 - OPAMPIE"]
#[inline(always)]
#[must_use]
pub fn opampie(&mut self) -> OPAMPIE_W<IER1_SPEC, 19> {
OPAMPIE_W::new(self)
}
#[doc = "Bit 20 - LPTIM1IE"]
#[inline(always)]
#[must_use]
pub fn lptim1ie(&mut self) -> LPTIM1IE_W<IER1_SPEC, 20> {
LPTIM1IE_W::new(self)
}
#[doc = "Bit 21 - LPUART1IE"]
#[inline(always)]
#[must_use]
pub fn lpuart1ie(&mut self) -> LPUART1IE_W<IER1_SPEC, 21> {
LPUART1IE_W::new(self)
}
#[doc = "Bit 22 - I2C4IE"]
#[inline(always)]
#[must_use]
pub fn i2c4ie(&mut self) -> I2C4IE_W<IER1_SPEC, 22> {
I2C4IE_W::new(self)
}
#[doc = "Bit 23 - LPTIM2IE"]
#[inline(always)]
#[must_use]
pub fn lptim2ie(&mut self) -> LPTIM2IE_W<IER1_SPEC, 23> {
LPTIM2IE_W::new(self)
}
#[doc = "Bit 24 - LPTIM3IE"]
#[inline(always)]
#[must_use]
pub fn lptim3ie(&mut self) -> LPTIM3IE_W<IER1_SPEC, 24> {
LPTIM3IE_W::new(self)
}
#[doc = "Bit 25 - FDCAN1IE"]
#[inline(always)]
#[must_use]
pub fn fdcan1ie(&mut self) -> FDCAN1IE_W<IER1_SPEC, 25> {
FDCAN1IE_W::new(self)
}
#[doc = "Bit 26 - USBFSIE"]
#[inline(always)]
#[must_use]
pub fn usbfsie(&mut self) -> USBFSIE_W<IER1_SPEC, 26> {
USBFSIE_W::new(self)
}
#[doc = "Bit 27 - UCPD1IE"]
#[inline(always)]
#[must_use]
pub fn ucpd1ie(&mut self) -> UCPD1IE_W<IER1_SPEC, 27> {
UCPD1IE_W::new(self)
}
#[doc = "Bit 28 - VREFBUFIE"]
#[inline(always)]
#[must_use]
pub fn vrefbufie(&mut self) -> VREFBUFIE_W<IER1_SPEC, 28> {
VREFBUFIE_W::new(self)
}
#[doc = "Bit 29 - COMPIE"]
#[inline(always)]
#[must_use]
pub fn compie(&mut self) -> COMPIE_W<IER1_SPEC, 29> {
COMPIE_W::new(self)
}
#[doc = "Bit 30 - TIM1IE"]
#[inline(always)]
#[must_use]
pub fn tim1ie(&mut self) -> TIM1IE_W<IER1_SPEC, 30> {
TIM1IE_W::new(self)
}
#[doc = "Bit 31 - SPI1IE"]
#[inline(always)]
#[must_use]
pub fn spi1ie(&mut self) -> SPI1IE_W<IER1_SPEC, 31> {
SPI1IE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "TZIC interrupt enable register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ier1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ier1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct IER1_SPEC;
impl crate::RegisterSpec for IER1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ier1::R`](R) reader structure"]
impl crate::Readable for IER1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ier1::W`](W) writer structure"]
impl crate::Writable for IER1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets IER1 to value 0"]
impl crate::Resettable for IER1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum PrimShapes{
PyramidShape(Pyramid),
BoxShape(Box),
RectangularTorusShape(RectangularTorus),
CircularTorusShape(CircularTorus),
EllipticalDishShape(EllipticalDish),//5
SphericalDishShape(SphericalDish),
SnoutShape(Snout),
CylinderShape(Cylinder),
SphereShape(Sphere),
LineShape(Line),
}
//金字塔形 1
#[derive(Debug)]
pub struct Pyramid{
pub bottom:[f32;2],
pub top:[f32;2],
pub offset:[f32;2],
pub height:f32,
}
//2
#[derive(Debug)]
pub struct Box{
pub lengths:[f32;3],
}
//矩形环面 3
#[derive(Debug)]
pub struct RectangularTorus{
pub inner_radius:f32,
pub outer_radius:f32,
pub height:f32,
pub angle:f32,
}
//圆环 4
#[derive(Debug)]
pub struct CircularTorus{
pub offset:f32,
pub radius:f32,
pub angle:f32,
}
//椭圆盘 5
#[derive(Debug)]
pub struct EllipticalDish{
pub baseRadius:f32,
pub height:f32,
}
//球盘 6
#[derive(Debug)]
pub struct SphericalDish{
pub baseRadius:f32,
pub height:f32,
}
//7
#[derive(Debug)]
pub struct Snout{
pub offset:[f32;2],
pub bshear:[f32;2],
pub tshear:[f32;2],
pub radius_b:f32,
pub radius_t:f32,
pub height:f32,
}
//8
#[derive(Debug)]
pub struct Cylinder{
pub radius:f32,
pub height:f32,
}
//9
#[derive(Debug)]
pub struct Sphere{
pub diameter:f32,
}
//10
#[derive(Debug)]
pub struct Line{
pub a:f32,
pub b:f32,
}
|
use apllodb_immutable_schema_engine_domain::version::id::VersionId;
use apllodb_storage_engine_interface::TableName;
use serde::{Deserialize, Serialize};
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)]
pub(in crate::sqlite::transaction::sqlite_tx) struct SqliteTableNameForVersion(String);
impl<S: Into<String>> From<S> for SqliteTableNameForVersion {
fn from(s: S) -> Self {
Self(s.into())
}
}
impl SqliteTableNameForVersion {
pub(in crate::sqlite::transaction::sqlite_tx) fn new(version_id: &VersionId) -> Self {
let s = format!(
"{}__v{}",
version_id.vtable_id().table_name().as_str(),
version_id.version_number().to_u64(),
);
Self(s)
}
pub(in crate::sqlite::transaction::sqlite_tx) fn to_full_table_name(&self) -> TableName {
TableName::new(self.0.clone()).unwrap()
}
}
|
use crate::password::password::{Password, Passwords};
use sled::open;
use sled::IVec;
pub fn add_to_local_store<K: std::convert::AsRef<[u8]>, V: serde::Serialize>(
key: K,
value: V,
) -> Result<Option<IVec>, sled::Error> {
let home_dir = std::env::var("HOME").unwrap();
// println!("{}", home_dir);
let local_store = open(format!("{}/.cerostore", home_dir)).expect("Open");
let serialized_data = bincode::serialize(&value).unwrap();
local_store.insert(key, serialized_data)
}
pub fn read_from_local_store<K: std::convert::AsRef<[u8]>>(key: K) -> Password {
let home_dir = std::env::var("HOME").unwrap();
let local_store = open(format!("{}/.cerostore", home_dir)).expect("Open");
let serialized_data = local_store.get(key).unwrap().unwrap().to_vec();
let password: Password = bincode::deserialize(&serialized_data).unwrap();
password
}
pub fn delete_from_local_store<K: AsRef<[u8]>>(key: K) {
let home_dir = std::env::var("HOME").unwrap();
let local_store = open(format!("{}/.cerostore", home_dir)).expect("Open");
local_store.remove(key);
}
pub fn read_all_the_data() -> Passwords {
let home_dir = std::env::var("HOME").unwrap();
let mut passwords = Vec::<Password>::new();
let local_store = open(format!("{}/.cerostore", home_dir)).unwrap();
for v in local_store.iter() {
let (_, v) = v.unwrap();
let serialized_data = v.to_vec();
let password: Password = bincode::deserialize(&serialized_data).unwrap();
passwords.push(password);
}
Passwords::new(passwords)
}
|
use std::any::Any;
use vec::Vec2;
use view::{Selector, View};
use printer::Printer;
use event::{Event, EventResult};
/// Generic wrapper around a view.
///
/// Default implementation forwards all calls to the child view.
/// Overrides some methods as desired.
pub trait ViewWrapper {
/// Get an immutable reference to the wrapped view, so that we can forward some calls to it.
fn get_view(&self) -> &View;
/// Get a mutable reference to the wrapped view, for the mutable methods.
fn get_view_mut(&mut self) -> &mut View;
/// Wraps the draw method.
fn wrap_draw(&mut self, printer: &Printer) {
self.get_view_mut().draw(printer);
}
/// Wraps the get_min_size method.
fn wrap_get_min_size(&mut self, req: Vec2) -> Vec2 {
self.get_view_mut().get_min_size(req)
}
/// Wraps the on_event method.
fn wrap_on_event(&mut self, ch: Event) -> EventResult {
self.get_view_mut().on_event(ch)
}
/// Wraps the layout method
fn wrap_layout(&mut self, size: Vec2) {
self.get_view_mut().layout(size);
}
/// Wraps the take_focus method
fn wrap_take_focus(&mut self) -> bool {
self.get_view_mut().take_focus()
}
fn wrap_find(&mut self, selector: &Selector) -> Option<&mut Any> {
self.get_view_mut().find(selector)
}
}
impl<T: ViewWrapper> View for T {
fn draw(&mut self, printer: &Printer) {
self.wrap_draw(printer);
}
fn get_min_size(&mut self, req: Vec2) -> Vec2 {
self.wrap_get_min_size(req)
}
fn on_event(&mut self, ch: Event) -> EventResult {
self.wrap_on_event(ch)
}
fn layout(&mut self, size: Vec2) {
self.wrap_layout(size);
}
fn take_focus(&mut self) -> bool {
self.wrap_take_focus()
}
fn find(&mut self, selector: &Selector) -> Option<&mut Any> {
self.wrap_find(selector)
}
}
/// Convenient macro to implement to two methods required for the ViewWrapper trait.
///
/// # Examples
///
/// If the wrapped view is in a box, just name it in the macro:
///
/// ```no_run
/// # #[macro_use] extern crate cursive;
/// # use cursive::view::{View,ViewWrapper};
/// struct BoxFooView {
/// content: Box<View>,
/// }
///
/// impl ViewWrapper for BoxFooView {
/// wrap_impl!(self.content);
/// }
/// # fn main() { }
/// ```
///
/// If the content is directly a view, reference it:
///
/// ```no_run
/// # #[macro_use] extern crate cursive;
/// # use cursive::view::{View,ViewWrapper};
/// struct FooView<T: View> {
/// view: T,
/// }
///
/// impl <T: View> ViewWrapper for FooView<T> {
/// wrap_impl!(&self.view);
/// }
/// # fn main() { }
/// ```
#[macro_export]
macro_rules! wrap_impl {
(&self.$v:ident) => {
fn get_view(&self) -> &View {
&self.$v
}
fn get_view_mut(&mut self) -> &mut View {
&mut self.$v
}
};
(self.$v:ident) => {
fn get_view(&self) -> &View {
&*self.$v
}
fn get_view_mut(&mut self) -> &mut View {
&mut *self.$v
}
};
}
|
extern crate anyhow;
#[macro_use]
extern crate log;
extern crate getopts;
extern crate libc;
extern crate nix;
extern crate pretty_env_logger;
extern crate rte;
use std::cmp;
use std::convert::TryFrom;
use std::env;
use std::fmt;
use std::io;
use std::io::prelude::*;
use std::mem;
use std::path::Path;
use std::process;
use std::ptr;
use std::result;
use std::str::FromStr;
use anyhow::Result;
use nix::sys::signal;
use rte::ethdev::EthDevice;
use rte::ffi::{RTE_ETHER_MAX_LEN, RTE_MAX_ETHPORTS, RTE_PKTMBUF_HEADROOM};
use rte::lcore::RTE_MAX_LCORE;
use rte::*;
const EXIT_FAILURE: i32 = -1;
// Max size of a single packet
const MAX_PACKET_SZ: u32 = 2048;
// Size of the data buffer in each mbuf
const MBUF_DATA_SZ: u32 = MAX_PACKET_SZ + RTE_PKTMBUF_HEADROOM;
// Number of mbufs in mempool that is created
const NB_MBUF: u32 = 8192;
// How many packets to attempt to read from NIC in one go
const PKT_BURST_SZ: u32 = 32;
// How many objects (mbufs) to keep in per-lcore mempool cache
const MEMPOOL_CACHE_SZ: u32 = PKT_BURST_SZ;
// Number of RX ring descriptors
const NB_RXD: u16 = 128;
// Number of TX ring descriptors
const NB_TXD: u16 = 512;
// Total octets in ethernet header
const KNI_ENET_HEADER_SIZE: u32 = 14;
// Total octets in the FCS
const KNI_ENET_FCS_SIZE: u32 = 4;
const KNI_MAX_KTHREAD: usize = 32;
#[repr(C)]
#[derive(Clone, Debug)]
struct kni_port_params {
// Port ID
port_id: u8,
// lcore ID for RX
lcore_rx: libc::c_uint,
// lcore ID for TX
lcore_tx: libc::c_uint,
// Number of lcores for KNI multi kernel threads
nb_lcore_k: u32,
// Number of KNI devices to be created
nb_kni: u32,
// lcore ID list for kthreads
lcore_k: [libc::c_uint; KNI_MAX_KTHREAD],
// KNI context pointers
kni: [kni::RawKniDevicePtr; KNI_MAX_KTHREAD],
}
struct Conf {
// mask of enabled ports
enabled_port_mask: u32,
promiscuous_on: bool,
port_params: [Option<kni_port_params>; RTE_MAX_ETHPORTS as usize],
}
impl fmt::Debug for Conf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for conf in self.port_params.iter().flatten() {
try!(write!(f, "Port ID: {}\n", conf.port_id));
try!(write!(
f,
" Rx lcore ID: {}, Tx lcore ID: {}\n",
conf.lcore_rx, conf.lcore_tx
));
for lcore_id in &conf.lcore_k[..conf.nb_lcore_k as usize] {
try!(write!(f, " Kernel thread lcore ID: {}\n", lcore_id));
}
}
Ok(())
}
}
impl Conf {
fn new() -> Conf {
unsafe { mem::zeroed() }
}
fn parse_config(&mut self, arg: &str) -> result::Result<(), String> {
let mut fields = arg
.split(',')
.map(|s| u32::from_str(s).expect("Invalid config parameters"));
let port_id = try!(fields.next().ok_or("Invalid config parameter, missed port_id field"));
if port_id > RTE_MAX_ETHPORTS {
return Err(format!(
"Port ID {} could not exceed the maximum {}",
port_id, RTE_MAX_ETHPORTS
));
}
if self.port_params[port_id as usize].is_some() {
return Err(format!("Port {} has been configured", port_id));
}
let mut param: kni_port_params = unsafe { mem::zeroed() };
param.port_id = port_id as u8;
param.lcore_rx = try!(fields.next().ok_or("Invalid config parameter, missed lcore_rx field"));
param.lcore_tx = try!(fields.next().ok_or("Invalid config parameter, missed lcore_tx field"));
if param.lcore_rx >= RTE_MAX_LCORE || param.lcore_tx >= RTE_MAX_LCORE {
return Err(format!(
"lcore_rx {} or lcore_tx {} ID could not exceed the maximum {}",
param.lcore_rx, param.lcore_tx, RTE_MAX_LCORE
));
}
let lcores: Vec<u32> = fields.collect();
unsafe {
ptr::copy_nonoverlapping(lcores.as_ptr(), param.lcore_k.as_mut_ptr(), lcores.len());
}
param.nb_lcore_k = lcores.len() as u32;
self.port_params[port_id as usize] = Some(param);
Ok(())
}
}
extern "C" fn handle_sigint(sig: libc::c_int) {
match signal::Signal::try_from(sig).unwrap() {
// When we receive a USR1 signal, print stats
signal::SIGUSR1 => unsafe {
kni_print_stats();
},
// When we receive a USR2 signal, reset stats
signal::SIGUSR2 => {
unsafe {
kni_stats = mem::zeroed();
}
println!("**Statistics have been reset**");
}
// When we receive a TERM or SIGINT signal, stop kni processing
signal::SIGINT | signal::SIGTERM => {
unsafe {
kni_stop = 1;
}
println!("SIGINT or SIGTERM is received, and the KNI processing is going to stop\n");
}
_ => info!("unexpect signo: {}", sig),
}
}
/// Associate signal_hanlder function with USR signals
fn handle_signals() -> nix::Result<()> {
let sig_action = signal::SigAction::new(
signal::SigHandler::Handler(handle_sigint),
signal::SaFlags::empty(),
signal::SigSet::empty(),
);
unsafe {
try!(signal::sigaction(signal::SIGUSR1, &sig_action));
try!(signal::sigaction(signal::SIGUSR2, &sig_action));
try!(signal::sigaction(signal::SIGINT, &sig_action));
try!(signal::sigaction(signal::SIGTERM, &sig_action));
}
Ok(())
}
fn prepare_args(args: &mut Vec<String>) -> (Vec<String>, Vec<String>) {
let program = String::from(Path::new(&args[0]).file_name().unwrap().to_str().unwrap());
if let Some(pos) = args.iter().position(|arg| arg == "--") {
let (eal_args, opt_args) = args.split_at_mut(pos);
opt_args[0] = program;
(eal_args.to_vec(), opt_args.to_vec())
} else {
(args[..1].to_vec(), args.clone())
}
}
// display usage
fn print_usage(program: &String, opts: getopts::Options) -> ! {
let brief = format!("Usage: {} [EAL options] -- [options]", program);
print!("{}", opts.usage(&brief));
process::exit(-1);
}
// Parse the argument given in the command line of the application
fn parse_args(args: &Vec<String>) -> result::Result<Conf, String> {
let mut opts = getopts::Options::new();
let program = args[0].clone();
opts.optflag("h", "help", "print this help menu");
opts.optopt("p", "", "hexadecimal bitmask of ports to configure", "PORTMASK");
opts.optflag("P", "", "enable promiscuous mode");
opts.optmulti(
"c",
"config",
"port and lcore configurations",
"port,lcore_rx,lcore_tx,lcore_kthread...",
);
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(err) => {
println!("Invalid option specified, {}", err);
print_usage(&program, opts);
}
};
if matches.opt_present("h") {
print_usage(&program, opts);
}
let mut conf = Conf::new();
if let Some(arg) = matches.opt_str("p") {
match u32::from_str_radix(arg.as_str(), 16) {
Ok(mask) if mask != 0 => conf.enabled_port_mask = mask,
_ => {
println!("invalid portmask, {}", arg);
print_usage(&program, opts);
}
}
}
conf.promiscuous_on = matches.opt_present("P");
for arg in matches.opt_strs("c") {
try!(conf.parse_config(&arg));
}
debug!("{:?}", conf);
Ok(conf)
}
// Initialize KNI subsystem
fn init_kni(conf: &Conf) -> Result<()> {
let num_of_kni_ports = conf
.port_params
.iter()
.flatten()
.fold(0, |acc, param| acc + cmp::max(param.nb_lcore_k, 1));
// Invoke rte KNI init to preallocate the ports
kni::init(num_of_kni_ports as usize)
}
// Initialise a single port on an Ethernet device
fn init_port(conf: &Conf, dev: ethdev::PortId, port_conf: ðdev::EthConf, pktmbuf_pool: &mut mempool::MemoryPool) {
let portid = dev.portid();
// Initialise device and RX/TX queues
info!("Initialising port {} ...", portid);
dev.configure(1, 1, &port_conf)
.expect(&format!("fail to configure device: port={}", portid));
// init one RX queue
dev.rx_queue_setup(0, NB_RXD, None, pktmbuf_pool)
.expect(&format!("fail to setup device rx queue: port={}", portid));
// init one TX queue on each port
dev.tx_queue_setup(0, NB_TXD, None)
.expect(&format!("fail to setup device tx queue: port={}", portid));
// Start device
dev.start().expect(&format!("fail to start device: port={}", portid));
info!("Done: ");
if conf.promiscuous_on {
dev.promiscuous_enable();
}
}
extern "C" fn kni_change_mtu(port_id: u16, new_mtu: libc::c_uint) -> libc::c_int {
debug!("port {} change MTU to {}", port_id, new_mtu);
let nb_sys_ports = ethdev::count();
if port_id > nb_sys_ports || port_id as u32 > RTE_MAX_ETHPORTS {
error!("Invalid port id {}", port_id);
return -libc::EINVAL;
}
if new_mtu > RTE_ETHER_MAX_LEN {
let dev = port_id as ethdev::PortId;
dev.stop();
// Set new MTU
let mut port_conf = ethdev::EthConf::default();
let mut rxmode: ethdev::EthRxMode = Default::default();
rxmode.max_rx_pkt_len = new_mtu + KNI_ENET_HEADER_SIZE + KNI_ENET_FCS_SIZE;
port_conf.rxmode = Some(rxmode);
if let Err(err) = dev.configure(1, 1, &port_conf) {
error!("Fail to reconfigure port {}, {}", port_id, err);
if let Some(&RteError(errno)) = err.downcast_ref::<RteError>() {
return errno;
}
}
if let Err(err) = dev.start() {
error!("Failed to start port {}, {}", port_id, err);
if let Some(&RteError(errno)) = err.downcast_ref::<RteError>() {
return errno;
}
}
}
0
}
extern "C" fn kni_config_network_interface(port_id: u16, if_up: u8) -> libc::c_int {
debug!(
"port {} change status to {}",
port_id,
if if_up != 0 { "up" } else { "down" }
);
let nb_sys_ports = ethdev::count();
if port_id > nb_sys_ports || port_id as u32 > RTE_MAX_ETHPORTS {
error!("Invalid port id {}", port_id);
return -libc::EINVAL;
}
let dev = port_id as ethdev::PortId;
dev.stop();
if if_up != 0 {
if let Err(err) = dev.start() {
error!("Failed to start port {}, {}", port_id, err);
if let Some(&RteError(errno)) = err.downcast_ref::<RteError>() {
return errno;
}
}
}
0
}
extern "C" fn kni_config_mac_address(port_id: u16, mac_addr: *mut u8) -> libc::c_int {
debug!(
"port {} change mac address to {}",
port_id,
ether::EtherAddr::from(mac_addr)
);
0
}
extern "C" fn kni_config_promiscusity(port_id: u16, on: u8) -> libc::c_int {
debug!(
"port {} change promiscusity to {}",
port_id,
if on == 0 { "off" } else { "on" }
);
0
}
extern "C" fn kni_config_allmulticast(port_id: u16, on: u8) -> libc::c_int {
debug!(
"port {} change allmulticast to {}",
port_id,
if on == 0 { "off" } else { "on" }
);
0
}
fn kni_alloc(conf: &mut Conf, dev: ethdev::PortId, pktmbuf_pool: &mut mempool::MemoryPool) {
let portid = dev.portid();
if let Some(ref mut param) = conf.port_params[portid as usize] {
param.nb_kni = cmp::max(param.nb_lcore_k, 1);
for i in 0..param.nb_kni {
let name = if param.nb_lcore_k > 0 {
format!("vEth{}_{}", portid, i)
} else {
format!("vEth{}", portid)
};
let mut conf = kni::KniDeviceConf::default();
conf.name = name.as_str();
conf.group_id = portid as u16;
conf.mbuf_size = MAX_PACKET_SZ;
let mut kni = (if i == 0 {
// The first KNI device associated to a port is the master,
// for multiple kernel thread environment.
// let dev_info = dev.info();
// let dev = dev_info
// .dev()
// .expect(&format!("port {} haven't PCI dev info", dev.portid()));
// conf.pci_addr = dev.addr;
// conf.pci_id = dev.id;
let ops = kni::KniDeviceOps {
port_id: portid,
change_mtu: Some(kni_change_mtu),
config_network_if: Some(kni_config_network_interface),
config_mac_address: Some(kni_config_mac_address),
config_promiscusity: Some(kni_config_promiscusity),
config_allmulticast: Some(kni_config_allmulticast),
};
kni::alloc(pktmbuf_pool, &conf, Some(&ops))
} else {
kni::alloc(pktmbuf_pool, &conf, None)
})
.expect(&format!("Fail to create kni for port: {}", portid));
param.kni[i as usize] = kni.into_raw();
debug!(
"allocated kni device `{}` @{:p} for port #{}",
conf.name, param.kni[i as usize], portid
);
}
}
}
fn kni_free_kni(conf: &Conf, dev: ethdev::PortId) {
if let Some(ref param) = conf.port_params[dev as usize] {
for kni in ¶m.kni[..param.nb_kni as usize] {
let _ = kni::KniDevice::from_raw(*kni);
}
dev.stop();
}
}
// Check the link status of all ports in up to 9s, and print them finally
fn check_all_ports_link_status(enabled_devices: &Vec<ethdev::PortId>) {
print!("Checking link status");
const CHECK_INTERVAL: u32 = 100;
const MAX_CHECK_TIME: usize = 90;
for _ in 0..MAX_CHECK_TIME {
if unsafe { kni_stop != 0 } {
break;
}
if enabled_devices.iter().all(|dev| dev.link_nowait().up) {
break;
}
delay_ms(CHECK_INTERVAL);
print!(".");
io::stdout().flush().unwrap();
}
println!("Done:");
for dev in enabled_devices {
let link = dev.link();
if link.up {
println!(
" Port {} Link Up - speed {} Mbps - {}",
dev.portid(),
link.speed,
if link.duplex { "full-duplex" } else { "half-duplex" }
)
} else {
println!(" Port {} Link Down", dev.portid());
}
}
}
#[repr(C)]
struct Struct_kni_interface_stats {
// number of pkts received from NIC, and sent to KNI
rx_packets: libc::uint64_t,
// number of pkts received from NIC, but failed to send to KNI
rx_dropped: libc::uint64_t,
// number of pkts received from KNI, and sent to NIC
tx_packets: libc::uint64_t,
// number of pkts received from KNI, but failed to send to NIC
tx_dropped: libc::uint64_t,
}
#[link(name = "kni_core")]
extern "C" {
static mut kni_stop: libc::c_int;
static mut kni_port_params_array: *const *mut kni_port_params;
static mut kni_stats: [Struct_kni_interface_stats; RTE_MAX_ETHPORTS as usize];
fn kni_print_stats();
fn kni_ingress(param: *const kni_port_params) -> libc::c_int;
fn kni_egress(param: *const kni_port_params) -> libc::c_int;
}
fn main_loop(conf: Option<&Conf>) -> i32 {
enum LcoreType<'a> {
Rx(&'a kni_port_params),
Tx(&'a kni_port_params),
};
let lcore_id = lcore::current().unwrap();
let mut lcore_type: Option<LcoreType> = None;
for portid in ethdev::devices() {
if let Some(ref param) = conf.unwrap().port_params[portid as usize] {
if lcore_id == param.lcore_rx {
lcore_type = Some(LcoreType::Rx(param));
break;
}
if lcore_id == param.lcore_tx {
lcore_type = Some(LcoreType::Tx(param));
break;
}
}
}
match lcore_type {
Some(LcoreType::Rx(param)) => {
info!("Lcore {} is reading from port {}", param.lcore_rx, param.port_id);
unsafe { kni_ingress(param) }
}
Some(LcoreType::Tx(param)) => {
info!("Lcore {} is writing from port {}", param.lcore_tx, param.port_id);
unsafe { kni_egress(param) }
}
_ => {
info!("Lcore {} has nothing to do", lcore_id);
0
}
}
}
fn main() {
pretty_env_logger::init();
handle_signals().expect("fail to handle signals");
let mut args: Vec<String> = env::args().collect();
let (eal_args, opt_args) = prepare_args(&mut args);
debug!("eal args: {:?}, l2fwd args: {:?}", eal_args, opt_args);
// Initialise EAL
eal::init(&eal_args).expect("Cannot init EAL");
// Parse application arguments (after the EAL ones)
let mut conf = parse_args(&opt_args).expect("Could not parse input parameters");
unsafe {
kni_port_params_array = conf.port_params.as_ptr() as *const _;
}
// create the mbuf pool
let mut pktmbuf_pool = mbuf::pool_create(
"mbuf_pool",
NB_MBUF,
MEMPOOL_CACHE_SZ,
0,
MBUF_DATA_SZ as u16,
rte::socket_id() as i32,
)
.expect("fail to initial mbuf pool");
let enabled_devices: Vec<ethdev::PortId> = ethdev::devices()
.filter(|dev| ((1 << dev.portid()) & conf.enabled_port_mask) != 0)
.collect();
if enabled_devices.is_empty() {
eal::exit(EXIT_FAILURE, "All available ports are disabled. Please set portmask.\n");
}
// Initialize KNI subsystem
init_kni(&conf).expect("initial KNI");
// Initialise each port
let port_conf = ethdev::EthConf::default();
for dev in &enabled_devices {
init_port(&conf, dev.portid(), &port_conf, &mut pktmbuf_pool);
kni_alloc(&mut conf, dev.portid(), &mut pktmbuf_pool);
}
check_all_ports_link_status(&enabled_devices);
// launch per-lcore init on every lcore
launch::mp_remote_launch(main_loop, Some(&conf), false).unwrap();
launch::mp_wait_lcore();
// Release resources
for dev in &enabled_devices {
kni_free_kni(&conf, dev.portid());
}
kni::close();
}
|
use flate;
use chrono;
use {Infraction, Ticket, RulesOK};
pub fn check(msg: &str, state: &mut ::UserState) -> ::RulesCheckResult {
let time_since_last = chrono::UTC::now() - state.last_message_time;
debug!("Scoring message: {}", msg);
// todo: these could be cached somewhere
let regexes = [
regex!(r"(?i)k[a@e3][e3p]p[ao@]"), // kappas
regex!(r"(?i)^doge$"), // nice meme
regex!(r"(?i)lenny[ ]?face"),
regex!(r"BibleThump"),
regex!(r"blis donate"),
regex!(r"(?i)or riot"),
regex!(r"(?i)donger"),
regex!(r"(?i)ez skins ez l[iy]fe"),
regex!(r"(?i)clutch or kick"),
// regex!(r"(organner)|(aimware)|(aimjunkies)"),
regex!(r"pl[sz] no .*erino"), // pls no spammerino
];
for re in regexes.iter() {
match re.find(msg) {
Some((start, end)) if start == 0 && end == msg.len() => {
return Infraction("This isn't Twitch chat.");
},
Some((start, end)) => {
return Ticket((start, end), "This isn't Twitch chat.")
},
None => ()
}
}
// single word messages are bad
if regex!(r"\s+[^$]").find_iter(msg.trim_chars(' ')).count() == 0 && time_since_last.num_seconds() <= 3 {
state.simple_msg_count += 1;
if state.simple_msg_count >= 3 {
state.simple_msg_count = 0;
return Infraction("Please use longer sentences, instead of many short ones")
}
} else {
state.simple_msg_count = 0;
}
if msg.len() > 6 && msg == state.last_message.as_slice()
&& (chrono::UTC::now() - state.last_message_time).num_seconds() < 2 {
return Infraction("Is there an echo in here?")
}
if complexity_test(msg) {
Infraction("Stop spamming.")
} else {
RulesOK
}
}
pub fn complexity_test(msg: &str) -> bool {
// uses compression ratio w/ zlib as a proxy for complexity.
if msg.len() < 16 {
return false;
}
let msg_bytes = msg.as_bytes();
match flate::deflate_bytes(msg_bytes) {
Some(compressed) => {
let ratio = msg_bytes.len() as f32 / compressed.len() as f32;
debug!("Compression ratio of {} is {}", msg, ratio);
let threshold = 2.0 + (0.15 * msg_bytes.len() as f32 / 10.0);
ratio > threshold
},
None => { warn!("No compression?"); false }
}
}
|
use criterion::BenchmarkId;
use criterion::Criterion;
use criterion::{criterion_group, criterion_main};
use lc_render::{BandScale, BarsValues, Chart, HorizontalBarView, LinearScale};
const SIZE: i32 = 800;
const MARGIN: i32 = 40;
fn create_horizontal_bar_chart(values_count: usize) {
let x_scale = LinearScale::new(0.0, 100.0, SIZE - MARGIN, 0);
let y_scale = BandScale::new(
(0..values_count).map(|v| v.to_string()).collect(),
0,
SIZE - MARGIN,
);
let data = vec![BarsValues::new(vec![24_f32; values_count])];
let view = HorizontalBarView::new(x_scale.clone(), y_scale.clone())
.set_data(&data)
.expect("unable to set data");
Chart::new()
.set_width(SIZE)
.set_height(SIZE)
.set_margin_top(MARGIN)
.set_margin_bottom(MARGIN)
.set_margin_left(MARGIN)
.set_margin_right(MARGIN)
.set_axis_bottom_linear(x_scale)
.set_axis_left_band(y_scale)
.set_axis_bottom_label("Values")
.set_axis_left_label("Categories")
.set_title("Horizontal Bar Chart")
.add_view(&view);
}
fn horizontal_bar_chart(c: &mut Criterion) {
let mut group = c.benchmark_group("horizontal_bar_chart");
for size in [100, 1_000, 10_000, 100_000, 1_000_000].iter() {
group.bench_with_input(
BenchmarkId::from_parameter(size),
size,
|b, &values_count| {
b.iter(|| create_horizontal_bar_chart(values_count));
},
);
}
group.finish();
}
criterion_group!(benches, horizontal_bar_chart);
criterion_main!(benches);
|
use async_trait::async_trait;
use common::result::Result;
use crate::domain::author::AuthorId;
use crate::domain::category::CategoryId;
use crate::domain::publication::{Publication, PublicationId};
#[async_trait]
pub trait PublicationRepository: Sync + Send {
async fn next_id(&self) -> Result<PublicationId>;
async fn find_all(&self) -> Result<Vec<Publication>>;
async fn find_by_id(&self, id: &PublicationId) -> Result<Publication>;
async fn find_by_author_id(&self, author_id: &AuthorId) -> Result<Vec<Publication>>;
async fn find_by_category_id(&self, category_id: &CategoryId) -> Result<Vec<Publication>>;
async fn find_by_status(&self, status: &str) -> Result<Vec<Publication>>;
async fn search(&self, text: &str) -> Result<Vec<Publication>>;
async fn save(&self, publication: &mut Publication) -> Result<()>;
}
|
#[doc = "Reader of register NVIC_IPR4"]
pub type R = crate::R<u32, super::NVIC_IPR4>;
#[doc = "Writer for register NVIC_IPR4"]
pub type W = crate::W<u32, super::NVIC_IPR4>;
#[doc = "Register NVIC_IPR4 `reset()`'s with value 0"]
impl crate::ResetValue for super::NVIC_IPR4 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `IP_19`"]
pub type IP_19_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IP_19`"]
pub struct IP_19_W<'a> {
w: &'a mut W,
}
impl<'a> IP_19_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 30)) | (((value as u32) & 0x03) << 30);
self.w
}
}
#[doc = "Reader of field `IP_18`"]
pub type IP_18_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IP_18`"]
pub struct IP_18_W<'a> {
w: &'a mut W,
}
impl<'a> IP_18_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);
self.w
}
}
#[doc = "Reader of field `IP_17`"]
pub type IP_17_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IP_17`"]
pub struct IP_17_W<'a> {
w: &'a mut W,
}
impl<'a> IP_17_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u32) & 0x03) << 14);
self.w
}
}
#[doc = "Reader of field `IP_16`"]
pub type IP_16_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `IP_16`"]
pub struct IP_16_W<'a> {
w: &'a mut W,
}
impl<'a> IP_16_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);
self.w
}
}
impl R {
#[doc = "Bits 30:31 - Priority of interrupt 19"]
#[inline(always)]
pub fn ip_19(&self) -> IP_19_R {
IP_19_R::new(((self.bits >> 30) & 0x03) as u8)
}
#[doc = "Bits 22:23 - Priority of interrupt 18"]
#[inline(always)]
pub fn ip_18(&self) -> IP_18_R {
IP_18_R::new(((self.bits >> 22) & 0x03) as u8)
}
#[doc = "Bits 14:15 - Priority of interrupt 17"]
#[inline(always)]
pub fn ip_17(&self) -> IP_17_R {
IP_17_R::new(((self.bits >> 14) & 0x03) as u8)
}
#[doc = "Bits 6:7 - Priority of interrupt 16"]
#[inline(always)]
pub fn ip_16(&self) -> IP_16_R {
IP_16_R::new(((self.bits >> 6) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 30:31 - Priority of interrupt 19"]
#[inline(always)]
pub fn ip_19(&mut self) -> IP_19_W {
IP_19_W { w: self }
}
#[doc = "Bits 22:23 - Priority of interrupt 18"]
#[inline(always)]
pub fn ip_18(&mut self) -> IP_18_W {
IP_18_W { w: self }
}
#[doc = "Bits 14:15 - Priority of interrupt 17"]
#[inline(always)]
pub fn ip_17(&mut self) -> IP_17_W {
IP_17_W { w: self }
}
#[doc = "Bits 6:7 - Priority of interrupt 16"]
#[inline(always)]
pub fn ip_16(&mut self) -> IP_16_W {
IP_16_W { w: self }
}
}
|
fn main() {
let mut v = Vec::new();
v.push(v.len());
} |
use anyhow::{anyhow, Result};
use crate::{Attribute, check_len, DATATYPE, toDATATYPE};
use std::ops::Deref;
use std::cmp::Ordering;
use std::fs::read;
#[derive(Debug)]
pub struct Data_item{
pub attributes: Vec<(DATATYPE,bool)>
}
impl Clone for Data_item{
fn clone(&self) -> Self {
let attributes = self.attributes.clone();
Data_item{
attributes
}
}
}
impl PartialEq for Data_item{
fn eq(&self, other: &Self) -> bool {
for (index, attribute) in self.attributes.iter().enumerate() {
if attribute.0.eq(&other.attributes[index].0 ) {
}else {
return false;
}
}
true
}
}
impl PartialOrd for Data_item{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
for (index, attribute) in self.attributes.iter().enumerate() {
match &attribute.0.partial_cmp(&other.attributes[index].0 ){
None => {}
Some(res) => {
match res {
Ordering::Less => return Some(Ordering::Less),
Ordering::Equal => {}
Ordering::Greater => return Some(Ordering::Greater),
}
}
}
}
Some(Ordering::Equal)
}
}
impl Data_item{
// pub fn decode_by_name(&self,str: &str,buf: &[u8]) -> Option<Box<Attribute>>{
// let mut bin_size = 0;
// for attribute in self.attributes.iter(){
// if attribute.1 == str {
// return Some(Box::new(attribute.0.deref().bin_size()))
// }
// bin_size += attribute.0.deref().bin_size();
// }
// None
// }
pub fn from_vec(&self, mut vec: Vec<u8>) -> Data_item {
assert_eq!(vec.len(), self.bin_size());
let mut res = self.clone();
let mut bin_size = 0;
let mut i = 0;
while i < self.attributes.len() {
let vec_temp = vec.split_off(res.attributes[i].0.bin_size());
toDATATYPE(vec, &mut res.attributes[i].0);
vec = vec_temp;
i = i + 1;
}
res
}
}
impl Attribute for Data_item{
fn bin_size(&self) -> usize {
let mut bin_size = 0;
for attribute in self.attributes.iter() {
bin_size += &attribute.0.bin_size();
}
bin_size
}
fn encode(&self, buf: &mut [u8]) {
check_len(buf, self.bin_size());
let mut bin_size = 0;
for attribute in self.attributes.iter() {
if attribute.1 {
&attribute.0.encode(&mut buf[bin_size..]);
}
bin_size += &attribute.0.bin_size();
}
}
fn decode(&self, buf: &[u8]) -> Vec<u8> {
let mut res = Vec::new();
let mut bin_size = 0;
for attribute in self.attributes.iter() {
res.append(&mut attribute.0.decode(&buf[bin_size..]));
bin_size += &attribute.0.bin_size();
}
res
}
// fn encode(&self, buf: &mut [u8]) -> Result<()> {
// check_len(buf, self.bin_size())?;
// let mut bin_size = 0;
// for attribute in self.attributes.iter() {
// &attribute.0.encode(&mut buf[bin_size..]);
// bin_size += &attribute.0.bin_size();
// }
// }
// fn decode(&self, buf: &[u8]) -> Result<(Box<Self>, usize)> {
// // unimplemented!()
// let mut res = self.clone();
// let mut bin_size = 0;
// for (index, attribute) in res.attributes.iter().enumerate(){
// res.attributes[index].0 = attribute.0.deref().decode(&buf[bin_size..]).0;
// bin_size += attribute.0.deref().bin_size();
// }
// Ok((Box::new(res),bin_size))
// }
}
// #[derive(Debug)]
// pub struct Update_data_item{
// pub attributes: Vec<(DATATYPE,bool)>
// }
//
// impl Attribute for Update_data_item{
// fn bin_size(&self) -> usize {
// let mut bin_size = 0;
// for attribute in self.attributes.iter() {
// bin_size += &attribute.0.bin_size();
// }
// bin_size
// }
//
// fn encode(&self, buf: &mut [u8]) {
// check_len(buf, self.bin_size());
// let mut bin_size = 0;
// for attribute in self.attributes.iter() {
// if attribute.1 {
// &attribute.0.encode(&mut buf[bin_size..]);
// }
// bin_size += &attribute.0.bin_size();
// }
// }
//
// fn decode(&self, buf: &[u8]) -> Vec<u8> {
// let mut res = Vec::new();
// let mut bin_size = 0;
// for attribute in self.attributes.iter() {
// res.append(&mut attribute.0.decode(&buf[bin_size..]));
// bin_size += &attribute.0.bin_size();
// }
// res
// }
// }
#[derive(Debug)]
pub struct Data_item_info{
pub attributes: Vec<(DATATYPE,String,bool)>
}
impl Clone for Data_item_info{
fn clone(&self) -> Self {
let attributes = self.attributes.clone();
Data_item_info{
attributes
}
}
}
impl Data_item_info{
pub fn new() -> Data_item_info{
Data_item_info{
attributes: Vec::new()
}
}
pub fn add(&mut self, attribute: (DATATYPE, String)){
self.attributes.push((attribute.0,attribute.1,false));
}
pub fn set(&mut self, value: DATATYPE, attributename: &str) -> bool {
for mut attribute in &mut self.attributes {
// println!("attribute:{:?}",attribute);
if attribute.1 == attributename {
if attribute.0.type_equal(&value) {
attribute.0 = value;
attribute.2 = true;
return true;
} else {
return false;}
}
}
// println!("555");
false
}
pub fn get_data_item(&self) -> Data_item{
let mut attributes = Vec::new();
for attribute in &self.attributes {
attributes.push((attribute.0.clone(),attribute.2));
}
Data_item{
attributes
}
}
// pub fn get_update_data_item(&self) -> Update_data_item{
// let mut attributes = Vec::new();
// for attribute in &self.attributes {
// attributes.push((attribute.0.clone(),attribute.2));
// }
// Update_data_item{
// attributes
// }
// }
pub fn clear_value(&mut self) -> &mut Data_item_info {
for attribute in &mut self.attributes {
attribute.2 = false;
}
self
}
} |
use crate::particle::Particle;
use nannou::prelude::*;
pub struct ForceField {
pub rect: Rect,
pub force: Vec2,
}
impl ForceField {
pub fn new(rect: Rect, force: Vec2) -> ForceField {
ForceField { rect, force }
}
pub fn draw(&self, draw: &Draw) {
draw.rect()
.wh(self.rect.wh())
.xy(self.rect.xy())
.stroke_weight(1.0)
.stroke_color(BLACK)
.color(WHITESMOKE);
self.draw_force_vector(draw)
}
fn draw_force_vector(&self, draw: &Draw) {
// arrow line
let color = BLACK;
let start = self.rect.xy();
let end = start + self.force * 25.0; // 25 is magic
draw.line()
.start(start)
.end(end)
.stroke_weight(3.0)
.color(color);
// arrow tip
let tip_size = self.rect.wh() / 12.0; // 12 is magic
draw.ellipse().xy(start).wh(tip_size).color(color);
}
pub fn apply_force(&self, particle: &mut Particle) {
particle.acceleration += self.force * 0.01;
}
}
|
use ImVec2;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum StyleVar {
Alpha(f32),
WindowPadding(ImVec2),
WindowRounding(f32),
WindowMinSize(ImVec2),
ChildWindowRounding(f32),
FramePadding(ImVec2),
FrameRounding(f32),
ItemSpacing(ImVec2),
ItemInnerSpacing(ImVec2),
IndentSpacing(f32),
GrabMinSize(f32),
ButtonTextAlign(ImVec2),
}
|
use config::Config;
use api::TellerClient;
use api::inform::{Incomings, GetIncomings};
use cli::arg_types::{AccountType, OutputFormat, Interval, Timeframe};
use command::representations::represent_list_amounts;
use command::timeframe_to_date_range;
fn represent_list_incomings(hac: &Incomings, output: &OutputFormat) {
represent_list_amounts("incoming", &hac, &output)
}
pub fn list_incomings_command(teller: &TellerClient,
config: &Config,
account: &AccountType,
interval: &Interval,
timeframe: &Timeframe,
output: &OutputFormat)
-> i32 {
info!("Calling the list incomings command");
let account_id = config.get_account_id(&account);
let (from, to) = timeframe_to_date_range(&timeframe);
teller.get_incomings(&account_id, &interval, &from, &to)
.map(|incomings| {
represent_list_incomings(&incomings, &output);
0
})
.unwrap_or_else(|err| {
error!("Unable to list incomings: {}", err);
1
})
}
|
#[doc = "Reader of register DU_SEL"]
pub type R = crate::R<u32, super::DU_SEL>;
#[doc = "Writer for register DU_SEL"]
pub type W = crate::W<u32, super::DU_SEL>;
#[doc = "Register DU_SEL `reset()`'s with value 0"]
impl crate::ResetValue for super::DU_SEL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DU_TR0_SEL`"]
pub type DU_TR0_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DU_TR0_SEL`"]
pub struct DU_TR0_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DU_TR0_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `DU_TR1_SEL`"]
pub type DU_TR1_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DU_TR1_SEL`"]
pub struct DU_TR1_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DU_TR1_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `DU_TR2_SEL`"]
pub type DU_TR2_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DU_TR2_SEL`"]
pub struct DU_TR2_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DU_TR2_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
#[doc = "Reader of field `DU_DATA0_SEL`"]
pub type DU_DATA0_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DU_DATA0_SEL`"]
pub struct DU_DATA0_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DU_DATA0_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24);
self.w
}
}
#[doc = "Reader of field `DU_DATA1_SEL`"]
pub type DU_DATA1_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DU_DATA1_SEL`"]
pub struct DU_DATA1_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DU_DATA1_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Data unit input signal 'tr0_in' source selection: '0': Constant '0'. '1': Constant '1'. '2': Data unit output. '10-3': LUT 7 - 0 outputs. Otherwise: Undefined."]
#[inline(always)]
pub fn du_tr0_sel(&self) -> DU_TR0_SEL_R {
DU_TR0_SEL_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 8:11 - Data unit input signal 'tr1_in' source selection. Encoding is the same as for DU_TR0_SEL."]
#[inline(always)]
pub fn du_tr1_sel(&self) -> DU_TR1_SEL_R {
DU_TR1_SEL_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - Data unit input signal 'tr2_in' source selection. Encoding is the same as for DU_TR0_SEL."]
#[inline(always)]
pub fn du_tr2_sel(&self) -> DU_TR2_SEL_R {
DU_TR2_SEL_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 24:25 - Data unit input data 'data0_in' source selection: '0': Constant '0'. '1': chip_data\\[7:0\\]. '2': io_data_in\\[7:0\\]. '3': DATA.DATA MMIO register field."]
#[inline(always)]
pub fn du_data0_sel(&self) -> DU_DATA0_SEL_R {
DU_DATA0_SEL_R::new(((self.bits >> 24) & 0x03) as u8)
}
#[doc = "Bits 28:29 - Data unit input data 'data1_in' source selection. Encoding is the same as for DU_DATA0_SEL."]
#[inline(always)]
pub fn du_data1_sel(&self) -> DU_DATA1_SEL_R {
DU_DATA1_SEL_R::new(((self.bits >> 28) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Data unit input signal 'tr0_in' source selection: '0': Constant '0'. '1': Constant '1'. '2': Data unit output. '10-3': LUT 7 - 0 outputs. Otherwise: Undefined."]
#[inline(always)]
pub fn du_tr0_sel(&mut self) -> DU_TR0_SEL_W {
DU_TR0_SEL_W { w: self }
}
#[doc = "Bits 8:11 - Data unit input signal 'tr1_in' source selection. Encoding is the same as for DU_TR0_SEL."]
#[inline(always)]
pub fn du_tr1_sel(&mut self) -> DU_TR1_SEL_W {
DU_TR1_SEL_W { w: self }
}
#[doc = "Bits 16:19 - Data unit input signal 'tr2_in' source selection. Encoding is the same as for DU_TR0_SEL."]
#[inline(always)]
pub fn du_tr2_sel(&mut self) -> DU_TR2_SEL_W {
DU_TR2_SEL_W { w: self }
}
#[doc = "Bits 24:25 - Data unit input data 'data0_in' source selection: '0': Constant '0'. '1': chip_data\\[7:0\\]. '2': io_data_in\\[7:0\\]. '3': DATA.DATA MMIO register field."]
#[inline(always)]
pub fn du_data0_sel(&mut self) -> DU_DATA0_SEL_W {
DU_DATA0_SEL_W { w: self }
}
#[doc = "Bits 28:29 - Data unit input data 'data1_in' source selection. Encoding is the same as for DU_DATA0_SEL."]
#[inline(always)]
pub fn du_data1_sel(&mut self) -> DU_DATA1_SEL_W {
DU_DATA1_SEL_W { w: self }
}
}
|
pub fn is_additive_number(num: String) -> bool {
let num: Vec<u64> = num.chars().map(|c| (c as u8 - '0' as u8) as u64).collect();
let n = num.len();
if n < 3 {
return false;
}
fn tonum(s: &[u64]) -> u64 {
s.iter().fold(0, |acc, &x| acc * 10 + x)
}
fn match_arr(mut t: u64, s: &[u64]) -> Option<usize> {
if t == 0 {
return if s[0] == 0 {
Some(1)
} else { None };
}
let mut st = vec![];
while t > 0 {
st.push(t % 10);
t /= 10;
}
if st.len() > s.len() {
return None;
}
for pointer in 0..st.len() {
if st[st.len() - 1 - pointer] != s[pointer] {
return None;
}
}
Some(st.len())
}
for i in 1..n - 1 {
if num[0] == 0 && i > 1 {
continue;
}
let first_origin = tonum(&num[..i]);
for j in 1..n - i {
if num[i] == 0 && j > 1 {
continue;
}
let mut first = first_origin;
let mut second = tonum(&num[i..i + j]);
let mut pointer = i + j;
while pointer < n {
let sum = first + second;
match match_arr(sum, &num[pointer..]) {
Some(p) => {
pointer += p;
first = second;
second = sum;
}
None => break
}
if pointer == n {
return true;
}
}
}
}
false
} |
use super::{Trader, Order, Action};
use crate::indicators::{Value, Indicator};
use crate::economy::Monetary;
enum Safe {
Base,
Quote,
None
}
pub struct StopLoss<T, const SAFE: &'static str, const STOP: Monetary, const BACKOFF: usize>
where
T: Trader
{
trader: T,
safe: Safe,
stop: Option<Monetary>,
backoff: usize
}
impl<T, const SAFE: &'static str, const STOP: Monetary, const BACKOFF: usize> Trader for StopLoss<T, SAFE, STOP, BACKOFF>
where
T: Trader
{
type Indicators = (Value, T::Indicators);
fn initialize(base: &str, quote: &str) -> StopLoss<T, SAFE, STOP, BACKOFF> {
StopLoss {
trader: T::initialize(base, quote),
safe: if base == SAFE {
Safe::Base
} else
if quote == SAFE {
Safe::Quote
} else {
Safe::None
},
stop: None,
backoff: 0,
}
}
fn evaluate(&mut self, (value, output): <Self::Indicators as Indicator>::Output) -> Option<Order> {
if self.backoff == 0 {
if let Some(stop) = self.stop {
match self.safe {
Safe::Base => {
if value >= stop {
self.stop = None;
self.backoff = BACKOFF;
return Some(Order::Limit(Action::Buy, 1.0, value));
}
},
Safe::Quote => {
if value <= stop {
self.stop = None;
self.backoff = BACKOFF;
return Some(Order::Limit(Action::Sell, 1.0, value));
}
}
Safe::None => {}
}
}
let order = self.trader.evaluate(output);
if let Some(order) = &order {
match (&self.safe, order) {
(Safe::Quote, Order::Limit(Action::Buy, _, value)) => {
self.stop = Some(if let Some(stop) = self.stop {
stop.min(*value * STOP)
} else {
*value * STOP
});
},
(Safe::Base, Order::Limit(Action::Sell, _, value)) => {
self.stop = Some(if let Some(stop) = self.stop {
stop.max(*value / STOP)
} else {
*value / STOP
});
},
_ => {}
}
}
order
} else {
self.backoff -= 1;
None
}
}
} |
use crate::utils::lines_from_file;
use std::time::Instant;
pub fn main() {
let start = Instant::now();
assert_eq!(part_1_test(), 37);
// println!("part_1 {:?}", part_1());
assert_eq!(mini_test().len(), 0);
assert_eq!(part_2_test(), 26);
println!("part_2 {:?}", part_2());
let duration = start.elapsed();
println!("Finished after {:?}", duration);
}
fn part_2() -> u32 {
let entries = lines_from_file("src/day_11/input.txt");
count_final_occupied_seats(&entries, get_values_around_position_part_2, 5)
}
fn mini_test() -> Vec<char> {
let entries = lines_from_file("src/day_11/mini-test.txt");
let seats: Vec<Vec<char>> = entries
.iter()
.map(|entry| entry.chars().collect())
.collect();
get_values_around_position_part_2(
&seats,
Position {
row_index: 3,
column_index: 3,
},
)
}
fn part_2_test() -> u32 {
let entries = lines_from_file("src/day_11/input-test.txt");
count_final_occupied_seats(&entries, get_values_around_position_part_2, 5)
}
fn part_1() -> u32 {
let entries = lines_from_file("src/day_11/input.txt");
count_final_occupied_seats(&entries, get_values_around_position, 4)
}
fn part_1_test() -> u32 {
let entries = lines_from_file("src/day_11/input-test.txt");
count_final_occupied_seats(&entries, get_values_around_position, 4)
}
#[derive(Debug, PartialEq, Eq)]
struct Position {
row_index: usize,
column_index: usize,
}
fn count_final_occupied_seats(
entries: &Vec<String>,
get_values: fn(seats: &Vec<Vec<char>>, position: Position) -> Vec<char>,
occupied_threshold: u8,
) -> u32 {
let seats: Vec<Vec<char>> = entries
.iter()
.map(|entry| entry.chars().collect())
.collect();
let row_count = entries.len();
let column_count = entries.first().unwrap().len();
let mut current_round = seats.clone();
let mut next_round = vec![vec!['.'; column_count]; row_count];
let last_round = loop {
let mut has_changed = false;
for row_index in 0..row_count {
for column_index in 0..column_count {
let values = get_values(
¤t_round,
Position {
row_index,
column_index,
},
);
match current_round[row_index][column_index] {
'L' => {
if !values.contains(&'#') {
next_round[row_index][column_index] = '#';
has_changed = true;
}
}
'#' => {
if values.iter().filter(|val| **val == '#').count()
>= occupied_threshold as usize
{
next_round[row_index][column_index] = 'L';
has_changed = true;
}
}
_ => {
next_round[row_index][column_index] =
current_round[row_index][column_index];
}
}
}
}
if !has_changed {
break next_round;
}
current_round = next_round.clone();
};
last_round
.iter()
.map(|row| {
row.iter().fold(0u32, |count, seat| {
if seat == &'#' {
return count + 1;
}
count
})
})
.sum()
}
fn get_values_around_position_part_2(seats: &Vec<Vec<char>>, position: Position) -> Vec<char> {
let mut values: Vec<char> = vec![];
for i in -1i32..=1 {
for j in -1i32..=1 {
let mut row_shift = 0;
let mut column_shift = 0;
loop {
row_shift += i;
column_shift += j;
let seat_row = position.row_index as i32 + row_shift;
let seat_column = position.column_index as i32 + column_shift;
if seat_row < 0 || seat_column < 0 {
break;
}
let seat = Position {
row_index: seat_row as usize,
column_index: seat_column as usize,
};
if !position_exists(seats, &seat) || seat.eq(&position) {
break;
}
let value = seats[seat.row_index][seat.column_index];
if value != '.' {
values.push(value);
break;
}
}
}
}
values
}
fn get_values_around_position(seats: &Vec<Vec<char>>, position: Position) -> Vec<char> {
let seats_to_check = get_seats_to_check_around_position(seats, position);
seats_to_check
.iter()
.map(|seat| seats[seat.row_index][seat.column_index])
.collect()
}
fn get_seats_to_check_around_position(seats: &Vec<Vec<char>>, position: Position) -> Vec<Position> {
let mut seats_to_check: Vec<Position> = vec![];
for i in -1i8..=1 {
for j in -1i8..=1 {
let seat_row = position.row_index as i8 + i;
let seat_column = position.column_index as i8 + j;
if seat_row >= 0 && seat_column >= 0 {
let seat = Position {
row_index: seat_row as usize,
column_index: seat_column as usize,
};
if position_exists(seats, &seat) && !seat.eq(&position) {
seats_to_check.push(seat);
}
}
}
}
seats_to_check
}
fn position_exists(seats: &Vec<Vec<char>>, position: &Position) -> bool {
match seats.get(position.row_index) {
Some(row) => match row.get(position.column_index) {
Some(_) => return true,
None => return false,
},
None => return false,
}
}
|
use std::fmt;
use std::io::ErrorKind;
use std::ptr;
use std::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
use std::sync::Arc;
use std::time::Duration;
use crate::cancel::Cancel;
use crate::coroutine_impl::{co_cancel_data, run_coroutine, CoroutineImpl, EventSource};
use crate::scheduler::get_scheduler;
use crate::sync::atomic_dur::AtomicDuration;
use crate::sync::AtomicOption;
use crate::timeout_list::TimeoutHandle;
use crate::yield_now::{get_co_para, yield_now, yield_with};
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ParkError {
Canceled,
Timeout,
}
pub struct DropGuard<'a>(&'a Park);
pub struct Park {
// the coroutine that waiting for this park instance
wait_co: Arc<AtomicOption<CoroutineImpl>>,
// when true means the Park no need to block
state: AtomicBool,
// control how to deal with the cancellation, usually init one time
check_cancel: AtomicBool,
// timeout settings in ms, 0 is none (park forever)
timeout: AtomicDuration,
// timer handle, can be null
timeout_handle: AtomicPtr<TimeoutHandle<Arc<AtomicOption<CoroutineImpl>>>>,
// a flag if kernel is entered
wait_kernel: AtomicBool,
}
impl Default for Park {
fn default() -> Self {
Park::new()
}
}
// this is the park resource type (spmc style)
impl Park {
pub fn new() -> Self {
Park {
wait_co: Arc::new(AtomicOption::none()),
state: AtomicBool::new(false),
check_cancel: AtomicBool::new(true),
timeout: AtomicDuration::new(None),
timeout_handle: AtomicPtr::new(ptr::null_mut()),
wait_kernel: AtomicBool::new(false),
}
}
// ignore cancel, if true, caller have to do the check instead
pub fn ignore_cancel(&self, ignore: bool) {
self.check_cancel.store(!ignore, Ordering::Relaxed);
}
#[inline]
fn set_timeout_handle(
&self,
handle: Option<TimeoutHandle<Arc<AtomicOption<CoroutineImpl>>>>,
) -> Option<TimeoutHandle<Arc<AtomicOption<CoroutineImpl>>>> {
let ptr = match handle {
None => ptr::null_mut(),
Some(h) => h.into_ptr(),
};
let old_ptr = self.timeout_handle.swap(ptr, Ordering::Relaxed);
if old_ptr.is_null() {
None
} else {
Some(unsafe { TimeoutHandle::from_ptr(old_ptr) })
}
}
// return true if need park the coroutine
// when the state is true, we clear it and indicate not to block
// when the state is false, means we need real park
#[inline]
fn check_park(&self) -> bool {
// fast check, since only one consumer to park
if self.state.load(Ordering::Acquire) {
self.state.store(false, Ordering::Release);
return false;
}
!self.state.swap(false, Ordering::AcqRel)
}
// unpark the underlying coroutine if any
#[inline]
pub(crate) fn unpark_impl(&self, b_sync: bool) {
if !self.state.swap(true, Ordering::AcqRel) {
self.wake_up(b_sync);
}
}
// unpark the underlying coroutine if any, push to the ready task queue
#[inline]
pub fn unpark(&self) {
self.unpark_impl(false);
}
// remove the timeout handle after return back to user space
#[inline]
fn remove_timeout_handle(&self) {
if let Some(h) = self.set_timeout_handle(None) {
if h.is_link() {
get_scheduler().del_timer(h);
}
// when timeout the node is unlinked
// just drop it to release memory
}
}
#[inline]
fn wake_up(&self, b_sync: bool) {
if let Some(co) = self.wait_co.take() {
if b_sync {
run_coroutine(co);
} else {
get_scheduler().schedule(co);
}
}
}
#[inline]
fn fast_wake_up(&self) {
if let Some(co) = self.wait_co.take() {
run_coroutine(co);
}
}
/// park current coroutine with specified timeout
/// if timeout happens, return Err(ParkError::Timeout)
/// if cancellation detected, return Err(ParkError::Canceled)
pub fn park_timeout(&self, dur: Option<Duration>) -> Result<(), ParkError> {
// if the state is not set, need to wait
if !self.check_park() {
return Ok(());
}
// before a new yield wait the kernel done
while self.wait_kernel.load(Ordering::Acquire) {
yield_now();
}
self.timeout.store(dur);
// what if the state is set before yield?
// the subscribe would re-check it
yield_with(self);
// clear the trigger state
self.check_park();
// remove timer handle
self.remove_timeout_handle();
// let _gen = self.state.load(Ordering::Acquire);
// println!("unparked gen={}, self={:p}", gen, self);
if let Some(err) = get_co_para() {
match err.kind() {
ErrorKind::TimedOut => return Err(ParkError::Timeout),
ErrorKind::Other => return Err(ParkError::Canceled),
_ => unreachable!("unexpected return error kind"),
}
}
Ok(())
}
fn delay_drop(&self) -> DropGuard {
self.wait_kernel.store(true, Ordering::Release);
DropGuard(self)
}
}
impl<'a> Drop for DropGuard<'a> {
fn drop(&mut self) {
self.0.wait_kernel.store(false, Ordering::Release);
}
}
impl Drop for Park {
fn drop(&mut self) {
// wait the kernel finish
while self.wait_kernel.load(Ordering::Acquire) {
yield_now();
}
self.set_timeout_handle(None);
}
}
impl EventSource for Park {
// register the coroutine to the park
fn subscribe(&mut self, co: CoroutineImpl) {
let cancel = co_cancel_data(&co);
// if we share the same park, the previous timer may wake up it by false
// if we not deleted the timer in time
let timeout_handle = self
.timeout
.take()
.map(|dur| get_scheduler().add_timer(dur, self.wait_co.clone()));
self.set_timeout_handle(timeout_handle);
let _g = self.delay_drop();
// register the coroutine
unsafe { self.wait_co.unsync_store(co) };
// re-check the state, only clear once after resume
if self.state.load(Ordering::Acquire) {
// here may have recursive call for subscribe
// normally the recursion depth is not too deep
return self.fast_wake_up();
}
// register the cancel data
cancel.set_co(self.wait_co.clone());
// re-check the cancel status
if cancel.is_canceled() {
unsafe { cancel.cancel() };
}
}
// when the cancel is true we check the panic or do nothing
fn yield_back(&self, cancel: &'static Cancel) {
if self.check_cancel.load(Ordering::Relaxed) {
cancel.check_cancel();
}
}
}
impl fmt::Debug for Park {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Park").field("state", &self.state).finish()
}
}
|
use regex::Regex;
extern crate chrono;
use chrono::prelude::*;
pub fn format_time(time0: u128) -> String {
let mut second = time0 / 1000;
let time = time0 % 1000;
let min = second / 60;
second = second % 60;
let mut text = String::new();
if min < 10 {text.push('0');}
text.push_str(&min.to_string());
text.push(':');
if second < 10 {text.push('0');}
text.push_str(&second.to_string());
text.push('.');
text.push_str(&time.to_string());
text
}
pub fn get_time() -> String {
let mut c = String::new();
let utc: DateTime<Utc> = Utc::now();
if utc.hour() < 10 { c.push('0')}
c.push_str(&utc.hour().to_string()); c.push(':');
if utc.minute() < 10 { c.push('0')}
c.push_str(&utc.minute().to_string()); c.push(':');
if utc.second() < 10 { c.push('0')}
c.push_str(&utc.second().to_string());
c
}
pub fn check_all(
name : String,
surname : String,
country : String,
city : String,
address : String,
phone : String,
email : String) -> u8
{
if name.is_empty() || surname.is_empty() || country.is_empty() || city.is_empty() || address.is_empty() || phone.is_empty() || email.is_empty(){
return 1;
}
let re = Regex::new(r"^\d{3}-\d{3}-\d{3}$").unwrap();
if !re.is_match(&phone) {
return 2;
}
0
} |
use sha2::{Digest, Sha256};
use std::{error, fmt, result, time};
pub const MAX_CONNS: usize = 4;
#[derive(Clone, Debug)]
/// Configuration parameters for Client.
pub struct Config {
/// A previously fetched round serving as a verification checkpoint.
///
/// * if `determinism` is true and check_point is None, Round-1 acts
/// as the the check_point round.
/// * if `determinism` is false, lastest round is assumed as verified
/// round and treated as `check_point`.
/// * if `secure` is false, every beacon round is assumed as verfied
/// round.
/// * if `secure` is true, every new round is verified with
/// `check_point` round.
///
/// Default: None
pub check_point: Option<Random>,
/// Ensure all rounds from check_point to the latest round is valid
///
/// Default: false,
pub determinism: bool,
/// Ensure all future rounds from latest round is verified.
///
/// Default: false
pub secure: bool,
/// Maximum number of concurrent connections allowed per remote.
///
/// Default: MAX_CONNS
pub max_conns: usize,
}
impl Default for Config {
fn default() -> Self {
Config {
check_point: None,
determinism: false,
secure: false,
max_conns: MAX_CONNS,
}
}
}
impl Config {
pub fn set_check_point(&mut self, check_point: Option<Random>) -> &mut Self {
self.check_point = check_point;
self
}
pub fn set_determinism(&mut self, determinism: bool) -> &mut Self {
self.determinism = determinism;
self
}
pub fn set_secure(&mut self, secure: bool) -> &mut Self {
self.secure = secure;
self
}
pub fn set_max_conns(&mut self, max_conns: usize) -> &mut Self {
self.max_conns = max_conns;
self
}
}
/// Type alias for Result return type, used by this package.
pub type Result<T> = result::Result<T, Error>;
/// Error variants that can be returned by this package's API.
///
/// Each variant carries a prefix, typically identifying the
/// error location.
pub enum Error {
Fatal(String, String),
PoisonedLock(String, String),
NotSecure(String, String),
Invalid(String, String),
IOError(String, String),
JsonParse(String, String),
StringParse(String, String),
HexParse(String, String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
use Error::*;
match self {
Fatal(p, msg) => write!(f, "{} Fatal: {}", p, msg),
PoisonedLock(p, msg) => write!(f, "{} PoisonedLock: {}", p, msg),
NotSecure(p, msg) => write!(f, "{} NotSecure: {}", p, msg),
Invalid(p, msg) => write!(f, "{} Invalid: {}", p, msg),
IOError(p, msg) => write!(f, "{} IOError: {}", p, msg),
JsonParse(p, msg) => write!(f, "{} JsonParse: {}", p, msg),
StringParse(p, msg) => write!(f, "{} StringParse: {}", p, msg),
HexParse(p, msg) => write!(f, "{} HexParse: {}", p, msg),
}
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
write!(f, "{}", self)
}
}
impl error::Error for Error {}
// TODO: Is there any way to use info.hash to validate the first round of
// randomness.
/// Type captures the drand-group's hash-info.
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Info {
/// Distributed public key of the drand group.
pub public_key: Vec<u8>,
/// Time in seconds between randomness beacon rounds.
pub period: time::Duration,
/// Time in seconds since the Unix Epoch that the group began generating
/// randomness
pub genesis_time: time::SystemTime,
/// Chain-hash, which uniquely identifies the drand chain. It is used as
/// a root of trust for validation of the first round of randomness.
pub hash: Vec<u8>,
/// Use as previous_signature to validate the first round of randomness.
pub group_hash: Vec<u8>,
}
impl Default for Info {
fn default() -> Self {
Info {
public_key: Vec::default(),
period: time::Duration::default(),
genesis_time: time::UNIX_EPOCH,
hash: Vec::default(),
group_hash: Vec::default(),
}
}
}
/// Type captures randomness from drand-group for a single round.
///
/// This randomness can be verified at the client side using root-of-trust
/// and the group's hash-info.
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Random {
/// Sequentially increasing integer - the randomness round index.
pub round: u128,
/// SHA-256 hash of the signature.
pub randomness: Vec<u8>,
/// Boneh-Lynn-Shacham (BLS) signature for this round of randomness.
pub signature: Vec<u8>,
/// Signature of the previous round of randomness.
pub previous_signature: Vec<u8>,
}
impl fmt::Display for Random {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
write!(f, "Random<{}>", self.round)
}
}
impl Random {
pub fn to_digest(&self) -> Result<Vec<u8>> {
let mut hasher = Sha256::default();
hasher.update(&self.previous_signature);
hasher.update(self.round.to_be_bytes());
Ok(hasher.finalize().to_vec())
}
}
|
// Each new term in the Fibonacci sequence is generated by adding the previous two terms.
// By starting with 1 and 2, the first 10 terms will be:
//
// 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
//
// By considering the terms in the Fibonacci sequence whose values do not exceed four million,
// find the sum of the even-valued terms.
fn main() {
let mut sum = 2; // First even value
let mut prev = 1;
let mut curr = 2;
while curr < 4_000_000 {
let term = curr + prev;
prev = curr;
curr = term;
if term % 2 == 0 {
sum += curr;
}
}
println!("Sum of even fibs under {} is: {}", 4_000_000, sum);
}
|
extern crate core;
extern crate hex;
#[macro_use]
extern crate crunchy;
pub mod constants;
pub mod helpers;
pub mod keccak;
use constants::*;
pub struct Cryptonid {
a: [u64; STATE_SIZE],
offset: usize,
rate: usize,
delim: u8
}
macro_rules! impl_constructor {
($name: ident, $alias: ident, $bits: expr, $delim: expr) => {
pub fn $name() -> Cryptonid {
Cryptonid::new(200 - $bits/4, $delim)
}
pub fn $alias(data: &[u8], _result: &mut [u8]) {
let mut keccak = Cryptonid::$name();
keccak.update(data);
}
}
}
macro_rules! impl_global_alias {
($alias: ident, $size: expr) => {
pub fn $alias(data: &[u8]) -> [u8; $size / 8] {
let mut result = [0u8; $size / 8];
Cryptonid::$alias(data, &mut result);
result
}
}
}
impl_global_alias!(sha3_256, 256);
impl Cryptonid {
pub fn new(rate_nb: usize, delim_padding: u8) -> Cryptonid {
Cryptonid {
a: [0; STATE_SIZE],
offset: 0,
rate: rate_nb,
delim: delim_padding
}
}
// Sha3 256 parameters
impl_constructor!(new_sha3_256, sha3_256, 256, 0x06);
// Parsing 64bits to 8bits
fn a_mut_bytes(&mut self) -> &mut [u8; STATE_SIZE * 8] {
unsafe { &mut *(&mut self.a as *mut [u64; STATE_SIZE] as *mut [u8; STATE_SIZE * 8]) }
}
pub fn update(&mut self, input: &[u8]) {
self.absorb(input);
}
// Apply keccak in a Sha3 way from a max length of 128 characters to a 64 bits state
pub fn compute_message(mut self, message: &str) -> [u64; STATE_SIZE] {
let message_u8: Vec<u8> = From::from(message);
self.absorb(&message_u8);
self.pad();
keccak::f1600(&mut self.a);
self.a
}
// Make and apply Sha3 on a State in u64 from a message based on a message and a
// previous_sponge, message are split to a random value on 32 characters and any value on 30
// characters
pub fn compute_cryptonauth(mut self, rand: &str , data: &str, previous_sponge: [u64; FSC_SIZE]) -> [u64; STATE_SIZE] {
// TODO: Need Guardclass for rand & data length
// Parse string to u8 vector for absorbtion
let pass_u8: Vec<u8> = From::from(rand);
let data_u8: Vec<u8> = From::from(data);
let message: Vec<u8> = self::helpers::concatenate_arrays(&pass_u8, &data_u8);
// TODO: To refacto to absorb()
//first foldp
let mut ip = 0;
let mut l = message.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
keccak::xorin(&mut self.a_mut_bytes()[offset..][..rate], &message[ip..]);
keccak::f1600(&mut self.a);
ip += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
let h0: [u64; HN_SIZE] = [0; HN_SIZE];
let mut state_64: [u64; STATE_SIZE] = [0;STATE_SIZE];
let state_h_fsc = self::helpers::concatenate_arrays(&h0.to_vec(), &previous_sponge);
state_64.copy_from_slice(&state_h_fsc);
self.a = state_64;
// Xor in the last block
keccak::xorin(&mut self.a_mut_bytes()[offset..][..l], &message[ip..]);
self.offset = offset + l;
self.pad();
keccak::f1600(&mut self.a);
self.a
}
// Absorb input
fn absorb(&mut self, input: &[u8]) {
//first foldp
let mut ip = 0;
let mut l = input.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
keccak::xorin(&mut self.a_mut_bytes()[offset..][..rate], &input[ip..]);
keccak::f1600(&mut self.a);
ip += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
// Xor in the last block
keccak::xorin(&mut self.a_mut_bytes()[offset..][..l], &input[ip..]);
self.offset = offset + l;
}
// To apply official Keccak padding parameters
fn pad(&mut self) {
let offset = self.offset;
let rate = self.rate;
let delim = self.delim;
let aa = self.a_mut_bytes();
aa[offset] ^= delim;
aa[rate - 1] ^= 0x80;
}
}
|
use deen::{Any, Optional, Tag, U16be, U32be, U32le, U8};
use deen_proc::deen;
use try_from_primitive::TryFromPrimitive;
#[derive(Debug, PartialEq)]
pub struct Header {
version: u8,
length: u16,
foo: Option<Foo>,
}
#[repr(u16)]
#[derive(Debug, PartialEq, Copy, Clone, TryFromPrimitive)]
enum Foo {
Hello = 0xff00,
World = 0x00ff,
}
deen! {
#[derive(Debug)]
pub struct Encoder(magic: u32) for Header {
Tag::new(U32be, magic),
version ~ U8,
Any::new(U8),
length ~ U16be,
foo ~ if version > 2 {
Tag::new(U8, 0xff);
Optional::<Foo>::wrap(U32be).decode_when(|| length > 1)
} else if version > 1{
Tag::new(U8, 0x00);
Optional::<Foo>::wrap(U32le).decode_when(|| length > 1)
} else {
Optional::<Foo>::wrap(U32le).decode_when(|| length > 0)
}
}
}
#[cfg(test)]
impl Header {
fn new(version: u8, length: u16, foo: Option<Foo>) -> Self {
Self {
version,
length,
foo,
}
}
}
#[test]
fn encode() {
let mut buf = Vec::new();
Encoder { magic: 0xcafebabe }
.encode(&Header::new(3, 0x542, Some(Foo::Hello)), &mut buf)
.unwrap();
assert_eq!(
&buf,
&[0xca, 0xfe, 0xba, 0xbe, 0x03, 0x00, 0x05, 0x42, 0xff, 0x00, 0x00, 0xff, 0x00]
);
}
#[test]
fn decode_valid() {
let buf = vec![
0xca, 0xfe, 0xba, 0xbe, 0x03, 0x00, 0x05, 0x42, 0xff, 0x00, 0x00, 0xff, 0x00,
];
let t = Encoder { magic: 0xcafebabe }
.decode(&mut buf.as_slice())
.unwrap();
assert_eq!(t, Header::new(3, 0x542, Some(Foo::Hello)));
}
#[test]
fn decode_invalid() {
let buf = vec![
0xca, 0xfe, 0xba, 0xbe, 0x03, 0x00, 0x05, 0x42, 0x00, 0x00, 0x09, 0x45,
];
let err = Encoder { magic: 0xcafebabe }
.decode(&mut buf.as_slice())
.unwrap_err();
assert_eq!(err.kind(), io::ErrorKind::InvalidData);
}
|
use crate::{
core::Context,
custom_client::TwitchVideo,
database::MinimizedPp,
embeds::{osu, Author, EmbedBuilder, EmbedData, Footer},
error::PpError,
util::{
constants::{AVATAR_URL, TWITCH_BASE},
datetime::{how_long_ago_dynamic, HowLongAgoFormatterDynamic},
matcher::highlight_funny_numeral,
numbers::{round, with_comma_int},
osu::{grade_completion_mods, prepare_beatmap_file},
Emote, ScoreExt,
},
BotResult,
};
use chrono::{DateTime, Utc};
use rosu_pp::{
Beatmap as Map, BeatmapExt, CatchPP, DifficultyAttributes, ManiaPP, OsuPP,
PerformanceAttributes, TaikoPP,
};
use rosu_v2::prelude::{BeatmapUserScore, GameMode, Grade, Score, User};
use std::{borrow::Cow, fmt::Write};
pub struct RecentEmbed {
description: String,
title: String,
url: String,
author: Author,
footer: Footer,
timestamp: DateTime<Utc>,
thumbnail: String,
stars: f32,
grade_completion_mods: Cow<'static, str>,
score: String,
acc: f32,
ago: HowLongAgoFormatterDynamic,
pp: Option<f32>,
max_pp: Option<f32>,
combo: String,
hits: String,
if_fc: Option<(f32, f32, String)>,
map_info: String,
mapset_cover: String,
twitch_vod: Option<TwitchVideo>,
minimized_pp: MinimizedPp,
}
impl RecentEmbed {
pub async fn new(
user: &User,
score: &Score,
personal: Option<&[Score]>,
map_score: Option<&BeatmapUserScore>,
twitch_vod: Option<TwitchVideo>,
minimized_pp: MinimizedPp,
ctx: &Context,
) -> BotResult<Self> {
let map = score.map.as_ref().unwrap();
let mapset = score.mapset.as_ref().unwrap();
let map_path = prepare_beatmap_file(ctx, map.map_id).await?;
let rosu_map = Map::from_path(map_path).await.map_err(PpError::from)?;
let mods = score.mods.bits();
let max_result = rosu_map.max_pp(mods);
let mut attributes = max_result.difficulty_attributes();
let max_pp = score
.pp
.filter(|pp| {
score.grade.eq_letter(Grade::X) && score.mode != GameMode::MNA && *pp > 0.0
})
.unwrap_or(max_result.pp() as f32);
let stars = round(attributes.stars() as f32);
let pp = if let Some(pp) = score.pp {
pp
} else if score.grade == Grade::F {
let hits = score.total_hits() as usize;
match map.mode {
GameMode::STD => {
OsuPP::new(&rosu_map)
.mods(mods)
.combo(score.max_combo as usize)
.n300(score.statistics.count_300 as usize)
.n100(score.statistics.count_100 as usize)
.n50(score.statistics.count_50 as usize)
.misses(score.statistics.count_miss as usize)
.passed_objects(hits)
.calculate()
.pp as f32
}
GameMode::MNA => {
ManiaPP::new(&rosu_map)
.mods(mods)
.score(score.score)
.passed_objects(hits)
.calculate()
.pp as f32
}
GameMode::CTB => {
CatchPP::new(&rosu_map)
.mods(mods)
.combo(score.max_combo as usize)
.fruits(score.statistics.count_300 as usize)
.droplets(score.statistics.count_100 as usize)
.misses(score.statistics.count_miss as usize)
.passed_objects(hits - score.statistics.count_katu as usize)
.accuracy(score.accuracy as f64)
.calculate()
.pp as f32
}
GameMode::TKO => {
TaikoPP::new(&rosu_map)
.combo(score.max_combo as usize)
.mods(mods)
.passed_objects(hits)
.accuracy(score.accuracy as f64)
.calculate()
.pp as f32
}
}
} else {
let pp_result: PerformanceAttributes = match map.mode {
GameMode::STD => OsuPP::new(&rosu_map)
.attributes(attributes)
.mods(mods)
.combo(score.max_combo as usize)
.n300(score.statistics.count_300 as usize)
.n100(score.statistics.count_100 as usize)
.n50(score.statistics.count_50 as usize)
.misses(score.statistics.count_miss as usize)
.calculate()
.into(),
GameMode::MNA => ManiaPP::new(&rosu_map)
.attributes(attributes)
.mods(mods)
.score(score.score)
.calculate()
.into(),
GameMode::CTB => CatchPP::new(&rosu_map)
.attributes(attributes)
.mods(mods)
.combo(score.max_combo as usize)
.fruits(score.statistics.count_300 as usize)
.droplets(score.statistics.count_100 as usize)
.misses(score.statistics.count_miss as usize)
.accuracy(score.accuracy as f64)
.calculate()
.into(),
GameMode::TKO => TaikoPP::new(&rosu_map)
.attributes(attributes)
.combo(score.max_combo as usize)
.mods(mods)
.misses(score.statistics.count_miss as usize)
.accuracy(score.accuracy as f64)
.calculate()
.into(),
};
let pp = pp_result.pp();
attributes = pp_result.into();
pp as f32
};
let (if_fc, _) = if_fc_struct(score, &rosu_map, attributes, mods);
let pp = Some(pp);
let max_pp = Some(max_pp);
let hits = score.hits_string(map.mode);
let grade_completion_mods = grade_completion_mods(score, map);
let (combo, title) = if map.mode == GameMode::MNA {
let mut ratio = score.statistics.count_geki as f32;
if score.statistics.count_300 > 0 {
ratio /= score.statistics.count_300 as f32
}
let combo = format!("**{}x** / {:.2}", &score.max_combo, ratio);
let title = format!(
"{} {} - {} [{}]",
osu::get_keys(score.mods, map),
mapset.artist,
mapset.title,
map.version
);
(combo, title)
} else {
(
osu::get_combo(score, map),
format!("{} - {} [{}]", mapset.artist, mapset.title, map.version),
)
};
let if_fc = if_fc.map(|if_fc| {
let mut hits = String::from("{");
let _ = write!(hits, "{}/{}/", if_fc.n300, if_fc.n100);
if let Some(n50) = if_fc.n50 {
let _ = write!(hits, "{n50}/");
}
let _ = write!(hits, "0}}");
(if_fc.pp, round(if_fc.acc), hits)
});
let footer = Footer::new(format!(
"{:?} map by {} | played",
map.status, mapset.creator_name
))
.icon_url(format!("{AVATAR_URL}{}", mapset.creator_id));
let personal_idx = personal.and_then(|personal| personal.iter().position(|s| s == score));
let global_idx = map_score
.and_then(|s| (&s.score == score).then(|| s.pos))
.filter(|&p| p <= 50);
let description = if personal_idx.is_some() || global_idx.is_some() {
let mut description = String::with_capacity(25);
description.push_str("__**");
if let Some(idx) = personal_idx {
let _ = write!(description, "Personal Best #{}", idx + 1);
if global_idx.is_some() {
description.reserve(19);
description.push_str(" and ");
}
}
if let Some(idx) = global_idx {
let _ = write!(description, "Global Top #{idx}");
}
description.push_str("**__");
description
} else {
String::new()
};
Ok(Self {
description,
title,
url: map.url.to_owned(),
author: author!(user),
footer,
timestamp: score.created_at,
thumbnail: mapset.covers.list.to_owned(),
grade_completion_mods,
stars,
score: with_comma_int(score.score).to_string(),
acc: round(score.accuracy),
ago: how_long_ago_dynamic(&score.created_at),
pp,
max_pp,
combo,
hits,
map_info: osu::get_map_info(map, score.mods, stars),
if_fc,
mapset_cover: mapset.covers.cover.to_owned(),
twitch_vod,
minimized_pp,
})
}
}
impl EmbedData for RecentEmbed {
fn as_builder(&self) -> EmbedBuilder {
let score = highlight_funny_numeral(&self.score).into_owned();
let acc = highlight_funny_numeral(&format!("{}%", self.acc)).into_owned();
let pp = osu::get_pp(self.pp, self.max_pp);
let pp = highlight_funny_numeral(&pp).into_owned();
let mut fields = vec![
field!(
"Grade",
self.grade_completion_mods.as_ref().to_owned(),
true
),
field!("Score", score, true),
field!("Acc", acc, true),
field!("PP", pp, true),
];
fields.reserve(
3 + (self.if_fc.is_some() as usize) * 3 + (self.twitch_vod.is_some()) as usize * 2,
);
let mania = self.hits.chars().filter(|&c| c == '/').count() == 5;
let combo = highlight_funny_numeral(&self.combo).into_owned();
let hits = highlight_funny_numeral(&self.hits).into_owned();
let name = if mania { "Combo / Ratio" } else { "Combo" };
fields.push(field!(name, combo, true));
fields.push(field!("Hits", hits, true));
if let Some((pp, acc, hits)) = &self.if_fc {
let pp = osu::get_pp(Some(*pp), self.max_pp);
fields.push(field!("**If FC**: PP", pp, true));
fields.push(field!("Acc", format!("{acc}%"), true));
fields.push(field!("Hits", hits.clone(), true));
}
fields.push(field!("Map Info".to_owned(), self.map_info.clone(), false));
if let Some(ref vod) = self.twitch_vod {
let twitch_channel = format!(
"[**{name}**]({base}{name})",
base = TWITCH_BASE,
name = vod.username
);
fields.push(field!("Live on twitch", twitch_channel, true));
let vod_hyperlink = format!("[**VOD**]({})", vod.url);
fields.push(field!("Liveplay of this score", vod_hyperlink, true));
}
EmbedBuilder::new()
.author(&self.author)
.description(&self.description)
.fields(fields)
.footer(&self.footer)
.image(&self.mapset_cover)
.timestamp(self.timestamp)
.title(&self.title)
.url(&self.url)
}
fn into_builder(mut self) -> EmbedBuilder {
let name = format!(
"{}\t{}\t({}%)\t{}",
self.grade_completion_mods, self.score, self.acc, self.ago
);
let pp = match self.minimized_pp {
MinimizedPp::IfFc => {
let mut result = String::with_capacity(17);
result.push_str("**");
if let Some(pp) = self.pp {
let _ = write!(result, "{:.2}", pp);
} else {
result.push('-');
}
match self.if_fc {
Some((if_fc, ..)) => {
let _ = write!(result, "pp** ~~({if_fc:.2}pp)~~");
}
None => {
result.push_str("**/");
if let Some(max) = self.max_pp {
let pp = self.pp.map(|pp| pp.max(max)).unwrap_or(max);
let _ = write!(result, "{:.2}", pp);
} else {
result.push('-');
}
result.push_str("PP");
}
}
result
}
MinimizedPp::Max => osu::get_pp(self.pp, self.max_pp),
};
let value = format!("{pp} [ {} ] {}", self.combo, self.hits);
let mut title = self.title;
let _ = write!(title, " [{}★]", self.stars);
let fields = vec![field!(name, value, false)];
if let Some(ref vod) = self.twitch_vod {
let _ = write!(
self.description,
" {} [Liveplay on twitch]({})",
Emote::Twitch.text(),
vod.url
);
}
EmbedBuilder::new()
.author(self.author)
.description(self.description)
.fields(fields)
.thumbnail(self.thumbnail)
.title(title)
.url(self.url)
}
}
pub struct IfFC {
pub n300: usize,
pub n100: usize,
pub n50: Option<usize>,
pub pp: f32,
pub acc: f32,
}
pub fn if_fc_struct(
score: &Score,
map: &Map,
attributes: DifficultyAttributes,
mods: u32,
) -> (Option<IfFC>, DifficultyAttributes) {
match attributes {
DifficultyAttributes::Osu(attributes)
if score.statistics.count_miss > 0
|| score.max_combo
// Allowing one missed sliderend per 500 combo
< (attributes.max_combo - (attributes.max_combo / 500).max(5)) as u32 =>
{
let total_objects = (map.n_circles + map.n_sliders + map.n_spinners) as usize;
let passed_objects = (score.statistics.count_300
+ score.statistics.count_100
+ score.statistics.count_50
+ score.statistics.count_miss) as usize;
let mut count300 =
score.statistics.count_300 as usize + total_objects.saturating_sub(passed_objects);
let count_hits = total_objects - score.statistics.count_miss as usize;
let ratio = 1.0 - (count300 as f32 / count_hits as f32);
let new100s = (ratio * score.statistics.count_miss as f32).ceil() as u32;
count300 += score.statistics.count_miss.saturating_sub(new100s) as usize;
let count100 = (score.statistics.count_100 + new100s) as usize;
let count50 = score.statistics.count_50 as usize;
let pp_result = OsuPP::new(map)
.attributes(attributes)
.mods(mods)
.n300(count300)
.n100(count100)
.n50(count50)
.calculate();
let acc =
100.0 * (6 * count300 + 2 * count100 + count50) as f32 / (6 * total_objects) as f32;
let if_fc = IfFC {
n300: count300,
n100: count100,
n50: Some(count50),
pp: pp_result.pp as f32,
acc,
};
(Some(if_fc), pp_result.difficulty.into())
}
DifficultyAttributes::Catch(attributes)
if score.max_combo != attributes.max_combo() as u32 =>
{
let total_objects = attributes.max_combo();
let passed_objects = (score.statistics.count_300
+ score.statistics.count_100
+ score.statistics.count_miss) as usize;
let missing = total_objects - passed_objects;
let missing_fruits = missing.saturating_sub(
attributes
.n_droplets
.saturating_sub(score.statistics.count_100 as usize),
);
let missing_droplets = missing - missing_fruits;
let n_fruits = score.statistics.count_300 as usize + missing_fruits;
let n_droplets = score.statistics.count_100 as usize + missing_droplets;
let n_tiny_droplet_misses = score.statistics.count_katu as usize;
let n_tiny_droplets = attributes
.n_tiny_droplets
.saturating_sub(n_tiny_droplet_misses);
let pp_result = CatchPP::new(map)
.attributes(attributes)
.mods(mods)
.fruits(n_fruits)
.droplets(n_droplets)
.tiny_droplets(n_tiny_droplets)
.tiny_droplet_misses(n_tiny_droplet_misses)
.calculate();
let hits = n_fruits + n_droplets + n_tiny_droplets;
let total = hits + n_tiny_droplet_misses;
let acc = if total == 0 {
0.0
} else {
100.0 * hits as f32 / total as f32
};
let if_fc = IfFC {
n300: n_fruits,
n100: n_droplets,
n50: Some(n_tiny_droplets),
pp: pp_result.pp as f32,
acc,
};
(Some(if_fc), pp_result.difficulty.into())
}
DifficultyAttributes::Taiko(attributes)
if score.grade == Grade::F || score.statistics.count_miss > 0 =>
{
let total_objects = map.n_circles as usize;
let passed_objects = score.total_hits() as usize;
let mut count300 =
score.statistics.count_300 as usize + total_objects.saturating_sub(passed_objects);
let count_hits = total_objects - score.statistics.count_miss as usize;
let ratio = 1.0 - (count300 as f32 / count_hits as f32);
let new100s = (ratio * score.statistics.count_miss as f32).ceil() as u32;
count300 += score.statistics.count_miss.saturating_sub(new100s) as usize;
let count100 = (score.statistics.count_100 + new100s) as usize;
let acc = 100.0 * (2 * count300 + count100) as f32 / (2 * total_objects) as f32;
let pp_result = TaikoPP::new(map)
.attributes(attributes)
.mods(mods)
.accuracy(acc as f64)
.calculate();
let if_fc = IfFC {
n300: count300,
n100: count100,
n50: None,
pp: pp_result.pp as f32,
acc,
};
(Some(if_fc), pp_result.difficulty.into())
}
_ => (None, attributes),
}
}
|
mod expull;
mod memory_arena;
mod murmurhash2;
mod term_hashmap;
pub use self::expull::ExpUnrolledLinkedList;
pub use self::memory_arena::{Addr, ArenaStorable, MemoryArena};
use self::murmurhash2::murmurhash2;
pub use self::term_hashmap::{compute_table_size, TermHashMap};
|
/*!
```rudra-poc
[target]
crate = "rdiff"
version = "0.1.2"
[report]
issue_url = "https://github.com/dyule/rdiff/issues/3"
issue_date = 2021-02-03
rustsec_url = "https://github.com/RustSec/advisory-db/pull/862"
rustsec_id = "RUSTSEC-2021-0094"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "HigherOrderInvariant"
rudra_report_locations = ["src/window.rs:8:5: 27:6"]
```
!*/
#![forbid(unsafe_code)]
use rdiff::BlockHashes;
use std::io::{Cursor, Read};
struct MyRead {
first: bool,
}
impl MyRead {
pub fn new() -> Self {
MyRead { first: false }
}
}
impl Read for MyRead {
fn read(&mut self, _buf: &mut [u8]) -> std::io::Result<usize> {
if !self.first {
self.first = true;
// First iteration: return more than the buffer size
Ok(256)
} else {
// Second iteration: indicate that we are done
Ok(0)
}
}
}
fn main() {
let mut hashes = BlockHashes::new(Cursor::new("Hello"), 32).unwrap();
let diff = hashes.diff_and_update(MyRead::new()).unwrap();
for insert in diff.inserts() {
println!("{:?}", insert);
}
}
|
use std::ops::Deref;
use std::rc::Rc;
use std::cell::Cell;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use servo::script::dom::document::Document;
use servo::script::dom::bindings::str::DOMString;
use servo::script::dom::eventtarget::EventTarget;
use servo::script::dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use servo::script::dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use servo::script::dom::bindings::inheritance::Castable;
use servo::script::dom::node::Node;
use servo::script::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use servo::script::dom::bindings::root::DomRoot;
use servo::script::dom::bindings::codegen::Bindings::DocumentBinding::ElementCreationOptions;
use servo::script::dom::element::Element;
use servo::script::script_thread::ION_APPLICATION_FRAME_CALLBACK;
pub use servo::script::dom::eventtarget::RustEventHandler;
thread_local!(pub static RENDER: Cell<Option<fn()->Option<HtmlElement>>> = Cell::new(None));
fn ds<T>(str: T) -> DOMString where T: ToString { DOMString::from_string(str.to_string()) }
pub struct HtmlElement {
id: String,
tag: String,
text: String,
class: String,
style: String,
listeners: HashMap<String, RustEventHandler>,
children: Vec<HtmlElement>
}
impl fmt::Debug for HtmlElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(self.id.clone(), self.tag.clone()).fmt(f)
}
}
impl HtmlElement {
fn gen_id() -> u32 {
thread_local!(static ID_COUNTER: RefCell<u32> = RefCell::new(0));
ID_COUNTER.with(|root| {
let val = *root.borrow() + 1;
*root.borrow_mut() = val;
val
})
}
pub fn get_dom_element_value(id: &String, doc: &Document) -> String {
use servo::script::dom::htmltextareaelement::HTMLTextAreaElement;
use servo::script::dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
let elem_ptr = doc.GetElementById(ds(id)).unwrap();
elem_ptr.deref().downcast::<HTMLTextAreaElement>()
.expect("Cannot get element value on non-textarea element")
.Value().to_string()
}
pub fn try_set_dom_element_value(id: &String, doc: &Document, value: String) {
use servo::script::dom::htmltextareaelement::HTMLTextAreaElement;
use servo::script::dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
let elem_ptr = doc.GetElementById(ds(id)).unwrap();
if let Some(elem) = elem_ptr.deref().downcast::<HTMLTextAreaElement>() {
elem.SetValue(ds(value))
}
}
pub fn new<T: ToString, U: ToString, V: ToString, W: ToString, X: ToString>(unique_key: Option<T>, tag: U, text: V, class: W, style: X,
listeners: HashMap<String, RustEventHandler>,
children: Vec<HtmlElement>) -> HtmlElement {
let id = match unique_key {
Some(k) => format!("unique_key_{}", k.to_string()),
_ => Self::gen_id().to_string(),
};
HtmlElement {
id,
tag: tag.to_string(),
text: text.to_string(),
class: class.to_string(),
style: style.to_string(),
listeners,
children,
}
}
fn render_to_dom_as_root(&self, doc: &Document) {
let body_collection = doc.GetElementsByTagName(ds("body"));
let body_ptr = body_collection.elements_iter().last().unwrap();
{
let body_node: &Node = body_ptr.deref().upcast::<Node>();
let new_node = &DomRoot::upcast(self.make_tree(doc));
match body_node.GetFirstChild() {
Some(ref child) => body_node.ReplaceChild(new_node, child).unwrap(),
None => body_node.AppendChild(new_node).unwrap(),
};
}
}
fn make_tree(&self, doc: &Document) -> DomRoot<Element> {
let has_valid_elem = match doc.GetElementById(ds(self.id.clone())) {
Some(ref dom_elem) if dom_elem.deref().TagName().to_string() == self.tag.to_uppercase() => true,
_ => false
};
let dom_elem: DomRoot<Element> = if has_valid_elem {
let elem_ptr = doc.GetElementById(ds(self.id.clone())).unwrap();
Self::try_set_dom_element_value(&self.id, doc, self.text.clone());
{
let node: &EventTarget = elem_ptr.upcast::<EventTarget>();
node.remove_all_listeners();
}
elem_ptr
} else {
doc.CreateElement(DOMString::from_string(self.tag.clone()),
unsafe { &ElementCreationOptions::empty(doc.window().get_cx()) }).unwrap()
};
dom_elem.deref().SetId(ds(self.id.clone()));
dom_elem.deref().SetAttribute(ds("style"), ds(self.style.clone())).unwrap();
dom_elem.deref().SetClassName(ds(self.class.clone()));
dom_elem.deref().upcast::<Node>().SetTextContent(Some(ds(self.text.clone())));
for (event, listener) in &self.listeners {
let node: &EventTarget = dom_elem.upcast::<EventTarget>();
node.add_event_handler_rust(ds(event), listener.clone());
}
for child in &self.children {
let dom_child = child.make_tree(doc);
dom_elem.upcast::<Node>().AppendChild(&DomRoot::upcast(dom_child)).unwrap();
}
dom_elem
}
pub fn get_id(&self) -> String {
self.id.clone()
}
pub fn add_listener<T: ToString>(&mut self, event: Vec<T>, listener: RustEventHandler) {
for e in event {
self.listeners.insert(e.to_string(), listener.clone());
}
}
}
fn frame_callback(doc: &Document) {
RENDER.with(|root| match (root.get().expect("Frame callback should not be set before html::RENDER is"))() {
Some(elem) => elem.render_to_dom_as_root(doc),
None => {}
});
}
pub fn app_main(doc: &Document) {
let window = doc.window();
window.deref().upcast::<EventTarget>().add_event_handler_rust(ds("load"), RustEventHandler {
handler: Rc::new( |_, _| {
ION_APPLICATION_FRAME_CALLBACK.with(|root| {
assert!(root.get().is_none());
root.set(Some(frame_callback))
});
})
});
}
#[macro_export]
macro_rules! make_app_setup {
(pub fn $app_setup_name:ident() app_thread_state = $app_state_thread_local_name:ident, render = $render:ident) => {
fn _do_not_use_make_app_setup_twice_in_one_file() -> Option<HtmlElement> {
let has_changed = $app_state_thread_local_name.with(|root| {
let val = root.borrow().has_changed;
root.borrow_mut().has_changed = false;
val
});
if !has_changed { return None };
Some($app_state_thread_local_name.with(|state| $render(&*state.borrow())))
}
pub fn $app_setup_name() {
use html::RENDER;
RENDER.with(|root| {
assert!(root.get().is_none());
root.set(Some(_do_not_use_make_app_setup_twice_in_one_file))
});
}
}
} |
mod error;
mod utils;
mod x509;
use crate::{
endpoints::params::DeleteParams, service::error::PostgresManagementServiceError, utils::epoch,
};
use actix_web::ResponseError;
use async_trait::async_trait;
use chrono::Utc;
use core::pin::Pin;
use deadpool_postgres::{Pool, Transaction};
use drogue_client::{registry, Translator};
use drogue_cloud_database_common::{
auth::{ensure, ensure_with},
error::ServiceError,
models::{
self,
app::{ApplicationAccessor, PostgresApplicationAccessor},
device::{DeviceAccessor, PostgresDeviceAccessor},
diff::diff_paths,
outbox::PostgresOutboxAccessor,
Generation, Lock, TypedAlias,
},
Client, DatabaseService,
};
use drogue_cloud_registry_events::{Event, EventSender, EventSenderError, SendEvent};
use drogue_cloud_service_api::{
auth::user::UserInformation,
health::{HealthCheckError, HealthChecked},
labels::LabelSelector,
};
use futures::{future, Stream, TryStreamExt};
use serde::Deserialize;
use serde_json::json;
use std::collections::HashSet;
use tokio_postgres::{error::SqlState, NoTls};
use uuid::Uuid;
#[async_trait]
pub trait ManagementService: Clone {
type Error: ResponseError;
async fn create_app(
&self,
identity: &UserInformation,
data: registry::v1::Application,
) -> Result<(), Self::Error>;
async fn get_app(
&self,
identity: &UserInformation,
name: &str,
) -> Result<Option<registry::v1::Application>, Self::Error>;
async fn list_apps(
&self,
identity: UserInformation,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Application, Self::Error>> + Send>>,
Self::Error,
>;
async fn update_app(
&self,
identity: &UserInformation,
data: registry::v1::Application,
) -> Result<(), Self::Error>;
async fn delete_app(
&self,
identity: &UserInformation,
name: &str,
params: DeleteParams,
) -> Result<(), Self::Error>;
async fn create_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error>;
async fn get_device(
&self,
identity: &UserInformation,
app: &str,
name: &str,
) -> Result<Option<registry::v1::Device>, Self::Error>;
async fn list_devices(
&self,
identity: UserInformation,
app: &str,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Device, Self::Error>> + Send>>,
Self::Error,
>;
async fn update_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error>;
async fn delete_device(
&self,
identity: &UserInformation,
app: &str,
name: &str,
params: DeleteParams,
) -> Result<(), Self::Error>;
}
#[derive(Clone, Debug, Deserialize)]
pub struct PostgresManagementServiceConfig {
pub pg: deadpool_postgres::Config,
pub instance: String,
}
impl<S> DatabaseService for PostgresManagementService<S>
where
S: EventSender + Clone,
{
fn pool(&self) -> &Pool {
&self.pool
}
}
#[async_trait::async_trait]
impl<S> HealthChecked for PostgresManagementService<S>
where
S: EventSender + Clone,
{
async fn is_ready(&self) -> Result<(), HealthCheckError> {
Ok(DatabaseService::is_ready(self)
.await
.map_err(HealthCheckError::from)?)
}
}
#[derive(Clone)]
pub struct PostgresManagementService<S>
where
S: EventSender + Clone,
{
pool: Pool,
sender: S,
instance: String,
}
impl<S> PostgresManagementService<S>
where
S: EventSender + Clone,
{
pub fn new(config: PostgresManagementServiceConfig, sender: S) -> anyhow::Result<Self> {
Ok(Self {
pool: config.pg.create_pool(NoTls)?,
instance: config.instance,
sender,
})
}
fn app_to_entity(
mut app: registry::v1::Application,
) -> Result<
(models::app::Application, HashSet<TypedAlias>),
PostgresManagementServiceError<S::Error>,
> {
// extract aliases
let mut aliases = HashSet::with_capacity(1);
aliases.insert(TypedAlias("name".into(), app.metadata.name.clone()));
// extract trust anchors
match app.section::<registry::v1::ApplicationSpecTrustAnchors>() {
Some(Ok(anchors)) => {
log::debug!("Anchors: {:?}", anchors);
let status = x509::process_anchors(anchors)?;
// add aliases
aliases.extend(status.1);
// inject status section
app.status.insert(
"trustAnchors".into(),
serde_json::to_value(status.0)
.map_err(|err| ServiceError::BadRequest(err.to_string()))?,
);
}
r => log::debug!("No-anchors: {:?}", r),
}
// convert payload
let app = models::app::Application {
name: app.metadata.name,
uid: Uuid::nil(), // will be set internally
labels: app.metadata.labels,
annotations: app.metadata.annotations,
generation: 0, // will be set internally
creation_timestamp: epoch(), // will be set internally
resource_version: Uuid::nil(), // will be set internally
deletion_timestamp: None, // will be set internally
finalizers: app.metadata.finalizers,
owner: None, // will be set internally
data: json!({
"spec": app.spec,
"status": app.status,
}),
};
// return result
Ok((app, aliases))
}
fn device_to_entity(
device: registry::v1::Device,
) -> Result<
(models::device::Device, HashSet<TypedAlias>),
PostgresManagementServiceError<S::Error>,
> {
// extract aliases
let mut aliases = HashSet::new();
aliases.insert(TypedAlias("name".into(), device.metadata.name.clone()));
if let Some(Ok(credentials)) = device.section::<registry::v1::DeviceSpecCredentials>() {
for credential in credentials.credentials {
match credential {
registry::v1::Credential::UsernamePassword {
username, unique, ..
} if unique => {
aliases.insert(TypedAlias("username".into(), username));
}
_ => {}
}
}
}
// convert payload
let device = models::device::Device {
name: device.metadata.name,
uid: Uuid::nil(), // will be set internally
application: device.metadata.application,
labels: device.metadata.labels,
annotations: device.metadata.annotations,
creation_timestamp: epoch(), // will be set internally
generation: 0, // will be set internally
resource_version: Uuid::nil(), // will be set internally
deletion_timestamp: None, // will be set internally
finalizers: device.metadata.finalizers,
data: json!({
"spec": device.spec,
"status": device.status,
}),
};
// return result
Ok((device, aliases))
}
/// Perform the operation of updating an application
async fn perform_update_app<S1, S2>(
&self,
t: &Transaction<'_>,
identity: Option<&UserInformation>,
mut app: models::app::Application,
aliases: Option<HashSet<TypedAlias>>,
expected_uid: S1,
expected_resource_version: S2,
) -> Result<Vec<Event>, PostgresManagementServiceError<S::Error>>
where
S1: AsRef<str>,
S2: AsRef<str>,
{
let accessor = PostgresApplicationAccessor::new(t);
// get current state for diffing
let current = match accessor.get(&app.name, Lock::ForUpdate).await? {
Some(app) => Ok(app),
None => Err(ServiceError::NotFound),
}?;
if let Some(identity) = identity {
ensure(¤t, identity)?;
}
utils::check_versions(expected_uid, expected_resource_version, ¤t)?;
// we simply copy over the deletion timestamp
app.deletion_timestamp = current.deletion_timestamp;
if app.deletion_timestamp.is_some() && app.finalizers.is_empty() {
// delete, but don't send any event
accessor.delete(&app.name).await?;
Ok(vec![])
} else {
// check which paths changed
let paths = diff_paths(¤t, &app);
if paths.is_empty() {
// there was no change
return Ok(vec![]);
}
// next generation
let generation = app.next_generation(¤t)?;
let name = app.name.clone();
let uid = app.uid;
// update
accessor
.update(app, aliases)
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
_ => err,
})?;
// send change event
Ok(Event::new_app(
self.instance.clone(),
name,
uid,
generation,
paths,
))
}
}
/// Called when a device was deleted, so check if the application can be garbage collected.
async fn check_clean_app(
&self,
t: &Transaction<'_>,
app_id: &str,
) -> Result<(), PostgresManagementServiceError<S::Error>> {
let app = PostgresApplicationAccessor::new(t)
.get(app_id, Lock::ForUpdate)
.await?;
let mut app = if let Some(app) = app {
app
} else {
// device without an app, shouldn't happen, but don't need to do anything anyways.
return Ok(());
};
if app.deletion_timestamp.is_none() {
// device got deleted, but the app is not
return Ok(());
}
// check how many devices remain
let count = PostgresDeviceAccessor::new(t).count_devices(app_id).await?;
if count > 0 {
// there are still devices left.
return Ok(());
}
// we removed the last of the devices blocking the deletion
app.finalizers.retain(|f| f != "has-devices");
self.perform_update_app(t, None, app, None, "", "").await?;
// done
Ok(())
}
fn outbox_err<E>(err: EventSenderError<ServiceError>) -> PostgresManagementServiceError<E>
where
E: std::error::Error + std::fmt::Debug + 'static,
{
match err {
EventSenderError::Sender(err) => PostgresManagementServiceError::Service(err),
EventSenderError::CloudEvent(err) => {
PostgresManagementServiceError::EventSender(EventSenderError::CloudEvent(err))
}
EventSenderError::Event(err) => {
PostgresManagementServiceError::EventSender(EventSenderError::Event(err))
}
}
}
async fn send_to_outbox<'c, C: Client, E>(
client: &C,
events: &[Event],
) -> Result<(), PostgresManagementServiceError<E>>
where
E: std::error::Error + std::fmt::Debug + 'static,
{
// send events to outbox
events
.to_vec()
.send_with(&PostgresOutboxAccessor::new(client))
.await
.map_err(Self::outbox_err)
}
}
#[async_trait]
impl<S> ManagementService for PostgresManagementService<S>
where
S: EventSender + Clone,
{
type Error = PostgresManagementServiceError<S::Error>;
async fn create_app(
&self,
identity: &UserInformation,
application: registry::v1::Application,
) -> Result<(), Self::Error> {
let (mut app, aliases) = Self::app_to_entity(application)?;
let generation = app.generation;
let name = app.name.clone();
// assign a new UID
let uid = Uuid::new_v4();
app.uid = uid;
app.owner = identity.user_id().map(Into::into);
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
PostgresApplicationAccessor::new(&t)
.create(app, aliases)
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
_ => err,
})?;
let events = Event::new_app(self.instance.clone(), name, uid, generation, vec![]);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn get_app(
&self,
identity: &UserInformation,
name: &str,
) -> Result<Option<registry::v1::Application>, Self::Error> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(name, Lock::None)
.await?;
if let Some(app) = &app {
ensure(app, identity)?;
}
Ok(app.map(Into::into))
}
async fn list_apps(
&self,
identity: UserInformation,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Application, Self::Error>> + Send>>,
Self::Error,
> {
let c = self.pool.get().await?;
Ok(Box::pin(
PostgresApplicationAccessor::new(&c)
.list(None, labels, limit, offset, Some(&identity), Lock::None)
.await?
.try_filter_map(move |app| {
// Using ensure call here is just a safeguard! The list operation must only return
// entries the user has access to. Otherwise the limit/offset functionality
// won't work
let result = match ensure(&app, &identity) {
Ok(_) => Some(app.into()),
Err(_) => None,
};
future::ready(Ok(result))
})
.map_err(|err| PostgresManagementServiceError::Service(err))
.into_stream(),
))
}
async fn update_app(
&self,
identity: &UserInformation,
application: registry::v1::Application,
) -> Result<(), Self::Error> {
let expected_uid = application.metadata.uid.clone();
let expected_resource_version = application.metadata.resource_version.clone();
let (app, aliases) = Self::app_to_entity(application)?;
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let events = self
.perform_update_app(
&t,
Some(identity),
app,
Some(aliases),
expected_uid,
expected_resource_version,
)
.await?;
Self::send_to_outbox(&t, &events).await?;
t.commit().await?;
// send events
events.send_with(&self.sender).await?;
Ok(())
}
async fn delete_app(
&self,
identity: &UserInformation,
id: &str,
params: DeleteParams,
) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// get current state for diffing
let mut current = match accessor.get(&id, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
if current.deletion_timestamp.is_some() {
return Err(ServiceError::NotFound.into());
}
//
ensure(¤t, identity)?;
utils::check_preconditions(¶ms.preconditions, ¤t)?;
// there is no need to use the provided constraints, we as locked the entry "for update"
// next, we need to delete the application
// first, delete all devices ...
let remaining_devices = PostgresDeviceAccessor::new(&t).delete_app(&id).await?;
// ...and count the once we can only soft-delete
if remaining_devices > 0 {
// we have pending device deletions, so add the finalizer
current.finalizers.push("has-devices".into());
}
// next generation
let generation = current.set_next_generation()?;
let uid = current.uid;
// if there are no finalizers ...
let paths = if current.finalizers.is_empty() {
// ... delete the application
accessor.delete(id).await?;
// notify an object change
vec![]
} else {
// ... otherwise, mark the application deleted
log::debug!("Pending finalizers: {:?}", current.finalizers);
// update deleted timestamp
current.deletion_timestamp = Some(Utc::now());
// update the record
accessor.update(current, None).await?;
// notify a resource change
vec![".metadata".into()]
};
// create events
let events = Event::new_app(self.instance.clone(), id, uid, generation, paths);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change event
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn create_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error> {
let (mut device, aliases) = Self::device_to_entity(device)?;
let generation = device.generation;
let application = device.application.clone();
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let app = PostgresApplicationAccessor::new(&t)
.get(&application, Lock::ForShare)
.await?;
// if there is no entry, or it is marked for deletion, we don't allow adding a new device
let app = match app {
Some(app) if app.deletion_timestamp.is_none() => app,
_ => return Err(ServiceError::ReferenceNotFound.into()),
};
// ensure we have access to the application, but don't confirm the device if we don't
ensure_with(&app, identity, || ServiceError::ReferenceNotFound)?;
let name = device.name.clone();
// assign a new UID
let uid = Uuid::new_v4();
device.uid = uid;
// create the device
PostgresDeviceAccessor::new(&t)
.create(device, aliases)
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
Some(state) if state == &SqlState::FOREIGN_KEY_VIOLATION => {
ServiceError::ReferenceNotFound
}
_ => err,
})?;
// create and persist events
let events = Event::new_device(
self.instance.clone(),
application,
name,
uid,
generation,
vec![],
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn get_device(
&self,
identity: &UserInformation,
app_id: &str,
device_id: &str,
) -> Result<Option<registry::v1::Device>, Self::Error> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(app_id, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, identity, || ServiceError::NotFound)?;
let device = PostgresDeviceAccessor::new(&c)
.get(app_id, device_id, Lock::None)
.await?;
Ok(device.map(Into::into))
}
async fn list_devices(
&self,
identity: UserInformation,
app_id: &str,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Device, Self::Error>> + Send>>,
Self::Error,
> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(app_id, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, &identity, || ServiceError::NotFound)?;
Ok(Box::pin(
PostgresDeviceAccessor::new(&c)
.list(app_id, None, labels, limit, offset, Lock::None)
.await?
.map_ok(|device| device.into())
.map_err(|err| PostgresManagementServiceError::Service(err))
.into_stream(),
))
}
async fn update_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error> {
let expected_resource_version = device.metadata.resource_version.clone();
let expected_uid = device.metadata.uid.clone();
let (mut device, aliases) = Self::device_to_entity(device)?;
let application = device.application.clone();
let name = device.name.clone();
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
let current = match accessor.get(&application, Lock::None).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(¤t, identity, || ServiceError::NotFound)?;
let accessor = PostgresDeviceAccessor::new(&t);
// get current state for diffing
let current = match accessor.get(&application, &name, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
// pre-check versions
utils::check_versions(expected_uid, expected_resource_version, ¤t)?;
// we simply copy over the deletion timestamp
device.deletion_timestamp = current.deletion_timestamp;
if device.deletion_timestamp.is_some() && device.finalizers.is_empty() {
// delete, but don't send any event
accessor.delete(&application, &name).await?;
// check with the application
self.check_clean_app(&t, &application).await?;
t.commit().await?;
} else {
// check which paths changed
let paths = diff_paths(¤t, &device);
if paths.is_empty() {
// there was no change
return Ok(());
}
let generation = device.next_generation(¤t)?;
let uid = current.uid;
accessor
.update(device, Some(aliases))
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
_ => err,
})?;
// create events
let events = Event::new_device(
self.instance.clone(),
application,
name,
uid,
generation,
paths,
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change event
events.send_with(&self.sender).await?;
}
// done
Ok(())
}
async fn delete_device(
&self,
identity: &UserInformation,
application: &str,
device: &str,
params: DeleteParams,
) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresDeviceAccessor::new(&t);
// get current state for diffing
let mut current = match accessor.get(&application, &device, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
if current.deletion_timestamp.is_some() {
return Err(ServiceError::NotFound.into());
}
// check if the user has access to the device, we can do this after some initial checks
// that would return "not found" anyway.
// Instead of "no access" we return "not found" here, as we don't want users that don't
// have access to application to probe for devices.
let app = PostgresApplicationAccessor::new(&t)
.get(application, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, identity, || ServiceError::NotFound)?;
// check the preconditions
utils::check_preconditions(¶ms.preconditions, ¤t)?;
// there is no need to use the provided constraints, we as locked the entry "for update"
// next generation
let generation = current.set_next_generation()?;
let uid = current.uid;
// if there are no finalizers ...
let path = if current.finalizers.is_empty() {
// ... we can directly delete
accessor.delete(application, device).await?;
vec![]
} else {
// ... otherwise, mark the device deleted
log::debug!("Pending finalizers: {:?}", current.finalizers);
// update deleted timestamp
current.deletion_timestamp = Some(Utc::now());
// update the record
accessor.update(current, None).await?;
vec![".metadata".into()]
};
// create events
let events = Event::new_device(
self.instance.clone(),
application,
device,
uid,
generation,
path,
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
}
|
//! Benchmark for downloading files over localhost.
use criterion::*;
use utilities::server;
static DATA: [u8; 0x10000] = [1; 0x10000]; // 64K
fn benchmark(c: &mut Criterion) {
c.bench_function("download 64K: curl", move |b| {
let server = server::spawn(|_| server::static_response(&DATA));
let endpoint = server.endpoint();
b.iter_batched(
|| {
let mut easy = curl::easy::Easy::new();
easy.url(&endpoint).unwrap();
easy
},
|mut easy| {
let mut body = Vec::new();
let mut transfer = easy.transfer();
transfer
.write_function(|bytes| {
body.extend_from_slice(bytes);
Ok(bytes.len())
})
.unwrap();
transfer.perform().unwrap();
},
BatchSize::SmallInput,
)
});
c.bench_function("download 64K: chttp", move |b| {
use std::io::Read;
let server = server::spawn(|_| server::static_response(&DATA));
let endpoint = server.endpoint();
b.iter_batched(
|| chttp::Client::new(),
|client| {
let mut body = Vec::new();
let mut response = client.get(&endpoint).unwrap();
response.body_mut().read_to_end(&mut body).unwrap();
},
BatchSize::SmallInput,
)
});
c.bench_function("download 64K: reqwest", move |b| {
let server = server::spawn(|_| server::static_response(&DATA));
let endpoint = server.endpoint();
b.iter_batched(
|| reqwest::Client::new(),
|client| {
let mut body = Vec::new();
client
.get(&endpoint)
.send()
.unwrap()
.copy_to(&mut body)
.unwrap();
},
BatchSize::SmallInput,
)
});
}
criterion_group!(benches, benchmark);
criterion_main!(benches);
|
use std::collections::HashSet;
use aoc_runner_derive::*;
use Instruction::*;
#[derive(Copy, Clone)]
enum Instruction {
ACC(i32),
JMP(i32),
NOP(i32),
}
#[aoc_generator(day8)]
fn generate(input: &str) -> Vec<Instruction> {
input
.lines()
.map(|x| {
let mut s = x.split(' ');
let part1 = s.next().unwrap();
let part2 = s.next().unwrap();
let num = i32::from_str_radix(part2, 10).unwrap();
match part1 {
"acc" => ACC(num),
"jmp" => JMP(num),
"nop" => NOP(num),
_ => unreachable!(),
}
})
.collect()
}
struct Helper(bool, i32);
impl std::fmt::Display for Helper {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "({}, {})", self.0, self.1)
}
}
#[aoc(day8, part1)]
fn solve_part1(input: &[Instruction]) -> Helper {
let mut acc = 0;
let mut i = 0;
let mut encountered = HashSet::new();
while i < input.len() {
if encountered.contains(&i) {
return Helper(false, acc);
}
encountered.insert(i);
match input[i] {
ACC(num) => {
acc += num;
i += 1;
}
NOP(_) => i += 1,
JMP(num) => i = (i as i32 + num) as usize,
}
}
Helper(true, acc)
}
#[aoc(day8, part2)]
fn solve_part2(input: &[Instruction]) -> i32 {
let mut copy = input.to_vec();
for i in 0..copy.len() {
let prev = copy[i];
match prev {
ACC(_) => continue,
JMP(num) => copy[i] = NOP(num),
NOP(num) => copy[i] = JMP(num),
}
if let Helper(true, acc) = solve_part1(©) {
return acc;
}
copy[i] = prev;
}
unreachable!()
}
|
// main.rs
// Rust client for the NeoWiFi project
use reqwest::{StatusCode};
use std::collections::HashMap;
use std::time::Duration;
use std::{thread, time};
use rand::Rng;
// A single pixel in the strip
#[derive(Copy, Clone)]
struct Pixel {
r: u32,
g: u32,
b: u32,
w: u32,
}
impl Pixel {
pub fn new() -> Pixel {
Pixel {
// Default to 'off'
r: 0,
g: 0,
b: 0,
w: 0,
}
}
// Set the color of the pixel
pub fn set_color(&mut self, r: u32, g: u32, b: u32, w: u32) {
self.r = r;
self.g = g;
self.b = b;
self.w = w;
}
// Convert to a HashMap
pub fn to_json(&self) -> HashMap<&str, u32> {
let mut json = HashMap::new();
json.insert("r", self.r);
json.insert("g", self.g);
json.insert("b", self.b);
json.insert("w", self.w);
return json;
}
}
// The NeoWiFi controller
struct NeoWiFi {
pixel_count: usize, // number of pixels in the strip
pixels: Vec<Pixel>, // vector of pixels
host: String, // IP address or hostname of NeoWiFi server
}
impl NeoWiFi {
pub fn new(pixel_count: usize, host: String) -> NeoWiFi {
NeoWiFi {
pixel_count,
pixels: vec!(Pixel::new(); pixel_count),
host,
}
}
// Set the color of a single pixel
pub fn set_pixel_color(&mut self, index: usize, r: u32, g: u32, b: u32, w: u32) {
// Return if invalid index
if index > self.pixel_count {
return;
}
// Find and set the pixel
self.pixels[index].set_color(r, g, b, w);
}
// Set the color of the entire strip
pub fn set_color(&mut self, r: u32, g: u32, b: u32, w: u32) {
for i in 0 .. self.pixel_count {
self.set_pixel_color(i, r, g, b, w);
}
}
// Convert all pixels in the strip to a JSON consumable format
pub fn to_json(&self) -> Vec<HashMap<&str, u32>> {
let mut json = Vec::new();
for i in 0 .. self.pixel_count {
let mut this_pixel = self.pixels[i].to_json();
// Pixels don't know where they are in the strip. Need to insert index.
this_pixel.insert("index", i as u32);
json.push(this_pixel);
}
return json;
}
// Send updated state to server
pub fn send(&self) -> Result<(StatusCode), reqwest::Error> {
// Construct body from pixel states
let body = self.to_json();
// Construct the request and send
let url = format!("http://{}/write", self.host);
// Construct a client
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(1))
.build().unwrap();
// Send the request
let resp = client.post(&url)
.json(&body)
.send()?;
return Ok(resp.status());
}
}
// An animation fo run on a NeoWiFi strip
struct Animation {
strip: *mut NeoWiFi,
state: Vec<Pixel>,
}
// Called asynchronously to progress an animation forward one step
trait AnimationHandler {
fn handler(&self) -> Result<(), reqwest::Error>;
}
// Run a simple animation to show off the client
fn main() {
let pixel_count = 29;
let host = "192.168.1.27".to_owned();
let mut strip = NeoWiFi::new(pixel_count, host);
let mut rng = rand::thread_rng();
let mut i: usize = 0;
let mut r;
let mut g;
let mut b;
loop {
for j in 0 .. pixel_count {
if i == j {
r = rng.gen_range(0, 255);
g = rng.gen_range(0, 255);
b = rng.gen_range(0, 255);
} else {
r = 0;
g = 0;
b = 0;
}
strip.set_pixel_color(j, r, g, b, 0);
}
// Send update request and show any error responses
let status = strip.send();
match status {
Ok(_code) => {},
Err(e) => println!("{}", e)
}
// Keep within bounds
i = (i + 1) % pixel_count;
}
}
|
use crate::*;
use std::collections::HashMap;
use std::hash::Hash;
use std::ops::Deref;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum HashInfo {
Map(HashMap<HashKey, Value>),
IdentMap(HashMap<IdentKey, Value>),
}
#[derive(Debug, Clone, Copy)]
pub struct HashKey(pub Value);
impl Deref for HashKey {
type Target = Value;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Hash for HashKey {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self.as_rvalue() {
None => self.0.hash(state),
Some(lhs) => match &lhs.kind {
ObjKind::Integer(lhs) => lhs.hash(state),
ObjKind::Float(lhs) => (*lhs as u64).hash(state),
ObjKind::String(lhs) => lhs.hash(state),
ObjKind::Array(lhs) => lhs.elements.hash(state),
ObjKind::Range(lhs) => lhs.hash(state),
ObjKind::Hash(lhs) => {
for (key, val) in lhs.iter() {
key.hash(state);
val.hash(state);
}
}
ObjKind::Method(lhs) => lhs.inner().hash(state),
_ => self.0.hash(state),
},
}
}
}
impl PartialEq for HashKey {
// Object#eql?()
// This type of equality is used for comparison for keys of Hash.
fn eq(&self, other: &Self) -> bool {
if self.0.id() == other.0.id() {
return true;
}
match (self.as_rvalue(), other.as_rvalue()) {
(None, None) => self.0 == other.0,
(Some(lhs), Some(rhs)) => match (&lhs.kind, &rhs.kind) {
(ObjKind::Integer(lhs), ObjKind::Integer(rhs)) => *lhs == *rhs,
(ObjKind::Float(lhs), ObjKind::Float(rhs)) => *lhs == *rhs,
(ObjKind::String(lhs), ObjKind::String(rhs)) => *lhs == *rhs,
(ObjKind::Array(lhs), ObjKind::Array(rhs)) => lhs.elements == rhs.elements,
(ObjKind::Range(lhs), ObjKind::Range(rhs)) => *lhs == *rhs,
(ObjKind::Hash(lhs), ObjKind::Hash(rhs)) => lhs.inner() == rhs.inner(),
(ObjKind::Method(lhs), ObjKind::Method(rhs)) => *lhs.inner() == *rhs.inner(),
_ => lhs.kind == rhs.kind,
},
_ => false,
}
}
}
impl Eq for HashKey {}
#[derive(Debug, Clone, Copy)]
pub struct IdentKey(pub Value);
impl Deref for IdentKey {
type Target = Value;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Hash for IdentKey {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
(*self.0).hash(state);
}
}
impl PartialEq for IdentKey {
// Object#eql?()
// This type of equality is used for comparison for keys of Hash.
fn eq(&self, other: &Self) -> bool {
*self.0 == *other.0
}
}
impl Eq for IdentKey {}
use std::collections::hash_map;
pub enum IntoIter {
Map(hash_map::IntoIter<HashKey, Value>),
IdentMap(hash_map::IntoIter<IdentKey, Value>),
}
impl IntoIter {
fn new(hash: HashInfo) -> IntoIter {
match hash {
HashInfo::Map(map) => IntoIter::Map(map.into_iter()),
HashInfo::IdentMap(map) => IntoIter::IdentMap(map.into_iter()),
}
}
}
impl Iterator for IntoIter {
type Item = (Value, Value);
fn next(&mut self) -> Option<Self::Item> {
match self {
IntoIter::Map(map) => match map.next() {
Some((k, v)) => Some((k.0, v)),
None => None,
},
IntoIter::IdentMap(map) => match map.next() {
Some((k, v)) => Some((k.0, v)),
None => None,
},
}
}
}
macro_rules! define_iter {
($trait:ident) => {
pub enum $trait<'a> {
Map(hash_map::$trait<'a, HashKey, Value>),
IdentMap(hash_map::$trait<'a, IdentKey, Value>),
}
};
}
define_iter!(Iter);
define_iter!(IterMut);
macro_rules! define_iter_new {
($ty1: ident, $ty2: ty, $method: ident) => {
impl<'a> $ty1<'a> {
fn new(hash: $ty2) -> $ty1 {
match hash {
HashInfo::Map(map) => $ty1::Map(map.$method()),
HashInfo::IdentMap(map) => $ty1::IdentMap(map.$method()),
}
}
}
};
}
define_iter_new!(Iter, &HashInfo, iter);
define_iter_new!(IterMut, &mut HashInfo, iter_mut);
macro_rules! define_iterator {
($ty2:ident) => {
impl<'a> Iterator for $ty2<'a> {
type Item = (Value, Value);
fn next(&mut self) -> Option<Self::Item> {
match self {
$ty2::Map(map) => match map.next() {
Some((k, v)) => Some((k.0, *v)),
None => None,
},
$ty2::IdentMap(map) => match map.next() {
Some((k, v)) => Some((k.0, *v)),
None => None,
},
}
}
}
};
}
define_iterator!(Iter);
define_iterator!(IterMut);
macro_rules! define_into_iterator {
($ty1:ty, $ty2:ident) => {
impl<'a> IntoIterator for $ty1 {
type Item = (Value, Value);
type IntoIter = $ty2<'a>;
fn into_iter(self) -> $ty2<'a> {
$ty2::new(self)
}
}
};
}
define_into_iterator!(&'a HashInfo, Iter);
define_into_iterator!(&'a mut HashInfo, IterMut);
impl IntoIterator for HashInfo {
type Item = (Value, Value);
type IntoIter = IntoIter;
fn into_iter(self) -> IntoIter {
IntoIter::new(self)
}
}
impl HashInfo {
pub fn new(map: HashMap<HashKey, Value>) -> Self {
HashInfo::Map(map)
}
pub fn iter(&self) -> Iter {
Iter::new(self)
}
pub fn iter_mut(&mut self) -> IterMut {
IterMut::new(self)
}
pub fn get(&self, v: &Value) -> Option<&Value> {
match self {
HashInfo::Map(map) => map.get(&HashKey(*v)),
HashInfo::IdentMap(map) => map.get(&IdentKey(*v)),
}
}
pub fn len(&self) -> usize {
match self {
HashInfo::Map(map) => map.len(),
HashInfo::IdentMap(map) => map.len(),
}
}
pub fn clear(&mut self) {
match self {
HashInfo::Map(map) => map.clear(),
HashInfo::IdentMap(map) => map.clear(),
}
}
pub fn insert(&mut self, k: Value, v: Value) {
match self {
HashInfo::Map(map) => map.insert(HashKey(k), v),
HashInfo::IdentMap(map) => map.insert(IdentKey(k), v),
};
}
pub fn remove(&mut self, k: Value) -> Option<Value> {
match self {
HashInfo::Map(map) => map.remove(&HashKey(k)),
HashInfo::IdentMap(map) => map.remove(&IdentKey(k)),
}
}
pub fn contains_key(&self, k: Value) -> bool {
match self {
HashInfo::Map(map) => map.contains_key(&HashKey(k)),
HashInfo::IdentMap(map) => map.contains_key(&IdentKey(k)),
}
}
pub fn keys(&self) -> Vec<Value> {
match self {
HashInfo::Map(map) => map.keys().map(|x| x.0).collect(),
HashInfo::IdentMap(map) => map.keys().map(|x| x.0).collect(),
}
}
pub fn values(&self) -> Vec<Value> {
match self {
HashInfo::Map(map) => map.values().cloned().collect(),
HashInfo::IdentMap(map) => map.values().cloned().collect(),
}
}
pub fn to_s(&self, vm: &mut VM) -> String {
match self.len() {
0 => "{}".to_string(),
_ => {
let mut result = "".to_string();
let mut first = true;
for (k, v) in self.iter() {
let k_inspect = vm.val_inspect(k);
let v_inspect = vm.val_inspect(v);
result = if first {
format!("{}=>{}", k_inspect, v_inspect)
} else {
format!("{}, {}=>{}", result, k_inspect, v_inspect)
};
first = false;
}
format! {"{{{}}}", result}
}
}
}
}
pub type HashRef = Ref<HashInfo>;
impl HashRef {
pub fn from(map: HashMap<HashKey, Value>) -> Self {
HashRef::new(HashInfo::new(map))
}
}
|
use actix_web::ResponseError;
use async_trait::async_trait;
use deadpool_postgres::Pool;
use drogue_cloud_database_common::{
auth::authorize, error::ServiceError, models::app::*, DatabaseService,
};
use drogue_cloud_service_api::auth::user::{UserDetails, UserInformation};
use drogue_cloud_service_api::{
auth::user::authz::{AuthorizationRequest, Outcome},
health::{HealthCheckError, HealthChecked},
};
use serde::Deserialize;
use tokio_postgres::NoTls;
#[async_trait]
pub trait AuthorizationService: Clone {
type Error: ResponseError;
async fn authorize(&self, request: AuthorizationRequest) -> Result<Outcome, Self::Error>;
}
#[derive(Clone, Debug, Deserialize)]
pub struct AuthorizationServiceConfig {
pub pg: deadpool_postgres::Config,
}
impl DatabaseService for PostgresAuthorizationService {
fn pool(&self) -> &Pool {
&self.pool
}
}
#[async_trait::async_trait]
impl HealthChecked for PostgresAuthorizationService {
async fn is_ready(&self) -> Result<(), HealthCheckError> {
Ok(DatabaseService::is_ready(self)
.await
.map_err(HealthCheckError::from)?)
}
}
#[derive(Clone)]
pub struct PostgresAuthorizationService {
pool: Pool,
}
impl PostgresAuthorizationService {
pub fn new(config: AuthorizationServiceConfig) -> anyhow::Result<Self> {
Ok(Self {
pool: config.pg.create_pool(NoTls)?,
})
}
}
struct Context(pub AuthorizationRequest);
impl From<Context> for UserInformation {
fn from(ctx: Context) -> Self {
Self::Authenticated(UserDetails {
user_id: ctx.0.user_id,
roles: ctx.0.roles,
})
}
}
#[async_trait]
impl AuthorizationService for PostgresAuthorizationService {
type Error = ServiceError;
async fn authorize(&self, request: AuthorizationRequest) -> Result<Outcome, Self::Error> {
let c = self.pool.get().await?;
// lookup the application
let application = PostgresApplicationAccessor::new(&c);
let application = match application.lookup(&request.application).await? {
Some(application) => application,
None => {
return Ok(Outcome::Deny);
}
};
log::debug!("Found application: {:?}", application.name);
let outcome = authorize(&application, &Context(request).into());
log::debug!("Authorization outcome: {:?}", outcome);
Ok(outcome)
}
}
|
#![recursion_limit = "256"]
extern crate proc_macro;
use proc_macro2::{Ident, Literal};
use std::collections::HashMap;
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::{parse_macro_input, Error, LitStr};
use quote::quote;
struct WeechatPluginInfo {
plugin: syn::Ident,
name: (usize, Literal),
author: (usize, Literal),
description: (usize, Literal),
version: (usize, Literal),
license: (usize, Literal),
}
enum WeechatVariable {
Name(syn::LitStr),
Author(syn::LitStr),
Description(syn::LitStr),
Version(syn::LitStr),
License(syn::LitStr),
}
impl WeechatVariable {
fn litstr_to_pair(string: &LitStr) -> (usize, Literal) {
let mut bytes = string.value().into_bytes();
bytes.push(0);
let len = bytes.len();
(len, Literal::byte_string(&bytes))
}
fn as_pair(&self) -> (usize, Literal) {
match self {
WeechatVariable::Name(string) => WeechatVariable::litstr_to_pair(string),
WeechatVariable::Author(string) => WeechatVariable::litstr_to_pair(string),
WeechatVariable::Description(string) => WeechatVariable::litstr_to_pair(string),
WeechatVariable::Version(string) => WeechatVariable::litstr_to_pair(string),
WeechatVariable::License(string) => WeechatVariable::litstr_to_pair(string),
}
}
}
impl Parse for WeechatVariable {
fn parse(input: ParseStream) -> Result<Self> {
let key: Ident = input.parse()?;
input.parse::<syn::Token![:]>()?;
let value = input.parse()?;
match key.to_string().to_lowercase().as_ref() {
"name" => Ok(WeechatVariable::Name(value)),
"author" => Ok(WeechatVariable::Author(value)),
"description" => Ok(WeechatVariable::Description(value)),
"version" => Ok(WeechatVariable::Version(value)),
"license" => Ok(WeechatVariable::License(value)),
_ => Err(Error::new(key.span(), "expected one of bla")),
}
}
}
impl Parse for WeechatPluginInfo {
fn parse(input: ParseStream) -> Result<Self> {
let plugin: syn::Ident = input.parse()?;
input.parse::<syn::Token![,]>()?;
let args: Punctuated<WeechatVariable, syn::Token![,]> =
input.parse_terminated(WeechatVariable::parse)?;
let mut variables = HashMap::new();
for arg in args.pairs() {
let variable = arg.value();
match variable {
WeechatVariable::Name(_) => variables.insert("name", *variable),
WeechatVariable::Author(_) => variables.insert("author", *variable),
WeechatVariable::Description(_) => variables.insert("description", *variable),
WeechatVariable::Version(_) => variables.insert("version", *variable),
WeechatVariable::License(_) => variables.insert("license", *variable),
};
}
Ok(WeechatPluginInfo {
plugin,
name: variables.remove("name").unwrap().as_pair(),
author: variables.remove("author").unwrap().as_pair(),
description: variables.remove("description").unwrap().as_pair(),
version: variables.remove("version").unwrap().as_pair(),
license: variables.remove("license").unwrap().as_pair(),
})
}
}
#[proc_macro]
pub fn weechat_plugin(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let WeechatPluginInfo {
plugin,
name,
author,
description,
version,
license,
} = parse_macro_input!(input as WeechatPluginInfo);
let (name_len, name) = name;
let (author_len, author) = author;
let (description_len, description) = description;
let (license_len, license) = license;
let (version_len, version) = version;
let result = quote! {
#[no_mangle]
pub static weechat_plugin_api_version: [u8; weechat_sys::WEECHAT_PLUGIN_API_VERSION_LENGTH] = *weechat_sys::WEECHAT_PLUGIN_API_VERSION;
#[no_mangle]
pub static weechat_plugin_name: [u8; #name_len] = *#name;
#[no_mangle]
pub static weechat_plugin_author: [u8; #author_len] = *#author;
#[no_mangle]
pub static weechat_plugin_description: [u8; #description_len] = *#description;
#[no_mangle]
pub static weechat_plugin_version: [u8; #version_len] = *#version;
#[no_mangle]
pub static weechat_plugin_license: [u8; #license_len] = *#license;
static mut __PLUGIN: Option<#plugin> = None;
#[no_mangle]
pub extern "C" fn weechat_plugin_init(
plugin: *mut weechat_sys::t_weechat_plugin,
argc: libc::c_int,
argv: *mut *mut ::libc::c_char,
) -> libc::c_int {
let plugin = Weechat::from_ptr(plugin);
let args = ArgsWeechat::new(argc, argv);
match <#plugin as ::weechat::WeechatPlugin>::init(plugin, args) {
Ok(p) => {
unsafe { __PLUGIN = Some(p) }
return weechat_sys::WEECHAT_RC_OK;
}
Err(_e) => {
return weechat_sys::WEECHAT_RC_ERROR;
}
}
}
#[no_mangle]
pub extern "C" fn weechat_plugin_end(_plugin: *mut weechat_sys::t_weechat_plugin) -> ::libc::c_int {
unsafe {
__PLUGIN = None;
}
weechat_sys::WEECHAT_RC_OK
}
};
result.into()
}
|
use std::{fs::File, io::{BufRead, BufReader}};
fn main() {
let file = File::open("inputs/input-12.txt").unwrap();
let lines: Vec<String> = BufReader::new(file).lines().map(|l| l.unwrap()).collect();
part_one(&lines);
part_two(&lines);
}
fn part_one(lines: &Vec<String>) {
let mut position: (isize, isize) = (0, 0);
let mut degrees = 90;
for line in lines {
let command = &line[0..1];
let value: isize = line[1..].parse().unwrap();
match command {
"N" => position = (position.0, position.1 + value),
"S" => position = (position.0, position.1 - value),
"E" => position = (position.0 + value, position.1),
"W" => position = (position.0 - value, position.1),
"R" => {
degrees += value;
degrees = degrees % 360;
degrees = (degrees + 360) % 360;
},
"L" => {
degrees -= value;
degrees = degrees % 360;
degrees = (degrees + 360) % 360;
},
"F" => {
match degrees {
0 => position = (position.0, position.1 + value),
90 => position = (position.0 + value, position.1),
180 => position = (position.0, position.1 - value),
270 => position = (position.0 - value, position.1),
_ => unreachable!(),
}
}
_ => unreachable!(),
}
}
println!("first {:?}", position.0.abs() + position.1.abs());
}
fn part_two(lines: &Vec<String>) {
let mut x = 0;
let mut y = 0;
let mut wx = 1;
let mut wy = 10;
for line in lines {
let command = &line[0..1];
let value: isize = line[1..].parse().unwrap();
match command {
"N" => wx += value,
"S" => wx -= value,
"E" => wy += value,
"W" => wy -= value,
"R" => {
let (a, b) = rot(wx, wy, value);
wx = a;
wy = b;
},
"L" => {
let (a, b) = rot(wx, wy, 360 - value);
wx = a;
wy = b;
},
"F" => {
x += wx * value;
y += wy * value;
}
_ => unreachable!(),
}
}
println!("second {:?}", x.abs() + y.abs());
}
fn rot(x: isize, y: isize, d: isize) -> (isize, isize) {
match d {
90 => (-y, x),
180 => (-x, -y),
270 => ( y, -x),
_ => unreachable!(),
}
}
|
// Copyright 2020 Yuchen Wong
use opencv::core::{ CV_8UC3, CV_32FC1, Mat, Point, Scalar, Size, Vec3b, BORDER_DEFAULT, NORM_MINMAX };
use opencv::imgproc::{ gaussian_blur, spatial_gradient, COLOR_BGR2GRAY };
use opencv::prelude::{ MatTrait, Vector };
use std::error::Error;
use std::vec::Vec;
#[path = "../base/opencv_utils.rs"] mod opencv_utils;
#[path = "../base/math_utils.rs"] mod math_utils;
use opencv_utils::{ get_pixel };
pub fn harris_detect_corner(src: &Mat,
block_size: i32,
k: f64,
threshold: f32,
cut_edge: bool) -> Result<Vec<Point>, Box<dyn Error>> {
let mut buffer: Mat = Mat::default()?;
let mut gray_image: Mat = Mat::default()?;
opencv::imgproc::cvt_color(src, &mut gray_image, COLOR_BGR2GRAY, 0).unwrap();
// Step1: Compute Ix, Iy
let mut ix: Mat = Mat::default()?;
let mut iy: Mat = Mat::default()?;
let mut buffer_x = Mat::default()?;
let mut buffer_y = Mat::default()?;
spatial_gradient(&gray_image, &mut buffer_x, &mut buffer_y, 3, BORDER_DEFAULT).unwrap();
buffer_x.convert_to(&mut ix, CV_32FC1, 1.0, 0.0).unwrap();
buffer_y.convert_to(&mut iy, CV_32FC1, 1.0, 0.0).unwrap();
// Step2: Compute Ix^2 Iy^2 IxIy
let mut ix2: Mat = Mat::default()?;
opencv::core::multiply(&ix, &ix, &mut ix2, 1.0, -1).unwrap();
let mut iy2: Mat = Mat::default()?;
opencv::core::multiply(&iy, &iy, &mut iy2, 1.0, -1).unwrap();
let mut ixiy: Mat = Mat::default()?;
opencv::core::multiply(&ix, &iy, &mut ixiy, 1.0, -1).unwrap();
// Step3: Gaussian filter on ix2, iy2, ixiy
let mut sx2: Mat = Mat::default()?;
let mut sy2: Mat = Mat::default()?;
let mut sxsy: Mat = Mat::default()?;
gaussian_blur(&ix2, &mut sx2, Size::new(block_size, block_size),
1.0, 0.0, BORDER_DEFAULT).unwrap();
gaussian_blur(&iy2, &mut sy2, Size::new(block_size, block_size),
1.0, 0.0, BORDER_DEFAULT).unwrap();
gaussian_blur(&ixiy, &mut sxsy, Size::new(block_size, block_size),
1.0, 0.0, BORDER_DEFAULT).unwrap();
// Step4: Now that M = [sx2, sxsy]
// [sxsy, sy2]
// We compute the R = det(M) - k * (trace(M))^2
let mut sx2sy2 = Mat::default()?;
opencv::core::multiply(&sx2, &sy2, &mut sx2sy2, 1.0, -1).unwrap();
let mut sxsy2 = Mat::default()?;
opencv::core::multiply(&sxsy, &sxsy, &mut sxsy2, 1.0, -1).unwrap();
let mut det = Mat::default()?;
opencv::core::add_weighted(&sx2sy2, 1.0, &sxsy2, -1.0, 0.0, &mut det, -1).unwrap();
opencv::core::add_weighted(&sx2, 1.0, &sy2, 1.0, 0.0, &mut buffer, -1).unwrap();
let mut trace = Mat::default()?;
opencv::core::multiply(&buffer, &buffer, &mut trace, 1.0, -1).unwrap();
let mut R = Mat::default()?;
opencv::core::add_weighted(&det, 1.0, &trace, -k, 0.0, &mut buffer, -1).unwrap();
opencv::core::normalize(&buffer, &mut R, 0.0, 255.0,
NORM_MINMAX, -1, &Mat::default()?).unwrap();
// Now we do the output
let rows = src.rows();
let cols = src.cols();
let mx = [-1, -1, -1, 0, 0, 1, 1, 1];
let my = [-1, 0, 1, 1, -1, 0, 1, -1];
let mut feature_num: i32 = 0;
let mut out_feature: Vec<Point> = Vec::new();
let mut thresh: i32 = 0;
if cut_edge == true {
thresh = 8;
}
for i in thresh..rows-thresh {
for j in thresh..cols-thresh {
let pixel = get_pixel::<f32>(&R, i, j);
if pixel > threshold {
let mut is_local_maximum = true;
for k in 0..8 {
let rr = i + mx[k];
let cc = j + my[k];
if rr >=0 && rr < rows && cc >=0 && cc < cols {
if get_pixel::<f32>(&R, rr, cc) > pixel {
is_local_maximum = false;
break;
}
}
}
if is_local_maximum == true {
feature_num += 1;
out_feature.push(Point::new(j, i));
}
}
}
}
log::trace!("Detected features: {}.", feature_num);
Ok(out_feature)
}
|
use lazy_static::lazy_static;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt::Display;
use std::path::PathBuf;
use std::str::FromStr;
use url::Url;
pub const EMPTY_FOLDER_HASH: &str = "QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn";
lazy_static! {
pub static ref EMPTY_FOLDER_PATH: Path =
Path::from_str("/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn").unwrap();
}
lazy_static! {
static ref IPFS_PUBLIC_API_URL: Url = Url::parse("https://ipfs.io/").unwrap();
}
// Assuming that the sha256 hash is for a Qmhash
pub fn sha256_to_cid(codec: cid::Codec, sha256_str: &str) -> Option<cid::Cid> {
hex::decode(sha256_str).ok().and_then(|digest| {
if digest.len() != 32 {
None
} else {
let mut mh = [0u8; 34];
mh[0] = multihash::Hash::SHA2256.code();
mh[1] = multihash::Hash::SHA2256.size();
digest.iter().enumerate().for_each(|(i, x)| mh[i + 2] = *x);
Some(cid::Cid::new(codec, cid::Version::V0, &mh))
}
})
}
#[derive(Display, Debug, Eq, PartialEq)]
pub enum PathParseError {
#[display(fmt = "unable to parse cid: {}", _0)]
CidError(cid::Error),
#[display(fmt = "invalid domain: {}", _0)]
DnsLinkDomainInvalid(String),
#[display(fmt = "errors during UTS#46 processing: {}", _0)]
DnsLinkUnicodeError(String),
#[display(fmt = "unable to parse suffix: {}", _0)]
SuffixError(std::string::ParseError),
#[display(fmt = "suffix is not absolute: {}", _0)]
SuffixNotAbsolute(String),
#[display(fmt = "unexpected prefix: {} (must be /ipfs/ or /ipns/)", _0)]
UnknownPrefix(String),
#[display(fmt = "expected cid, got dnslink record")]
ExpectedCid,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Root {
Ipfs(cid::Cid),
Ipns(cid::Cid),
DnsLink(publicsuffix::Domain),
}
impl Display for Root {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Root::Ipfs(cid) => write!(f, "/ipfs/{}", cid),
Root::Ipns(cid) => write!(f, "/ipns/{}", cid),
Root::DnsLink(domain) => write!(f, "/ipns/{}", domain),
}
}
}
lazy_static! {
static ref PUBLIC_SUFFIX_LIST: publicsuffix::List = publicsuffix::List::fetch().unwrap();
}
impl FromStr for Root {
type Err = PathParseError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
use cid::ToCid;
match (s.get(0..6), s.get(6..)) {
(Some("/ipfs/"), Some(s)) => {
s.to_cid().map(Root::Ipfs).map_err(PathParseError::CidError)
}
(Some("/ipns/"), Some(s)) => s
.to_cid()
.map(Root::Ipns)
.map_err(PathParseError::CidError)
.or_else(|_| {
PUBLIC_SUFFIX_LIST
.parse_domain(s)
.map(Root::DnsLink)
.map_err(|e| {
use publicsuffix::errors::ErrorKind;
match e.0 {
ErrorKind::Uts46(errs) => {
PathParseError::DnsLinkUnicodeError(format!("{:?}", errs))
}
ErrorKind::InvalidDomain(domain) => {
PathParseError::DnsLinkDomainInvalid(domain)
}
_ => panic!("unhandled publicsuffix error"),
}
})
}),
(other, _) => Err(PathParseError::UnknownPrefix(
other.unwrap_or_default().to_string(),
)),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Path {
pub root: Root,
pub suffix: Option<PathBuf>,
}
impl Path {
pub fn ipfs(cid: cid::Cid) -> Self {
Self {
root: Root::Ipfs(cid),
suffix: None,
}
}
}
impl FromStr for Path {
type Err = PathParseError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
use path_clean::PathClean;
let root_end = s
.match_indices('/')
.nth(2)
.map(|(x, _)| x)
.unwrap_or_else(|| s.len());
let root = Root::from_str(s.get(0..root_end).unwrap_or_default())?;
let suffix = s
.get(root_end..)
.and_then(|x| if x.is_empty() { None } else { Some(x) })
.map(PathBuf::from_str)
.map(|res| {
res.map(|x| x.clean())
.map_err(PathParseError::SuffixError)
.and_then(|x| {
if x.is_absolute() {
Ok(x)
} else {
Err(PathParseError::SuffixNotAbsolute(
x.to_string_lossy().to_string(),
))
}
})
})
.transpose()
.map(|x| {
if let Some(x) = x {
if x.parent().is_none() {
None
} else {
Some(x)
}
} else {
None
}
})?;
Ok(Self { root, suffix })
}
}
impl Display for Path {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.root)?;
if let Some(suffix) = &self.suffix {
write!(f, "{}", suffix.to_string_lossy())
} else {
Ok(())
}
}
}
impl Into<String> for &Path {
fn into(self) -> String {
format!("{}", self)
}
}
impl Serialize for Path {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let s: String = self.into();
serializer.serialize_str(&s)
}
}
impl<'de> Deserialize<'de> for Path {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
use serde::de;
use std::fmt;
struct PathVisitor;
impl<'de> de::Visitor<'de> for PathVisitor {
type Value = Path;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a string that can be converted to a Path")
}
fn visit_str<E>(self, path_str: &str) -> std::result::Result<Self::Value, E>
where
E: de::Error,
{
Path::from_str(path_str).map_err(de::Error::custom)
}
}
deserializer.deserialize_string(PathVisitor)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn root_ipfs_ok() {
let ipfs_root_str = format!("/ipfs/{}", EMPTY_FOLDER_HASH);
assert_eq!(
ipfs_root_str,
Root::from_str(&ipfs_root_str).unwrap().to_string()
);
}
#[test]
fn root_dnslink_ok() {
let dnslink_root_str = "/ipns/bootstrap.libp2p.io";
assert_eq!(
dnslink_root_str,
Root::from_str(dnslink_root_str).unwrap().to_string()
);
}
#[test]
fn root_dnslink_with_invalid_domain_err() {
let dnslink_root_str = "/ipns/notadomain.123$$$%@";
assert_eq!(
PathParseError::DnsLinkDomainInvalid("notadomain.123$$$%@".to_string()),
Root::from_str(dnslink_root_str).unwrap_err()
);
}
#[test]
fn root_dnslink_with_non_uts46_conformant_err() {
let dnslink_root_str = "/ipns/Ⅎ.com";
assert_eq!(
PathParseError::DnsLinkDomainInvalid("Ⅎ.com".to_string()),
Root::from_str(dnslink_root_str).unwrap_err()
);
}
#[test]
fn root_ipns_ok() {
let ipns_root_str = "/ipns/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN";
assert_eq!(
ipns_root_str,
Root::from_str(ipns_root_str).unwrap().to_string()
);
}
#[test]
fn ipfs_path_without_suffix_ok() {
let path_string = format!("/ipfs/{}", EMPTY_FOLDER_HASH);
assert_eq!(
path_string,
Path::from_str(&path_string).unwrap().to_string()
);
}
#[test]
fn ipfs_path_with_suffix_ok() {
let path_string = "/ipfs/QmXGuztteR8h7TKDsw61yCrwYzrw8kcfQMfG8dXd3Y2ZkC/spec/ipfs.rs";
assert_eq!(
path_string,
Path::from_str(&path_string).unwrap().to_string()
);
}
#[test]
fn ipfs_path_with_dot_dot_to_no_suffix_ok() {
let path_string =
"/ipfs/QmXGuztteR8h7TKDsw61yCrwYzrw8kcfQMfG8dXd3Y2ZkC/../spec/ipfs.rs/../../../../../";
assert_eq!(
"/ipfs/QmXGuztteR8h7TKDsw61yCrwYzrw8kcfQMfG8dXd3Y2ZkC",
Path::from_str(&path_string).unwrap().to_string(),
);
}
#[test]
fn ipfs_path_with_invalid_cid_err() {
let path_string = "/ipfs/QmSomeHash";
assert_eq!(
PathParseError::CidError(cid::Error::ParsingError),
Path::from_str(&path_string).unwrap_err(),
);
}
#[test]
fn ipfs_path_with_dot_dot_to_some_suffix_ok() {
let path_string = "/ipfs/QmXGuztteR8h7TKDsw61yCrwYzrw8kcfQMfG8dXd3Y2ZkC/spec/ipfs.rs/../";
assert_eq!(
"/ipfs/QmXGuztteR8h7TKDsw61yCrwYzrw8kcfQMfG8dXd3Y2ZkC/spec",
Path::from_str(&path_string).unwrap().to_string(),
);
}
}
|
mod adler32;
mod murmur3;
mod sip;
pub use adler32::*;
pub use murmur3::*;
pub use sip::*;
|
use rand::{thread_rng, Rng};
use std::error::Error;
use std::io::{Error as ioError, ErrorKind};
use crate::player::Player;
use crate::board::{self, Board};
use crate::coin::Coin;
use crate::checks::{is_vertical_win, is_horizontal_win, is_diagonal_win, is_draw};
use crate::renderer;
pub enum GameOutcome {
Win(Player),
Draw
}
pub struct Game {
player_1: Player,
player_2: Player,
board: Board,
current_player: Player
}
impl Game {
pub fn new_game() -> Game {
Game {
player_1: Player::Red,
player_2: Player::Yellow,
current_player: Player::Red,
board: board::new_board(),
}
}
pub fn next_player(&mut self) -> &Self {
self.current_player = if self.current_player == self.player_1 {
self.player_2
} else {
self.player_1
};
self
}
pub fn start(&mut self) {
renderer::render_board(&self.board);
// game loop
let outcome: Option<Player> = loop {
if self.current_player == self.player_1 {
match human_move(&self.board) {
Ok(col) => board::drop_coin(&mut self.board, Coin::Player(self.player_1), col.into()),
Err(_) => continue,
};
} else {
let computer_move = computer_move(&self.board);
board::drop_coin(&mut self.board, Coin::Player(self.player_2), computer_move.into());
// render out the board with the new move
renderer::render_board(&self.board);
}
// run win check.
match self.check_for_winner() {
Some(GameOutcome::Win(player)) => break Some(player),
Some(GameOutcome::Draw) => break None,
None => self.next_player(),
};
};
if let Some(player) = outcome {
// announce winner
renderer::render_board(&self.board);
println!("{:?} HAS WON!", player);
} else {
// draw
renderer::render_board(&self.board);
println!("THIS GAME IS A DRAW!");
}
}
fn check_for_winner(&self) -> Option<GameOutcome> {
let checks_for_player = |player| {
let vertical = is_vertical_win(&self.board, player);
let horizontal = is_horizontal_win(&self.board, player);
let diagonal = is_diagonal_win(&self.board, player);
match (vertical, horizontal, diagonal) {
(Some(_),_,_) |
(_,Some(_),_) |
(_,_,Some(_)) => Some(GameOutcome::Win(player)),
_ => None
}
};
let is_p1_win = checks_for_player(self.player_1);
if is_p1_win.is_some() {
return is_p1_win
}
let is_p2_win = checks_for_player(self.player_2);
if is_p2_win.is_some() {
return is_p2_win
}
if is_draw(&self.board) { Some(GameOutcome::Draw) } else { None }
}
}
fn human_move(_board: &Board) -> Result<u8, Box<dyn Error>> {
let input = renderer::read()?;
let input_int = match input.trim().parse::<u8>() {
Ok(i) if i < 1 =>
return Err(Box::new(ioError::new(ErrorKind::InvalidInput, "Input cannot be less than 1"))),
Ok(i) => i,
Err(e) => return Err(Box::new(e))
};
Ok(input_int-1)
}
fn computer_move(_board: &Board) -> u8 {
// for now pick a random number from 0-6
// we'll want to run minmax here...
let mut rng = thread_rng();
rng.gen_range(0..=6)
} |
extern crate discord;
extern crate time;
extern crate chrono_humanize;
extern crate rand;
use arg;
use std::collections::{HashMap, HashSet};
use std::fs::{File, OpenOptions};
use std::io::Write;
use bot_utility::{extract_preceding_arg, remove_non_command_characters, extract_first_word,
BasicServerInfo, MessageRecipient};
use emojistats::{CustomEmoji, Database, Emoji};
use self::chrono_humanize::HumanTime;
use self::discord::model::{Event, Channel, ChannelId, ChannelType, Game, GameType, LiveServer,
Message, MessageType, OnlineStatus, PossibleServer, PrivateChannel,
PublicChannel, Server, ServerId, ServerInfo, User, UserId};
use self::rand::{Rng, thread_rng};
use self::time::{Timespec, get_time};
const RESPONSE_STATS_ERR: &str = "\
Sorry! An error occurred while retrieving the statistics. :(";
const RESPONSE_USE_COMMAND_IN_PUBLIC_CHANNEL: &str = "\
Please use this command in a public channel. :shrug:";
#[derive(Debug)]
pub enum BotError {
FailedToAuthenticate = 101,
FailedToConnect = 102,
}
#[derive(Debug)]
pub enum BotDisposition {
Quit,
Restart,
}
#[derive(Debug)]
enum BotLoopDisposition {
Continue,
Quit,
Restart,
}
pub struct Bot {
discord: discord::Discord,
discord_conn: discord::Connection,
online_since: Timespec,
bot_user_id: UserId,
bot_admin_password: String,
bot_admins: HashMap<UserId, User>,
about_text: Option<String>,
help_text: Option<String>,
feedback_file: Option<File>,
servers: HashMap<ServerId, BasicServerInfo>,
public_text_channels: HashMap<ChannelId, PublicChannel>,
private_channels: HashMap<ChannelId, PrivateChannel>,
unknown_public_text_channels: HashSet<ChannelId>,
db: Database,
emoji: HashSet<Emoji>,
}
impl Bot {
pub fn new(bot_token: &str, bot_admin_password: &str, db: Database) -> Result<Bot, BotError> {
let discord = match discord::Discord::from_bot_token(bot_token) {
Ok(discord) => discord,
Err(reason) => {
error!("Failed to authenticate with Discord: {}", reason);
return Err(BotError::FailedToAuthenticate);
}
};
let (discord_conn, ready_event) = match discord.connect() {
Ok((discord_conn, ready_event)) => (discord_conn, ready_event),
Err(reason) => {
error!("Failed to create websocket connection to Discord: {}",
reason);
return Err(BotError::FailedToConnect);
}
};
let bot_user_id = ready_event.user.id;
let bot_admin_password = bot_admin_password.to_string();
let mut bot_admins = HashMap::new();
match discord.get_application_info() {
Ok(application_info) => {
debug!("Application owner = {}#{} ({})",
application_info.owner.name,
application_info.owner.discriminator,
application_info.owner.id);
bot_admins.insert(application_info.owner.id, application_info.owner);
}
Err(_) => {
debug!("No application info available");
}
}
Ok(Bot {
discord,
discord_conn,
online_since: get_time(),
bot_user_id,
bot_admin_password,
bot_admins,
about_text: None,
help_text: None,
feedback_file: None,
servers: HashMap::new(),
public_text_channels: HashMap::new(),
private_channels: HashMap::new(),
unknown_public_text_channels: HashSet::new(),
db,
emoji: HashSet::new(),
})
}
pub fn set_about_text<S>(&mut self, text: S)
where S: Into<String>
{
self.about_text = Some(text.into());
}
pub fn set_help_text<S>(&mut self, text: S)
where S: Into<String>
{
self.help_text = Some(text.into());
}
pub fn set_feedback_file<S>(&mut self, filename: S)
where S: Into<String>
{
let filename = filename.into();
match OpenOptions::new().append(true).create(true).open(&filename) {
Ok(file) => {
self.feedback_file = Some(file);
info!("Logging feedback to file: <{}>", filename);
}
Err(reason) => {
warn!("Unable to open file for logging feedback <{}>: {}",
filename,
reason);
}
}
}
pub fn add_unicode_emoji(&mut self, emoji: String) {
let emoji = Emoji::Unicode(emoji);
match self.db.add_emoji(&emoji, None) {
Ok(_) => {}
Err(reason) => {
warn!("Error adding Unicode emoji <{:?}> to database: {}",
emoji,
reason);
}
}
self.emoji.insert(emoji);
}
pub fn run(mut self) -> BotDisposition {
self.set_game(format!("{} version {}",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION")));
let mut bot_loop_disposition = BotLoopDisposition::Continue;
// Main loop
let bot_disposition = loop {
match self.discord_conn.recv_event() {
Ok(Event::MessageCreate(message)) => {
bot_loop_disposition = self.process_message(message);
}
Ok(Event::ServerCreate(server)) => {
match server {
PossibleServer::Online(server) => {
self.add_emoji_list(server.id, server.emojis.clone());
self.add_live_server(server);
}
PossibleServer::Offline(_) => {}
}
}
Ok(Event::ServerUpdate(server)) => {
self.update_server(server);
}
Ok(Event::ServerDelete(possible_server)) => {
match possible_server {
PossibleServer::Online(server) => {
self.remove_server_id(&server.id);
}
PossibleServer::Offline(server_id) => {
self.remove_server_id(&server_id);
}
}
}
Ok(Event::ChannelCreate(channel)) => {
self.add_channel(channel);
}
Ok(Event::ChannelDelete(channel)) => {
self.remove_channel(&channel);
}
Ok(Event::ChannelUpdate(channel)) => {
self.update_channel(channel);
}
Ok(Event::ChannelRecipientAdd(_, user)) => {
self.add_user(&user);
}
Ok(Event::ServerMemberUpdate { user, .. }) => {
self.add_user(&user);
}
Ok(Event::ServerEmojisUpdate(server_id, emoji_list)) => {
self.add_emoji_list(server_id, emoji_list);
}
_ => {}
}
match bot_loop_disposition {
BotLoopDisposition::Continue => {}
BotLoopDisposition::Quit => {
break BotDisposition::Quit;
}
BotLoopDisposition::Restart => {
break BotDisposition::Restart;
}
}
};
let _ = self.discord_conn.shutdown();
bot_disposition
}
fn check_for_new_servers(&mut self) {
if let Ok(servers) = self.discord.get_servers() {
for server_info in servers {
if !self.servers.contains_key(&server_info.id) {
self.add_server_info(server_info);
}
}
}
}
fn add_live_server(&mut self, server: LiveServer) {
if !self.servers.contains_key(&server.id) {
debug!("Adding new server {} ({})", server.name, server.id);
}
for channel in &server.channels {
self.add_channel(Channel::Public(channel.clone()));
}
for member in &server.members {
self.add_user(&member.user);
}
self.servers
.insert(server.id, BasicServerInfo::from(server));
}
fn add_server_info(&mut self, server: ServerInfo) {
if !self.servers.contains_key(&server.id) {
debug!("Adding new server {} ({})", server.name, server.id);
}
if let Ok(channels) = self.discord.get_server_channels(server.id) {
for public_channel in channels {
self.add_channel(Channel::Public(public_channel));
}
}
self.servers
.insert(server.id, BasicServerInfo::from(server));
}
fn remove_server_id(&mut self, server_id: &ServerId) {
if self.servers.contains_key(server_id) {
debug!("Removing server {} and all associated channels", server_id);
self.servers.remove(server_id);
self.public_text_channels
.retain(|_, c| c.server_id != *server_id);
}
}
fn update_server(&mut self, new_server_info: Server) {
self.add_emoji_list(new_server_info.id, new_server_info.emojis);
if let Some(server) = self.servers.get_mut(&new_server_info.id) {
debug!("Updating server info: {} -> {} ({})",
server.name,
new_server_info.name,
server.id);
server.name = new_server_info.name;
server.icon = new_server_info.icon;
return;
}
}
fn add_channel(&mut self, channel: Channel) {
match channel {
Channel::Public(channel) => {
if channel.kind != ChannelType::Text {
return;
}
if !self.public_text_channels.contains_key(&channel.id) {
debug!("Adding new public text channel #{} ({})",
channel.name,
channel.id);
}
if let Err(reason) = self.db.add_channel(&channel) {
warn!("Error adding channel ({}) to database: {}",
channel.id,
reason);
}
self.public_text_channels.insert(channel.id, channel);
}
Channel::Private(channel) => {
if !self.private_channels.contains_key(&channel.id) {
debug!("Adding new private channel with {}#{} ({})",
channel.recipient.name,
channel.recipient.discriminator,
channel.id);
}
self.private_channels.insert(channel.id, channel);
}
Channel::Group(_) => {}
}
}
fn remove_channel(&mut self, channel: &Channel) {
match *channel {
Channel::Public(ref channel) => {
debug!("Removing public text channel #{} ({})",
channel.name,
channel.id);
self.public_text_channels.remove(&channel.id);
}
Channel::Private(ref channel) => {
debug!("Removing private channel with {}#{} ({})",
channel.recipient.name,
channel.recipient.discriminator,
channel.id);
self.private_channels.remove(&channel.id);
}
Channel::Group(_) => {}
}
}
fn update_channel(&mut self, channel: Channel) {
match channel {
Channel::Public(new_channel_info) => {
if let Some(channel) = self.public_text_channels.get_mut(&new_channel_info.id) {
debug!("Updating existing public text channel #{} -> #{} ({})",
channel.name,
new_channel_info.name,
channel.id);
channel.name = new_channel_info.name;
return;
}
self.add_channel(Channel::Public(new_channel_info));
}
Channel::Private(_) => {}
Channel::Group(_) => {}
}
}
fn add_user(&mut self, user: &User) {
if let Err(reason) = self.db.add_user(user) {
warn!("Error adding user {}#{} ({}) to database: {}",
user.name,
user.discriminator,
user.id,
reason);
}
}
fn resolve_unknown_channel(&mut self, channel_id: &ChannelId) {
// Get an updated list of servers
if let Ok(servers) = self.discord.get_servers() {
let mut new_servers = HashSet::new();
for server_info in &servers {
// If this is a new server ID, add the information for the server
if !self.servers.contains_key(&server_info.id) {
new_servers.insert(server_info.id);
self.add_server_info(server_info.clone());
}
}
// If the channel ID is still unknown,
if !self.public_text_channels.contains_key(channel_id) {
// get updated data on all servers the bot already knew about
for server_info in servers {
if !new_servers.contains(&server_info.id) {
self.add_server_info(server_info);
}
}
}
}
}
fn add_emoji_list(&mut self, server_id: ServerId, emoji_list: Vec<discord::model::Emoji>) {
for emoji in emoji_list {
let custom_emoji = Emoji::Custom(CustomEmoji::new(server_id, emoji.id, emoji.name));
match self.db.add_emoji(&custom_emoji, Some(&server_id)) {
Ok(_) => {
debug!("Added custom emoji on server ({}): <{:?}>",
server_id,
custom_emoji);
}
Err(reason) => {
warn!("Error adding custom emoji <{:?}> to database: {}",
custom_emoji,
reason);
}
}
self.emoji.insert(custom_emoji);
}
}
fn set_game<S>(&mut self, game_name: S)
where S: Into<String>
{
self.discord_conn
.set_presence(Some(Game {
name: game_name.into(),
kind: GameType::Playing,
url: None,
}),
OnlineStatus::Online,
false);
}
fn process_message(&mut self, message: Message) -> BotLoopDisposition {
// If the channel is unknown, try and get information on it by refreshing the server list
//
// If the channel is still unknown after refreshing the server list, add it to a list of
// "unknown channels" so that we don't keep trying to get information on it
if !self.public_text_channels.contains_key(&message.channel_id) &&
!self.private_channels.contains_key(&message.channel_id) &&
!self.unknown_public_text_channels
.contains(&message.channel_id) {
self.resolve_unknown_channel(&message.channel_id);
if !self.public_text_channels.contains_key(&message.channel_id) {
self.unknown_public_text_channels.insert(message.channel_id);
}
}
// Ignore all messages except regular, text-based messages
match message.kind {
MessageType::Regular => {}
_ => {
return BotLoopDisposition::Continue;
}
}
// Ignore messages sent by other bots
if message.author.bot {
return BotLoopDisposition::Continue;
}
// If the message begins with a user id,
if let (Some(arg::Type::UserId(user_id)), command) =
extract_preceding_arg(&message.content) {
// And that user ID is the bot user ID, this message is a command
if user_id == self.bot_user_id {
return self.process_command(&message, command);
}
}
// If the message was sent in a private channel to the bot, the entire message is a command
if self.private_channels.contains_key(&message.channel_id) {
return self.process_command(&message, &message.content);
}
// This is not a command; log the emoji and continue to the next event
self.log_emoji_usage(&message);
BotLoopDisposition::Continue
}
fn log_emoji_usage(&self, message: &Message) {
match self.db.message_exists(&message.id) {
Ok(message_exists) => {
if message_exists {
return;
}
}
Err(reason) => {
warn!("Unable to determine whether message {} exists in database: {}",
message.id,
reason);
return;
}
}
let mut total_emoji_used = 0;
for emoji in &self.emoji {
let pattern = match *emoji {
Emoji::Custom(ref custom_emoji) => &custom_emoji.pattern,
Emoji::Unicode(ref emoji) => emoji,
};
let count = message.content.matches(pattern).count() as i32;
if count > 0 {
total_emoji_used += count;
match self.db
.record_emoji_usage(&message.channel_id,
&message.author.id,
emoji,
count) {
Ok(_) => {}
Err(reason) => warn!("Error recording emoji usage: {}", reason),
}
}
}
match self.db
.record_message_stats(&message.id,
&message.channel_id,
&message.author.id,
total_emoji_used) {
Ok(_) => {}
Err(reason) => {
warn!("Error recording statistics for message {}: {}",
message.id,
reason);
}
}
}
fn process_command(&mut self, message: &Message, command: &str) -> BotLoopDisposition {
let command = remove_non_command_characters(command);
match extract_first_word(command) {
(command, args) if !command.is_empty() => {
// Commands are case-insensitive
match command.to_lowercase().as_ref() {
"auth" => self.attempt_auth(message, args),
"botinfo" => self.bot_info(message),
"quit" => self.quit(message),
"restart" => self.restart(message),
"feedback" => self.feedback(message, args),
"about" | "info" => self.about(message),
"help" | "commands" => self.help(message),
"g" | "global" => self.stats_global(message),
"s" | "server" => self.stats_server(message),
"c" | "channel" => self.stats_channel(message, None),
"m" | "me" => self.stats_user(message, None),
_ => {
// Something else
// Did the user begin the message with a #channel or mention a user?
match arg::get_type(command) {
arg::Type::UserId(user_id) => {
self.stats_user(message, Some(&user_id));
}
arg::Type::ChannelId(channel_id) => {
self.stats_channel(message, Some(&channel_id));
}
_ => {
let mut matches =
self.emoji.iter().filter(|e| e.pattern() == command);
if let Some(emoji) = matches.next() {
self.stats_emoji(message, &emoji);
} else {
self.help(message);
}
}
}
BotLoopDisposition::Continue
}
}
}
_ => {
// No command was provided
self.help(message);
BotLoopDisposition::Continue
}
}
}
fn send_message(&self, recipient: &MessageRecipient, text: &str) {
recipient.send_message(&self.discord, text);
}
fn send_response(&self, message: &Message, text: &str) {
self.send_message(message, &format!("<@{}>: {}", message.author.id, text));
}
fn attempt_auth(&mut self, message: &Message, password_attempt: &str) -> BotLoopDisposition {
if self.bot_admins.contains_key(&message.author.id) {
self.send_response(message,
"You are already authenticated as a bot administrator. :unlock:");
} else if !self.private_channels.contains_key(&message.channel_id) {
self.send_response(message,
"Please use this command in a private message. :lock:");
} else {
if password_attempt.is_empty() {
self.send_response(message,
"Please enter the bot administration password. :lock:");
} else if password_attempt == self.bot_admin_password {
self.send_response(message, "Authenticated successfully. :white_check_mark:");
self.bot_admins
.insert(message.author.id, message.author.clone());
} else {
self.send_response(message, "Unable to authenticate. :x:");
}
}
BotLoopDisposition::Continue
}
fn bot_info(&mut self, message: &Message) -> BotLoopDisposition {
if self.bot_admins.contains_key(&message.author.id) {
self.check_for_new_servers();
let online_time = HumanTime::from(self.online_since - get_time());
self.send_response(message,
&format!("**{} version {}**\n\
Online since {} on {} server{} comprising \
{} text channel{}. :clock2:",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION"),
online_time,
self.servers.len(),
if self.servers.len() == 1 { "" } else { "s" },
self.public_text_channels.len(),
if self.public_text_channels.len() == 1 {
""
} else {
"s"
}));
} else {
self.respond_auth_required(message);
}
BotLoopDisposition::Continue
}
fn quit(&self, message: &Message) -> BotLoopDisposition {
if self.bot_admins.contains_key(&message.author.id) {
self.send_response(message, "Quitting. :octagonal_sign:");
info!("Quit command issued by {}.", message.author.name);
BotLoopDisposition::Quit
} else {
self.respond_auth_required(message);
BotLoopDisposition::Continue
}
}
fn restart(&self, message: &Message) -> BotLoopDisposition {
if self.bot_admins.contains_key(&message.author.id) {
self.send_response(message, "Restarting. :repeat:");
info!("Restart command issued by {}.", message.author.name);
BotLoopDisposition::Restart
} else {
self.respond_auth_required(message);
BotLoopDisposition::Continue
}
}
fn feedback(&self, message: &Message, feedback: &str) -> BotLoopDisposition {
self.send_response(message,
"Thanks. Your feedback has been logged for review. :smiley:");
// Write the feedback to log files
// If the the feedback spans multiple lines, indent the subsequent lines
let log_feedback =
feedback.replace("\n",
&format!("\n {}#{}> ",
message.author.name,
message.author.discriminator));
let log_feedback = format!("Feedback from {}#{}: {}\n",
message.author.name,
message.author.discriminator,
log_feedback);
info!("{}", log_feedback);
if self.feedback_file.is_some() {
match self.feedback_file
.as_ref()
.unwrap()
.write(log_feedback.as_bytes()) {
Ok(_) => {}
Err(reason) => {
warn!("Error writing feedback \"{}\" to log: {}", feedback, reason);
}
}
}
// Send the feedback to administrators
let feedback = format!("Feedback from {}#{}:\n```\n{}```",
message.author.name,
message.author.discriminator,
feedback);
for (user_id, user) in &self.bot_admins {
let mut num_channels_sent_to = 0;
// Look for an existing private channel for each administrator
for (channel_id, _) in self.private_channels
.iter()
.filter(|&(_, c)| c.recipient.id == *user_id) {
num_channels_sent_to += 1;
self.send_message(channel_id, &feedback);
}
// If there wasn't an existing private channel, create one
if num_channels_sent_to == 0 {
if let Ok(private_channel) = self.discord.create_private_channel(*user_id) {
self.send_message(&private_channel.id, &feedback);
} else {
warn!("Unable to create private channel to send feedback to bot administrator \
{}#{}.",
user.name,
user.discriminator);
}
}
}
BotLoopDisposition::Continue
}
fn about(&self, message: &Message) -> BotLoopDisposition {
if self.about_text.is_some() {
self.send_response(message, self.about_text.as_ref().unwrap());
}
BotLoopDisposition::Continue
}
fn help(&self, message: &Message) -> BotLoopDisposition {
if self.help_text.is_some() {
self.send_response(message, self.help_text.as_ref().unwrap());
}
BotLoopDisposition::Continue
}
fn stats_global(&self, message: &Message) -> BotLoopDisposition {
let top_emoji = match self.db.get_global_top_emoji() {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve global top used emoji: {}", reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
if top_emoji.len() == 0 {
self.send_response(message, "I've never seen anyone use any emoji. :shrug:");
} else {
let stats = create_emoji_usage_line(top_emoji);
let earth_emoji_list = [":earth_africa:", ":earth_americas:", ":earth_asia:"];
let earth = thread_rng().choose(&earth_emoji_list).unwrap();
let _ = self.discord
.send_embed(message.channel_id,
&format!("<@{}>", message.author.id),
|e| {
e.fields(|f| {
f.field(&format!("Top used emoji globally {}", earth),
&stats,
false)
})
});
}
BotLoopDisposition::Continue
}
fn stats_server(&self, message: &Message) -> BotLoopDisposition {
if self.private_channels.contains_key(&message.channel_id) {
self.send_response(message, RESPONSE_USE_COMMAND_IN_PUBLIC_CHANNEL);
return BotLoopDisposition::Continue;
}
let server_id = match self.public_text_channels.get(&message.channel_id) {
Some(channel) => channel.server_id,
None => {
warn!("Unknown public text channel ({})", message.channel_id);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
let top_emoji = match self.db.get_server_top_emoji(&server_id) {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve top used emoji on server ({}): {}",
server_id,
reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
if top_emoji.len() == 0 {
self.send_response(message,
"I've never seen anyone use any emoji on this server. :shrug:");
} else {
let top_users = match self.db.get_server_top_users(&server_id) {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve top users on server ({}): {}",
server_id,
reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
let user_stats = create_top_users_line(top_users);
let emoji_stats = create_emoji_usage_line(top_emoji);
let _ = self.discord
.send_embed(message.channel_id,
&format!("<@{}>", message.author.id),
|e| {
e.title("Statistics for this server :chart_with_upwards_trend:")
.fields(|f| {
f.field("Top emoji", &emoji_stats, true)
.field("Top users", &user_stats, true)
})
});
}
BotLoopDisposition::Continue
}
fn stats_channel(&self,
message: &Message,
channel_id: Option<&ChannelId>)
-> BotLoopDisposition {
if self.private_channels.contains_key(&message.channel_id) {
self.send_response(message, RESPONSE_USE_COMMAND_IN_PUBLIC_CHANNEL);
return BotLoopDisposition::Continue;
}
let channel_id = channel_id.unwrap_or(&message.channel_id);
let stats_description = match self.public_text_channels.get(&channel_id) {
Some(channel) => {
format!("Statistics for #{} :chart_with_upwards_trend:",
channel.name)
}
None => "Channel statistics :chart_with_upwards_trend:".to_string(),
};
let top_emoji = match self.db.get_channel_top_emoji(&channel_id) {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve top used emoji on channel ({}): {}",
message.channel_id,
reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
if top_emoji.len() == 0 {
self.send_response(message,
"I've never seen anyone use any emoji in that channel. :shrug:");
} else {
let top_users = match self.db.get_channel_top_users(&channel_id) {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve top users in channel ({}): {}",
channel_id,
reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
let user_stats = create_top_users_line(top_users);
let emoji_stats = create_emoji_usage_line(top_emoji);
let _ = self.discord
.send_embed(message.channel_id,
&format!("<@{}>", message.author.id),
|e| {
e.title(&stats_description)
.fields(|f| {
f.field("Top emoji", &emoji_stats, true)
.field("Top users", &user_stats, true)
})
});
}
BotLoopDisposition::Continue
}
fn stats_user(&self, message: &Message, user_id: Option<&UserId>) -> BotLoopDisposition {
let user_id = user_id.unwrap_or(&message.author.id);
if *user_id == self.bot_user_id {
self.send_response(message, "You're so silly! :smile:");
return BotLoopDisposition::Continue;
}
// If the bot knows which server is associated with the public text channel, get statistics
// for both Unicode emoji and custom emoji on the same server
// Otherwise, just get statistics for Unicode emoji
let server = match self.public_text_channels.get(&message.channel_id) {
Some(channel) => Some(&channel.server_id),
None => None,
};
let user_name = match self.db.get_user_name(user_id) {
Ok(maybe_user_name) => {
match maybe_user_name {
Some(user_name) => user_name,
None => "(Unknown user)".to_string(),
}
}
Err(reason) => {
debug!("Error retrieving user name for user ({}) from database: {}",
user_id,
reason);
"(Unknown user)".to_string()
}
};
let stats_description = if *user_id == message.author.id {
"Your favourite emoji :two_hearts:".to_string()
} else {
format!("{}'s favourite emoji :two_hearts:", user_name)
};
let top_emoji = match self.db.get_user_top_emoji(&user_id, server) {
Ok(results) => results,
Err(reason) => {
warn!("Unable to retrieve top emoji used by user {} ({}): {}",
user_name,
user_id,
reason);
self.send_response(message, RESPONSE_STATS_ERR);
return BotLoopDisposition::Continue;
}
};
if top_emoji.len() == 0 {
self.send_response(message,
&format!("I've never seen <@{}> use any emoji. :shrug:", user_id));
} else {
let stats = create_emoji_usage_line(top_emoji);
let _ = self.discord
.send_embed(message.channel_id,
&format!("<@{}>", message.author.id),
|e| e.fields(|f| f.field(&stats_description, &stats, false)));
}
BotLoopDisposition::Continue
}
fn stats_emoji(&self, message: &Message, emoji: &Emoji) {
match self.db.get_emoji_usage(emoji) {
Ok(maybe_count) => {
match maybe_count {
Some(count) if count > 0 => {
self.send_response(message,
&format!("{} has been used {} time{}.",
emoji.pattern(),
count,
if count == 1 { "" } else { "s" }));
}
_ => {
self.send_response(message,
&format!("I've never seen anyone use {}.",
emoji.pattern()));
}
}
}
Err(reason) => {
warn!("Error obtaining emoji usage stats for emoji {}: {}",
emoji.pattern(),
reason);
self.send_response(message, RESPONSE_STATS_ERR);
}
}
}
fn respond_auth_required(&self, message: &Message) {
self.send_response(message, "Please authenticate first. :lock:");
}
}
fn create_emoji_usage_line(emoji_usage: Vec<(Emoji, i64)>) -> String {
let mut stats = String::new();
for (emoji, count) in emoji_usage {
match emoji {
Emoji::Custom(emoji) => {
stats += &format!("{} used {} time{}\n",
emoji.pattern,
count,
if count == 1 { "" } else { "s" });
}
Emoji::Unicode(emoji) => {
stats += &format!("{} used {} time{}\n",
emoji,
count,
if count == 1 { "" } else { "s" });
}
}
}
stats
}
fn create_top_users_line(emoji_usage: Vec<(String, i64)>) -> String {
let mut stats = String::new();
for (user_name, count) in emoji_usage {
stats += &format!("{} used {} emoji\n", user_name, count)
}
stats
}
|
#[doc = "Reader of register RTC_DATE"]
pub type R = crate::R<u32, super::RTC_DATE>;
#[doc = "Writer for register RTC_DATE"]
pub type W = crate::W<u32, super::RTC_DATE>;
#[doc = "Register RTC_DATE `reset()`'s with value 0x0101"]
impl crate::ResetValue for super::RTC_DATE {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0101
}
}
#[doc = "Reader of field `RTC_DATE`"]
pub type RTC_DATE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RTC_DATE`"]
pub struct RTC_DATE_W<'a> {
w: &'a mut W,
}
impl<'a> RTC_DATE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f);
self.w
}
}
#[doc = "Reader of field `RTC_MON`"]
pub type RTC_MON_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RTC_MON`"]
pub struct RTC_MON_W<'a> {
w: &'a mut W,
}
impl<'a> RTC_MON_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 8)) | (((value as u32) & 0x1f) << 8);
self.w
}
}
#[doc = "Reader of field `RTC_YEAR`"]
pub type RTC_YEAR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RTC_YEAR`"]
pub struct RTC_YEAR_W<'a> {
w: &'a mut W,
}
impl<'a> RTC_YEAR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:5 - Calendar Day of the Month in BCD, 1-31 Automatic Leap Year Correction"]
#[inline(always)]
pub fn rtc_date(&self) -> RTC_DATE_R {
RTC_DATE_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bits 8:12 - Calendar Month in BCD, 1-12"]
#[inline(always)]
pub fn rtc_mon(&self) -> RTC_MON_R {
RTC_MON_R::new(((self.bits >> 8) & 0x1f) as u8)
}
#[doc = "Bits 16:23 - Calendar year in BCD, 0-99"]
#[inline(always)]
pub fn rtc_year(&self) -> RTC_YEAR_R {
RTC_YEAR_R::new(((self.bits >> 16) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:5 - Calendar Day of the Month in BCD, 1-31 Automatic Leap Year Correction"]
#[inline(always)]
pub fn rtc_date(&mut self) -> RTC_DATE_W {
RTC_DATE_W { w: self }
}
#[doc = "Bits 8:12 - Calendar Month in BCD, 1-12"]
#[inline(always)]
pub fn rtc_mon(&mut self) -> RTC_MON_W {
RTC_MON_W { w: self }
}
#[doc = "Bits 16:23 - Calendar year in BCD, 0-99"]
#[inline(always)]
pub fn rtc_year(&mut self) -> RTC_YEAR_W {
RTC_YEAR_W { w: self }
}
}
|
//! This example demonstrates using the [`Padding::colorize()`] function in several ways
//! to give a [`Table`] display a vibrant asthetic.
//!
//! * 🚩 This example requires the `color` feature.
//!
//! * Note how the [`Color`] [setting](tabled::settings) is used to simplify creating
//! reusable themes for text, backgrounds, padded whitespace, and borders.
//!
//! * Note how a unique color can be set for each direction.
use std::convert::TryFrom;
use owo_colors::OwoColorize;
use tabled::{
grid::{
config::{ColoredConfig, Entity},
dimension::SpannedGridDimension,
records::{
vec_records::{Cell, VecRecords},
ExactRecords, PeekableRecords, Records,
},
util::string::string_width_multiline,
},
settings::{
object::{Columns, Object, Rows, Segment},
Alignment, CellOption, Color, Format, Margin, Modify, Padding, Style,
},
Table, Tabled,
};
#[derive(Tabled)]
#[tabled(rename_all = "PascalCase")]
struct Fundamental {
quantity: &'static str,
symbol: &'static str,
value: &'static str,
unit: &'static str,
}
impl Fundamental {
fn new(
quantity: &'static str,
symbol: &'static str,
value: &'static str,
unit: &'static str,
) -> Self {
Self {
quantity,
symbol,
value,
unit,
}
}
}
fn main() {
// data source: https://www.britannica.com/science/physical-constant
let data = [
Fundamental::new(
"constant of gravitation",
"G",
"6.67384 × 10⁻¹¹",
"cubic metre per second squared per kilogram",
),
Fundamental::new(
"speed of light (in a vacuum)",
"c",
"2.99792458 × 10⁻⁸",
"metres per second",
),
Fundamental::new(
"Planck's constant",
"h",
"6.626070040 × 10⁻³⁴",
"joule second",
),
Fundamental::new(
"Boltzmann constant",
"k",
"1.38064852 × 10⁻²³",
"joule per kelvin",
),
Fundamental::new(
"Faraday constant",
"F",
"9.648533289 × 10⁴",
"coulombs per mole",
),
];
let pane_color = Color::try_from(' '.bg_rgb::<220, 220, 220>().to_string()).unwrap();
let border_color = Color::try_from(' '.bg_rgb::<200, 200, 220>().bold().to_string()).unwrap();
let data_color = Color::try_from(' '.bg_rgb::<200, 200, 220>().to_string()).unwrap();
let header_settings = Modify::new(Rows::first())
.with(Padding::new(1, 1, 2, 2).colorize(
Color::BG_GREEN,
Color::BG_YELLOW,
Color::BG_MAGENTA,
Color::BG_CYAN,
))
.with(MakeMaxPadding)
.with(Format::content(|s| s.on_black().white().to_string()));
let data_settings = Modify::new(Rows::first().inverse())
.with(Alignment::left())
.with(MakeMaxPadding)
.with(Padding::new(1, 1, 0, 0).colorize(
Color::default(),
Color::default(),
data_color.clone(),
data_color.clone(),
));
let symbol_settings = Modify::new(Columns::single(1).not(Rows::first()))
.with(Format::content(|s| s.bold().to_string()));
let unit_settings = Modify::new(Columns::single(3).not(Rows::first()))
.with(Format::content(|s| s.italic().to_string()));
let table = Table::new(data)
.with(Style::rounded())
.with(Margin::new(1, 2, 1, 1).colorize(
pane_color.clone(),
pane_color.clone(),
pane_color.clone(),
pane_color,
))
.with(border_color)
.with(Modify::new(Segment::all()).with(data_color))
.with(header_settings)
.with(data_settings)
.with(symbol_settings)
.with(unit_settings)
.to_string();
println!("\n\n{table}\n\n");
}
#[derive(Debug, Clone)]
struct MakeMaxPadding;
impl<T> CellOption<VecRecords<T>, ColoredConfig> for MakeMaxPadding
where
T: Cell + AsRef<str>,
{
fn change(self, records: &mut VecRecords<T>, cfg: &mut ColoredConfig, entity: Entity) {
let widths = SpannedGridDimension::width(&*records, cfg);
let count_rows = records.count_rows();
let count_cols = records.count_columns();
for (row, col) in entity.iter(count_rows, count_cols) {
let column_width = widths[col];
let text = records.get_text((row, col));
let width = string_width_multiline(text);
if width < column_width {
let available_width = column_width - width;
let left = available_width / 2;
let right = available_width - left;
let pos = (row, col).into();
let mut pad = cfg.get_padding(pos);
pad.left.size = left;
pad.right.size = right;
cfg.set_padding(pos, pad);
}
}
}
}
|
use mysql;
use r2d2;
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "MySqlError: {:#?}", _0)]
MySqlError(mysql::Error),
#[fail(display = "R2D2Error: {:#?}", _0)]
R2D2Error(r2d2::Error),
#[fail(display = "You must provide a schema: {}", _0)]
SchemaError(String),
}
impl From<mysql::Error> for Error {
fn from(err: mysql::Error) -> Error {
Error::MySqlError(err)
}
}
impl From<r2d2::Error> for Error {
fn from(err: r2d2::Error) -> Error {
Error::R2D2Error(err)
}
}
|
// Copyright 2016 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Storage of core types using RocksDB.
#![deny(non_upper_case_globals)]
#![deny(non_camel_case_types)]
#![deny(non_snake_case)]
#![deny(unused_mut)]
#![warn(missing_docs)]
extern crate grin_core as core;
extern crate rocksdb;
use std::sync::RwLock;
use core::ser;
use rocksdb::{DB, Options, Writable, DBCompactionStyle};
/// Main error type for this crate.
#[derive(Debug)]
pub enum Error {
/// Wraps an error originating from RocksDB (which unfortunately returns
/// string errors).
RocksDbErr(String),
/// Wraps a serialization error for Writeable or Readable
SerErr(ser::Error),
}
impl From<String> for Error {
fn from(s: String) -> Error {
Error::RocksDbErr(s)
}
}
/// Thread-safe rocksdb wrapper
pub struct Store {
rdb: RwLock<DB>,
}
unsafe impl Sync for Store {}
unsafe impl Send for Store {}
impl Store {
/// Opens a new RocksDB at the specified location.
pub fn open(path: &str) -> Result<Store, Error> {
let mut opts = Options::new();
opts.create_if_missing(true);
opts.set_compaction_style(DBCompactionStyle::DBUniversalCompaction);
opts.set_max_open_files(256);
opts.set_use_fsync(false);
let db = try!(DB::open(&opts, &path));
Ok(Store { rdb: RwLock::new(db) })
}
/// Writes a single key/value pair to the db
pub fn put(&self, key: &[u8], value: Vec<u8>) -> Result<(), Error> {
let db = self.rdb.write().unwrap();
db.put(key, &value[..]).map_err(Error::RocksDbErr)
}
/// Writes a single key and its `Writeable` value to the db. Encapsulates
/// serialization.
pub fn put_ser(&self, key: &[u8], value: &ser::Writeable) -> Result<(), Error> {
let ser_value = ser::ser_vec(value);
match ser_value {
Ok(data) => self.put(key, data),
Err(err) => Err(Error::SerErr(err)),
}
}
/// Gets a value from the db, provided its key
pub fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Error> {
let db = self.rdb.read().unwrap();
db.get(key).map(|r| r.map(|o| o.to_vec())).map_err(Error::RocksDbErr)
}
/// Gets a `Readable` value from the db, provided its key. Encapsulates
/// serialization.
pub fn get_ser<T: ser::Readable<T>>(&self, key: &[u8]) -> Result<Option<T>, Error> {
self.get_ser_limited(key, 0)
}
/// Gets a `Readable` value from the db, provided its key, allowing to
/// extract only partial data. The underlying Readable size must align
/// accordingly. Encapsulates serialization.
pub fn get_ser_limited<T: ser::Readable<T>>(&self,
key: &[u8],
len: usize)
-> Result<Option<T>, Error> {
let data = try!(self.get(key));
match data {
Some(val) => {
let mut lval = if len > 0 { &val[..len] } else { &val[..] };
let r = try!(ser::deserialize(&mut lval).map_err(Error::SerErr));
Ok(Some(r))
}
None => Ok(None),
}
}
/// Deletes a key/value pair from the db
pub fn delete(&self, key: &[u8]) -> Result<(), Error> {
let db = self.rdb.write().unwrap();
db.delete(key).map_err(Error::RocksDbErr)
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Dashboard>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardParts {
pub position: dashboard_parts::Position,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<DashboardPartMetadata>,
}
pub mod dashboard_parts {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Position {
pub x: i32,
pub y: i32,
#[serde(rename = "rowSpan")]
pub row_span: i32,
#[serde(rename = "colSpan")]
pub col_span: i32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<serde_json::Value>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardPartMetadata {
#[serde(rename = "type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MarkdownPartMetadata {
#[serde(flatten)]
pub dashboard_part_metadata: DashboardPartMetadata,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub inputs: Vec<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<markdown_part_metadata::Settings>,
}
pub mod markdown_part_metadata {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Settings {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<settings::Content>,
}
pub mod settings {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Content {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<content::Settings>,
}
pub mod content {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Settings {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subtitle: Option<String>,
#[serde(rename = "markdownSource", default, skip_serializing_if = "Option::is_none")]
pub markdown_source: Option<i32>,
#[serde(rename = "markdownUri", default, skip_serializing_if = "Option::is_none")]
pub markdown_uri: Option<String>,
}
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardLens {
pub order: i32,
pub parts: Vec<DashboardParts>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DashboardProperties {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub lenses: Vec<DashboardLens>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Dashboard {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DashboardProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchableDashboard {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DashboardProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperationList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceProviderOperation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<resource_provider_operation::Display>,
}
pub mod resource_provider_operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConfigurationList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Configuration>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Configuration {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConfigurationProperties {
#[serde(rename = "enforcePrivateMarkdownStorage", default, skip_serializing_if = "Option::is_none")]
pub enforce_private_markdown_storage: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ViolationsList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Violation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Violation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "userId", default, skip_serializing_if = "Option::is_none")]
pub user_id: Option<String>,
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
|
use anyhow::Result;
use itertools::Itertools;
use std::{collections::VecDeque, fs};
fn find_abberation(input: &str, preamble_length: usize) -> u32 {
let mut ring = VecDeque::<u32>::new();
input
.lines()
.enumerate()
.find_map(|(index, n)| {
let n: u32 = n.parse().unwrap();
if index < preamble_length {
ring.push_back(n);
None
} else {
for (a, b) in ring.iter().tuple_combinations() {
if a + b == n {
ring.push_back(n);
ring.pop_front();
return None;
}
}
Some(n)
}
})
.unwrap()
}
fn main() -> Result<()> {
let input = fs::read_to_string("input.txt")?;
println!("{:?}", find_abberation(&input, 25));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn first() {
let input = "35
20
15
25
47
40
62
55
65
95
102
117
150
182
127
219
299
277
309
576";
assert_eq!(127, find_abberation(&input, 5));
}
}
|
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
use std::borrow::Cow;
use std::future::Future;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use bytes::Bytes;
use futures::future::{ok, FutureExt};
use futures::stream::StreamExt;
use ntex::http::body::Body;
use ntex::http::header::{HeaderValue, CONTENT_TYPE, SERVER};
use ntex::http::{HttpService, KeepAlive, Request, Response, StatusCode};
use ntex::service::{Service, ServiceFactory};
use ntex::web::Error;
use sailfish::TemplateOnce;
use smallvec::{smallvec, SmallVec};
use tokio_postgres::{connect, Client, NoTls, Statement};
struct Fortune {
id: i32,
message: Cow<'static, str>,
}
#[derive(TemplateOnce)]
#[template(path = "fortune.stpl", rm_whitespace = true)]
struct Fortunes {
items: SmallVec<[Fortune; 32]>,
}
struct App {
hdr_srv: HeaderValue,
hdr_cthtml: HeaderValue,
db: Client,
fortune: Statement,
}
impl Service for App {
type Request = Request;
type Response = Response;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Response, Error>>>>;
#[inline]
fn poll_ready(&self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&self, req: Request) -> Self::Future {
let path = req.path();
match path {
"/fortunes" => {
let h_srv = self.hdr_srv.clone();
let h_ct = self.hdr_cthtml.clone();
let fut = self.db.query_raw(&self.fortune, &[]);
Box::pin(async move {
let mut stream = fut.await.map_err(|e| {
io::Error::new(io::ErrorKind::Other, format!("{:?}", e))
})?;
let mut items: SmallVec<[_; 32]> = smallvec![Fortune {
id: 0,
message: Cow::Borrowed(
"Additional fortune added at request time."
),
}];
while let Some(row) = stream.next().await {
let row = row.map_err(|e| {
io::Error::new(io::ErrorKind::Other, format!("{:?}", e))
})?;
items.push(Fortune {
id: row.get(0),
message: Cow::Owned(row.get(1)),
});
}
items.sort_by(|it, next| it.message.cmp(&next.message));
let body = match (Fortunes { items }).render_once() {
Ok(body) => Ok(Bytes::from(body)),
Err(e) => {
Err(io::Error::new(io::ErrorKind::Other, e.to_string()))
}
}?;
let mut res = Response::with_body(StatusCode::OK, Body::Bytes(body));
let hdrs = res.headers_mut();
hdrs.insert(SERVER, h_srv);
hdrs.insert(CONTENT_TYPE, h_ct);
Ok(res)
})
}
_ => Box::pin(ok(Response::new(http::StatusCode::NOT_FOUND))),
}
}
}
#[derive(Clone)]
struct AppFactory;
impl ServiceFactory for AppFactory {
type Config = ();
type Request = Request;
type Response = Response;
type Error = Error;
type Service = App;
type InitError = ();
type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>;
fn new_service(&self, _: ()) -> Self::Future {
const DB_URL: &str =
"postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world";
Box::pin(async move {
let (db, conn) = connect(DB_URL, NoTls)
.await
.expect("can not connect to postgresql");
ntex::rt::spawn(conn.map(|_| ()));
let fortune = db.prepare("SELECT * FROM fortune").await.unwrap();
Ok(App {
hdr_srv: HeaderValue::from_static("N"),
hdr_cthtml: HeaderValue::from_static("text/html; charset=utf-8"),
db,
fortune,
})
})
}
}
#[ntex::main]
async fn main() -> std::io::Result<()> {
println!("Starting http server: 127.0.0.1:8080");
ntex::server::build()
.backlog(1024)
.bind("techempower", "0.0.0.0:8080", || {
HttpService::build()
.keep_alive(KeepAlive::Os)
.client_timeout(0)
.h1(AppFactory)
.tcp()
})?
.start()
.await
}
|
use crate::block::Content;
use crate::blockchain::BlockChain;
use crate::blockdb::BlockDatabase;
use crate::crypto::hash::{Hashable, H256};
use crate::experiment::performance_counter::PERFORMANCE_COUNTER;
use crate::transaction::{CoinId, Output, Transaction};
use crate::utxodb::UtxoDatabase;
use crate::wallet::Wallet;
use crossbeam::channel;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use std::thread;
pub struct LedgerManager {
blockdb: Arc<BlockDatabase>,
chain: Arc<BlockChain>,
utxodb: Arc<UtxoDatabase>,
wallet: Arc<Wallet>,
}
impl LedgerManager {
pub fn new(
blockdb: &Arc<BlockDatabase>,
chain: &Arc<BlockChain>,
utxodb: &Arc<UtxoDatabase>,
wallet: &Arc<Wallet>,
) -> Self {
Self {
blockdb: Arc::clone(&blockdb),
chain: Arc::clone(&chain),
utxodb: Arc::clone(&utxodb),
wallet: Arc::clone(&wallet),
}
}
pub fn start(self, buffer_size: usize, num_workers: usize) {
// start thread that updates transaction sequence
let blockdb = Arc::clone(&self.blockdb);
let chain = Arc::clone(&self.chain);
let (tx_diff_tx, tx_diff_rx) = channel::bounded(buffer_size);
thread::spawn(move || loop {
let tx_diff = update_transaction_sequence(&blockdb, &chain);
tx_diff_tx.send(tx_diff).unwrap();
});
// start thread that dispatches jobs to utxo manager
let _utxodb = Arc::clone(&self.utxodb);
// Scoreboard notes the transaction ID of the coins that is being looked up, may be added,
// or may be deleted. Before dispatching a transaction, we first check whether the input
// and output are used by transactions being processed. If no, we will dispatch this
// transaction. Otherwise, we will wait until this situation clears. This prevents Read
// After Write (must ins/del then check), Write After Read (must check then ins/del), and
// Write After Write (must ins then del) hazards. We can also do this at CoinId level, but
// doing this at transaction hash level should be pretty sufficient.
let mut scoreboard: HashSet<H256> = HashSet::new();
// Transaction coins keeps the mapping between transaction ID and the entries in the
// scoreboard that this transaction is responsible for.
let mut transaction_coins: HashMap<H256, Vec<H256>> = HashMap::new();
let (transaction_tx, transaction_rx) = channel::bounded(buffer_size * num_workers);
let (notification_tx, notification_rx) = channel::unbounded();
let (coin_diff_tx, coin_diff_rx) = channel::unbounded();
thread::spawn(move || {
loop {
// get the diff
let (mut added_tx, mut removed_tx) = tx_diff_rx.recv().unwrap();
// dispatch transactions
for (t, h) in removed_tx.drain(..).rev() {
// drain the notification channel so that we mark all finished transaction as
// finished
for processed in notification_rx.try_iter() {
let finished_coins = transaction_coins.remove(&processed).unwrap();
for hash in &finished_coins {
scoreboard.remove(&hash);
}
}
// collect the tx hash of all coins this tx will touch
let mut touched_coin_transaction_hash: HashSet<H256> = HashSet::new();
touched_coin_transaction_hash.insert(h); // the transaction hash of all output coins
for input in &t.input {
touched_coin_transaction_hash.insert(input.coin.hash); // tx hash of input coin
}
// wait until we are not touching hot coins
while !scoreboard.is_disjoint(&touched_coin_transaction_hash) {
let processed = notification_rx.recv().unwrap();
let finished_coins = transaction_coins.remove(&processed).unwrap();
for hash in &finished_coins {
scoreboard.remove(&hash);
}
}
// mark the coins that we will be touching as hot
let mut touched: Vec<H256> = vec![];
for hash in touched_coin_transaction_hash.drain() {
touched.push(hash);
scoreboard.insert(hash);
}
transaction_coins.insert(h, touched);
transaction_tx.send((false, t, h)).unwrap();
}
for (t, h) in added_tx.drain(..) {
// drain the notification channel so that we mark all finished transaction as
// finished
for processed in notification_rx.try_iter() {
let finished_coins = transaction_coins.remove(&processed).unwrap();
for hash in &finished_coins {
scoreboard.remove(&hash);
}
}
// collect the tx hash of all coins this tx will touch
let mut touched_coin_transaction_hash: HashSet<H256> = HashSet::new();
touched_coin_transaction_hash.insert(h); // the transaction hash of all output coins
for input in &t.input {
touched_coin_transaction_hash.insert(input.coin.hash); // tx hash of input coin
}
// wait until we are not touching hot coins
while !scoreboard.is_disjoint(&touched_coin_transaction_hash) {
let processed = notification_rx.recv().unwrap();
let finished_coins = transaction_coins.remove(&processed).unwrap();
for hash in &finished_coins {
scoreboard.remove(&hash);
}
}
// mark the coins that we will be touching as hot
let mut touched: Vec<H256> = vec![];
for hash in touched_coin_transaction_hash.drain() {
touched.push(hash);
scoreboard.insert(hash);
}
transaction_coins.insert(h, touched);
transaction_tx.send((true, t, h)).unwrap();
}
}
});
// start utxo manager
let utxo_manager = UtxoManager {
utxodb: Arc::clone(&self.utxodb),
transaction_chan: transaction_rx,
coin_chan: coin_diff_tx,
notification_chan: notification_tx,
};
utxo_manager.start(num_workers);
// start thread that writes to wallet
let wallet = Arc::clone(&self.wallet);
thread::spawn(move || loop {
let coin_diff = coin_diff_rx.recv().unwrap();
wallet.apply_diff(&coin_diff.0, &coin_diff.1).unwrap();
});
}
}
#[derive(Clone)]
struct UtxoManager {
utxodb: Arc<UtxoDatabase>,
/// Channel for dispatching jobs (add/delete, transaction, hash of transaction).
transaction_chan: channel::Receiver<(bool, Transaction, H256)>,
/// Channel for returning added and removed coins.
coin_chan: channel::Sender<(Vec<(CoinId, Output)>, Vec<CoinId>)>,
/// Channel for notifying the dispatcher about the completion of processing this transaction.
notification_chan: channel::Sender<H256>,
}
impl UtxoManager {
fn start(self, num_workers: usize) {
for _i in 0..num_workers {
let cloned = self.clone();
thread::spawn(move || {
cloned.worker_loop();
});
}
}
fn worker_loop(&self) {
loop {
let (add, transaction, hash) = self.transaction_chan.recv().unwrap();
if add {
let diff = self.utxodb.add_transaction(&transaction, hash).unwrap();
self.coin_chan.send(diff).unwrap();
} else {
let diff = self.utxodb.remove_transaction(&transaction, hash).unwrap();
self.coin_chan.send(diff).unwrap();
}
self.notification_chan.send(hash).unwrap();
}
}
}
fn update_transaction_sequence(
blockdb: &BlockDatabase,
chain: &BlockChain,
) -> (Vec<(Transaction, H256)>, Vec<(Transaction, H256)>) {
let diff = chain.update_ledger().unwrap();
PERFORMANCE_COUNTER.record_deconfirm_transaction_blocks(diff.1.len());
// gather the transaction diff
let mut add: Vec<(Transaction, H256)> = vec![];
let mut remove: Vec<(Transaction, H256)> = vec![];
for hash in diff.0 {
let block = blockdb.get(&hash).unwrap().unwrap();
PERFORMANCE_COUNTER.record_confirm_transaction_block(&block);
let content = match block.content {
Content::Transaction(data) => data,
_ => unreachable!(),
};
let mut transactions = content
.transactions
.iter()
.map(|t| (t.clone(), t.hash()))
.collect();
// TODO: precompute the hash here. Note that although lazy-eval for tx hash, and we could have
// just called hash() here without storing the results (the results will be cached in the struct),
// such function call will be optimized away by LLVM. As a result, we have to manually pass the hash
// here. The same for added transactions below. This is a very ugly hack.
add.append(&mut transactions);
}
for hash in diff.1 {
let block = blockdb.get(&hash).unwrap().unwrap();
let content = match block.content {
Content::Transaction(data) => data,
_ => unreachable!(),
};
let mut transactions = content
.transactions
.iter()
.map(|t| (t.clone(), t.hash()))
.collect();
remove.append(&mut transactions);
}
(add, remove)
}
|
#[doc = "Reader of register CR1"]
pub type R = crate::R<u32, super::CR1>;
#[doc = "Writer for register CR1"]
pub type W = crate::W<u32, super::CR1>;
#[doc = "Register CR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::CR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `IOLOCK`"]
pub type IOLOCK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `IOLOCK`"]
pub struct IOLOCK_W<'a> {
w: &'a mut W,
}
impl<'a> IOLOCK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `TCRCINI`"]
pub type TCRCINI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TCRCINI`"]
pub struct TCRCINI_W<'a> {
w: &'a mut W,
}
impl<'a> TCRCINI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `RCRCINI`"]
pub type RCRCINI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RCRCINI`"]
pub struct RCRCINI_W<'a> {
w: &'a mut W,
}
impl<'a> RCRCINI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `CRC33_17`"]
pub type CRC33_17_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CRC33_17`"]
pub struct CRC33_17_W<'a> {
w: &'a mut W,
}
impl<'a> CRC33_17_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `SSI`"]
pub type SSI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SSI`"]
pub struct SSI_W<'a> {
w: &'a mut W,
}
impl<'a> SSI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `HDDIR`"]
pub type HDDIR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HDDIR`"]
pub struct HDDIR_W<'a> {
w: &'a mut W,
}
impl<'a> HDDIR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `CSUSP`"]
pub type CSUSP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CSUSP`"]
pub struct CSUSP_W<'a> {
w: &'a mut W,
}
impl<'a> CSUSP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `CSTART`"]
pub type CSTART_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CSTART`"]
pub struct CSTART_W<'a> {
w: &'a mut W,
}
impl<'a> CSTART_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `MASRX`"]
pub type MASRX_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MASRX`"]
pub struct MASRX_W<'a> {
w: &'a mut W,
}
impl<'a> MASRX_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `SPE`"]
pub type SPE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SPE`"]
pub struct SPE_W<'a> {
w: &'a mut W,
}
impl<'a> SPE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 16 - IOLOCK"]
#[inline(always)]
pub fn iolock(&self) -> IOLOCK_R {
IOLOCK_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 15 - TCRCINI"]
#[inline(always)]
pub fn tcrcini(&self) -> TCRCINI_R {
TCRCINI_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 14 - RCRCINI"]
#[inline(always)]
pub fn rcrcini(&self) -> RCRCINI_R {
RCRCINI_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 13 - CRC33_17"]
#[inline(always)]
pub fn crc33_17(&self) -> CRC33_17_R {
CRC33_17_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12 - SSI"]
#[inline(always)]
pub fn ssi(&self) -> SSI_R {
SSI_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11 - HDDIR"]
#[inline(always)]
pub fn hddir(&self) -> HDDIR_R {
HDDIR_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - CSUSP"]
#[inline(always)]
pub fn csusp(&self) -> CSUSP_R {
CSUSP_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9 - CSTART"]
#[inline(always)]
pub fn cstart(&self) -> CSTART_R {
CSTART_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - MASRX"]
#[inline(always)]
pub fn masrx(&self) -> MASRX_R {
MASRX_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 0 - SPE"]
#[inline(always)]
pub fn spe(&self) -> SPE_R {
SPE_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 16 - IOLOCK"]
#[inline(always)]
pub fn iolock(&mut self) -> IOLOCK_W {
IOLOCK_W { w: self }
}
#[doc = "Bit 15 - TCRCINI"]
#[inline(always)]
pub fn tcrcini(&mut self) -> TCRCINI_W {
TCRCINI_W { w: self }
}
#[doc = "Bit 14 - RCRCINI"]
#[inline(always)]
pub fn rcrcini(&mut self) -> RCRCINI_W {
RCRCINI_W { w: self }
}
#[doc = "Bit 13 - CRC33_17"]
#[inline(always)]
pub fn crc33_17(&mut self) -> CRC33_17_W {
CRC33_17_W { w: self }
}
#[doc = "Bit 12 - SSI"]
#[inline(always)]
pub fn ssi(&mut self) -> SSI_W {
SSI_W { w: self }
}
#[doc = "Bit 11 - HDDIR"]
#[inline(always)]
pub fn hddir(&mut self) -> HDDIR_W {
HDDIR_W { w: self }
}
#[doc = "Bit 10 - CSUSP"]
#[inline(always)]
pub fn csusp(&mut self) -> CSUSP_W {
CSUSP_W { w: self }
}
#[doc = "Bit 9 - CSTART"]
#[inline(always)]
pub fn cstart(&mut self) -> CSTART_W {
CSTART_W { w: self }
}
#[doc = "Bit 8 - MASRX"]
#[inline(always)]
pub fn masrx(&mut self) -> MASRX_W {
MASRX_W { w: self }
}
#[doc = "Bit 0 - SPE"]
#[inline(always)]
pub fn spe(&mut self) -> SPE_W {
SPE_W { w: self }
}
}
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Field `TAMP1F` reader - TAMP1F"]
pub type TAMP1F_R = crate::BitReader<TAMP1F_A>;
#[doc = "TAMP1F\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TAMP1F_A {
#[doc = "0: No tamper detected"]
Idle = 0,
#[doc = "1: Tamper detected"]
Tamper = 1,
}
impl From<TAMP1F_A> for bool {
#[inline(always)]
fn from(variant: TAMP1F_A) -> Self {
variant as u8 != 0
}
}
impl TAMP1F_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TAMP1F_A {
match self.bits {
false => TAMP1F_A::Idle,
true => TAMP1F_A::Tamper,
}
}
#[doc = "No tamper detected"]
#[inline(always)]
pub fn is_idle(&self) -> bool {
*self == TAMP1F_A::Idle
}
#[doc = "Tamper detected"]
#[inline(always)]
pub fn is_tamper(&self) -> bool {
*self == TAMP1F_A::Tamper
}
}
#[doc = "Field `TAMP2F` reader - TAMP2F"]
pub use TAMP1F_R as TAMP2F_R;
#[doc = "Field `TAMP3F` reader - TAMP3F"]
pub use TAMP1F_R as TAMP3F_R;
#[doc = "Field `ITAMP3F` reader - ITAMP3F"]
pub type ITAMP3F_R = crate::BitReader<ITAMP3F_A>;
#[doc = "ITAMP3F\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ITAMP3F_A {
#[doc = "0: No tamper detected"]
Idle = 0,
#[doc = "1: Internal tamper detected"]
Tamper = 1,
}
impl From<ITAMP3F_A> for bool {
#[inline(always)]
fn from(variant: ITAMP3F_A) -> Self {
variant as u8 != 0
}
}
impl ITAMP3F_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ITAMP3F_A {
match self.bits {
false => ITAMP3F_A::Idle,
true => ITAMP3F_A::Tamper,
}
}
#[doc = "No tamper detected"]
#[inline(always)]
pub fn is_idle(&self) -> bool {
*self == ITAMP3F_A::Idle
}
#[doc = "Internal tamper detected"]
#[inline(always)]
pub fn is_tamper(&self) -> bool {
*self == ITAMP3F_A::Tamper
}
}
#[doc = "Field `ITAMP5F` reader - ITAMP5F"]
pub use ITAMP3F_R as ITAMP5F_R;
#[doc = "Field `ITAMP6F` reader - ITAMP6F"]
pub use ITAMP3F_R as ITAMP6F_R;
#[doc = "Field `ITAMP8F` reader - ITAMP8F"]
pub use ITAMP3F_R as ITAMP8F_R;
impl R {
#[doc = "Bit 0 - TAMP1F"]
#[inline(always)]
pub fn tamp1f(&self) -> TAMP1F_R {
TAMP1F_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TAMP2F"]
#[inline(always)]
pub fn tamp2f(&self) -> TAMP2F_R {
TAMP2F_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - TAMP3F"]
#[inline(always)]
pub fn tamp3f(&self) -> TAMP3F_R {
TAMP3F_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 18 - ITAMP3F"]
#[inline(always)]
pub fn itamp3f(&self) -> ITAMP3F_R {
ITAMP3F_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 20 - ITAMP5F"]
#[inline(always)]
pub fn itamp5f(&self) -> ITAMP5F_R {
ITAMP5F_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - ITAMP6F"]
#[inline(always)]
pub fn itamp6f(&self) -> ITAMP6F_R {
ITAMP6F_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 23 - ITAMP8F"]
#[inline(always)]
pub fn itamp8f(&self) -> ITAMP8F_R {
ITAMP8F_R::new(((self.bits >> 23) & 1) != 0)
}
}
#[doc = "TAMP status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`reset()` method sets SR to value 0"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! # Ark OTA
//! Library for binary application.
#[macro_use]
extern crate failure;
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
pub mod cli;
pub mod core;
|
use std::cmp::Ordering;
//consiste en ganar metodos de la estructura, de forma que derive
#[derive(Copy, Clone)]
struct Punto
{
x: i32,
y: i32
}
struct Rectangulo
{
origen: Punto,
ancho: i32,
alto: i32
}
// le implementa un metodo a la estructura rectangulo
impl Rectangulo
{
// self hace referencia a la esctructura, y pub indica que es publico
pub fn area(&self) -> i32
{
self.ancho*self.alto
}
}
impl PartialEq for Rectangulo
{
fn eq(&self, other: &Rectangulo) -> bool
{
self.area() == other.area()
}
}
impl PartialOrd for Rectangulo
{
fn partial_cmp(&self, other: &Rectangulo) -> Option<Ordering>
{
if self.area() == other.area()
{
Some(Ordering::Equal)
}
else if self.area() > other.area()
{
Some(Ordering::Greater)
}
else
{
Some(Ordering::Less)
}
}
}
impl std::fmt::Display for Rectangulo
{
//de esta forma logro visualizar todos los datos de la funcion Rectangu;p
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result
{
write!(f, "Origen: ({}, {}) - Area: {}", self.origen.x, self.origen.y, self.area())
}
}
fn main()
{
let p = Punto {x: 50, y: 50};
println!("Punto X: {}", p.x);
let r1 = Rectangulo {origen: p, ancho: 20, alto: 20};
println!("{}", r1);
let r2 = Rectangulo {origen: Punto{x: 3, y: 4}, ancho: 30, alto: 30};
println!("{}", r2);
if r1 == r2
{
println!("r2 es más grande que r1");
}
else
{
println!("r1 y r2 no son iguales");
}
}
|
//! This crate provides wrappers and convenience functions to make Hyper and
//! Serde work hand in hand.
//!
//! The supported types are:
//!
//! * `cookie::Cookie`
//! * `hyper::header::ContentType`
//! * `hyper::header::Headers`
//! * `hyper::http::RawStatus`
//! * `hyper::method::Method`
//! * `mime::Mime`
//! * `time::Tm`
//!
//! # How do I use a data type with a `Headers` member with Serde?
//!
//! Use the serde attributes `deserialize_with` and `serialize_with`.
//!
//! ```
//! struct MyStruct {
//! #[serde(deserialize_with = "hyper_serde::deserialize",
//! serialize_with = "hyper_serde::serialize")]
//! headers: Headers,
//! }
//! ```
//!
//! # How do I encode a `Headers` value with `serde_json::to_string`?
//!
//! Use the `Ser` wrapper.
//!
//! ```
//! serde_json::to_string(&Ser::new(&headers))
//! ```
//!
//! # How do I decode a `Method` value with `serde_json::parse`?
//!
//! Use the `De` wrapper.
//!
//! ```
//! serde_json::parse::<De<Method>>("\"PUT\"").map(De::into_inner)
//! ```
//!
//! # How do I send `Cookie` values as part of an IPC channel?
//!
//! Use the `Serde` wrapper. It implements `Deref` and `DerefMut` for
//! convenience.
//!
//! ```
//! ipc::channel::<Serde<Cookie>>()
//! ```
//!
//!
#![deny(missing_docs)]
#![deny(unsafe_code)]
extern crate cookie;
extern crate hyper;
extern crate mime;
extern crate serde;
extern crate serde_bytes;
extern crate time;
use cookie::Cookie;
use hyper::header::{ContentType, Headers};
use hyper::http::RawStatus;
use hyper::method::Method;
use mime::Mime;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_bytes::{ByteBuf, Bytes};
use serde::de::{self, MapAccess, SeqAccess, Visitor};
use serde::ser::{SerializeMap, SerializeSeq};
use std::cmp;
use std::fmt;
use std::ops::{Deref, DerefMut};
use std::str;
use time::{Tm, strptime};
/// Deserialises a `T` value with a given deserializer.
///
/// This is useful to deserialize Hyper types used in structure fields or
/// tuple members with `#[serde(deserialize_with = "hyper_serde::deserialize")]`.
#[inline(always)]
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where D: Deserializer<'de>,
De<T>: Deserialize<'de>,
{
De::deserialize(deserializer).map(De::into_inner)
}
/// Serialises `value` with a given serializer.
///
/// This is useful to serialize Hyper types used in structure fields or
/// tuple members with `#[serde(serialize_with = "hyper_serde::serialize")]`.
#[inline(always)]
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
for<'a> Ser<'a, T>: Serialize,
{
Ser::new(value).serialize(serializer)
}
/// Serialises `value` with a given serializer in a pretty way.
///
/// This does the same job as `serialize` but with a prettier format
/// for some combinations of types and serialisers.
///
/// For now, the only change from `serialize` is when serialising `Headers`,
/// where the items in the header values get serialised as strings instead
/// of sequences of bytes, if they represent UTF-8 text.
#[inline(always)]
pub fn serialize_pretty<T, S>(value: &T,
serializer: S)
-> Result<S::Ok, S::Error>
where S: Serializer,
for<'a> Ser<'a, T>: Serialize,
{
Ser::new_pretty(value).serialize(serializer)
}
/// A wrapper to deserialize Hyper types.
///
/// This is useful with functions such as `serde_json::from_str`.
///
/// Values of this type can only be obtained through
/// the `serde::Deserialize` trait.
#[derive(Debug)]
pub struct De<T> {
v: T,
}
impl<T> De<T> {
fn new(v: T) -> Self {
De { v: v }
}
}
impl<'de, T> De<T>
where De<T>: Deserialize<'de>,
{
/// Consumes this wrapper, returning the deserialized value.
#[inline(always)]
pub fn into_inner(self) -> T {
self.v
}
}
/// A wrapper to serialize Hyper types.
///
/// This is useful with functions such as `serde_json::to_string`.
///
/// Values of this type can only be passed to the `serde::Serialize` trait.
#[derive(Debug)]
pub struct Ser<'a, T: 'a> {
v: &'a T,
pretty: bool,
}
impl<'a, T> Ser<'a, T>
where Ser<'a, T>: serde::Serialize,
{
/// Returns a new `Ser` wrapper.
#[inline(always)]
pub fn new(value: &'a T) -> Self {
Ser {
v: value,
pretty: false,
}
}
/// Returns a new `Ser` wrapper, in pretty mode.
///
/// See `serialize_pretty`.
#[inline(always)]
pub fn new_pretty(value: &'a T) -> Self {
Ser {
v: value,
pretty: true,
}
}
}
/// A convenience wrapper to be used as a type parameter, for example when
/// a `Vec<T>` need to be passed to serde.
#[derive(Clone, PartialEq)]
pub struct Serde<T>(pub T)
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize;
impl<T> Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
/// Consumes this wrapper, returning the inner value.
#[inline(always)]
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> fmt::Debug for Serde<T>
where T: fmt::Debug,
for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.fmt(formatter)
}
}
impl<T> Deref for Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T> DerefMut for Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: PartialEq> PartialEq<T> for Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
fn eq(&self, other: &T) -> bool {
self.0 == *other
}
}
impl<'b, T> Deserialize<'b> for Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'b>,
{
De::deserialize(deserializer).map(De::into_inner).map(Serde)
}
}
impl<T> Serialize for Serde<T>
where for<'de> De<T>: Deserialize<'de>,
for<'a> Ser<'a, T>: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
Ser::new(&self.0).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for De<ContentType> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
deserialize(deserializer).map(ContentType).map(De::new)
}
}
impl<'a> Serialize for Ser<'a, ContentType> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
serialize(&self.v.0, serializer)
}
}
impl<'de> Deserialize<'de> for De<Cookie<'static>> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
struct CookieVisitor;
impl<'de> Visitor<'de> for CookieVisitor {
type Value = De<Cookie<'static>>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an HTTP cookie header value")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: de::Error,
{
Cookie::parse(v)
.map(Cookie::into_owned)
.map(De::new)
.map_err(|e| E::custom(format!("{:?}", e)))
}
}
deserializer.deserialize_string(CookieVisitor)
}
}
impl<'a, 'cookie> Serialize for Ser<'a, Cookie<'cookie>> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
serializer.serialize_str(&self.v.to_string())
}
}
impl<'de> Deserialize<'de> for De<Headers> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
struct HeadersVisitor;
impl<'de> Visitor<'de> for HeadersVisitor {
type Value = De<Headers>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a map from header names to header values")
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where E: de::Error,
{
Ok(De::new(Headers::new()))
}
fn visit_map<V>(self,
mut visitor: V)
-> Result<Self::Value, V::Error>
where V: MapAccess<'de>,
{
let mut headers = Headers::new();
while let Some((k, v)) = visitor.next_entry::<String, Value>()? {
headers.set_raw(k, v.0);
}
Ok(De::new(headers))
}
}
struct Value(Vec<Vec<u8>>);
impl<'de> Deserialize<'de> for Value {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
deserializer.deserialize_seq(ValueVisitor)
}
}
struct ValueVisitor;
impl<'de> Visitor<'de> for ValueVisitor {
type Value = Value;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an array of strings and sequences of bytes")
}
fn visit_unit<E>(self) -> Result<Value, E>
where E: de::Error,
{
Ok(Value(vec![]))
}
fn visit_seq<V>(self, mut visitor: V) -> Result<Value, V::Error>
where V: SeqAccess<'de>,
{
// Clamp to not OOM on rogue values.
let capacity = cmp::min(visitor.size_hint().unwrap_or(0), 64);
let mut values = Vec::with_capacity(capacity);
while let Some(v) = visitor.next_element::<ByteBuf>()? {
values.push(v.into());
}
Ok(Value(values))
}
}
deserializer.deserialize_map(HeadersVisitor)
}
}
impl<'a> Serialize for Ser<'a, Headers> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
struct Value<'headers>(&'headers [Vec<u8>], bool);
impl<'headers> Serialize for Value<'headers> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
let mut serializer =
serializer.serialize_seq(Some(self.0.len()))?;
for v in self.0 {
if self.1 {
if let Ok(v) = str::from_utf8(v) {
serializer.serialize_element(v)?;
continue;
}
}
serializer.serialize_element(&Bytes::new(v))?;
}
serializer.end()
}
}
let mut serializer = serializer.serialize_map(Some(self.v.len()))?;
for header in self.v.iter() {
let name = header.name();
let value = self.v.get_raw(name).unwrap();
serializer.serialize_entry(name, &Value(value, self.pretty))?;
}
serializer.end()
}
}
impl<'de> Deserialize<'de> for De<Method> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
struct MethodVisitor;
impl<'de> Visitor<'de> for MethodVisitor {
type Value = De<Method>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an HTTP method")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: de::Error,
{
v.parse::<Method>().map(De::new).map_err(E::custom)
}
}
deserializer.deserialize_string(MethodVisitor)
}
}
impl<'a> Serialize for Ser<'a, Method> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
Serialize::serialize(self.v.as_ref(), serializer)
}
}
impl<'de> Deserialize<'de> for De<Mime> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
struct MimeVisitor;
impl<'de> Visitor<'de> for MimeVisitor {
type Value = De<Mime>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a mime type")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: de::Error,
{
v.parse::<Mime>().map(De::new).map_err(|()| {
E::custom("could not parse mime type")
})
}
}
deserializer.deserialize_string(MimeVisitor)
}
}
impl<'a> Serialize for Ser<'a, Mime> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
serializer.serialize_str(&self.v.to_string())
}
}
impl<'de> Deserialize<'de> for De<RawStatus> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
let (code, reason) = Deserialize::deserialize(deserializer)?;
Ok(De::new(RawStatus(code, reason)))
}
}
impl<'a> Serialize for Ser<'a, RawStatus> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
(self.v.0, &self.v.1).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for De<Tm> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>,
{
struct TmVisitor;
impl<'de> Visitor<'de> for TmVisitor {
type Value = De<Tm>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a date and time according to RFC 3339")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: de::Error,
{
strptime(v, "%Y-%m-%dT%H:%M:%SZ").map(De::new).map_err(|e| {
E::custom(e.to_string())
})
}
}
deserializer.deserialize_string(TmVisitor)
}
}
impl<'a> Serialize for Ser<'a, Tm> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
serializer.serialize_str(&self.v.rfc3339().to_string())
}
}
|
use crate::util::computer::Computer;
use std::{collections::BTreeMap, slice::Iter};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum Tile {
Wall = 0,
Empty,
Oxygen,
}
impl From<i64> for Tile {
fn from(value: i64) -> Tile {
match value {
0 => Tile::Wall,
1 => Tile::Empty,
2 => Tile::Oxygen,
t => panic!("found invalid tile: {}", t),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum Direction {
North = 1,
South,
West,
East,
}
impl Direction {
#[inline]
fn offset(self, (x, y): (i64, i64)) -> (i64, i64) {
match self {
Direction::North => (x, y - 1),
Direction::South => (x, y + 1),
Direction::West => (x - 1, y),
Direction::East => (x + 1, y),
}
}
#[inline]
fn opposite(self) -> Direction {
match self {
Direction::North => Direction::South,
Direction::South => Direction::North,
Direction::West => Direction::East,
Direction::East => Direction::West,
}
}
#[inline]
fn iter() -> Iter<'static, Direction> {
const DIRECTIONS: [Direction; 4] = [
Direction::North,
Direction::East,
Direction::South,
Direction::West,
];
DIRECTIONS.iter()
}
}
fn map_to_string(map: &mut BTreeMap<(i64, i64), Tile>, pos: (i64, i64)) -> String {
let min_x = map
.keys()
.map(|&(_, x)| x)
.fold(i64::max_value(), |min, x| if x < min { x } else { min });
let mut last_y = i64::min_value();
let mut last_x = min_x;
let mut res = String::new();
for (&(y, x), tile) in map.iter() {
if last_y != y {
res.push('\n');
for _ in min_x..x {
res.push(' ');
}
} else {
for _ in (last_x + 1)..x {
res.push(' ');
}
}
if x == pos.0 && y == pos.1 {
res.push('D');
} else if x == 0 && y == 0 {
res.push('X');
} else {
res.push(match tile {
Tile::Empty => '.',
Tile::Wall => '#',
Tile::Oxygen => 'o',
})
}
last_y = y;
last_x = x;
}
res
}
fn explore(
program: &mut Computer,
map: &mut BTreeMap<(i64, i64), Tile>,
pos: (i64, i64),
) -> Option<usize> {
let mut steps: Option<usize> = None;
for &direction in Direction::iter() {
let to = direction.offset(pos);
if map.get(&(to.1, to.0)).is_none() {
let tile = Tile::from(program.resume_get(Some(direction as i64)).unwrap());
map.insert((to.1, to.0), tile);
if tile == Tile::Oxygen {
steps = Some(0);
}
if tile == Tile::Empty || tile == Tile::Oxygen {
if let Some(nsteps) = explore(program, map, to) {
steps = match steps {
Some(s) if nsteps < s => Some(nsteps),
None => Some(nsteps),
_ => steps,
};
}
// move back to original pos
program
.resume_get(Some(direction.opposite() as i64))
.unwrap();
}
}
}
steps.map(|s| s + 1)
}
fn spread_oxygen(map: &mut BTreeMap<(i64, i64), Tile>) -> usize {
let oxygen_tiles: Vec<(i64, i64)> = map
.iter()
.filter(|&(_, &t)| t == Tile::Oxygen)
.map(|(&pos, _)| pos)
.collect();
let mut filled = 0;
for pos in oxygen_tiles {
for &direction in Direction::iter() {
if let Some(tile) = map.get_mut(&direction.offset(pos)) {
if *tile == Tile::Empty {
*tile = Tile::Oxygen;
filled += 1;
}
}
}
}
filled
}
fn fill_with_oxygen(map: &mut BTreeMap<(i64, i64), Tile>) -> usize {
let mut minutes = 0;
while spread_oxygen(map) > 0 {
minutes += 1;
}
minutes
}
#[aoc_generator(day15)]
pub fn day15_gen(input: &str) -> Vec<i64> {
input.split(',').map(|n| n.parse().unwrap()).collect()
}
const MEMORY_SIZE: usize = 4096;
#[aoc(day15, part1)]
pub fn day15_part1(code: &[i64]) -> Option<usize> {
let mut program = Computer::new(code, Some(MEMORY_SIZE));
let mut map = BTreeMap::new();
map.insert((0, 0), Tile::Empty);
let route_length = explore(&mut program, &mut map, (0, 0));
println!("{}", map_to_string(&mut map, (0, 0)));
route_length
}
#[aoc(day15, part2)]
pub fn day15_part2(code: &[i64]) -> usize {
let mut program = Computer::new(code, Some(MEMORY_SIZE));
let mut map = BTreeMap::new();
explore(&mut program, &mut map, (0, 0));
let minutes = fill_with_oxygen(&mut map);
println!("{}", map_to_string(&mut map, (0, 0)));
minutes
}
|
extern crate kmod;
#[macro_use]
extern crate log;
extern crate env_logger;
use std::env;
fn main() {
env_logger::init();
let ctx = kmod::Context::new().expect("kmod ctx failed");
let mut args: Vec<String> = env::args().skip(1).collect();
if args.len() < 1 {
panic!("missing argument");
}
let filename = args.remove(0);
let module = ctx
.module_new_from_path(&filename)
.expect("new_from_path failed");
info!("got module: {:?}", module.name());
module.insert_module(0, args).expect("insert_module failed");
}
|
use std::num::Wrapping;
fn mix(a: &mut Wrapping<u32>, b: &mut Wrapping<u32>, c: &mut Wrapping<u32>) {
*a -= *b; *a -= *c; *a ^= *c >> 13;
*b -= *c; *b -= *a; *b ^= *a << 8;
*c -= *a; *c -= *b; *c ^= *b >> 13;
*a -= *b; *a -= *c; *a ^= *c >> 12;
*b -= *c; *b -= *a; *b ^= *a << 16;
*c -= *a; *c -= *b; *c ^= *b >> 5;
*a -= *b; *a -= *c; *a ^= *c >> 3;
*b -= *c; *b -= *a; *b ^= *a << 10;
*c -= *a; *c -= *b; *c ^= *b >> 15;
}
pub fn hash<T: Copy + Into<u32>>(arr: &[T]) -> u32 {
hash_with(arr, |u| Wrapping((*u).into()), Wrapping(31))
}
pub fn hash_with<T, H: Fn(&T) -> Wrapping<u32>>(arr: &[T], h: H, init_value: Wrapping<u32>) -> u32 {
let mut a: Wrapping<u32> = Wrapping(0x9e3779b9);
let mut b = a;
let mut c: Wrapping<u32> = Wrapping(11);
let mut n = arr.len();
match n {
1 => {
a += init_value;
b = h(&arr[0]);
mix(&mut a, &mut b, &mut c); c.0 },
2 => {
a += init_value;
b += h(&arr[0]);
c += h(&arr[1]);
mix(&mut a, &mut b, &mut c); c.0 },
3 => {
a += h(&arr[0]);
b += h(&arr[1]);
c += h(&arr[2]);
mix(&mut a, &mut b, &mut c);
a += init_value;
mix(&mut a, &mut b, &mut c); c.0 },
_ => {
while n >= 3 {
n -= 1; a += h(&arr[n]);
n -= 1; b += h(&arr[n]);
n -= 1; c += h(&arr[n]);
mix(&mut a, &mut b, &mut c);
}
a += init_value;
match n {
2 => { b += h(&arr[1]); c += h(&arr[0]); },
1 => c += h(&arr[0]),
_ => ()
};
mix(&mut a, &mut b, &mut c); c.0 }
}
}
|
use serde::de::{self, Deserialize as _, Deserializer, Unexpected};
pub fn type_name<T>() -> &'static str {
let name = std::any::type_name::<T>();
name.split("::").last().unwrap_or_else(|| name)
}
pub const fn one_please() -> f64 {
1.0
}
pub fn bool_from_int<'de, D>(deserializer: D) -> Result<bool, D::Error>
where
D: Deserializer<'de>,
{
match u8::deserialize(deserializer)? {
0 => Ok(false),
1 => Ok(true),
other => Err(de::Error::invalid_value(
Unexpected::Unsigned(other as u64),
&"zero or one",
)),
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyOnlyResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Disk {
#[serde(flatten)]
pub resource: Resource,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(rename = "managedByExtended", default, skip_serializing_if = "Vec::is_empty")]
pub managed_by_extended: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<DiskSku>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
#[serde(rename = "extendedLocation", default, skip_serializing_if = "Option::is_none")]
pub extended_location: Option<ExtendedLocation>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<DiskSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskList {
pub value: Vec<Disk>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<disk_sku::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
pub mod disk_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
#[serde(rename = "StandardSSD_LRS")]
StandardSsdLrs,
#[serde(rename = "UltraSSD_LRS")]
UltraSsdLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<snapshot_sku::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
pub mod snapshot_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<disk_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "creationData")]
pub creation_data: CreationData,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "diskSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub disk_size_bytes: Option<i64>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
#[serde(rename = "diskIOPSReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_only: Option<i64>,
#[serde(rename = "diskMBpsReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_only: Option<i64>,
#[serde(rename = "diskState", default, skip_serializing_if = "Option::is_none")]
pub disk_state: Option<DiskState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "maxShares", default, skip_serializing_if = "Option::is_none")]
pub max_shares: Option<i32>,
#[serde(rename = "shareInfo", default, skip_serializing_if = "Vec::is_empty")]
pub share_info: Vec<ShareInfoElement>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(rename = "burstingEnabled", default, skip_serializing_if = "Option::is_none")]
pub bursting_enabled: Option<bool>,
}
pub mod disk_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<snapshot_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<snapshot_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "creationData")]
pub creation_data: CreationData,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "diskSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub disk_size_bytes: Option<i64>,
#[serde(rename = "diskState", default, skip_serializing_if = "Option::is_none")]
pub disk_state: Option<DiskState>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub incremental: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
}
pub mod snapshot_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ShareInfoElement {
#[serde(rename = "vmUri", default, skip_serializing_if = "Option::is_none")]
pub vm_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSetProperties {
#[serde(rename = "encryptionType", default, skip_serializing_if = "Option::is_none")]
pub encryption_type: Option<DiskEncryptionSetType>,
#[serde(rename = "activeKey", default, skip_serializing_if = "Option::is_none")]
pub active_key: Option<KeyForDiskEncryptionSet>,
#[serde(rename = "previousKeys", default, skip_serializing_if = "Vec::is_empty")]
pub previous_keys: Vec<KeyForDiskEncryptionSet>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSettingsCollection {
pub enabled: bool,
#[serde(rename = "encryptionSettings", default, skip_serializing_if = "Vec::is_empty")]
pub encryption_settings: Vec<EncryptionSettingsElement>,
#[serde(rename = "encryptionSettingsVersion", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSettingsElement {
#[serde(rename = "diskEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_key: Option<KeyVaultAndSecretReference>,
#[serde(rename = "keyEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub key_encryption_key: Option<KeyVaultAndKeyReference>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultAndSecretReference {
#[serde(rename = "sourceVault")]
pub source_vault: SourceVault,
#[serde(rename = "secretUrl")]
pub secret_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultAndKeyReference {
#[serde(rename = "sourceVault")]
pub source_vault: SourceVault,
#[serde(rename = "keyUrl")]
pub key_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyForDiskEncryptionSet {
#[serde(rename = "sourceVault", default, skip_serializing_if = "Option::is_none")]
pub source_vault: Option<SourceVault>,
#[serde(rename = "keyUrl")]
pub key_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SourceVault {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EncryptionType {
EncryptionAtRestWithPlatformKey,
EncryptionAtRestWithCustomerKey,
EncryptionAtRestWithPlatformAndCustomerKeys,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiskEncryptionSetType {
EncryptionAtRestWithCustomerKey,
EncryptionAtRestWithPlatformAndCustomerKeys,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Encryption {
#[serde(rename = "diskEncryptionSetId", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<EncryptionType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum NetworkAccessPolicy {
AllowAll,
AllowPrivate,
DenyAll,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskUpdateProperties {
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_update_properties::OsType>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
#[serde(rename = "diskIOPSReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_only: Option<i64>,
#[serde(rename = "diskMBpsReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_only: Option<i64>,
#[serde(rename = "maxShares", default, skip_serializing_if = "Option::is_none")]
pub max_shares: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(rename = "burstingEnabled", default, skip_serializing_if = "Option::is_none")]
pub bursting_enabled: Option<bool>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
}
pub mod disk_update_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotUpdateProperties {
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<snapshot_update_properties::OsType>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
}
pub mod snapshot_update_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetUpdateProperties {
#[serde(rename = "encryptionType", default, skip_serializing_if = "Option::is_none")]
pub encryption_type: Option<DiskEncryptionSetType>,
#[serde(rename = "activeKey", default, skip_serializing_if = "Option::is_none")]
pub active_key: Option<KeyForDiskEncryptionSet>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiskState {
Unattached,
Attached,
Reserved,
#[serde(rename = "ActiveSAS")]
ActiveSas,
ReadyToUpload,
ActiveUpload,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CreationData {
#[serde(rename = "createOption")]
pub create_option: creation_data::CreateOption,
#[serde(rename = "storageAccountId", default, skip_serializing_if = "Option::is_none")]
pub storage_account_id: Option<String>,
#[serde(rename = "imageReference", default, skip_serializing_if = "Option::is_none")]
pub image_reference: Option<ImageDiskReference>,
#[serde(rename = "galleryImageReference", default, skip_serializing_if = "Option::is_none")]
pub gallery_image_reference: Option<ImageDiskReference>,
#[serde(rename = "sourceUri", default, skip_serializing_if = "Option::is_none")]
pub source_uri: Option<String>,
#[serde(rename = "sourceResourceId", default, skip_serializing_if = "Option::is_none")]
pub source_resource_id: Option<String>,
#[serde(rename = "sourceUniqueId", default, skip_serializing_if = "Option::is_none")]
pub source_unique_id: Option<String>,
#[serde(rename = "uploadSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub upload_size_bytes: Option<i64>,
#[serde(rename = "logicalSectorSize", default, skip_serializing_if = "Option::is_none")]
pub logical_sector_size: Option<i32>,
}
pub mod creation_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreateOption {
Empty,
Attach,
FromImage,
Import,
Copy,
Restore,
Upload,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageDiskReference {
pub id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lun: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PurchasePlan {
pub name: String,
pub publisher: String,
pub product: String,
#[serde(rename = "promotionCode", default, skip_serializing_if = "Option::is_none")]
pub promotion_code: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GrantAccessData {
pub access: grant_access_data::Access,
#[serde(rename = "durationInSeconds")]
pub duration_in_seconds: i32,
}
pub mod grant_access_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Access {
None,
Read,
Write,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccessUri {
#[serde(rename = "accessSAS", default, skip_serializing_if = "Option::is_none")]
pub access_sas: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Snapshot {
#[serde(flatten)]
pub resource: Resource,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<SnapshotSku>,
#[serde(rename = "extendedLocation", default, skip_serializing_if = "Option::is_none")]
pub extended_location: Option<ExtendedLocation>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SnapshotProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SnapshotUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<SnapshotSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotList {
pub value: Vec<Snapshot>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSetIdentity {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<encryption_set_identity::Type>,
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
}
pub mod encryption_set_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSet {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<EncryptionSetIdentity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<EncryptionSetProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskEncryptionSetUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetList {
pub value: Vec<DiskEncryptionSet>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceUriList {
pub value: Vec<String>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointServiceConnectionStatus {
Pending,
Approved,
Rejected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointConnectionProvisioningState {
Succeeded,
Creating,
Deleting,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<PrivateEndpointServiceConnectionStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpoint>,
#[serde(rename = "privateLinkServiceConnectionState")]
pub private_link_service_connection_state: PrivateLinkServiceConnectionState,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<PrivateEndpointConnectionProvisioningState>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessProperties {
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccess {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskAccessProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessList {
pub value: Vec<DiskAccess>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePoint {
#[serde(flatten)]
pub proxy_only_resource: ProxyOnlyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskRestorePointProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePointList {
pub value: Vec<DiskRestorePoint>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePointProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "sourceResourceId", default, skip_serializing_if = "Option::is_none")]
pub source_resource_id: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_restore_point_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<disk_restore_point_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "familyId", default, skip_serializing_if = "Option::is_none")]
pub family_id: Option<String>,
#[serde(rename = "sourceUniqueId", default, skip_serializing_if = "Option::is_none")]
pub source_unique_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
}
pub mod disk_restore_point_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiError {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ApiErrorBase>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub innererror: Option<InnerError>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiErrorBase {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InnerError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub exceptiontype: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub errordetail: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ExtendedLocationType {
EdgeZone,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExtendedLocation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<ExtendedLocationType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Gallery {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identifier: Option<GalleryIdentifier>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_properties::ProvisioningState>,
#[serde(rename = "sharingProfile", default, skip_serializing_if = "Option::is_none")]
pub sharing_profile: Option<SharingProfile>,
}
pub mod gallery_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryIdentifier {
#[serde(rename = "uniqueName", default, skip_serializing_if = "Option::is_none")]
pub unique_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharingProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub permissions: Option<sharing_profile::Permissions>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub groups: Vec<SharingProfileGroup>,
}
pub mod sharing_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Permissions {
Private,
Groups,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharingProfileGroup {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<sharing_profile_group::Type>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub ids: Vec<String>,
}
pub mod sharing_profile_group {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Subscriptions,
#[serde(rename = "AADTenants")]
AadTenants,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplication {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub eula: Option<String>,
#[serde(rename = "privacyStatementUri", default, skip_serializing_if = "Option::is_none")]
pub privacy_statement_uri: Option<String>,
#[serde(rename = "releaseNoteUri", default, skip_serializing_if = "Option::is_none")]
pub release_note_uri: Option<String>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
#[serde(rename = "supportedOSType")]
pub supported_os_type: gallery_application_properties::SupportedOsType,
}
pub mod gallery_application_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SupportedOsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersion {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionProperties {
#[serde(rename = "publishingProfile")]
pub publishing_profile: GalleryApplicationVersionPublishingProfile,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_application_version_properties::ProvisioningState>,
#[serde(rename = "replicationStatus", default, skip_serializing_if = "Option::is_none")]
pub replication_status: Option<ReplicationStatus>,
}
pub mod gallery_application_version_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionPublishingProfile {
#[serde(flatten)]
pub gallery_artifact_publishing_profile_base: GalleryArtifactPublishingProfileBase,
pub source: UserArtifactSource,
#[serde(rename = "manageActions", default, skip_serializing_if = "Option::is_none")]
pub manage_actions: Option<UserArtifactManage>,
#[serde(rename = "enableHealthCheck", default, skip_serializing_if = "Option::is_none")]
pub enable_health_check: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserArtifactSource {
#[serde(rename = "mediaLink")]
pub media_link: String,
#[serde(rename = "defaultConfigurationLink", default, skip_serializing_if = "Option::is_none")]
pub default_configuration_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserArtifactManage {
pub install: String,
pub remove: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub update: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImage {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub eula: Option<String>,
#[serde(rename = "privacyStatementUri", default, skip_serializing_if = "Option::is_none")]
pub privacy_statement_uri: Option<String>,
#[serde(rename = "releaseNoteUri", default, skip_serializing_if = "Option::is_none")]
pub release_note_uri: Option<String>,
#[serde(rename = "osType")]
pub os_type: gallery_image_properties::OsType,
#[serde(rename = "osState")]
pub os_state: gallery_image_properties::OsState,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<gallery_image_properties::HyperVGeneration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub features: Vec<GalleryImageFeature>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
pub identifier: GalleryImageIdentifier,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub recommended: Option<RecommendedMachineConfiguration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disallowed: Option<Disallowed>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<ImagePurchasePlan>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_image_properties::ProvisioningState>,
}
pub mod gallery_image_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsState {
Generalized,
Specialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageFeature {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageIdentifier {
pub publisher: String,
pub offer: String,
pub sku: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecommendedMachineConfiguration {
#[serde(rename = "vCPUs", default, skip_serializing_if = "Option::is_none")]
pub v_cp_us: Option<ResourceRange>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub memory: Option<ResourceRange>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceRange {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub min: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Disallowed {
#[serde(rename = "diskTypes", default, skip_serializing_if = "Vec::is_empty")]
pub disk_types: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImagePurchasePlan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersion {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionProperties {
#[serde(rename = "publishingProfile", default, skip_serializing_if = "Option::is_none")]
pub publishing_profile: Option<GalleryImageVersionPublishingProfile>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_image_version_properties::ProvisioningState>,
#[serde(rename = "storageProfile")]
pub storage_profile: GalleryImageVersionStorageProfile,
#[serde(rename = "replicationStatus", default, skip_serializing_if = "Option::is_none")]
pub replication_status: Option<ReplicationStatus>,
}
pub mod gallery_image_version_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactPublishingProfileBase {
#[serde(rename = "targetRegions", default, skip_serializing_if = "Vec::is_empty")]
pub target_regions: Vec<TargetRegion>,
#[serde(rename = "replicaCount", default, skip_serializing_if = "Option::is_none")]
pub replica_count: Option<i32>,
#[serde(rename = "excludeFromLatest", default, skip_serializing_if = "Option::is_none")]
pub exclude_from_latest: Option<bool>,
#[serde(rename = "publishedDate", default, skip_serializing_if = "Option::is_none")]
pub published_date: Option<String>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<gallery_artifact_publishing_profile_base::StorageAccountType>,
}
pub mod gallery_artifact_publishing_profile_base {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAccountType {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TargetRegion {
pub name: String,
#[serde(rename = "regionalReplicaCount", default, skip_serializing_if = "Option::is_none")]
pub regional_replica_count: Option<i32>,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<target_region::StorageAccountType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<EncryptionImages>,
}
pub mod target_region {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAccountType {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionImages {
#[serde(rename = "osDiskImage", default, skip_serializing_if = "Option::is_none")]
pub os_disk_image: Option<OsDiskImageEncryption>,
#[serde(rename = "dataDiskImages", default, skip_serializing_if = "Vec::is_empty")]
pub data_disk_images: Vec<DataDiskImageEncryption>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsDiskImageEncryption {
#[serde(flatten)]
pub disk_image_encryption: DiskImageEncryption,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DataDiskImageEncryption {
#[serde(flatten)]
pub disk_image_encryption: DiskImageEncryption,
pub lun: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskImageEncryption {
#[serde(rename = "diskEncryptionSetId", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactSource {
#[serde(rename = "managedImage")]
pub managed_image: ManagedArtifact,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedArtifact {
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionPublishingProfile {
#[serde(flatten)]
pub gallery_artifact_publishing_profile_base: GalleryArtifactPublishingProfileBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionStorageProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<GalleryArtifactVersionSource>,
#[serde(rename = "osDiskImage", default, skip_serializing_if = "Option::is_none")]
pub os_disk_image: Option<GalleryOsDiskImage>,
#[serde(rename = "dataDiskImages", default, skip_serializing_if = "Vec::is_empty")]
pub data_disk_images: Vec<GalleryDataDiskImage>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactVersionSource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryOsDiskImage {
#[serde(flatten)]
pub gallery_disk_image: GalleryDiskImage,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryDataDiskImage {
#[serde(flatten)]
pub gallery_disk_image: GalleryDiskImage,
pub lun: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryDiskImage {
#[serde(rename = "sizeInGB", default, skip_serializing_if = "Option::is_none")]
pub size_in_gb: Option<i32>,
#[serde(rename = "hostCaching", default, skip_serializing_if = "Option::is_none")]
pub host_caching: Option<gallery_disk_image::HostCaching>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<GalleryArtifactVersionSource>,
}
pub mod gallery_disk_image {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HostCaching {
None,
ReadOnly,
ReadWrite,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationStatus {
#[serde(rename = "aggregatedState", default, skip_serializing_if = "Option::is_none")]
pub aggregated_state: Option<replication_status::AggregatedState>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub summary: Vec<RegionalReplicationStatus>,
}
pub mod replication_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AggregatedState {
Unknown,
InProgress,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegionalReplicationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<regional_replication_status::State>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub progress: Option<i32>,
}
pub mod regional_replication_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Unknown,
Replicating,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryList {
pub value: Vec<Gallery>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageList {
pub value: Vec<GalleryImage>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionList {
pub value: Vec<GalleryImageVersion>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationList {
pub value: Vec<GalleryApplication>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionList {
pub value: Vec<GalleryApplicationVersion>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateResourceDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharingUpdate {
#[serde(rename = "operationType")]
pub operation_type: sharing_update::OperationType,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub groups: Vec<SharingProfileGroup>,
}
pub mod sharing_update {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OperationType {
Add,
Remove,
Reset,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PirResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PirSharedGalleryResource {
#[serde(flatten)]
pub pir_resource: PirResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identifier: Option<SharedGalleryIdentifier>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryIdentifier {
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryList {
pub value: Vec<SharedGallery>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGallery {
#[serde(flatten)]
pub pir_shared_gallery_resource: PirSharedGalleryResource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImageList {
pub value: Vec<SharedGalleryImage>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImage {
#[serde(flatten)]
pub pir_shared_gallery_resource: PirSharedGalleryResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SharedGalleryImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImageProperties {
#[serde(rename = "osType")]
pub os_type: shared_gallery_image_properties::OsType,
#[serde(rename = "osState")]
pub os_state: shared_gallery_image_properties::OsState,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
pub identifier: GalleryImageIdentifier,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub recommended: Option<RecommendedMachineConfiguration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disallowed: Option<Disallowed>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<shared_gallery_image_properties::HyperVGeneration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub features: Vec<GalleryImageFeature>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<ImagePurchasePlan>,
}
pub mod shared_gallery_image_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsState {
Generalized,
Specialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImageVersionList {
pub value: Vec<SharedGalleryImageVersion>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImageVersion {
#[serde(flatten)]
pub pir_shared_gallery_resource: PirSharedGalleryResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SharedGalleryImageVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SharedGalleryImageVersionProperties {
#[serde(rename = "publishedDate", default, skip_serializing_if = "Option::is_none")]
pub published_date: Option<String>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
}
|
extern crate lazy_static;
pub use lazy_static::lazy_static;
use std::env;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use std::fmt::{Display};
pub fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
pub fn read_lines2<P>(filename: P) -> Vec<String>
where P: AsRef<Path>, {
let file = File::open(filename).unwrap();
io::BufReader::new(file).lines()
.map(|l| l.unwrap())
.collect()
}
pub fn lines_arg1() -> Vec<String> {
read_lines2(&argv()[1])
}
pub fn contents_arg1() -> String {
std::fs::read_to_string(&argv()[1]).unwrap()
}
pub fn argv() -> Vec<String> {
let args: Vec<String> = env::args().collect();
args
}
#[derive(Debug)]
pub struct Array2d<T>
where T: Copy + Display {
buf: Vec<T>,
cols: usize,
rows: usize,
}
fn index_of(col: usize, row: usize, cols: usize, rows: usize) -> usize {
assert!(col < cols);
assert!(row < rows);
row * cols + col
}
impl<T> Array2d<T>
where T: Copy + Display {
pub fn new(cols: usize, rows: usize, value: T) -> Array2d<T> {
assert!(cols > 0);
assert!(rows > 0);
let mut buf: Vec<T> = Vec::new();
let size = (cols*rows) as usize;
buf.resize(size, value);
Array2d::<T> {
buf: buf,
cols: cols,
rows: rows,
}
}
pub fn get(&self, col: usize, row: usize) -> T {
self.buf[index_of(col, row, self.cols, self.rows)]
}
pub fn get_wrapped(&self, col: usize, row: usize) -> T {
let wrapped_col = col % self.cols;
let wrapped_row = row % self.rows;
self.buf[index_of(wrapped_col, wrapped_row, self.cols, self.rows)]
}
pub fn set(&mut self, col: usize, row: usize, value: T) {
self.buf[index_of(col, row, self.cols, self.rows)] = value
}
pub fn rows(&self) -> usize {
self.rows
}
pub fn cols(&self) -> usize {
self.cols
}
pub fn map_to<T2>(&self, transform: fn(&T) -> T2) -> Array2d<T2>
where T2: Copy + Display {
Array2d::<T2> {
buf: self.buf.iter().map(transform).collect(),
cols: self.cols,
rows: self.rows,
}
}
pub fn dump(&self) {
for row in 0..self.rows {
for col in 0..self.cols {
print!("{}", self.get(col, row));
}
println!()
}
}
} |
use bytes::BytesMut;
use libp2p::core::ProtocolName;
use libp2p::core::{Negotiated, UpgradeInfo};
use tokio::prelude::{AsyncRead, AsyncWrite};
use libp2p::{InboundUpgrade, OutboundUpgrade};
use futures::future::FutureResult;
use tokio::codec::Framed;
use unsigned_varint::codec::UviBytes;
use crate::message::Message;
use futures::{Stream, Sink};
#[derive(Clone)]
pub struct Name;
impl ProtocolName for Name {
fn protocol_name(&self) -> &[u8] {
b"/ackintosh/pbft/1.0.0"
}
}
#[derive(Clone, Debug)]
pub struct PbftProtocolConfig;
impl UpgradeInfo for PbftProtocolConfig {
type Info = Name;
type InfoIter = std::iter::Once<Self::Info>;
fn protocol_info(&self) -> Self::InfoIter {
println!("Pbft::protocol_info()");
std::iter::once(Name{})
}
}
impl<TSubstream> InboundUpgrade<TSubstream> for PbftProtocolConfig
where
TSubstream: AsyncRead + AsyncWrite
{
type Output = PbftInStreamSink<Negotiated<TSubstream>>;
type Error = std::io::Error;
type Future = FutureResult<Self::Output, std::io::Error>;
fn upgrade_inbound(
self,
socket: Negotiated<TSubstream>,
_info: Self::Info,
) -> Self::Future {
println!("PbftProtocolConfig::upgrade_inbound");
let codec = UviBytes::default();
// TODO: Protocol Buffers
futures::future::ok(
Framed::new(socket, codec)
.from_err()
.with::<_, fn(_) -> _, _>(|response| {
println!("[PbftProtocolConfig::upgrade_inbound] [with] response: {:?}", response);
Ok(response)
})
.and_then::<fn(_) -> _, _>(|bytes| {
println!("[PbftProtocolConfig::upgrade_inbound] [and_then]");
Ok(bytes_to_message(&bytes))
})
)
}
}
impl<TSubstream> OutboundUpgrade<TSubstream> for PbftProtocolConfig
where
TSubstream: AsyncRead + AsyncWrite
{
type Output = PbftOutStreamSink<Negotiated<TSubstream>>;
type Error = <Self as InboundUpgrade<TSubstream>>::Error;
type Future = FutureResult<Self::Output, std::io::Error>;
fn upgrade_outbound(
self,
socket: Negotiated<TSubstream>,
_info: Self::Info,
) -> Self::Future {
println!("[PbftProtocolConfig::upgrade_outbound]");
let codec = UviBytes::default();
// TODO: Protocol Buffers
futures::future::ok(
Framed::new(socket, codec)
.from_err()
.with::<_, fn(_) -> _, _>(|outbound_message| {
println!("[PbftProtocolConfig::upgrade_outbound] [with] outbound_message : {:?}", outbound_message);
Ok(message_to_json(&outbound_message).into_bytes())
})
.and_then::<fn(_) -> _, _>(|bytes| {
println!("[PbftProtocolConfig::upgrade_outbound] [and_then]");
Ok(bytes.to_vec())
})
)
}
}
pub type PbftInStreamSink<S> = PbftStreamSink<S, Vec<u8>, Message>;
pub type PbftOutStreamSink<S> = PbftStreamSink<S, Message, Vec<u8>>;
pub type PbftStreamSink<S, A, B> = futures::stream::AndThen<
futures::sink::With<
futures::stream::FromErr<Framed<S, UviBytes<Vec<u8>>>, std::io::Error>,
A,
fn(A) -> Result<Vec<u8>, std::io::Error>,
Result<Vec<u8>, std::io::Error>
>,
fn(BytesMut) -> Result<B, std::io::Error>,
Result<B, std::io::Error>,
>;
fn message_to_json(message: &Message) -> String {
let json = match message {
Message::PrePrepare(_) | Message::Prepare(_) | Message::Commit(_) => {
message.to_string()
}
Message::ClientRequest(_) => unreachable!()
};
println!("[protocol_config::message_to_json] json: {:?}", json);
return json;
}
fn bytes_to_message(bytes: &BytesMut) -> Message {
let message = bytes.to_vec().into();
println!("[protocol_config::bytes_to_message] message: {:?}", message);
return message;
} |
#[cfg(feature = "node")]
extern crate napi_build;
fn main() {
#[cfg(feature = "node")]
napi_build::setup();
}
|
use std::fmt;
/// A reply to the FTP client
#[derive(Clone, PartialEq, Eq)]
pub enum Reply {
None,
CodeAndMsg { code: ReplyCode, msg: String },
MultiLine { code: ReplyCode, lines: Vec<String> },
}
// A custom debug implementation to avoid spamming the log with a large amount of data
impl fmt::Debug for Reply {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Reply::None => write!(f, "None"),
Reply::CodeAndMsg { code, msg } => write!(f, "CodeAndMsg {{ code: {:?}, msg: {:?} }}", code, msg),
Reply::MultiLine { code, lines } => {
if lines.len() > 1 {
write!(f, "MultiLine {{ code: {:?}, {} lines ({}...) }}", code, lines.len(), lines[0])
} else {
write!(f, "MultiLine {{ code: {:?}, line: {:?} }}", code, lines)
}
}
}
}
}
/// The reply codes according to RFC 959.
//
// From: https://cr.yp.to/ftp/request.html#response
//
// The three digits form a code. Codes between 100 and 199 indicate marks; codes between 200
// and 399 indicate acceptance; codes between 400 and 599 indicate rejection.
//
// RFC 959 prohibited all codes other than 110, 120, 125, 150, 200, 202, 211, 212, 213, 214, 215,
// 220, 221, 225, 226, 227, 230, 250, 257, 331, 332, 350, 421, 425, 426, 450, 451, 452, 500, 501,
// 502, 503, 504, 530, 532, 550, 551, 552, and 553.
//
// Typically the second digit is:
// - 0 for a syntax error
// - 1 for a human-oriented help message,
// - 2 for a hello/goodbye message
// - 3 for an accounting message
// - 5 for a filesystem-related message.
//
// However, clients cannot take this list seriously; the IETF adds new codes at its whim. I
// recommend that clients avoid looking past the first digit of the code,
// either 1, 2, 3, 4, or 5. The other two digits, and all other portions of the response,
// are primarily for human consumption. (Exceptions: Greetings, responses with code 227,
// and responses with code 257 have a special format.)
//
// Servers must not send marks except where they are explicitly allowed. Many clients cannot
// handle unusual marks. Typical requests do not permit any marks.
//
// The server can reject any request with code
// - 421 if the server is about to close the connection;
// - 500, 501, 502, or 504 for unacceptable syntax; or
// - 530 if permission is denied.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
#[allow(dead_code)]
pub enum ReplyCode {
NoReply = 0,
GroupPreliminaryReply = 1,
GroupPositiveCompletion = 2,
RestartMarker = 110,
InNMinutes = 120,
ConnectionAlreadyOpen = 125,
FileStatusOkay = 150,
CommandOkay = 200,
CommandOkayNotImplemented = 202,
SystemStatus = 211,
DirectoryStatus = 212,
FileStatus = 213,
HelpMessage = 214,
SystemType = 215,
ServiceReady = 220,
ClosingControlConnection = 221,
DataConnectionOpen = 225,
ClosingDataConnection = 226,
EnteringPassiveMode = 227,
EnteringExtendedPassiveMode = 229,
UserLoggedIn = 230,
UserLoggedInViaCert = 232,
AuthOkayNoDataNeeded = 234,
FileActionOkay = 250,
DirCreated = 257,
NeedPassword = 331,
NeedAccount = 332,
FileActionPending = 350,
ServiceNotAvailable = 421,
CantOpenDataConnection = 425,
ConnectionClosed = 426,
TransientFileError = 450,
LocalError = 451,
OutOfSpace = 452,
CommandSyntaxError = 500,
ParameterSyntaxError = 501,
CommandNotImplemented = 502,
BadCommandSequence = 503,
CommandNotImplementedForParameter = 504,
NotLoggedIn = 530,
NeedAccountToStore = 532,
FtpsRequired = 534, // Could Not Connect to Server - Policy Requires SSL
FileError = 550,
PageTypeUnknown = 551,
ExceededStorageAllocation = 552,
BadFileName = 553,
Resp533 = 533,
}
impl Reply {
pub fn new(code: ReplyCode, message: &str) -> Self {
Reply::CodeAndMsg {
code,
msg: message.to_string(),
}
}
pub fn new_with_string(code: ReplyCode, msg: String) -> Self {
Reply::CodeAndMsg { code, msg }
}
pub fn new_multiline<I>(code: ReplyCode, lines: I) -> Self
where
I: IntoIterator,
I::Item: std::fmt::Display,
{
Reply::MultiLine {
code,
lines: lines.into_iter().map(|item| format!("{}", item)).collect(),
}
}
// A no-reply
pub fn none() -> Self {
Reply::None
}
}
|
use super::*;
#[cfg(feature = "asynchronous")]
use crate::LruAsyncCache;
use crate::LruCache;
use serde_json::{self, Value};
#[cfg(feature = "asynchronous")]
use std::sync::Arc;
use std::{rc::Rc, thread, time::Duration};
#[cfg(feature = "asynchronous")]
use tokio::time::delay_for;
#[test]
#[should_panic]
fn test_create_cache_with_cap_0() {
LruCache::<usize, ()>::new(0, 60);
}
#[test]
fn test_get_on_empty_cache() {
let mut cache = LruCache::<(), usize>::new(1, 60);
assert!(cache.is_empty());
assert_eq!(cache.get(&()), None);
}
#[test]
fn test_get_uncached_key() {
let mut cache = LruCache::<usize, usize>::new(1, 60);
cache.put(1, 1);
assert_eq!(cache.get(&2), None);
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev().is_null()
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_reuse_single_entry() {
let mut cache = LruCache::<&str, Rc<Value>>::new(1, 60);
let val_1: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":1}"#).unwrap());
let val_2: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":2}"#).unwrap());
let old_value = cache.put("1", val_1.clone());
assert_eq!(old_value, None);
let old_value = cache.put("1", val_2.clone());
assert!(if let Some(value) = old_value {
value == val_1
} else {
false
});
assert_eq!(cache.len(), 1);
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev().is_null()
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_reuse_expire_entry() {
let mut cache = LruCache::<usize, Rc<Value>>::new(2, 1);
let val_1: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":1}"#).unwrap());
let val_2: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":2}"#).unwrap());
let old_value = cache.put(1, val_1.clone());
assert_eq!(old_value, None);
thread::sleep(Duration::from_secs(1));
let old_value = cache.put(2, val_2.clone());
assert!(if let Some(value) = old_value {
value == val_1
} else {
false
});
assert_eq!(cache.len(), 1);
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev().is_null()
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_reuse_last_expire_entry() {
let mut cache = LruCache::<usize, Rc<Value>>::new(2, 1);
let val_1: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":1}"#).unwrap());
let val_2: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":2}"#).unwrap());
let val_3: Rc<Value> = Rc::new(serde_json::from_str(r#"{"id":3}"#).unwrap());
let old_value = cache.put(1, val_1.clone());
assert_eq!(old_value, None);
let old_value = cache.put(2, val_2.clone());
assert_eq!(old_value, None);
thread::sleep(Duration::from_secs(1));
let old_value = cache.put(3, val_3.clone());
assert!(if let Some(value) = old_value {
value == val_1
} else {
false
});
assert_eq!(cache.len(), 2);
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev().is_null()
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 1 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_get_head_entry() {
let mut cache = LruCache::<usize, &str>::new(2, 60);
cache.put(1, "one");
cache.put(2, "two");
let cache_head = cache.get(&2);
assert_eq!(cache_head, Some(&"two"));
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.prev().is_null()
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 0 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_get_least_entry() {
let mut cache = LruCache::<usize, &str>::new(3, 60);
cache.put(1, "one");
cache.put(2, "two");
cache.put(3, "three");
let cache_head = cache.get(&1);
assert_eq!(cache_head, Some(&"one"));
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev().is_null()
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 2 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 2 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 1 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 2 }
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_get_middle_entry() {
let mut cache = LruCache::<usize, &str>::new(3, 60);
cache.put(1, "one");
cache.put(2, "two");
cache.put(3, "three");
let cache_head = cache.get(&2);
assert_eq!(cache_head, Some(&"two"));
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.prev().is_null()
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 2 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 2 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 1 }
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 0 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev() == Pointer::InternalPointer { slab: 0, pos: 2 }
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
}
#[test]
fn test_get_expire_entry() {
let mut cache = LruCache::<usize, &str>::new(2, 1);
cache.put(1, "one");
cache.put(2, "two");
cache.put(3, "three");
let cache_head = cache.get(&2);
assert_eq!(cache_head, Some(&"two"));
thread::sleep(Duration::from_secs(1));
assert_eq!(cache.get(&2), None);
let mut iter = cache.storage.iter();
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 1, pos: 0 }
&& item.prev().is_null()
&& item.next() == Pointer::InternalPointer { slab: 0, pos: 0 }
} else {
false
});
assert!(if let Some(item) = iter.next() {
item.ptr() == Pointer::InternalPointer { slab: 0, pos: 0 }
&& item.prev() == Pointer::InternalPointer { slab: 1, pos: 0 }
&& item.next().is_null()
} else {
false
});
assert!(iter.next().is_none());
assert_eq!(cache.len(), 2);
assert_eq!(cache.capacity(), 4);
}
#[cfg(feature = "asynchronous")]
#[tokio::test]
async fn test_get_expire_entry_async() {
let cache = LruAsyncCache::<usize, Arc<Value>>::new(2, 1);
let val_1: Arc<Value> = Arc::new(serde_json::from_str(r#"{"id":1}"#).unwrap());
let val_2: Arc<Value> = Arc::new(serde_json::from_str(r#"{"id":2}"#).unwrap());
let val_3: Arc<Value> = Arc::new(serde_json::from_str(r#"{"id":3}"#).unwrap());
cache.put(1, val_1.clone()).await;
cache.put(2, val_2.clone()).await;
cache.put(3, val_3.clone()).await;
assert!(if let Some(value) = cache.get(&2).await {
value == val_2
} else {
false
});
delay_for(Duration::from_millis(1500)).await;
assert_eq!(cache.len().await, 0);
assert_eq!(cache.capacity().await, 0);
}
|
use bevy::prelude::*;
pub struct GameController(Gamepad);
pub fn gamepad_connections(
mut commands: Commands,
my_gamepad: Option<Res<GameController>>,
mut gamepad_evr: EventReader<GamepadEvent>,
) {
for GamepadEvent(id, kind) in gamepad_evr.iter() {
match kind {
GamepadEventType::Connected => {
println!("New gamepad connected with ID: {:?}", id);
// if we don't have any gamepad yet, use this one
if my_gamepad.is_none() {
commands.insert_resource(GameController(*id));
}
}
GamepadEventType::Disconnected => {
println!("Lost gamepad connection with ID: {:?}", id);
// if it's the one we previously associated with the player,
// disassociate it:
if let Some(GameController(old_id)) = my_gamepad.as_deref() {
if old_id == id {
commands.remove_resource::<GameController>();
}
}
}
// other events are irrelevant
_ => {}
}
}
}
#[derive(PartialEq)]
pub enum GameButton {
Up, Down, Left, Right, Action, Switch, Nothing, Start
}
pub fn get_pressed_buttons(
axes: &Res<Axis<GamepadAxis>>,
buttons: &Res<Input<GamepadButton>>,
gamepad: Option<Res<GameController>>,
) -> Vec::<GameButton> {
let mut pressed_buttons = vec!();
let gamepad = if let Some(gp) = gamepad {
// a gamepad is connected, we have the id
gp.0
} else {
// no gamepad is connected
return pressed_buttons;
};
// The joysticks are represented using a separate axis for X and Y
let axis_lx = GamepadAxis(gamepad, GamepadAxisType::LeftStickX);
let axis_ly = GamepadAxis(gamepad, GamepadAxisType::LeftStickY);
if let (Some(x), Some(y)) = (axes.get(axis_lx), axes.get(axis_ly)) {
// combine X and Y into one vector
let left_stick_pos = Vec2::new(x, y);
// implement a dead-zone to ignore small inputs
if left_stick_pos.length() > 0.1 {
// do something with the position of the left stick
if x > 0.0 {
pressed_buttons.push(GameButton::Right);
}
if x < 0.0 {
pressed_buttons.push(GameButton::Left);
}
if y > 0.0 {
pressed_buttons.push(GameButton::Up);
}
if y < 0.0 {
pressed_buttons.push(GameButton::Down);
}
}
}
let axis_dx = GamepadAxis(gamepad, GamepadAxisType::DPadX);
let axis_dy = GamepadAxis(gamepad, GamepadAxisType::DPadY);
if let (Some(x), Some(y)) = (axes.get(axis_dx), axes.get(axis_dy)) {
// combine X and Y into one vector
let left_stick_pos = Vec2::new(x, y);
// implement a dead-zone to ignore small inputs
if left_stick_pos.length() > 0.1 {
// do something with the position of the left stick
if x > 0.0 {
pressed_buttons.push(GameButton::Right);
}
if x < 0.0 {
pressed_buttons.push(GameButton::Left);
}
if y > 0.0 {
pressed_buttons.push(GameButton::Up);
}
if y < 0.0 {
pressed_buttons.push(GameButton::Down);
}
}
}
let dpad_up = GamepadButton(gamepad, GamepadButtonType::DPadUp);
let dpad_down = GamepadButton(gamepad, GamepadButtonType::DPadDown);
let dpad_left = GamepadButton(gamepad, GamepadButtonType::DPadLeft);
let dpad_right = GamepadButton(gamepad, GamepadButtonType::DPadRight);
if buttons.pressed(dpad_up) {
pressed_buttons.push(GameButton::Up);
}
if buttons.pressed(dpad_down) {
pressed_buttons.push(GameButton::Down);
}
if buttons.pressed(dpad_left) {
pressed_buttons.push(GameButton::Left);
}
if buttons.pressed(dpad_right) {
pressed_buttons.push(GameButton::Right);
}
let action_1 = GamepadButton(gamepad, GamepadButtonType::South);
let action_2 = GamepadButton(gamepad, GamepadButtonType::East);
if buttons.just_pressed(action_1) {
pressed_buttons.push(GameButton::Action);
}
if buttons.just_pressed(action_2) {
pressed_buttons.push(GameButton::Switch);
}
let start_button = GamepadButton(gamepad, GamepadButtonType::Start);
if buttons.just_pressed(start_button) {
pressed_buttons.push(GameButton::Start);
}
pressed_buttons
}
|
//! Advanced Vector Extensions (AVX)
use mem::transmute;
use simd::i64x4;
/// Copy `a` to result, and insert the 64-bit integer `i` into result
/// at the location specified by `index`.
#[inline]
#[target_feature(enable = "avx")]
pub unsafe fn _mm256_insert_epi64(a: i64x4, i: i64, index: i32) -> i64x4 {
match index {
0 => transmute(::arch::_mm256_insert_epi64(transmute(a), i, 0)),
1 => transmute(::arch::_mm256_insert_epi64(transmute(a), i, 1)),
2 => transmute(::arch::_mm256_insert_epi64(transmute(a), i, 2)),
3 => transmute(::arch::_mm256_insert_epi64(transmute(a), i, 3)),
_ => unreachable!(),
}
}
|
use crate::error::*;
use crate::*;
use ::zip;
pub struct Zip;
pub type ModuleZipInstaller = Installer<UnityModule, Zip, InstallerWithDestination>;
impl<V, I> Installer<V, Zip, I> {
pub fn deploy_zip(&self, installer: &Path, destination: &Path) -> Result<()> {
self.deploy_zip_with_rename(installer, destination, |p| p.to_path_buf())
}
fn deploy_zip_with_rename<F>(
&self,
installer: &Path,
destination: &Path,
rename_handler: F,
) -> Result<()>
where
F: Fn(&Path) -> PathBuf,
{
let file = File::open(installer)?;
let mut archive = zip::ZipArchive::new(file)?;
for i in 0..archive.len() {
let mut file = archive.by_index(i).unwrap();
let outpath = rename_handler(&destination.join(file.mangled_name()));
{
let comment = file.comment();
if !comment.is_empty() {
trace!("File {} comment: {}", i, comment);
}
}
if (&*file.name()).ends_with('/') {
debug!(
"File {} extracted to \"{}\"",
i,
outpath.as_path().display()
);
std::fs::DirBuilder::new()
.recursive(true)
.create(&outpath)?;
} else {
debug!(
"File {} extracted to \"{}\" ({} bytes)",
i,
outpath.as_path().display(),
file.size()
);
if let Some(p) = outpath.parent() {
if !p.exists() {
std::fs::DirBuilder::new().recursive(true).create(&p)?;
}
}
let mut outfile = fs::File::create(&outpath)?;
io::copy(&mut file, &mut outfile)?;
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode))?;
}
}
}
Ok(())
}
}
impl InstallHandler for ModuleZipInstaller {
fn install_handler(&self) -> Result<()> {
let rename = self.rename();
let rename_handler = |path: &Path| match rename {
Some((from, to)) => path.strip_prefix(from).map(|p| to.join(p)).unwrap(),
None => path.to_path_buf(),
};
let installer = self.installer();
let destination = self.destination();
debug!(
"install module from zip archive {} to {}",
installer.display(),
destination.display()
);
self.deploy_zip_with_rename(installer, destination, rename_handler)
}
fn error_handler(&self) {
self.cleanup_directory_failable(self.destination());
}
}
|
#![no_std]
#![no_main]
extern crate panic_halt;
use cortex_m_rt::entry;
use hal::gpio::GpioExt;
use hal::flash::FlashExt;
use hal::rcc::RccExt;
use hd44780_driver::{Cursor, CursorBlink, Display, DisplayMode, HD44780};
// Connections:
// VSS: GND
// VDD: 5V
// V0: 10k poti between 5V and GND
// RS: PD1
// RW: GND
// E: PD2
// D4-D7: PD4-PD7
// A: 5V
// K: GND
#[entry]
fn main() -> ! {
let cp = cortex_m::Peripherals::take().unwrap();
let dp = hal::stm32f30x::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let mut gpiod = dp.GPIOD.split(&mut rcc.ahb);
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let delay = hal::delay::Delay::new(cp.SYST, clocks);
let rs = gpiod.pd1.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let en = gpiod.pd2.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let b4 = gpiod.pd4.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let b5 = gpiod.pd5.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let b6 = gpiod.pd6.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let b7 = gpiod.pd7.into_push_pull_output(&mut gpiod.moder, &mut gpiod.otyper);
let mut lcd = HD44780::new_4bit(rs, en, b4, b5, b6, b7, delay);
lcd.reset();
lcd.clear();
lcd.set_display_mode(
DisplayMode {
display: Display::On,
cursor_visibility: Cursor::Visible,
cursor_blink: CursorBlink::On,
}
);
lcd.write_str("Hello, world!");
loop {}
}
|
//! Bridge to provide a client implementation for the `hyper` crate.
//!
//! # Examples
//!
//! Refer to the documentation for [`KitsuRequester`].
//!
//! [`KitsuRequester`]: trait.KitsuRequester.html
use futures::future::{self, Future};
use futures::Stream;
use hyper::client::{Client as HyperClient, Connect};
use hyper::error::Error as HyperError;
use hyper::Uri;
use serde_json;
use std::str::FromStr;
use ::builder::Search;
use ::model::*;
use ::{API_URL, Error};
macro_rules! try_uri {
($uri:ident) => {
match Uri::from_str($uri) {
Ok(v) => v,
Err(why) => return Box::new(future::err(Error::Uri(why))),
}
}
}
/// Trait which defines the methods necessary to interact with the service.
///
/// # Examples
///
/// To bring in the implemenation for the `hyper` Client, simply use the
/// trait:
///
/// ```rust,no_run
/// use kitsu::KitsuHyperRequester;
/// ```
///
/// At this point, the methods will be on your Hyper Client.
pub trait KitsuRequester {
/// Gets an anime using its id.
///
/// # Examples
///
/// Get an anime with the id of 1:
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let anime_id = 1;
///
/// let runner = client.get_anime(anime_id)
/// .map(|anime| {
/// println!(
/// "The anime's name is '{}'",
/// anime.data.attributes.canonical_title,
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn get_anime(&self, id: u64)
-> Box<Future<Item = Response<Anime>, Error = Error>>;
/// Gets a character using its id.
fn get_character(&self, id: u64)
-> Box<Future<Item = Response<Character>, Error = Error>>;
/// Gets a manga using its id.
///
/// # Examples
///
/// Get a manga with the id of 1:
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let manga_id = 1;
///
/// let runner = client.get_manga(manga_id)
/// .map(|manga| {
/// println!(
/// "The manga's name is '{}'",
/// manga.data.attributes.canonical_title,
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn get_manga(&self, id: u64)
-> Box<Future<Item = Response<Manga>, Error = Error>>;
// Gets a producer using their id.
///
/// # Examples
///
/// Get a producer with the id of 1:
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let producer_id = 1;
///
/// let runner = client.get_producer(producer_id)
/// .map(|user| {
/// println!(
/// "The producers's name is '{}'",
/// producer.data.attributes.name,
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn get_producer(&self, id: u64)
-> Box<Future<Item = Response<Producer>, Error = Error>>;
/// Gets a user using their id.
///
/// # Examples
///
/// Get a user with the id of 1:
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let user_id = 1;
///
/// let runner = client.get_user(user_id)
/// .map(|user| {
/// println!(
/// "The user's name is '{}'",
/// user.data.attributes.name,
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn get_user(&self, id: u64)
-> Box<Future<Item = Response<User>, Error = Error>>;
/// Searches for an anime using the passed [Search] builder.
///
/// # Examples
///
/// Search for an anime with the name "Beyond the Boundary":
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let anime_name = "Beyond the Boundary";
///
/// let runner = client.search_anime(|f| f.filter("text", anime_name))
/// .map(|resp| {
/// println!(
/// "There are {} results",
/// resp.data.len(),
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn search_anime<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Anime>>, Error = Error>>;
/// Searches for a character using the passed search builder.
fn search_characters<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Character>>, Error = Error>>;
/// Searches for a manga using the passed [Search] builder.
///
/// # Examples
///
/// Search for a manga with the name "Orange":
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let manga_name = "Orange";
///
/// let runner = client.search_manga(|f| f.filter("text", manga_name))
/// .map(|resp| {
/// println!(
/// "There are {} results",
/// resp.data.len(),
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn search_manga<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Manga>>, Error = Error>>;
/// Searches for a user using the passed [`Search`] builder.
///
/// # Examples
///
/// Search for a user with the name "Bob":
///
/// ```rust,ignore
/// extern crate hyper;
/// extern crate hyper_tls;
/// extern crate kitsu;
/// extern crate tokio_core;
///
/// use hyper_tls::HttpsConnector;
/// use kitsu::KitsuHyperRequester;
/// use hyper::Client;
/// use std::env;
/// use tokio_core::reactor::Core;
///
/// let mut core = Core::new()?;
///
/// let connector = HttpsConnector::new(1, &core.handle())?;
/// let client = Client::configure()
/// .connector(connector)
/// .build(&core.handle());
///
/// let user_name = "Bob";
///
/// let runner = client.search_users(|f| f.filter("name", user_name))
/// .map(|resp| {
/// println!(
/// "There are {} results",
/// resp.data.len(),
/// );
/// })
/// .map_err(|why| {
/// println!("Error with the request: {:?}", why);
/// });
///
/// core.run(runner)?;
/// ```
///
/// [`Search`]: ../builder/struct.Search.html
///
// Note: This doc example can not be tested due to the reliance on
// tokio_core. Instead, this is taken from example `02_hyper` and should
// roughly match it to ensure accuracy.
fn search_users<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<User>>, Error = Error>>;
}
impl<B, C: Connect> KitsuRequester for HyperClient<C, B>
where B: Stream<Error = HyperError> + 'static, B::Item: AsRef<[u8]> {
fn get_anime(&self, id: u64)
-> Box<Future<Item = Response<Anime>, Error = Error>> {
let url = format!("{}/anime/{}", API_URL, id);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn get_character(&self, id: u64)
-> Box<Future<Item = Response<Character>, Error = Error>> {
let url = format!("{}/characters/{}", API_URL, id);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn get_manga(&self, id: u64)
-> Box<Future<Item = Response<Manga>, Error = Error>> {
let url = format!("{}/manga/{}", API_URL, id);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn get_producer(&self, id: u64)
-> Box<Future<Item = Response<Producer>, Error = Error>> {
let url = format!("{}/producer/{}", API_URL, id);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn get_user(&self, id: u64)
-> Box<Future<Item = Response<User>, Error = Error>> {
let url = format!("{}/users/{}", API_URL, id);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn search_anime<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Anime>>, Error = Error>> {
let params = f(Search::default()).0;
let url = format!("{}/anime?{}", API_URL, params);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn search_characters<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Character>>, Error = Error>> {
let params = f(Search::default()).0;
let url = format!("{}/characters?{}", API_URL, params);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn search_manga<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<Manga>>, Error = Error>> {
let params = f(Search::default()).0;
let url = format!("{}/manga?{}", API_URL, params);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
fn search_users<F: FnOnce(Search) -> Search>(&self, f: F)
-> Box<Future<Item = Response<Vec<User>>, Error = Error>> {
let params = f(Search::default()).0;
let url = format!("{}/users?{}", API_URL, params);
let c = &url;
let uri = try_uri!(c);
Box::new(self.get(uri)
.and_then(|res| res.body().concat2())
.map_err(From::from)
.and_then(|body| serde_json::from_slice(&body).map_err(From::from)))
}
}
|
use std::str::FromStr;
use crate::db::models::Grade;
use crate::db::repository::{
GradeRepository, PostgrSQLGradeRepository, PostgrSQLUserRepository, UserRepository,
};
use crate::errors::UserError;
use crate::Role;
pub fn insert_grade(user_id: i32, grade: f32) -> Result<(), UserError> {
let repository = PostgrSQLGradeRepository {};
_insert_grade(user_id, grade, &repository)
}
pub fn get_grades(user_id: i32) -> Result<Vec<Grade>, UserError> {
let repository = PostgrSQLUserRepository {};
_get_grades(user_id, &repository)
}
fn _insert_grade(
user_id: i32,
grade: f32,
repository: &dyn GradeRepository,
) -> Result<(), UserError> {
let r = repository.insert_grade(user_id, grade);
if let Err(_) = r {
return Err(UserError::FailedToInsertGrade);
}
Ok(())
}
fn _get_grades(user_id: i32, repository: &dyn UserRepository) -> Result<Vec<Grade>, UserError> {
let u = repository.get_user_by_id(user_id);
if let Err(_) = u {
return Err(UserError::StudentNotFound);
}
let u = u.unwrap();
if Role::from_str(&u.role).unwrap() == Role::Teacher {
return Err(UserError::TeacherCantHaveGrades);
}
match repository.get_grades(user_id) {
Ok(v) => Ok(v),
Err(_) => Ok(Vec::new()),
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::db::models::{Grade, User};
use crate::db::repository::{MockPostgrSQLGradeRepository, MockPostgrSQLUserRepository};
use crate::errors::{DBError, UserError};
use crate::utils;
#[test]
fn test_failed_grade_insert() {
let mut mock = MockPostgrSQLGradeRepository::new();
mock.expect_insert_grade()
.returning(|_, _| Err(DBError::FailToInsertGrade));
let res = _insert_grade(5, 6.0, &mock);
assert_eq!(Err(UserError::FailedToInsertGrade), res);
}
#[test]
fn test_succesfull_grade_insert() {
let mut mock = MockPostgrSQLGradeRepository::new();
mock.expect_insert_grade().returning(|_, _| Ok(()));
let res = _insert_grade(1, 6.0, &mock);
assert_eq!(Ok(()), res);
}
#[test]
fn test_get_grade_with_unknown_user() {
let mut mock = MockPostgrSQLUserRepository::new();
mock.expect_get_user_by_id()
.returning(|_| Err(DBError::UserNotFound));
let res = _get_grades(1, &mock);
assert_eq!(Err(UserError::StudentNotFound), res);
}
#[test]
fn test_get_grade_with_teacher() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user_by_id()
.returning(move |_| Ok(User::new("bob", &passwd, "Teacher")));
let res = _get_grades(1, &mock);
assert_eq!(Err(UserError::TeacherCantHaveGrades), res);
}
#[test]
fn test_get_grade_with_student_without_grades() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user_by_id()
.returning(move |_| Ok(User::new("bob", &passwd, "Student")));
mock.expect_get_grades()
.returning(|_| Err(DBError::UserNotFound));
let res = _get_grades(1, &mock);
assert_eq!(Ok(Vec::new()), res);
}
#[test]
fn test_get_grade_with_student_with_grades() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user_by_id()
.returning(move |_| Ok(User::new("bob", &passwd, "Student")));
mock.expect_get_grades().returning(|_| {
Ok(vec![
Grade {
id: 1,
grade: 4.5,
student_id: 1,
},
Grade {
id: 2,
grade: 5.1,
student_id: 1,
},
])
});
let res = _get_grades(1, &mock);
assert_eq!(
Ok(vec![
Grade {
id: 1,
grade: 4.5,
student_id: 1,
},
Grade {
id: 2,
grade: 5.1,
student_id: 1,
},
]),
res
);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.