text stringlengths 8 4.13M |
|---|
#![cfg_attr(not(feature = "std"), no_std)]
#![feature(proc_macro_hygiene)] // for tests in a separate file
use ink_lang as ink;
#[ink::contract]
mod ddc {
use ink_prelude::string::String;
use ink_prelude::vec::Vec;
use ink_storage::{
collections::HashMap as StorageHashMap,
lazy::Lazy,
traits::{PackedLayout, SpreadLayout},
};
use scale::{Decode, Encode};
// ---- Storage ----
#[ink(storage)]
pub struct Ddc {
// -- Admin --
/// Owner of Contract.
owner: Lazy<AccountId>,
pause: bool,
// -- Tiers --
service_tiers: StorageHashMap<u64, ServiceTier>,
// -- App Subscriptions --
/// Mapping from owner to number of owned coins.
subscriptions: StorageHashMap<AccountId, AppSubscription>,
// -- Admin: Inspectors --
inspectors: StorageHashMap<AccountId, ()>,
current_period_ms: StorageHashMap<AccountId, u64>,
// -- DDC Node managers --
ddn_managers: StorageHashMap<AccountId, ()>,
// -- DDC Nodes --
ddc_nodes: StorageHashMap<String, DDCNode>,
// -- Statuses of DDC Nodes--
ddn_statuses: StorageHashMap<DDNStatusKey, DDNStatus>,
// -- Metrics Reporting --
pub metrics: StorageHashMap<MetricKey, MetricValue>,
pub metrics_ddn: StorageHashMap<MetricKeyDDN, MetricValue>,
pub total_ddc_balance: Balance,
}
impl Ddc {
/// Constructor that initializes the contract
#[ink(constructor)]
pub fn new() -> Self {
let caller = Self::env().caller();
Self {
owner: Lazy::new(caller),
service_tiers: StorageHashMap::new(),
subscriptions: StorageHashMap::new(),
inspectors: StorageHashMap::new(),
ddn_managers: StorageHashMap::new(),
current_period_ms: StorageHashMap::new(),
ddc_nodes: StorageHashMap::new(),
ddn_statuses: StorageHashMap::new(),
metrics: StorageHashMap::new(),
metrics_ddn: StorageHashMap::new(),
pause: false,
total_ddc_balance: 0,
}
}
}
// ---- Admin ----
impl Ddc {
/// Check if account is the owner of this contract
fn only_owner(&self) -> Result<()> {
let caller = self.env().caller();
if *self.owner == caller {
Ok(())
} else {
Err(Error::OnlyOwner)
}
}
/// Transfer the contract admin to the accoung provided
#[ink(message)]
pub fn transfer_ownership(&mut self, to: AccountId) -> Result<()> {
self.only_active()?;
self.only_owner()?;
*self.owner = to;
Ok(())
}
}
// ---- Admin: Funds ----
impl Ddc {
// This seems to be the endowment you give to the contract upon initializing it
// Official recommendation is 1000
/// Return the total balance held in this contract
#[ink(message)]
pub fn balance_of_contract(&self) -> Balance {
self.env().balance()
}
/// As owner, withdraw tokens to the given account. The destination account can be the same
/// as the contract owner. Some balance must be left in the contract as subsistence deposit.
#[ink(message)]
pub fn withdraw(&mut self, destination: AccountId, amount: Balance) -> Result<()> {
self.only_owner()?;
if destination == AccountId::default() {
return Err(Error::InvalidAccount);
}
// Check that the amount requested is *strictly* less than the contract balance.
// If it is exactly the same, it is probably an error because then the contract
// will not have any deposit left for its subsistence.
if self.env().balance() <= amount {
return Err(Error::InsufficientBalance);
}
match self.env().transfer(destination, amount) {
Err(_e) => Err(Error::TransferFailed),
Ok(_v) => Ok(()),
}
}
}
// ---- Admin: Pausable ----
impl Ddc {
#[ink(message)]
pub fn paused_or_not(&self) -> bool {
self.pause
}
/// check if contract is active
/// return ok if pause is false - not paused
fn only_active(&self) -> Result<()> {
if self.pause == false {
Ok(())
} else {
Err(Error::ContractPaused)
}
}
/// flip the status of contract, pause it if it is live
/// unpause it if it is paused before
/// only contract owner can call this function
#[ink(message)]
pub fn flip_contract_status(&mut self) -> Result<()> {
self.only_owner()?;
self.pause = !self.pause;
Ok(())
}
}
// ---- Admin: Tiers ----
#[derive(scale::Encode, Clone, scale::Decode, SpreadLayout, PackedLayout)]
#[cfg_attr(
feature = "std",
derive(
Debug,
PartialEq,
Eq,
scale_info::TypeInfo,
ink_storage::traits::StorageLayout
)
)]
pub struct ServiceTier {
tier_id: u64,
tier_fee: Balance,
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
}
impl ServiceTier {
pub fn new(
tier_id: u64,
tier_fee: Balance,
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
) -> ServiceTier {
ServiceTier {
tier_id,
tier_fee,
storage_bytes,
wcu_per_minute,
rcu_per_minute,
}
}
}
#[ink(event)]
pub struct TierAdded {
tier_id: u64,
tier_fee: Balance,
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
}
impl Ddc {
fn calculate_new_tier_id(&self) -> u64 {
let mut max = 0_u64;
for key in self.service_tiers.keys() {
let tier = self.service_tiers.get(key).unwrap();
if tier.tier_id > max {
max = tier.tier_id;
}
}
max + 1
}
#[ink(message)]
pub fn add_tier(
&mut self,
tier_fee: Balance,
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
) -> Result<u64> {
self.only_owner()?;
let tier_id = self.calculate_new_tier_id();
let tier = ServiceTier {
tier_id,
tier_fee,
storage_bytes,
wcu_per_minute,
rcu_per_minute,
};
self.service_tiers.insert(tier_id, tier);
Self::env().emit_event(TierAdded {
tier_id,
tier_fee,
storage_bytes,
wcu_per_minute,
rcu_per_minute,
});
Ok(tier_id)
}
/// return the fee required
#[ink(message)]
pub fn tier_deposit(&self, tier_id: u64) -> Balance {
if self.tid_in_bound(tier_id).is_err() {
return 0 as Balance;
}
let v = self.service_tiers.get(&tier_id).unwrap();
return v.tier_fee as Balance;
}
#[ink(message)]
pub fn get_all_tiers(&self) -> Vec<ServiceTier> {
self.service_tiers.values().cloned().collect()
}
/// check if tid is within 1, 2 ,3
/// return ok or error
fn tid_in_bound(&self, tier_id: u64) -> Result<()> {
if self.service_tiers.get(&tier_id).is_some() {
Ok(())
} else {
Err(Error::TidOutOfBound)
}
}
/// change the tier fee given the tier id and new fee
/// Must be the contract admin to call this function
#[ink(message)]
pub fn change_tier_fee(&mut self, tier_id: u64, new_fee: Balance) -> Result<()> {
self.tid_in_bound(tier_id)?;
self.only_active()?;
self.only_owner()?;
self.diff_deposit(tier_id, new_fee)?;
let mut tier = self.service_tiers.get_mut(&tier_id).unwrap();
tier.tier_fee = new_fee;
Ok(())
}
/// Change tier limit given tier id and a new limit
/// Must be contract admin to call this function
#[ink(message)]
pub fn change_tier_limit(
&mut self,
tier_id: u64,
new_storage_bytes_limit: u64,
new_wcu_limit: u64,
new_rcu_limit: u64,
) -> Result<()> {
self.tid_in_bound(tier_id)?;
self.only_active()?;
self.only_owner()?;
let mut tier = self.service_tiers.get_mut(&tier_id).unwrap();
tier.storage_bytes = new_storage_bytes_limit;
tier.wcu_per_minute = new_wcu_limit;
tier.rcu_per_minute = new_rcu_limit;
Ok(())
}
/// Check if the new fee is the same as the old fee
/// Return error if they are the same
fn diff_deposit(&self, tier_id: u64, new_value: Balance) -> Result<()> {
self.tid_in_bound(tier_id)?;
let v = self.service_tiers.get(&tier_id).unwrap();
if v.tier_fee as Balance != new_value {
Ok(())
} else {
Err(Error::SameDepositValue)
}
}
/// Return tier limit given a tier id
fn get_tier_limit(&self, tier_id: u64) -> ServiceTier {
self.tid_in_bound(tier_id).unwrap();
self.service_tiers.get(&tier_id).unwrap().clone()
}
}
// ---- App Subscriptions ----
/// event emit when a deposit is made
#[ink(event)]
pub struct Deposit {
#[ink(topic)]
from: Option<AccountId>,
#[ink(topic)]
value: Balance,
}
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct AppSubscription {
start_date_ms: u64,
tier_id: u64,
balance: Balance,
last_update_ms: u64, // initially creation time
}
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct AppSubscriptionDetails {
subscription: AppSubscription,
end_date_ms: u64,
}
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct AppSubscriptionLimit {
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
}
impl AppSubscriptionLimit {
pub fn new(
storage_bytes: u64,
wcu_per_minute: u64,
rcu_per_minute: u64,
) -> AppSubscriptionLimit {
AppSubscriptionLimit {
storage_bytes,
wcu_per_minute,
rcu_per_minute,
}
}
}
impl Ddc {
/// Returns the account balance for the specified `account`.
/// Returns `0` if the account is non-existent.
#[ink(message)]
pub fn balance_of(&self, owner: AccountId) -> Balance {
let subscription_opt = self.subscriptions.get(&owner);
if subscription_opt.is_none() {
return 0;
}
let subscription = subscription_opt.unwrap();
subscription.balance
}
// TODO: Add tests in case if subscription is empty
/// Return the tier id corresponding to the account
#[ink(message)]
pub fn tier_id_of(&self, acct: AccountId) -> u64 {
self.get_tier_id(&acct)
}
/// Return the tier limit corresponding the account
#[ink(message)]
pub fn tier_limit_of(&self, acct: AccountId) -> ServiceTier {
let tier_id = self.get_tier_id(&acct);
self.get_tier_limit(tier_id)
}
#[ink(message)]
pub fn get_subscription_details_of(
&self,
acct: AccountId,
) -> Result<AppSubscriptionDetails> {
let subscription = match self.subscriptions.get(&acct) {
None => return Err(Error::NoSubscription),
Some(v) => v,
};
Ok(AppSubscriptionDetails {
subscription: subscription.clone(),
end_date_ms: self.get_end_date_ms(subscription),
})
}
/// Return tier id given an account
fn get_tier_id(&self, owner: &AccountId) -> u64 {
let subscription = self.subscriptions.get(owner).unwrap();
subscription.tier_id
}
fn get_end_date_ms(&self, subscription: &AppSubscription) -> u64 {
let tier_id = subscription.tier_id;
let tier = self.service_tiers.get(&tier_id).unwrap();
let price = tier.tier_fee; // get tier fee
let prepaid_time_ms = subscription.balance * PERIOD_MS as u128 / price;
subscription.last_update_ms + prepaid_time_ms as u64
}
fn get_consumed_balance_at_time(
now_ms: u64,
subscription: &AppSubscription,
subscription_tier: &ServiceTier,
) -> Balance {
let duration_consumed = now_ms - subscription.last_update_ms;
duration_consumed as u128 * subscription_tier.tier_fee as u128 / PERIOD_MS as u128
}
fn actualize_subscription_at_time(
now_ms: u64,
subscription: &mut AppSubscription,
subscription_tier: &ServiceTier,
) -> Balance {
let consumed =
Self::get_consumed_balance_at_time(now_ms, subscription, subscription_tier);
let actually_consumed;
if consumed > subscription.balance {
actually_consumed = subscription.balance;
subscription.balance = 0;
} else {
subscription.balance -= consumed;
actually_consumed = consumed;
}
subscription.last_update_ms = now_ms;
actually_consumed
}
#[must_use]
fn actualize_subscription(
subscription: &mut AppSubscription,
subscription_tier: &ServiceTier,
) -> Balance {
let now_ms = Self::env().block_timestamp();
Self::actualize_subscription_at_time(now_ms, subscription, subscription_tier)
}
#[ink(message)]
pub fn actualize_subscriptions(&mut self) -> Result<()> {
self.only_owner()?;
for (_, subscription) in self.subscriptions.iter_mut() {
let subscription_tier = match self.service_tiers.get(&subscription.tier_id) {
None => return Err(Error::TidOutOfBound),
Some(v) => v,
};
self.total_ddc_balance +=
Self::actualize_subscription(subscription, subscription_tier);
}
Ok(())
}
pub fn get_total_ddc_balance(&self) -> Balance {
self.total_ddc_balance
}
fn set_tier(&mut self, subscription: &mut AppSubscription, new_tier_id: u64) -> Result<()> {
let subscription_tier = match self.service_tiers.get(&subscription.tier_id) {
None => return Err(Error::TidOutOfBound),
Some(v) => v,
};
self.total_ddc_balance += Self::actualize_subscription(subscription, subscription_tier);
subscription.tier_id = new_tier_id;
Ok(())
}
#[ink(message)]
pub fn get_app_limit(&self, app: AccountId) -> Result<AppSubscriptionLimit> {
let now_ms = Self::env().block_timestamp() as u64;
self.get_app_limit_at_time(app, now_ms)
}
pub fn get_app_limit_at_time(
&self,
app: AccountId,
now_ms: u64,
) -> Result<AppSubscriptionLimit> {
let subscription_opt = self.subscriptions.get(&app);
if subscription_opt.is_none() {
return Err(Error::NoSubscription);
}
let subscription = subscription_opt.unwrap();
if self.tid_in_bound(subscription.tier_id).is_err() {
return Ok(AppSubscriptionLimit::new(0, 0, 0));
}
let current_tier = self.service_tiers.get(&subscription.tier_id).unwrap();
// actual
if self.get_end_date_ms(subscription) >= now_ms {
Ok(AppSubscriptionLimit::new(
current_tier.storage_bytes,
current_tier.wcu_per_minute,
current_tier.rcu_per_minute,
))
} else {
// expired
let free_tier = self.get_free_tier()?;
Ok(AppSubscriptionLimit::new(
free_tier.storage_bytes,
free_tier.wcu_per_minute,
free_tier.rcu_per_minute,
))
}
}
pub fn get_free_tier(&self) -> Result<ServiceTier> {
for tier_key in self.service_tiers.keys() {
let current_tier = self.service_tiers.get(tier_key).unwrap();
if current_tier.tier_fee == 0 {
return Ok(current_tier.clone());
}
}
Err(Error::NoFreeTier)
}
/// Receive payment from the participating DDC node
/// Store payment into users balance map
/// Initialize user metrics map
#[ink(message, payable)]
pub fn subscribe(&mut self, tier_id: u64) -> Result<()> {
self.tid_in_bound(tier_id)?;
self.only_active()?;
let payer = self.env().caller();
let value = self.env().transferred_balance();
let fee_value = value;
let service_v = self.service_tiers.get(&tier_id).unwrap();
if service_v.tier_fee > fee_value {
//TODO: We probably need to summarize the existing balance with provided, in case app wants to deposit more than monthly amount
return Err(Error::InsufficientDeposit);
}
let subscription_opt = self.subscriptions.get(&payer);
let now = Self::env().block_timestamp();
let mut subscription: AppSubscription;
if subscription_opt.is_none() || self.get_end_date_ms(subscription_opt.unwrap()) < now {
subscription = AppSubscription {
start_date_ms: now,
tier_id,
last_update_ms: now,
balance: value,
};
} else {
subscription = subscription_opt.unwrap().clone();
subscription.balance += value;
if subscription.tier_id != tier_id {
self.set_tier(&mut subscription, tier_id)?;
}
}
self.subscriptions.insert(payer, subscription);
self.env().emit_event(Deposit {
from: Some(payer),
value,
});
Ok(())
}
#[ink(message)]
pub fn refund(&mut self) -> Result<()> {
let caller = self.env().caller();
let subscription = match self.subscriptions.get_mut(&caller) {
None => return Err(Error::NoSubscription),
Some(v) => v,
};
let subscription_tier = match self.service_tiers.get(&subscription.tier_id) {
None => return Err(Error::TidOutOfBound),
Some(v) => v,
};
self.total_ddc_balance += Self::actualize_subscription(subscription, subscription_tier);
let to_refund = subscription.balance;
subscription.balance = 0;
if to_refund == 0 {
return Ok(());
}
match self.env().transfer(caller, to_refund) {
Err(_e) => panic!("Transfer has failed!"),
Ok(_) => Ok(()),
}
}
}
// ---- Admin: Inspectors ----
#[ink(event)]
pub struct InspectorAdded {
#[ink(topic)]
inspector: AccountId,
}
#[ink(event)]
pub struct InspectorRemoved {
#[ink(topic)]
inspector: AccountId,
}
#[ink(event)]
pub struct ErrorOnlyInspector {}
impl Ddc {
/// Check if account is an approved inspector.
fn only_inspector(&self) -> Result<()> {
let caller = self.env().caller();
if self.is_inspector(caller) {
Ok(())
} else {
self.env().emit_event(ErrorOnlyInspector {});
Err(Error::OnlyInspector)
}
}
#[ink(message)]
pub fn is_inspector(&self, inspector: AccountId) -> bool {
self.inspectors.contains_key(&inspector)
}
#[ink(message)]
pub fn add_inspector(&mut self, inspector: AccountId) -> Result<()> {
self.only_owner()?;
self.inspectors.insert(inspector, ());
Self::env().emit_event(InspectorAdded { inspector });
Ok(())
}
#[ink(message)]
pub fn remove_inspector(&mut self, inspector: AccountId) -> Result<()> {
self.only_owner()?;
self.inspectors.take(&inspector);
Self::env().emit_event(InspectorRemoved { inspector });
Ok(())
}
}
// ---- DDC Node managers ----
#[ink(event)]
pub struct DDNManagerAdded {
#[ink(topic)]
ddn_manager: AccountId,
}
#[ink(event)]
pub struct DDNManagerRemoved {
#[ink(topic)]
ddn_manager: AccountId,
}
#[ink(event)]
pub struct ErrorOnlyDDNManager {}
impl Ddc {
/// Check if account is an approved DDC node manager
fn only_ddn_manager(&self) -> Result<()> {
let caller = self.env().caller();
if self.is_ddn_manager(caller) || *self.owner == caller {
Ok(())
} else {
self.env().emit_event(ErrorOnlyDDNManager {});
Err(Error::OnlyDDNManager)
}
}
#[ink(message)]
pub fn is_ddn_manager(&self, ddn_manager: AccountId) -> bool {
self.ddn_managers.contains_key(&ddn_manager)
}
#[ink(message)]
pub fn add_ddn_manager(&mut self, ddn_manager: AccountId) -> Result<()> {
self.only_owner()?;
self.ddn_managers.insert(ddn_manager, ());
Self::env().emit_event(DDNManagerAdded { ddn_manager });
Ok(())
}
#[ink(message)]
pub fn remove_ddn_manager(&mut self, ddn_manager: AccountId) -> Result<()> {
self.only_owner()?;
self.ddn_managers.take(&ddn_manager);
Self::env().emit_event(DDNManagerRemoved { ddn_manager });
Ok(())
}
}
// ---- DDC nodes ----
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct DDCNode {
p2p_id: String,
p2p_addr: String,
url: String,
/// There is only one known permission for trusted nodes:
///
/// is_trusted = (permissions & 1) != 0
permissions: u64,
}
#[ink(event)]
pub struct DDCNodeAdded {
#[ink(topic)]
p2p_id: String,
p2p_addr: String,
url: String,
permissions: u64,
}
#[ink(event)]
pub struct DDCNodeRemoved {
#[ink(topic)]
p2p_id: String,
p2p_addr: String,
}
impl Ddc {
/// Return the list of all DDC nodes
#[ink(message)]
pub fn get_all_ddc_nodes(&self) -> Vec<DDCNode> {
self.ddc_nodes.values().cloned().collect()
}
/// Add DDC node to the list.
///
/// If the node already exists based on p2p_id, update all fields.
///
/// Use permissions 1 for a trusted node, otherwise 0.
#[ink(message)]
pub fn add_ddc_node(
&mut self,
p2p_id: String,
p2p_addr: String,
url: String,
permissions: u64,
) -> Result<()> {
self.only_ddn_manager()?;
self.ddc_nodes.insert(
p2p_id.clone(),
DDCNode {
p2p_id: p2p_id.clone(),
p2p_addr: p2p_addr.clone(),
url: url.clone(),
permissions,
},
);
Self::env().emit_event(DDCNodeAdded {
p2p_id,
p2p_addr,
url,
permissions,
});
Ok(())
}
/// Check if DDC node is in the list
#[ink(message)]
pub fn is_ddc_node(&self, p2p_id: String) -> bool {
self.ddc_nodes.contains_key(&p2p_id)
}
/// Removes DDC node from the list
#[ink(message)]
pub fn remove_ddc_node(&mut self, p2p_id: String) -> Result<()> {
self.only_ddn_manager()?;
// Remove DDN if exists
let removed_node = self.ddc_nodes.take(&p2p_id).ok_or(Error::DDNNotFound)?;
Self::env().emit_event(DDCNodeRemoved {
p2p_id: p2p_id.clone(),
p2p_addr: removed_node.p2p_addr,
});
// Remove DDN status entries from all inspectors
for &inspector in self.inspectors.keys() {
self.ddn_statuses.take(&DDNStatusKey {
inspector,
p2p_id: p2p_id.clone(),
});
}
Ok(())
}
}
// ---- DDN Statuses ----
#[derive(Default, Copy, Clone, PartialEq, Encode, Decode, SpreadLayout, PackedLayout)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct DDNStatus {
is_online: bool,
total_downtime: u64,
reference_timestamp: u64,
last_timestamp: u64,
}
// ---- DDN Status Key ----
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct DDNStatusKey {
inspector: AccountId,
p2p_id: String,
}
impl Ddc {
/// Update DDC node connectivity status (online/offline)
/// Called by OCW to set DDN offline status if fetching of node metrics failed
/// Called by SC to set online status when metrics is reported
#[ink(message)]
pub fn report_ddn_status(&mut self, p2p_id: String, is_online: bool) -> Result<()> {
let inspector = self.env().caller();
self.only_inspector()?;
if !self.ddc_nodes.contains_key(&p2p_id) {
return Err(Error::DDNNotFound);
}
let now = Self::env().block_timestamp();
let key = DDNStatusKey { inspector, p2p_id };
// Add new DDN status if not exists
if !self.ddn_statuses.contains_key(&key) {
let new_ddn_status = DDNStatus {
is_online,
total_downtime: 0,
reference_timestamp: now,
last_timestamp: now,
};
self.ddn_statuses.insert(key.clone(), new_ddn_status);
}
let ddn_status = self.ddn_statuses.get_mut(&key).unwrap();
if now < ddn_status.last_timestamp || now < ddn_status.reference_timestamp {
return Err(Error::UnexpectedTimestamp);
}
// Update total downlime
if !ddn_status.is_online {
let last_downtime = now - ddn_status.last_timestamp;
ddn_status.total_downtime += last_downtime;
}
ddn_status.is_online = is_online;
ddn_status.last_timestamp = now;
Ok(())
}
/// Get DDC node status
#[ink(message)]
pub fn get_ddn_status(&self, p2p_id: String) -> Result<DDNStatus> {
if !self.ddc_nodes.contains_key(&p2p_id) {
return Err(Error::DDNNotFound);
}
let mut ddn_statuses: Vec<&DDNStatus> = Vec::new();
// Collect DDN statuses from all inspectors
for &inspector in self.inspectors.keys() {
let key = DDNStatusKey {
inspector,
p2p_id: p2p_id.clone(),
};
if let Some(ddn_status) = self.ddn_statuses.get(&key) {
ddn_statuses.push(ddn_status);
}
}
// Get DDN status by using median value of total downtime
get_median_by_key(ddn_statuses, |item| item.total_downtime)
.cloned()
.ok_or(Error::DDNNoStatus)
}
}
// ---- Metrics Reporting ----
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct MetricKey {
inspector: AccountId,
app_id: AccountId,
day_of_period: u64,
}
// ---- Metric per DDN ----
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct MetricKeyDDN {
inspector: AccountId,
p2p_id: String,
day_of_period: u64,
}
#[derive(
Default, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, SpreadLayout, PackedLayout,
)]
#[cfg_attr(feature = "std", derive(Debug, scale_info::TypeInfo))]
pub struct MetricValue {
start_ms: u64,
storage_bytes: u64,
wcu_used: u64,
rcu_used: u64,
}
impl MetricValue {
pub fn add_assign(&mut self, other: Self) {
self.storage_bytes += other.storage_bytes;
self.wcu_used += other.wcu_used;
self.rcu_used += other.rcu_used;
}
}
#[ink(event)]
pub struct MetricReported {
#[ink(topic)]
inspector: AccountId,
#[ink(topic)]
key: MetricKey,
metrics: MetricValue,
}
#[ink(event)]
pub struct MetricDDNReported {
#[ink(topic)]
inspector: AccountId,
#[ink(topic)]
key: MetricKeyDDN,
metrics: MetricValue,
}
#[ink(event)]
pub struct MetricPeriodFinalized {
#[ink(topic)]
inspector: AccountId,
start_ms: u64,
}
/// Get median value from a vector
fn get_median<T: Clone + Ord>(mut source: Vec<T>) -> Option<T> {
let length = source.len();
// sort_unstable is faster, it doesn't preserve the order of equal elements
source.sort_unstable();
let index_correction = length != 0 && length % 2 == 0;
let median_index = length / 2 - index_correction as usize;
source.get(median_index).cloned()
}
/// Get median value from a vector of structs by key
fn get_median_by_key<T, F, K>(mut source: Vec<T>, f: F) -> Option<T>
where
T: Clone,
F: FnMut(&T) -> K,
K: Ord,
{
let length = source.len();
// sort_unstable is faster, it doesn't preserve the order of equal elements
source.sort_unstable_by_key(f);
let index_correction = length != 0 && length % 2 == 0;
let median_index = length / 2 - index_correction as usize;
source.get(median_index).cloned()
}
impl Ddc {
#[ink(message)]
pub fn metrics_since_subscription(&self, app_id: AccountId) -> Result<MetricValue> {
let subscription = self
.subscriptions
.get(&app_id)
.ok_or(Error::NoSubscription)?;
let now_ms = Self::env().block_timestamp() as u64;
let metrics = self.metrics_for_period(app_id, subscription.start_date_ms, now_ms);
Ok(metrics)
}
#[ink(message)]
pub fn metrics_for_period(
&self,
app_id: AccountId,
subscription_start_ms: u64,
now_ms: u64,
) -> MetricValue {
// The start date may be several months away. When did the current period start?
let (period_start_days, now_days) =
get_current_period_days(subscription_start_ms, now_ms);
let mut period_metrics = MetricValue {
start_ms: period_start_days * MS_PER_DAY,
storage_bytes: 0,
wcu_used: 0,
rcu_used: 0,
};
for day in period_start_days..=now_days {
let mut day_storage_bytes: Vec<u64> = Vec::new();
let mut day_wcu_used: Vec<u64> = Vec::new();
let mut day_rcu_used: Vec<u64> = Vec::new();
for inspector in self.inspectors.keys() {
let inspector_day_metric = self.metrics_for_day(*inspector, app_id, day);
if let Some(inspector_day_metric) = inspector_day_metric {
day_storage_bytes.push(inspector_day_metric.storage_bytes);
day_wcu_used.push(inspector_day_metric.wcu_used);
day_rcu_used.push(inspector_day_metric.rcu_used);
}
}
period_metrics.add_assign(MetricValue {
storage_bytes: get_median(day_storage_bytes).unwrap_or(0),
wcu_used: get_median(day_wcu_used).unwrap_or(0),
rcu_used: get_median(day_rcu_used).unwrap_or(0),
start_ms: 0, // Ignored by add_assign, but required by type
});
}
period_metrics
}
fn metrics_for_day(
&self,
inspector: AccountId,
app_id: AccountId,
day: u64,
) -> Option<&MetricValue> {
let day_of_period = day % PERIOD_DAYS;
let day_key = MetricKey {
inspector,
app_id,
day_of_period,
};
self.metrics.get(&day_key).and_then(|day_metrics| {
// Ignore out-of-date metrics from a previous period
if day_metrics.start_ms != day * MS_PER_DAY {
None
} else {
Some(day_metrics)
}
})
}
#[ink(message)]
pub fn metrics_for_ddn(&self, p2p_id: String) -> Vec<MetricValue> {
let now_ms = Self::env().block_timestamp() as u64;
self.metrics_for_ddn_at_time(p2p_id, now_ms)
}
pub fn metrics_for_ddn_at_time(&self, p2p_id: String, now_ms: u64) -> Vec<MetricValue> {
let mut period_metrics: Vec<MetricValue> = Vec::with_capacity(PERIOD_DAYS as usize);
let last_day = now_ms / MS_PER_DAY + 1; // non-inclusive.
let first_day = if last_day >= PERIOD_DAYS {
last_day - PERIOD_DAYS
} else {
0
};
for day in first_day..last_day {
let mut day_storage_bytes: Vec<u64> = Vec::new();
let mut day_wcu_used: Vec<u64> = Vec::new();
let mut day_rcu_used: Vec<u64> = Vec::new();
for inspector in self.inspectors.keys() {
let day_metric = self.metrics_for_ddn_day(*inspector, p2p_id.clone(), day);
if let Some(day_metric) = day_metric {
day_storage_bytes.push(day_metric.storage_bytes);
day_wcu_used.push(day_metric.wcu_used);
day_rcu_used.push(day_metric.rcu_used);
}
}
period_metrics.push(MetricValue {
storage_bytes: get_median(day_storage_bytes).unwrap_or(0),
wcu_used: get_median(day_wcu_used).unwrap_or(0),
rcu_used: get_median(day_rcu_used).unwrap_or(0),
start_ms: day * MS_PER_DAY,
});
}
period_metrics
}
fn metrics_for_ddn_day(
&self,
inspector: AccountId,
p2p_id: String,
day: u64,
) -> Option<MetricValue> {
let day_of_period = day % PERIOD_DAYS;
let day_key = MetricKeyDDN {
inspector,
p2p_id,
day_of_period,
};
self.metrics_ddn
.get(&day_key)
.and_then(|metric| {
// Ignore out-of-date metrics from a previous period
if metric.start_ms != day * MS_PER_DAY {
None
} else {
Some(metric)
}
})
.cloned()
}
#[ink(message)]
pub fn report_metrics(
&mut self,
app_id: AccountId,
day_start_ms: u64,
storage_bytes: u64,
wcu_used: u64,
rcu_used: u64,
) -> Result<()> {
let inspector = self.env().caller();
self.only_inspector()?;
enforce_time_is_start_of_day(day_start_ms)?;
let day = day_start_ms / MS_PER_DAY;
let day_of_period = day % PERIOD_DAYS;
let key = MetricKey {
inspector,
app_id,
day_of_period,
};
let metrics = MetricValue {
start_ms: day_start_ms,
storage_bytes,
wcu_used,
rcu_used,
};
self.metrics.insert(key.clone(), metrics.clone());
self.env().emit_event(MetricReported {
inspector,
key,
metrics,
});
Ok(())
}
/// Reports DDC node metrics
/// Called by OCW if node metrics is successfully fetched
/// Updates DDC node connectivity status to online
#[ink(message)]
pub fn report_metrics_ddn(
&mut self,
p2p_id: String,
day_start_ms: u64,
storage_bytes: u64,
wcu_used: u64,
rcu_used: u64,
) -> Result<()> {
let inspector = self.env().caller();
self.only_inspector()?;
enforce_time_is_start_of_day(day_start_ms)?;
let day = day_start_ms / MS_PER_DAY;
let day_of_period = day % PERIOD_DAYS;
let key = MetricKeyDDN {
inspector,
p2p_id: p2p_id.clone(),
day_of_period,
};
let metrics = MetricValue {
start_ms: day_start_ms,
storage_bytes,
wcu_used,
rcu_used,
};
self.metrics_ddn.insert(key.clone(), metrics.clone());
self.report_ddn_status(p2p_id, true).unwrap();
self.env().emit_event(MetricDDNReported {
inspector,
key,
metrics,
});
Ok(())
}
#[ink(message)]
pub fn finalize_metric_period(&mut self, start_ms: u64) -> Result<()> {
let inspector = self.env().caller();
self.only_inspector()?;
enforce_time_is_start_of_day(start_ms)?;
let next_period_ms = start_ms + MS_PER_DAY;
self.current_period_ms.insert(inspector, next_period_ms);
self.env().emit_event(MetricPeriodFinalized {
inspector,
start_ms,
});
Ok(())
}
#[ink(message)]
pub fn get_current_period_ms(&self) -> u64 {
let caller = self.env().caller();
self.get_current_period_ms_of(caller)
}
#[ink(message)]
pub fn get_current_period_ms_of(&self, inspector_id: AccountId) -> u64 {
let current_period_ms = self.current_period_ms.get(&inspector_id);
match current_period_ms {
None => {
let now: u64 = Self::env().block_timestamp(); // Epoch in milisecond
let today_ms = now - now % MS_PER_DAY; // The beginning of today
today_ms
}
Some(current_period_ms) => *current_period_ms,
}
}
}
// ---- Utils ----
#[derive(Debug, PartialEq, Eq, scale::Encode)]
#[cfg_attr(feature = "std", derive(scale_info::TypeInfo))]
pub enum Error {
OnlyOwner,
OnlyInspector,
OnlyDDNManager,
SameDepositValue,
NoPermission,
InsufficientDeposit,
TransferFailed,
ZeroBalance,
InsufficientBalance,
InvalidAccount,
OverLimit,
TidOutOfBound,
ContractPaused,
ContractActive,
UnexpectedTimestamp,
NoSubscription,
NoFreeTier,
DDNNotFound,
DDNNoStatus,
}
pub type Result<T> = core::result::Result<T, Error>;
const MS_PER_DAY: u64 = 24 * 3600 * 1000;
const PERIOD_DAYS: u64 = 31;
const PERIOD_MS: u64 = PERIOD_DAYS * MS_PER_DAY;
fn get_current_period_days(subscription_start_ms: u64, now_ms: u64) -> (u64, u64) {
let now_days = now_ms / MS_PER_DAY;
let start_days = subscription_start_ms / MS_PER_DAY;
let period_elapsed_days = (now_days - start_days) % PERIOD_DAYS;
let period_start_days = now_days - period_elapsed_days;
(period_start_days, now_days)
}
fn enforce_time_is_start_of_day(ms: u64) -> Result<()> {
if ms % MS_PER_DAY == 0 {
Ok(())
} else {
Err(Error::UnexpectedTimestamp)
}
}
#[cfg(test)]
mod tests;
}
|
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use grid::Grid;
use direction::Direction;
use direction::Direction::*;
use maze::Maze;
use posn::Posn;
use tile::Tile;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct Node {
cost: Option<i32>,
from: Direction,
}
impl Node {
fn new() -> Node {
Node {
cost: None,
from: Direction::North,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct HeapEntry {
cost: i32,
pos: Posn,
from: Direction,
}
impl PartialOrd for HeapEntry {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(&other))
}
}
impl Ord for HeapEntry {
fn cmp(&self, other: &Self) -> Ordering {
other.cost.cmp(&self.cost)
}
}
pub fn pathfind(maze: &Maze, player: Posn) -> Vec<Posn> {
let mut search_nodes = Grid::new(maze.map
.iter()
.map(|row| {
row.iter()
.map(|_| Node::new())
.collect()
})
.collect());
let mut heap = BinaryHeap::new();
search_nodes[player].cost = None;
heap.push(HeapEntry {
cost: 0,
pos: player,
from: North,
});
let mut exit_pos: Option<Posn> = None;
while let Some(HeapEntry { cost, pos, from }) = heap.pop() {
trace!("Considering cell {:?}", pos);
assert!(maze.in_bounds(&pos));
if maze[&pos] == Tile::Exit {
trace!("Found the exit at {:?}", pos);
search_nodes[pos].from = from;
exit_pos = Some(pos);
break;
}
let node = search_nodes[pos];
if let Some(old_cost) = node.cost {
if old_cost <= cost {
trace!("Old cost of {:?} was better than new cost {:?}",
old_cost,
cost);
continue;
}
}
search_nodes[pos].cost = Some(cost);
search_nodes[pos].from = from;
for dir in &[North, East, South, West] {
let next_pos = pos + dir.numeric();
// Also handles out of bounds.
if maze[&next_pos] == Tile::Wall {
continue;
}
assert!(maze.in_bounds(&next_pos));
heap.push(HeapEntry {
pos: next_pos,
cost: cost + 1,
from: dir.flip(),
});
}
}
if exit_pos == None {
warn!("Couldn't find a path");
return vec![];
}
let mut path = vec![];
let mut curr_pos = exit_pos.unwrap();
trace!("Path:");
while curr_pos != player {
path.push(curr_pos);
trace!("{:?}", curr_pos);
let next_dir = search_nodes[curr_pos].from;
trace!("Stepping to the {:?}", next_dir);
curr_pos = curr_pos + next_dir.numeric();
}
path.reverse();
path
}
// fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
// // dist[node] = current shortest distance from `start` to `node`
// let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
// // We're at `start`, with a zero cost
// dist[start] = 0;
// heap.push(State { cost: 0, position: start });
// // Examine the frontier with lower cost nodes first (min-heap)
// while let Some(State { cost, position }) = heap.pop() {
// // Alternatively we could have continued to find all shortest paths
// if position == goal { return Some(cost); }
// // Important as we may have already found a better way
// if cost > dist[position] { continue; }
// // For each node we can reach, see if we can find a way with
// // a lower cost going through this node
// for edge in &adj_list[position] {
// let next = State { cost: cost + edge.cost, position: edge.node };
// // If so, add it to the frontier and continue
// if next.cost < dist[next.position] {
// heap.push(next);
// // Relaxation, we have now found a better way
// dist[next.position] = next.cost;
// }
// }
// }
// // Goal not reachable
// None
// }
|
use collections::BTreeSet;
use constants::*;
use kernel_std::*;
use segment::Segment;
pub struct Layout {
map: BTreeSet<Segment>,
free: Allocator,
}
impl Layout {
pub fn new() -> Layout {
Layout {
map: BTreeSet::new(),
free: Allocator::new()
}
}
#[inline]
pub fn allocate(&mut self, size: u64, align: u64) -> Option<Region> {
self.free.allocate(size, align)
}
#[inline]
pub fn register(&mut self, region: Region) -> bool {
self.free.register(region)
}
#[inline]
pub fn forget(&mut self, region: Region) -> bool {
let dummy = Segment::dummy_range(region.base(), region.size());
// only forget a region if no segments are mapped in it
if !self.map.contains(&dummy) {
self.free.forget(region)
} else {
false
}
}
#[inline]
pub fn segments(&self) -> ::collections::btree_set::Iter<Segment> {
self.map.iter()
}
pub fn insert(&mut self, segment: Segment) -> bool {
trace!("Inserting segment {:?}", &segment);
if self.map.insert(segment.clone()) {
let region = Region::new(segment.virtual_base(), segment.size());
// may or may not already be allocated
self.free.set_used(region);
true
} else {
false
}
}
pub fn remove(&mut self, segment: Segment) -> bool {
let region = Region::new(segment.virtual_base(), segment.size());
if self.free.release(region) && self.map.remove(&segment) {
true
} else {
false
}
}
pub fn to_physical(&self, addr: u64) -> Option<u64> {
let dummy = Segment::dummy(addr);
if let Some(segment) = self.map.get(&dummy) {
// address has a mapping
Some((addr & ((1 << CANONICAL_BITS) - 1)) - segment.virtual_base() + segment.physical_base())
} else {
// no mapping
None
}
}
pub fn to_virtual(&self, addr: u64) -> Option<u64> {
// naive implementation
for segment in self.map.iter() {
if segment.physical_base() <= addr && addr <= segment.physical_base() + segment.size() {
return Some(addr - segment.physical_base() + segment.virtual_base());
}
}
None
}
}
|
// id number A unique identifier for each domain record.
// type string The type of the DNS record (ex: A, CNAME,
// TXT, ...).
// name string The name to use for the DNS record.
// data string The value to use for the DNS record.
// priority nullable number The priority for SRV and MX records.
// port nullable number The port for SRV records.
// weight nullable number The weight for SRV records.
use std::borrow::Cow;
use std::fmt;
use response::NamedResponse;
use response;
#[derive(Deserialize)]
pub struct DnsRecord {
pub id: f64,
#[serde(rename="type")]
pub rec_type: String,
pub name: String,
pub data: String,
pub priority: Option<f64>,
pub port: Option<f64>,
pub weight: Option<f64>,
}
impl response::NotArray for DnsRecord {}
pub type DnsRecords = Vec<DnsRecord>;
impl NamedResponse for DnsRecord {
fn name<'a>() -> Cow<'a, str> { "domain_record".into() }
}
impl fmt::Display for DnsRecord {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", "dns_record") }
}
|
#[macro_use]
extern crate failure;
use failure::Error;
use std::process::Command;
use std::process::Output;
/// Represents a failed command (non-zero satus)
#[derive(Fail, Debug)]
#[fail(display = "Command failed: {}, output: {:?}", command, output)]
pub struct CommandFail {
command: String,
output: Output,
}
impl CommandFail {
/// The casted command that has failed
pub fn command(&self) -> &str {
&self.command
}
/// The command output
pub fn output(&self) -> &Output {
&self.output
}
}
type Result<T> = std::result::Result<T, Error>;
/// Runs the provided shell command, can return a CommandFail or other lower level errors (using failure)
pub fn run_shell_command(cmd: &str) -> Result<String> {
let output = if cfg!(target_os = "windows") {
// untested
Command::new("cmd").arg("/C").arg(cmd).output()?
} else {
Command::new("sh").arg("-c").arg(cmd).output()?
};
if output.status.success() {
Ok(String::from_utf8(output.stdout)?)
} else {
Err(CommandFail {
command: cmd.into(),
output,
}
.into())
}
}
|
// Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use parser_rust as parser;
use ocamlpool_rust::{
caml_raise,
ocamlvalue::*,
utils::{block_field, bool_field, str_field},
};
use oxidized::{file_info, global_options::GlobalOptions, relative_path::RelativePath};
use parser::{
indexed_source_text::IndexedSourceText,
positioned_syntax::{PositionedSyntax, PositionedValue},
positioned_token::PositionedToken,
source_text::SourceText,
};
use syntax_tree::SyntaxTree;
use lowerer::{Env as LowererEnv, Lowerer};
use std::default::Default;
struct PositionedSyntaxLowerer {}
impl<'a> Lowerer<'a, PositionedToken, PositionedValue> for PositionedSyntaxLowerer {}
extern "C" {
fn ocamlpool_enter();
fn ocamlpool_leave();
}
caml_raise!(lower, |ocaml_env, ocaml_source_text, ocaml_tree|, <res>, {
let ocaml_source_text_value = ocaml_source_text.0;
let relative_path_raw = block_field(&ocaml_source_text, 0);
let relative_path = RelativePath::from_ocamlvalue(&relative_path_raw);
let content = str_field(&ocaml_source_text, 2);
let source_text = SourceText::make_with_raw(&relative_path, &content.data(), ocaml_source_text_value);
let indexed_source_text = IndexedSourceText::new(source_text.clone());
let tree = match <SyntaxTree<PositionedSyntax, ()>>::ffi_pointer_as_ref(ocaml_tree.usize_val(), &source_text) {
Ok(t) => t,
Err(msg) => panic!(msg),
};
let parser_options = GlobalOptions::default();
let codegen = bool_field(&ocaml_env, 1);
let elaborate_namespaces = bool_field(&ocaml_env, 3);
let quick_mode = bool_field(&ocaml_env, 6);
let mode = file_info::Mode::from_ocamlvalue(block_field(&ocaml_env, 11));
let mut env = LowererEnv::make(
codegen,
elaborate_namespaces,
quick_mode,
mode,
&indexed_source_text,
&parser_options,
);
ocamlpool_enter();
let r = PositionedSyntaxLowerer::lower(&mut env, tree.root()).ocamlvalue();
ocamlpool_leave();
res = ocaml::Value::new(r);
} -> res );
|
use std::{fmt::Write, sync::Arc};
use prometheus::core::Collector;
use twilight_model::{
application::{
command::CommandOptionChoice,
component::{
button::ButtonStyle, select_menu::SelectMenuOption, ActionRow, Button, Component,
SelectMenu,
},
interaction::{
application_command::CommandOptionValue, ApplicationCommand,
ApplicationCommandAutocomplete, MessageComponentInteraction,
},
},
channel::embed::EmbedField,
http::interaction::{InteractionResponse, InteractionResponseData, InteractionResponseType},
};
use crate::{
commands::{MyCommand, MyCommandOption, MyCommandOptionKind, SLASH_COMMANDS},
core::Context,
embeds::{EmbedBuilder, Footer},
error::{Error, InvalidHelpState},
util::{
constants::{common_literals::HELP, BATHBOT_GITHUB, BATHBOT_WORKSHOP, INVITE_LINK, KOFI},
datetime::how_long_ago_dynamic,
levenshtein_distance,
numbers::with_comma_int,
CowUtils, MessageBuilder, MessageExt,
},
BotResult,
};
use super::failed_message_;
type PartResult = Result<(Parts, bool), InvalidHelpState>;
struct Parts {
name: &'static str,
help: &'static str,
root: bool,
options: Vec<MyCommandOption>,
}
impl From<MyCommand> for Parts {
fn from(command: MyCommand) -> Self {
Self {
name: command.name,
help: command.help.unwrap_or(command.description),
root: true,
options: command.options,
}
}
}
impl From<MyCommandOption> for Parts {
fn from(option: MyCommandOption) -> Self {
let options = match option.kind {
MyCommandOptionKind::SubCommand { options }
| MyCommandOptionKind::SubCommandGroup { options } => options,
MyCommandOptionKind::String { .. }
| MyCommandOptionKind::Integer { .. }
| MyCommandOptionKind::Number { .. }
| MyCommandOptionKind::Boolean { .. }
| MyCommandOptionKind::User { .. }
| MyCommandOptionKind::Channel { .. }
| MyCommandOptionKind::Role { .. }
| MyCommandOptionKind::Mentionable { .. } => Vec::new(),
};
Self {
name: option.name,
help: option.help.unwrap_or(option.description),
root: false,
options,
}
}
}
impl From<EitherCommand> for Parts {
fn from(either: EitherCommand) -> Self {
match either {
EitherCommand::Base(command) => command.into(),
EitherCommand::Option(option) => option.into(),
}
}
}
impl From<CommandIter> for Parts {
fn from(iter: CommandIter) -> Self {
match iter.next {
Some(option) => option.into(),
None => iter.curr.into(),
}
}
}
enum EitherCommand {
Base(MyCommand),
Option(MyCommandOption),
}
struct CommandIter {
curr: EitherCommand,
next: Option<MyCommandOption>,
}
impl From<MyCommand> for CommandIter {
fn from(command: MyCommand) -> Self {
Self {
curr: EitherCommand::Base(command),
next: None,
}
}
}
impl CommandIter {
fn next(&mut self, name: &str) -> bool {
let options = match &mut self.next {
Some(option) => match &mut option.kind {
MyCommandOptionKind::SubCommand { options }
| MyCommandOptionKind::SubCommandGroup { options } => options,
_ => return true,
},
None => match &mut self.curr {
EitherCommand::Base(command) => &mut command.options,
EitherCommand::Option(option) => match &mut option.kind {
MyCommandOptionKind::SubCommand { options }
| MyCommandOptionKind::SubCommandGroup { options } => options,
_ => return true,
},
},
};
let next = match options.drain(..).find(|option| option.name == name) {
Some(option) => option,
None => return true,
};
if let Some(curr) = self.next.replace(next) {
self.curr = EitherCommand::Option(curr);
}
false
}
}
const AUTHORITY_STATUS: &str = "Requires authority status (check the /authorities command)";
fn continue_subcommand(title: &mut String, name: &str) -> PartResult {
let mut names = title.split(' ');
let base = names.next().ok_or(InvalidHelpState::MissingTitle)?;
let command = SLASH_COMMANDS
.command(base)
.ok_or(InvalidHelpState::UnknownCommand)?;
let authority = command.authority;
let mut iter = CommandIter::from(command);
for name in names {
if iter.next(name) {
return Err(InvalidHelpState::UnknownCommand);
}
}
if iter.next(name) {
return Err(InvalidHelpState::UnknownCommand);
}
let command = Parts::from(iter);
let _ = write!(title, " {}", command.name);
Ok((command, authority))
}
fn backtrack_subcommand(title: &mut String) -> PartResult {
let index = title.chars().filter(char::is_ascii_whitespace).count();
let mut names = title.split(' ').take(index);
let base = names.next().ok_or(InvalidHelpState::MissingTitle)?;
let command = SLASH_COMMANDS
.command(base)
.ok_or(InvalidHelpState::UnknownCommand)?;
let authority = command.authority;
let mut iter = CommandIter::from(command);
for name in names {
if iter.next(name) {
return Err(InvalidHelpState::UnknownCommand);
}
}
if let Some(pos) = title.rfind(' ') {
title.truncate(pos);
}
Ok((iter.into(), authority))
}
pub async fn handle_menu_select(
ctx: &Context,
mut component: MessageComponentInteraction,
) -> BotResult<()> {
// Parse given component
let mut title = component
.message
.embeds
.pop()
.ok_or(InvalidHelpState::MissingEmbed)?
.title
.ok_or(InvalidHelpState::MissingTitle)?;
// If value is None, back button was pressed; otherwise subcommand was picked
let (command, authority) = match component.data.values.pop() {
Some(name) => continue_subcommand(&mut title, &name)?,
None => backtrack_subcommand(&mut title)?,
};
// Prepare embed and components
let mut embed_builder = EmbedBuilder::new()
.title(title)
.description(command.help)
.fields(option_fields(&command.options));
if authority {
embed_builder = embed_builder.footer(Footer::new(AUTHORITY_STATUS));
}
let mut components = parse_select_menu(&command.options);
let menu_content = components.get_or_insert_with(|| Vec::with_capacity(1));
let button_row = ActionRow {
components: vec![back_button(command.root)],
};
menu_content.push(Component::ActionRow(button_row));
let data = InteractionResponseData {
components,
embeds: Some(vec![embed_builder.build()]),
..Default::default()
};
let response = InteractionResponse {
kind: InteractionResponseType::UpdateMessage,
data: Some(data),
};
ctx.interaction()
.create_response(component.id, &component.token, &response)
.exec()
.await?;
Ok(())
}
fn back_button(disabled: bool) -> Component {
let button = Button {
custom_id: Some("help_back".to_owned()),
disabled,
emoji: None,
label: Some("Back".to_owned()),
style: ButtonStyle::Danger,
url: None,
};
Component::Button(button)
}
fn option_fields(children: &[MyCommandOption]) -> Vec<EmbedField> {
children
.iter()
.filter_map(|child| match &child.kind {
MyCommandOptionKind::SubCommand { .. }
| MyCommandOptionKind::SubCommandGroup { .. } => None,
MyCommandOptionKind::String { required, .. }
| MyCommandOptionKind::Integer { required, .. }
| MyCommandOptionKind::Number { required, .. }
| MyCommandOptionKind::Boolean { required }
| MyCommandOptionKind::User { required }
| MyCommandOptionKind::Channel { required }
| MyCommandOptionKind::Role { required }
| MyCommandOptionKind::Mentionable { required } => {
let mut name = child.name.to_owned();
if *required {
name.push_str(" (required)");
}
let help = child.help.unwrap_or(child.description);
let field = EmbedField {
inline: help.len() <= 40,
name,
value: help.to_owned(),
};
Some(field)
}
})
.collect()
}
fn parse_select_menu(options: &[MyCommandOption]) -> Option<Vec<Component>> {
if options.is_empty() {
return None;
}
let options: Vec<_> = options
.iter()
.filter(|option| {
matches!(
option.kind,
MyCommandOptionKind::SubCommand { .. }
| MyCommandOptionKind::SubCommandGroup { .. }
)
})
.map(|option| SelectMenuOption {
default: false,
description: Some(option.description.to_owned()),
emoji: None,
label: option.name.to_owned(),
value: option.name.to_owned(),
})
.collect();
if options.is_empty() {
return None;
}
let select_menu = SelectMenu {
custom_id: "help_menu".to_owned(),
disabled: false,
max_values: None,
min_values: None,
options,
placeholder: Some("Select a subcommand".to_owned()),
};
let row = ActionRow {
components: vec![Component::SelectMenu(select_menu)],
};
Some(vec![Component::ActionRow(row)])
}
async fn help_slash_command(
ctx: &Context,
command: ApplicationCommand,
cmd: MyCommand,
) -> BotResult<()> {
let MyCommand {
name,
description,
help,
authority,
options,
} = cmd;
let description = help.unwrap_or(description);
if name == "owner" {
let description =
"This command can only be used by the owner of the bot.\nQuit snooping around :^)";
let embed_builder = EmbedBuilder::new().title(name).description(description);
let builder = MessageBuilder::new().embed(embed_builder);
command.create_message(ctx, builder).await?;
return Ok(());
}
let mut embed_builder = EmbedBuilder::new()
.title(name)
.description(description)
.fields(option_fields(&options));
if authority {
let footer = Footer::new(AUTHORITY_STATUS);
embed_builder = embed_builder.footer(footer);
}
let menu = parse_select_menu(&options);
let builder = MessageBuilder::new()
.embed(embed_builder)
.components(menu.as_deref().unwrap_or_default());
command.create_message(ctx, builder).await?;
Ok(())
}
pub async fn handle_autocomplete(
ctx: Arc<Context>,
command: ApplicationCommandAutocomplete,
) -> BotResult<()> {
let mut cmd_name = None;
let mut focus = None;
if let Some(option) = command.data.options.first() {
match option.value {
Some(ref value) if option.name == "command" => {
cmd_name = Some(value);
focus = Some(option.focused);
}
_ => return Err(Error::InvalidCommandOptions),
}
}
let name = cmd_name.map(|name| name.cow_to_ascii_lowercase());
let choices = match (name, focus) {
(Some(name), Some(true)) => {
let arg = name.trim();
match (arg, SLASH_COMMANDS.descendants(arg)) {
("", _) | (_, None) => Vec::new(),
(_, Some(cmds)) => cmds
.map(|cmd| CommandOptionChoice::String {
name: cmd.to_owned(),
value: cmd.to_owned(),
})
.collect(),
}
}
_ => Vec::new(),
};
let data = InteractionResponseData {
choices: Some(choices),
..Default::default()
};
let response = InteractionResponse {
kind: InteractionResponseType::ApplicationCommandAutocompleteResult,
data: Some(data),
};
ctx.interaction()
.create_response(command.id, &command.token, &response)
.exec()
.await?;
Ok(())
}
pub async fn slash_help(ctx: Arc<Context>, command: ApplicationCommand) -> BotResult<()> {
let mut cmd_name = None;
if let Some(option) = command.data.options.first() {
let option = (option.name == "command").then(|| match &option.value {
CommandOptionValue::String(value) => Some(value),
_ => None,
});
match option.flatten() {
Some(value) => cmd_name = Some(value),
None => return Err(Error::InvalidCommandOptions),
}
}
let name = cmd_name.map(|name| name.cow_to_ascii_lowercase());
match name {
Some(name) => {
let arg = name.as_ref();
match SLASH_COMMANDS.command(arg) {
Some(cmd) => help_slash_command(&ctx, command, cmd).await,
None => {
let dists = SLASH_COMMANDS
.names()
.map(|name| (levenshtein_distance(arg, name).0, name))
.filter(|(dist, _)| *dist < 5)
.collect();
failed_message_(&ctx, command.into(), dists).await
}
}
}
None => basic_help(&ctx, command).await,
}
}
async fn basic_help(ctx: &Context, command: ApplicationCommand) -> BotResult<()> {
let id = ctx
.cache
.current_user()
.expect("missing CurrentUser in cache")
.id;
let mention = format!("<@{id}>");
let description = format!(
"{mention} is a discord bot written by [Badewanne3](https://osu.ppy.sh/u/2211396) all around osu!"
);
let join_server = EmbedField {
inline: false,
name: "Got a question, suggestion, bug, or are interested in the development?".to_owned(),
value: format!("Feel free to join the [discord server]({BATHBOT_WORKSHOP})"),
};
let command_help = EmbedField {
inline: false,
name: "Want to learn more about a command?".to_owned(),
value: "Try specifying the command name on the `help` command: `/help command:_`"
.to_owned(),
};
let invite = EmbedField {
inline: false,
name: "Want to invite the bot to your server?".to_owned(),
value: format!("Try using this [**invite link**]({INVITE_LINK})"),
};
let servers = EmbedField {
inline: true,
name: "Servers".to_owned(),
value: with_comma_int(ctx.cache.stats().guilds()).to_string(),
};
let boot_time = ctx.stats.start_time;
let boot_up = EmbedField {
inline: true,
name: "Boot-up".to_owned(),
value: how_long_ago_dynamic(&boot_time).to_string(),
};
let github = EmbedField {
inline: false,
name: "Interested in the code?".to_owned(),
value: format!("The source code can be found over at [github]({BATHBOT_GITHUB})"),
};
let commands_used: usize = ctx.stats.command_counts.message_commands.collect()[0]
.get_metric()
.iter()
.map(|metrics| metrics.get_counter().get_value() as usize)
.sum();
let commands_used = EmbedField {
inline: true,
name: "Commands used".to_owned(),
value: with_comma_int(commands_used).to_string(),
};
let osu_requests: usize = ctx.stats.osu_metrics.rosu.collect()[0]
.get_metric()
.iter()
.map(|metric| metric.get_counter().get_value() as usize)
.sum();
let osu_requests = EmbedField {
inline: true,
name: "osu!api requests".to_owned(),
value: with_comma_int(osu_requests).to_string(),
};
let kofi = EmbedField {
inline: false,
name: "Feel like supporting the bot's development & maintenance?".to_owned(),
value: format!("Donations through [Ko-fi]({KOFI}) are very much appreciated <3"),
};
let fields = vec![
join_server,
command_help,
invite,
servers,
boot_up,
github,
commands_used,
osu_requests,
kofi,
];
let builder = EmbedBuilder::new()
.description(description)
.fields(fields)
.build()
.into();
command.create_message(ctx, builder).await?;
Ok(())
}
pub fn define_help() -> MyCommand {
let option_help = "Specify a command **base** name.\n\
Once the help for that command is displayed, you can use the menu to navigate \
to specific subcommands you want to know more about.";
let command = MyCommandOption::builder("command", "Specify a command base name")
.help(option_help)
.autocomplete()
.string(Vec::new(), false);
let description = "Display general help or help for a specific command";
let help = "If no command name is specified, it will show general help for the bot.\n\
Otherwise it'll show a help menu for the specific command.";
MyCommand::new(HELP, description)
.help(help)
.options(vec![command])
}
|
fn som(x: i64, y: i64) -> i64 {
x + y
}
fn maxi(x: i64, y: i64) -> i64 {
if x > y {
x
} else {
y
}
}
fn mini(x: i64, y: i64) -> i64 {
if x < y {
x
} else {
y
}
}
fn gcdi(m: i64, n: i64) -> i64 {
let x = maxi(m.abs(), n.abs());
let y = mini(m.abs(), n.abs());
if x % y == 0 {
y
} else {
gcdi(y, x % y)
}
}
fn lcmu(a: i64, b: i64) -> i64 {
(a * b).abs() / gcdi(a, b)
}
// first parameter: dots have to be replaced by function of two variables
fn oper_array(f: fn(i64, i64) -> i64, a: &[i64], init: i64) -> Vec<i64> {
let mut out_array: Vec<i64> = Vec::new();
a.iter().fold(init, |acc, x| {
let current = f(acc, *x);
out_array.push(current);
current
});
out_array
}
#[test]
fn test_basics() {
assert_eq!(som(1, 1), 2, "som should be the sum of the two inputs");
assert_eq!(maxi(1, 2), 2, "maxi should be the highest of the two inputs");
assert_eq!(mini(1, 2), 1, "mini should be the lowest of the two inputs");
assert_eq!(gcdi(24, -54), 6, "gcdi should be the greatest common divisor of the two args");
assert_eq!(lcmu(4, 6), 12, "lcmu should be the least common multiple of the two args");
}
#[test]
fn test_oper_array() {
assert_eq!(oper_array(som, &[2, 4, 6, 8, 10, 20], 0), vec![2, 6, 12, 20, 30, 50]);
}
|
mod media;
use media::Playable;
struct Audio(String, String);
struct Video(String, String);
impl Playable for Audio {
fn play(&self) {
println!("Now Playing: {} by {}", self.0, self.1);
}
}
impl Playable for Video {
fn play(&self) {
println!("Now Playing: {} by {}", self.0, self.1);
}
}
fn main() {
println!("Super Player!");
let audio = Audio("ambient_music.mp3".to_string(), "falala guy".to_string());
let video = Video("ufo_documentary.mkv".to_string(), "aliens_guy".to_string());
audio.play();
video.play();
}
|
extern crate kmod;
#[macro_use]
extern crate log;
extern crate env_logger;
use std::env;
use std::fs;
fn main() {
env_logger::init();
let ctx = kmod::Context::new().expect("kmod ctx failed");
let filename = env::args().nth(1).expect("missing argument");
let module = match fs::metadata(&filename) {
Ok(_) => ctx
.module_new_from_path(&filename)
.expect("new_from_path failed"),
Err(_) => ctx
.module_new_from_name(&filename)
.expect("new_from_name failed"),
};
info!("got module: {:?}", module.name());
module.remove_module(0).expect("remove_module failed");
}
|
#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
extern crate bindgen;
extern crate rocket;
use bindgen::builder;
use rocket::request::Form;
use rocket::response::NamedFile;
use std::path::{Path, PathBuf};
#[get("/")]
fn index() -> Option<NamedFile> {
NamedFile::open(Path::new("static/index.html")).ok()
}
#[get("/<file..>")]
fn static_files(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
#[derive(FromForm)]
struct BindgenInput {
source: String,
}
#[post("/bindgen", data = "<input>")]
fn api_bindgen(input: Form<BindgenInput>) -> Result<String, String> {
let i = input.get();
builder().header_contents("input.h", &i.source)
.generate()
.map(|b| b.to_string())
//TODO: get error messages out of bindgen?
.or(Err("Failed to generate bindings".to_owned()))
}
fn main() {
rocket::ignite()
.mount("/api", routes![api_bindgen])
.mount("/", routes![index, static_files])
.launch();
}
|
use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoInterruptiblePipelineData, PipelineData, Signature, Span, Spanned,
SyntaxShape, Value,
};
#[derive(Clone)]
pub struct Window;
impl Command for Window {
fn name(&self) -> &str {
"window"
}
fn signature(&self) -> Signature {
Signature::build("window")
.required("window_size", SyntaxShape::Int, "the size of each window")
.named(
"stride",
SyntaxShape::Int,
"the number of rows to slide over between windows",
Some('s'),
)
.category(Category::Filters)
}
fn usage(&self) -> &str {
"Creates a sliding window of `window_size` that slide by n rows/elements across input."
}
fn examples(&self) -> Vec<Example> {
let stream_test_1 = vec![
Value::List {
vals: vec![
Value::Int {
val: 1,
span: Span::test_data(),
},
Value::Int {
val: 2,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
Value::List {
vals: vec![
Value::Int {
val: 2,
span: Span::test_data(),
},
Value::Int {
val: 3,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
Value::List {
vals: vec![
Value::Int {
val: 3,
span: Span::test_data(),
},
Value::Int {
val: 4,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
];
let stream_test_2 = vec![
Value::List {
vals: vec![
Value::Int {
val: 1,
span: Span::test_data(),
},
Value::Int {
val: 2,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
Value::List {
vals: vec![
Value::Int {
val: 4,
span: Span::test_data(),
},
Value::Int {
val: 5,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
Value::List {
vals: vec![
Value::Int {
val: 7,
span: Span::test_data(),
},
Value::Int {
val: 8,
span: Span::test_data(),
},
],
span: Span::test_data(),
},
];
vec![
Example {
example: "echo [1 2 3 4] | window 2",
description: "A sliding window of two elements",
result: Some(Value::List {
vals: stream_test_1,
span: Span::test_data(),
}),
},
Example {
example: "[1, 2, 3, 4, 5, 6, 7, 8] | window 2 --stride 3",
description: "A sliding window of two elements, with a stride of 3",
result: Some(Value::List {
vals: stream_test_2,
span: Span::test_data(),
}),
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
let ctrlc = engine_state.ctrlc.clone();
let metadata = input.metadata();
let stride: Option<usize> = call.get_flag(engine_state, stack, "stride")?;
let stride = stride.unwrap_or(1);
//FIXME: add in support for external redirection when engine-q supports it generally
let each_group_iterator = EachWindowIterator {
group_size: group_size.item,
input: Box::new(input.into_iter()),
span: call.head,
previous: vec![],
stride,
};
Ok(each_group_iterator
.into_pipeline_data(ctrlc)
.set_metadata(metadata))
}
}
struct EachWindowIterator {
group_size: usize,
input: Box<dyn Iterator<Item = Value> + Send>,
span: Span,
previous: Vec<Value>,
stride: usize,
}
impl Iterator for EachWindowIterator {
type Item = Value;
fn next(&mut self) -> Option<Self::Item> {
let mut group = self.previous.clone();
let mut current_count = 0;
if group.is_empty() {
loop {
let item = self.input.next();
match item {
Some(v) => {
group.push(v);
current_count += 1;
if current_count >= self.group_size {
break;
}
}
None => return None,
}
}
} else {
// our historic buffer is already full, so stride instead
loop {
let item = self.input.next();
match item {
Some(v) => {
group.push(v);
current_count += 1;
if current_count >= self.stride {
break;
}
}
None => return None,
}
}
for _ in 0..current_count {
let _ = group.remove(0);
}
}
if group.is_empty() || current_count == 0 {
return None;
}
self.previous = group.clone();
Some(Value::List {
vals: group,
span: self.span,
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(Window {})
}
}
|
pub type Runs = Vec<Run>;
// only define the structs for the data I want to pull out
#[derive(Debug,Serialize, Deserialize)]
pub struct Run {
pub status: String,
pub result: String,
pub user: User,
}
#[derive(Debug,Serialize, Deserialize)]
pub struct User {
pub meta: Meta,
}
#[derive(Debug,Serialize, Deserialize)]
pub struct Meta {
pub username: String
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CanonicalSupportPlanProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<canonical_support_plan_properties::ProvisioningState>,
}
pub mod canonical_support_plan_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Succeeded,
Failed,
Cancelled,
Purchasing,
Downgrading,
Cancelling,
Upgrading,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CanonicalSupportPlanResponseEnvelope {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub properties: CanonicalSupportPlanProperties,
}
pub type CanonicalSupportPlanStatus = Vec<serde_json::Value>;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListValue {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<OperationList>,
}
pub type OperationList = Vec<OperationsDefinition>;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationsDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationsDisplayDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationsDisplayDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDefinition {
pub message: String,
pub code: String,
}
|
//! rocket_auth provides a ready-to-use backend agnostic API for authentication management.
//! It supports connections for SQLite and Postgresql. It lets you create, delete, and authenticate users.
//! The available features are:
//! * `sqlite-db`: for interacting with a SQLite database using [`sqlx`].
//! * `postgres-db`: for interacting with a Postgresql database with [`sqlx`].
//! * `tokio-postgres-db`: for interacting with a Postgresql database with [`tokio_postgres`].
//! * `redis-session`: for storing sessions on a redis server using [`redis`].
//!
//! By default this crate stores sessions on a concurrent hashmap.
//! As a result, sessions will only be stored as long as the rocket application runs uninterrupted.
//! In order to store persistent sessions, it is recommended to connect the [`Users`](`Users::open_redis`) instance to a [redis server](https://redis.io/) .
//! This requires the `redis-session` feature to be enabled.
//!
//! `rocket_auth` uses private cookies to store session data.
//! This means that in order for cookies to be properly decrypted between launches, a `secret_key` must be set.
//! For more information visit rocket's [configuration guide](https://rocket.rs/v0.4/guide/configuration/).
//!
//!
//!
//!
//!
//! To use `rocket_auth` include it as a dependency in your Cargo.toml file:
//! ```ini
//! [dependencies.rocket_auth]
//! version = "0.3.0"
//! features = ["sqlite-db"]
//! ```
//! # Quick overview
//! This crate provides two guards:
//! * [`Auth`]: manages authentication.
//! * [`Session`]: retrieves session data from client cookies.
//! * [`User`]: It restricts content, so it can be viewed by authenticated clients only.
//!
//! It also includes two structs to be parsed from forms and json data:
//! * [`Signup`]: used to create new users.
//! * [`Login`]: used to authenticate users.
//!
//! Finally it has two structures for queries:
//! * [`Users`]: it allows to query users to the database.
//! * [`User`]: it is the response of a query.
//!
//! The [`Auth`] guard allows to log in, log out, sign up, modify, and delete the currently (un)authenticated user.
//! For more information see [`Auth`]. Because of rust's ownership rules, you may not retrieve both `rocket::http::Cookies` and the [`Auth`] guard
//! simultaneously. However, retrieveng cookies is not needed since `Auth` stores them in the public field [`Auth::cookies`].
//! A working example:
//! ```rust,no_run
//! use rocket::{get, post, form::Form, routes};
//! use rocket_auth::{Users, Error, Auth, Signup, Login};
//!
//! #[post("/signup", data="<form>")]
//! async fn signup(form: Form<Signup>, mut auth: Auth<'_>) -> Result<&'static str, Error> {
//! auth.signup(&form).await?;
//! auth.login(&form.into());
//! Ok("You signed up.")
//! }
//!
//! #[post("/login", data="<form>")]
//! async fn login(form: Form<Login>, mut auth: Auth<'_>) -> Result<&'static str, Error>{
//! auth.login(&form).await?;
//! Ok("You're logged in.")
//! }
//!
//! #[get("/logout")]
//! fn logout(mut auth: Auth<'_>) {
//! auth.logout();
//! }
//! #[tokio::main]
//! async fn main() -> Result<(), Error>{
//! let users = Users::open_sqlite("mydb.db").await?;
//!
//! rocket::build()
//! .mount("/", routes![signup, login, logout])
//! .manage(users)
//! .launch();
//! Ok(())
//! }
//! ```
//!
//! ## Users struct
//! The [`Users`] struct administers interactions with the database.
//! It lets you query, create, modify and delete users.
//! Unlike the [`Auth`] guard, a [`Users`] instance can manage any user in the database.
//! Note that the [`Auth`] guards includes a `Users` instance stored on the public `users` field.
//! So it is not necessary to retrieve Users when using `Auth`.
//! A simple example of how to query a user with the [`Users`] struct:
//!
//! ```rust
//! # use rocket::{get, State};
//! # use serde_json::json;
//! use rocket_auth::Users;
//!
//! #[get("/see-user/<id>")]
//! async fn see_user(id: i32, users: &State<Users>) -> String {
//! let user = users.get_by_id(id).await.unwrap();
//! format!("{}", json!(user))
//! }
//! # fn main() {}
//! ```
//!
//! A [`Users`] instance can be constructed by connecting it to the database with the methods [`open_sqlite`](Users::open_sqlite),
//! [`open_postgres`](Users::open_postgres). Furthermore, it can be constructed from a working connection.
//!
//!
//! ## User guard
//! The `User` guard can be used to restrict content so it can only be viewed by authenticated users.
//! Additionally, yo can use it to render special content if the client is authenticated or not.
//! ```rust
//! # use rocket::*;
//! # use rocket_auth::User;
//! #[get("/private-content")]
//! fn private_content(user: User) -> &'static str {
//! "If you can see this, you are logged in."
//! }
//!
//! #[get("/special-content")]
//! fn special_content(option: Option<User>) -> String {
//! if let Some(user) = option {
//! format!("hello, {}.", user.email())
//! } else {
//! "hello, anonymous user".into()
//! }
//! }
//! ```
//!
mod cookies;
mod db;
mod error;
mod forms;
pub mod prelude;
mod session;
mod user;
use prelude::*;
use rocket::FromForm;
pub use crate::user::auth::Auth;
pub use cookies::Session;
pub use error::Error;
/// The `User` guard can be used to restrict content so it can only be viewed by authenticated users.
///
/// ```rust
/// #
/// # use rocket::get;
/// # use rocket_auth::User;
/// #[get("/private-content")]
/// fn private_content(user: User) -> &'static str {
/// "If you can see this, you are logged in."
/// }
/// # fn main() {}
/// ```
#[derive(PartialEq, Eq, Clone)]
pub struct User {
id: i32,
email: String,
password: String,
is_admin: bool,
}
/// The `Users` struct is used to query users from the database, as well as to create, modify and delete them.
pub struct Users {
conn: Box<dyn DBConnection>,
sess: Box<dyn SessionManager>,
}
/// The `Login` form is used along with the [`Auth`] guard to authenticate users.
#[derive(FromForm, Deserialize, Debug, Clone)]
pub struct Login {
pub email: String,
password: String,
}
/// The `Signup` form is used along with the [`Auth`] guard to create new users.
#[derive(FromForm, Deserialize, Debug, Clone)]
pub struct Signup {
pub email: String,
password: String,
}
|
use std::fmt;
#[derive(Debug)]
struct User{
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
impl fmt::Display for User {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({}, {}, {}, {})", self.username, self.email, self.sign_in_count, self.active)
}
}
fn build_user(email: String, username: String) -> User {
User { username: username, email: email, sign_in_count: 1, active: true }
}
fn main() {
const MAX_POINTS: u32 = 100_000;
let mut x = 5;
println!("The value of x is: {}", x);
x = 6;
println!("The value of x is: {}", x);
println!("The constatn of MAX_POINTS is : {}", MAX_POINTS);
println!("Build User : {}", build_user(String::from("roach@naver.com"), String::from("username")))
}
|
// This code is quite a bit different than the code you might be used to. Don't fear! I believe
// in you!
//
// Please take a look at the accompanying README.md located in this directory for setup/test
// instructions.
// Here we import the `qrcode` crate (like a Ruby gem). It is also declared in the `./Cargo.toml`.
extern crate qrcode;
// Import specific functionality from the `qrcode` crate.
use qrcode::{render::svg, QrCode, types::QrError};
// From the standard library we'll also import some functionality to let us work with raw C
// style strings.
use std::os::raw::c_char;
use std::ffi::CString;
/// Generate a QR code from the respective data. Returns a string containing the SVG string
/// appropriate to be saved to a file or rendered to a DOM tree.
pub fn qrcode<T>(data: T, width: u32, height: u32) -> Result<String, QrError>
where T: AsRef<[u8]> {
QrCode::with_error_correction_level(data.as_ref(), qrcode::EcLevel::L)
.map(|code| code.render::<svg::Color>()
.max_dimensions(width, height)
.min_dimensions(width, height)
.build())
}
/// Generate a qrcode from the C string starting at the pointer provided. It is essentially an
/// unsafe proxy to `qrcode(data: &[u8])` that handles FFI concerns.
///
/// The input is a raw pointer to memory and the string located there will be freed during the
/// function execution. Use `alloc()` to get an appropriate region in memory.
///
/// Returns a new pointer to a new location in memory where the SVG code for the qrcode is located.
/// You **must** pass this pointer back to the `free()` function below.
#[no_mangle]
pub unsafe extern "C" fn qrcode_ffi(region: *mut c_char, width: u32, height: u32) -> *mut c_char {
// This will get dropped at the end of the function.
let arg = CString::from_raw(region).into_bytes();
let qr_code = match qrcode(arg, width, height) {
Ok(v) => v,
// Since we're on an FFI boundary we can't return strongly typed errors. Instead if we get
// an error from the qrcode generation we return the error string.
Err(e) => format!("{}", e),
};
// Output the generated string as a raw C string, or (if it fails to do this) output a
// meaningful error.
match CString::new(qr_code).map(|v| v.into_raw()) {
Ok(v) => v,
Err(_e) => CString::new("Generated SVG contains NULL bytes.").unwrap().into_raw(),
}
}
/// Allocates a zero'd region in memory for the caller.
///
/// Writing past this region will result in undefined behaivor.
///
/// You **must** free the region returned either via the `free()` function or the `qrcode_ffi()`
/// function.
#[no_mangle]
pub unsafe extern "C" fn alloc(size: usize) -> *mut c_char {
let buffer = vec![0; size];
// This buffer has `\0` bytes in it, but we are going to go let JS fill it in. So it's ok!
let wrapped = CString::from_vec_unchecked(buffer);
// When we cast into a raw pointer Rust no longer bothers with managing the related memory.
wrapped.into_raw()
}
/// Frees the provided region in memory from the first byte of data until the first `\0` byte.
///
/// It is only appropriate to call this on a value returned from `alloc()` or `qrcode_ffi()`.
#[no_mangle]
pub unsafe extern "C" fn free(region: *mut c_char) {
let _ = CString::from_raw(region);
}
|
use crate::event::Event;
use std::collections::VecDeque;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct EventQueue(VecDeque<Event>);
impl EventQueue {
pub fn new() -> Self {
EventQueue(VecDeque::new())
}
pub fn enqueu(&mut self, event: Event) {
self.0.push_front(event);
}
pub fn dequeue(&mut self) -> Option<Event> {
self.0.pop_back()
}
} |
pub fn factorial(n: u16) -> u64 {
match n {
0 => 1,
_ => factorial(n - 1) * n as u64
}
}
|
use crate::character::{
AbilityBoostChoice, AbilityBoostChoiceSet, AbilityScore, AbilityScoreSet, AbilityScoreType,
Ancestry, Background, Class, Health,
};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum Size {
Tiny,
Small,
Medium,
Large,
Huge,
Gargantuan,
}
pub struct Character<'class, 'ancestry, 'background> {
name: String,
description: String,
class: &'class Class,
ancestry: &'ancestry Ancestry,
background: &'background Background,
level: u32,
hp: Health,
speed: u32,
size: Size,
ability_scores: AbilityScoreSet,
}
impl<'class, 'ancestry, 'background> Character<'class, 'ancestry, 'background> {
pub fn new(
name: &str,
class: &'class Class,
ancestry: &'ancestry Ancestry,
ancestry_boost_choices: &Vec<AbilityScoreType>,
background: &'background Background,
background_boost_choices: &Vec<AbilityScoreType>,
extra_boost_choices: &Vec<AbilityScoreType>,
) -> Result<Character<'class, 'ancestry, 'background>, String> {
let extra_boosts: Vec<AbilityBoostChoice> = vec![
AbilityBoostChoice::free(),
AbilityBoostChoice::free(),
AbilityBoostChoice::free(),
AbilityBoostChoice::free(),
];
Ok(Character {
name: String::from(name),
description: "".to_string(),
class,
ancestry,
background,
level: 1,
hp: Health::new(class.hp_increment() + ancestry.base_hp()),
speed: ancestry.speed(),
size: ancestry.size(),
ability_scores: AbilityScoreSet::with_boosts(&vec![
&hashset![class.key_ability()],
&ancestry
.ability_boosts()
.apply_choices(ancestry_boost_choices)?,
&background
.ability_boosts()
.apply_choices(background_boost_choices)?,
&extra_boosts.apply_choices(extra_boost_choices)?,
]),
})
}
// ============================= Accessors =============================
pub fn name(&self) -> &str {
&self.name
}
pub fn description(&self) -> &str {
&self.description
}
pub fn set_description(&mut self, description: String) {
self.description = description;
}
pub fn class(&self) -> &Class {
&self.class
}
pub fn ancestry(&self) -> &Ancestry {
&self.ancestry
}
pub fn background(&self) -> &Background {
&self.background
}
pub fn level(&self) -> u32 {
self.level
}
pub fn hp(&self) -> &Health {
&self.hp
}
pub fn speed(&self) -> u32 {
self.speed
}
pub fn size(&self) -> Size {
self.size
}
pub fn abliity_scores(&self) -> &AbilityScoreSet {
&self.ability_scores
}
pub fn strength(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Strength)
}
pub fn dexterity(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Dexterity)
}
pub fn constitution(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Constitution)
}
pub fn intelligence(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Intelligence)
}
pub fn wisdom(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Wisdom)
}
pub fn charisma(&self) -> AbilityScore {
self.ability_scores.get(AbilityScoreType::Charisma)
}
// ============================= Logic =============================
pub fn level_up(&mut self) {
self.level += 1;
self.hp.increase_max(self.class.hp_increment());
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::character::AbilityBoostChoice;
#[test]
fn class_stats() {
let class = Class::new("Bob".to_string(), AbilityScoreType::Strength, 10);
let ancestry = Ancestry::new("Bob".to_string(), 0, Size::Medium, 30, vec![]);
let background = Background::new("Bob".to_string(), "".to_string(), vec![]);
let character = Character::new(
"Bob",
&class,
&ancestry,
&vec![],
&background,
&vec![],
&vec![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
AbilityScoreType::Dexterity,
AbilityScoreType::Wisdom,
],
)
.unwrap();
assert_eq!(character.name(), "Bob");
assert_eq!(character.level(), 1);
assert_eq!(character.hp().max(), 10);
assert_eq!(character.strength().value(), 14);
assert_eq!(character.dexterity().value(), 12);
assert_eq!(character.constitution().value(), 12);
assert_eq!(character.intelligence().value(), 10);
assert_eq!(character.wisdom().value(), 12);
assert_eq!(character.charisma().value(), 10);
}
#[test]
fn ancestry_stats() {
let class = Class::new("Bob".to_string(), AbilityScoreType::Strength, 10);
let ancestry = Ancestry::new(
"Bob".to_string(),
8,
Size::Medium,
30,
vec![
AbilityBoostChoice::predetermined(AbilityScoreType::Strength),
AbilityBoostChoice::predetermined(AbilityScoreType::Constitution),
AbilityBoostChoice::free(),
],
);
let background = Background::new("Bob".to_string(), "".to_string(), vec![]);
let character = Character::new(
"Bob",
&class,
&ancestry,
&vec![AbilityScoreType::Dexterity],
&background,
&vec![],
&vec![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
AbilityScoreType::Dexterity,
AbilityScoreType::Wisdom,
],
)
.unwrap();
assert_eq!(character.speed(), ancestry.speed());
assert_eq!(character.size(), ancestry.size());
assert_eq!(character.strength().value(), 16);
assert_eq!(character.dexterity().value(), 14);
assert_eq!(character.constitution().value(), 14);
assert_eq!(character.intelligence().value(), 10);
assert_eq!(character.wisdom().value(), 12);
assert_eq!(character.charisma().value(), 10);
}
#[test]
fn background_stats() {
let class = Class::new("Bob".to_string(), AbilityScoreType::Strength, 10);
let ancestry = Ancestry::new(
"Bob".to_string(),
8,
Size::Medium,
30,
vec![
AbilityBoostChoice::predetermined(AbilityScoreType::Strength),
AbilityBoostChoice::predetermined(AbilityScoreType::Constitution),
AbilityBoostChoice::free(),
],
);
let background = Background::new(
"Bob".to_string(),
"".to_string(),
vec![
AbilityBoostChoice::restricted(hashset![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
]),
AbilityBoostChoice::free(),
],
);
let character = Character::new(
"Bob",
&class,
&ancestry,
&vec![AbilityScoreType::Dexterity],
&background,
&vec![AbilityScoreType::Strength, AbilityScoreType::Constitution],
&vec![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
AbilityScoreType::Dexterity,
AbilityScoreType::Wisdom,
],
)
.unwrap();
assert_eq!(character.strength().value(), 18);
assert_eq!(character.dexterity().value(), 14);
assert_eq!(character.constitution().value(), 16);
assert_eq!(character.intelligence().value(), 10);
assert_eq!(character.wisdom().value(), 12);
assert_eq!(character.charisma().value(), 10);
}
#[test]
fn level_up() {
let class = Class::new("Bob".to_string(), AbilityScoreType::Strength, 10);
let ancestry = Ancestry::new(
"Bob".to_string(),
8,
Size::Medium,
30,
vec![
AbilityBoostChoice::predetermined(AbilityScoreType::Strength),
AbilityBoostChoice::predetermined(AbilityScoreType::Constitution),
AbilityBoostChoice::free(),
],
);
let background = Background::new(
"Bob".to_string(),
"".to_string(),
vec![
AbilityBoostChoice::restricted(hashset![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
]),
AbilityBoostChoice::free(),
],
);
let mut character = Character::new(
"Bob",
&class,
&ancestry,
&vec![AbilityScoreType::Dexterity],
&background,
&vec![AbilityScoreType::Strength, AbilityScoreType::Constitution],
&vec![
AbilityScoreType::Strength,
AbilityScoreType::Constitution,
AbilityScoreType::Dexterity,
AbilityScoreType::Wisdom,
],
)
.unwrap();
character.level_up();
assert_eq!(character.level(), 2);
assert_eq!(character.hp().max(), 28);
}
}
|
#[derive(Debug, PartialEq, Eq)]
pub enum DivisionError {
NotDivisible(NotDivisibleError),
DivideByZero,
}
#[derive(Debug, PartialEq, Eq)]
pub struct NotDivisibleError {
dividend: i32,
divisor: i32,
}
// This function should calculate `a` divided by `b` if `a` is
// evenly divisible by b.
// Otherwise, it should return a suitable error.
pub fn divide(a: i32, b: i32) -> Result<i32, DivisionError> {
if b == 0{
Err(DivisionError::DivideByZero)
}
else if a%b==0{
Ok(a/b)
}
else
{
Err(DivisionError::NotDivisible(NotDivisibleError{dividend:a, divisor:b}))
}
}
fn main() {
let numbers = vec![27, 297, 38502, 81];
let division_results = numbers.into_iter().map(|n| divide(n, 27));
println!("{:?}",division_results);
let x = division_results.iter();//... Fill in here!
println!("{}",format!("{:?}", x));
}
|
//! Exposes the ``name!()`` macro, which can be used to assign type names to values. This can be
//! used to implement in part the "Ghosts of Departed Proofs" pattern. Be very careful using this
//! crate, and see the README for some caveats.
use std::marker::PhantomData;
#[macro_use]
extern crate derivative;
/// An owned value which has name Name. This type
/// is unique to this Named, so it can be used to
/// enforce compile-time coherency
#[derive(Derivative)]
#[derivative(Debug(bound="T: std::fmt::Debug"), PartialEq(bound="T: PartialEq"), Eq(bound="T: Eq"), Hash(bound="T: std::hash::Hash"), PartialOrd(bound="T: PartialOrd"), Ord(bound="T: Ord"))]
pub struct Named<T, Name> {
inner: T,
_phantom: PhantomData<Name>,
}
/// Create a named value. You probably don't want to use
/// this, but instead want to use the name!() macro that
/// calls this.
pub unsafe fn name<Name, T>(val: T) -> Named<T, Name> {
Named {
inner: val,
_phantom: PhantomData,
}
}
impl<T, Name> Named<T, Name> {
pub fn unname(self) -> T {
self.inner
}
pub fn unname_ref(&self) -> &T {
&self.inner
}
/// # Safety
/// Must uphold whatever invariant the Named protects
pub unsafe fn unname_ref_mut(&mut self) -> &mut T {
&mut self.inner
}
}
/// Create a ``Named`` with a given value. This will create an anonymous type that's unique to the
/// macro invokation. Note that since this gives a type that's impossible to name properly, use
/// wildcards and generics when passing them around.
///
/// # Examples
///
/// ```
/// # use type_name_value::*;
/// let x: Named<u32, _> = name!(5);
/// ```
///
/// ```
/// # use type_name_value::*;
/// fn is_five<Name>(val: Named<u32, Name>) -> bool {
/// *val.unname_ref() == 5
/// }
/// ```
#[macro_export]
macro_rules! name {
($val:expr) => {{
struct UniqueName {};
unsafe {
// Nothing else is named UniqueName because
// we just defined it
name::<UniqueName, _>($val)
}
}}
}
|
use token::{Token, Type};
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct Ast {
pub node_val: Option<Token>,
child_nodes: Vec<Ast>
}
pub trait AstTrait {
fn push_child(&mut self, ast: Ast);
fn pop_child(&mut self) -> Option<Ast>;
fn get_child(&mut self, index: usize) -> Option<Ast>;
fn insert_child(&mut self, child: Ast, index: usize) -> bool;
fn child_count(&self) -> usize;
fn dump_children(&mut self) -> Vec<Ast>;
fn clone_children(&self) -> Vec<Ast>;
}
impl AstTrait for Ast {
fn push_child(&mut self, ast: Ast) {
self.child_nodes.push(ast);
}
fn pop_child(&mut self) -> Option<Ast> {
self.child_nodes.pop()
}
fn get_child(&mut self, index: usize) -> Option<Ast> {
match self.child_nodes.len() > index {
true => Some(self.child_nodes.remove(index)),
false => None
}
}
fn insert_child(&mut self, child: Ast, index: usize) -> bool {
if self.child_nodes.len() >= index {
self.child_nodes.insert(index, child);
true
} else {
false
}
}
fn child_count(&self) -> usize {
self.child_nodes.len()
}
fn dump_children(&mut self) -> Vec<Ast> {
let children = self.child_nodes.clone();
self.child_nodes.clear();
children
}
fn clone_children(&self) -> Vec<Ast> {
self.child_nodes.clone()
}
}
impl Ast {
pub fn new(node: Token) -> Self {
Ast {
node_val: Some(node),
child_nodes: Vec::new()
}
}
pub fn new_null() -> Self {
Ast {
node_val: None,
child_nodes: Vec::new()
}
}
pub fn is_function(&mut self) -> bool {
match self.get_child(0) {
Some(child) => {
let result = match child.node_val {
Some(ref tok) => {
match tok.tok_type {
Type::Func => true,
_ => false
}
},
None => false
};
self.insert_child(child, 0);
result
},
None => false
}
}
}
|
use kuragecc::ast::visualize_ast;
use kuragecc::codegen::CodeGenerator;
use kuragecc::error::VisualizeError;
use kuragecc::lexer::Lexer;
use kuragecc::parser::Parser;
use kuragecc::semantics::SemanticAnalyzer;
use std::fs;
fn main() {
let paths = vec![
// "example/main0.tmpc",
// "example/main1.tmpc",
// "example/main2.tmpc",
// "example/main3.tmpc",
// "example/main4.tmpc",
// "example/main5.tmpc",
// "example/main6.tmpc",
// "example/main7.tmpc",
// "example/main8.tmpc",
// "example/main9.tmpc",
// "example/main10.tmpc",
// "example/main11.tmpc",
"example/main12.tmpc",
// "example/lexer_error.tmpc",
// "example/parser_error0.tmpc",
// "example/parser_error1.tmpc",
// "example/parser_error2.tmpc",
// "example/parser_error3.tmpc",
// "example/parser_error4.tmpc",
// "example/parser_error5.tmpc",
// "example/parser_error6.tmpc",
// "example/parser_error7.tmpc",
// "example/parser_error8.tmpc",
// "example/parser_error9.tmpc",
// "example/parser_error10.tmpc",
// "example/parser_error11.tmpc",
// "example/parser_error12.tmpc",
// "example/parser_error13.tmpc",
// "example/parser_error14.tmpc",
// "example/parser_error15.tmpc",
// "example/parser_error16.tmpc",
// "example/parser_error17.tmpc",
// "example/semantics_error0.tmpc",
// "example/semantics_error1.tmpc",
// "example/semantics_error2.tmpc",
// "example/semantics_error3.tmpc",
];
for path in paths {
println!("=> {}\n", path);
compile(path);
}
}
fn compile(path: &str) {
let code = fs::read_to_string(path).expect("File Input Error");
println!("```\n{}```\n", code);
// Lexer
let mut lexer = Lexer::new(&code);
let tokens = match lexer.tokenize() {
Ok(tokens) => {
if !tokens.is_empty() {
tokens
} else {
return;
}
}
Err(errors) => {
for e in errors {
e.visualize_error(code.as_str());
}
return;
}
};
if std::env::var("RUST_BACKTRACE").is_ok() {
for token in tokens.clone() {
println!("{}", token);
}
println!();
}
// Parser
let mut parser = Parser::new(tokens.clone());
let ast = match parser.parse() {
Ok(ast) => ast,
Err(error) => {
error.visualize_error(code.as_str());
return;
}
};
if std::env::var("RUST_BACKTRACE").is_ok() {
println!("=== After parse ===\n");
visualize_ast(ast.clone());
println!();
}
// Semantic Analyzer
let mut analyzer = SemanticAnalyzer::new();
let ast = match analyzer.semantic_analyze(ast.clone()) {
Ok(new_ast) => new_ast,
Err(errors) => {
for e in errors {
e.visualize_error(code.as_str());
}
return;
}
};
if std::env::var("RUST_BACKTRACE").is_ok() {
println!("=== After semantic-analyze ===\n");
visualize_ast(ast.clone());
println!();
}
// Code Generator
let mut generator = CodeGenerator::new(ast);
let asm = generator.gen_assembly();
if std::env::var("RUST_BACKTRACE").is_ok() {
println!("{}", asm);
}
println!("{}", asm.to_assembly());
}
|
use crate::engine::basic_types::*;
use crate::engine::*;
#[derive(Clone, Debug)]
pub struct ElementData {
pub active: bool,
pub position: Rect,
pub rotation: f64,
}
pub struct Element {
pub data: ElementData,
pub components: Vec<Box<dyn Component>>,
}
impl Element {
pub fn new(x: i32, y: i32, size_x: u32, size_y: u32, rotation: f64) -> Element {
Element {
data: ElementData {
active: false,
position: Rect::new(x, y, size_x, size_y),
rotation: rotation,
},
components: Vec::new(),
}
}
pub fn add_component(&mut self, component: Box<dyn Component>) {
self.components.push(component);
}
}
impl Drawable for Element {
fn draw(&self, renderer: &mut dyn Renderer) -> Result<(), String> {
if self.data.active == false {
return Ok(());
}
for component in self.components.iter() {
component.on_draw(self, renderer)?;
}
Ok(())
}
}
impl Updatable for Element {
fn update(&mut self, events: &Vec<Event>) -> Result<(), String> {
if self.data.active == false {
return Ok(());
}
for component in self.components.iter() {
match component.on_update(self, events)? {
Some(data) => self.data = data,
_ => {}
}
}
Ok(())
}
}
|
use postgres::{Client, NoTls};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
pub id: String,
pub exp: usize,
pub sub: String,
}
pub fn get_event_store_db_connection() -> Result<Client, postgres::Error> {
Client::connect(
crate::SECRETS
.get("event_store_connection_string")
.map_or(&"", |s| &s),
NoTls,
)
}
pub fn get_order_query_db_connection() -> Result<Client, postgres::Error> {
Client::connect(
crate::SECRETS
.get("order_query_connection_string")
.map_or(&"", |s| &s),
NoTls,
)
}
|
// Copyright 2015 The Rust-Windows Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_export]
macro_rules! wnd_proc_thunk(
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_CREATE) => (
if $msg == 0x0001 { // WM_CREATE
let cs = unsafe {
let pcs = ::std::mem::transmute::<::winapi::LPARAM,
*const ::winapi::CREATESTRUCTW>($l);
&(*pcs)
};
let ret = $self_.on_create(cs);
if ret {
return 0 as ::winapi::LRESULT;
} else {
return -1 as ::winapi::LRESULT;
}
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_DESTROY) => (
if $msg == 0x0002 { // WM_DESTROY
$self_.on_destroy();
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_SIZE) => (
if $msg == 0x0005 { // WM_SIZE
let l = $l as u32;
let width = (l & 0xFFFF) as isize;
let height = (l >> 16) as isize;
$self_.on_size(width, height);
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_SETFOCUS) => (
if $msg == 0x0007 { // WM_SETFOCUS
let w = ::windows::window::Window { wnd: $w as ::winapi::HWND };
$self_.on_focus(w);
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_PAINT) => (
if $msg == 0x000F { // WM_PAINT
$self_.on_paint();
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_LBUTTONDOWN) => (
if $msg == 0x0201 { // WM_LBUTTONDOWN
let l = $l as u32;
let x = (l & 0xFFFF) as isize;
let y = (l >> 16) as isize;
let flags = $w as u32;
$self_.on_left_button_down(x, y, flags);
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_LBUTTONUP) => (
if $msg == 0x0202 { // WM_LBUTTONUP
let l = $l as u32;
let x = (l & 0xFFFF) as isize;
let y = (l >> 16) as isize;
let flags = $w as u32;
$self_.on_left_button_up(x, y, flags);
return 0 as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_KEYDOWN) => (
if $msg == 0x0100 { // WM_KEYDOWN
return $self_.on_key_down($w as u8, $l as u32) as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_KEYUP) => (
if $msg == 0x0101 { // WM_KEYUP
return $self_.on_key_up($w as u8, $l as u32) as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, WM_ERASEBKGND) => (
if $msg == 0x0014 { // WM_ERASEBKGND
// Returning 1 means that the background no longer needs erasing.
return $self_.on_erase_background() as ::winapi::LRESULT;
}
);
($self_:ident, $msg:ident, $w:ident, $l:ident, ANY) => (
if let Some(result) = $self_.on_message($msg, $w, $l) {
return result;
}
);
);
#[macro_export]
macro_rules! wnd_proc(
($wnd:ident, $win:ident, $($msg:ident),+) => (
impl ::windows::window::WindowImpl for $wnd {
fn wnd<'a>(&'a self) -> &'a ::windows::window::Window {
&self.$win
}
fn wnd_mut<'a>(&'a mut self) -> &'a mut ::windows::window::Window {
&mut self.$win
}
fn wnd_proc(&self, msg: ::winapi::UINT, w: ::winapi::WPARAM,
l: ::winapi::LPARAM) -> ::winapi::LRESULT {
$(
wnd_proc_thunk!(self, msg, w, l, $msg);
)+
::windows::def_window_proc(self.wnd().wnd, msg, w, l)
}
}
)
);
|
use crate::computer::*;
use crate::human::*;
use crate::player::Player;
use rand::seq::SliceRandom;
pub const CARDS: [u8; 13] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10];
pub struct GameControl {
human: HumanPlayer,
computers: [ComputerPlayer; 3],
}
impl GameControl {
pub fn create_game(human_name: &str) -> Self {
let comps = [
ComputerPlayer::new("Player 1"),
ComputerPlayer::new("Player 2"),
ComputerPlayer::new("Player 3"),
];
GameControl {
human: HumanPlayer::new(human_name),
computers: comps,
}
}
pub fn deal(&mut self) {
self.human.take_visible_card(next_card());
self.human.take_hidden_card(next_card());
for i in 0..3 {
self.computers[i].take_visible_card(next_card());
self.computers[i].take_hidden_card(next_card());
}
}
}
pub fn next_card() -> u8 {
*CARDS.choose(&mut rand::thread_rng()).unwrap()
}
pub fn run_game() {
println!("Weeeeelcome to the game of 21,");
println!("You'll play against 3 other players (computers). ");
println!("Try to get as close to 21 as possible, without going over.");
println!("what is your name? > ");
let mut name = String::new();
let _ = std::io::stdin()
.read_line(&mut name)
.unwrap();
let mut game = GameControl::create_game(&name);
game.deal();
} |
use std::sync::atomic::compiler_fence;
use std::sync::atomic::Ordering;
#[link(name = "dmb")]
extern "C" {
pub fn __dmb();
}
#[inline(always)]
pub fn dmb() {
compiler_fence(Ordering::SeqCst);
unsafe {
__dmb();
}
compiler_fence(Ordering::SeqCst);
}
|
#![cfg(test)]
use super::*;
use crate::physics::single_chain::test::Parameters;
mod base
{
use super::*;
use rand::Rng;
#[test]
fn init()
{
let parameters = Parameters::default();
let _ = WLC::init(parameters.number_of_links_minimum, parameters.link_length_reference, parameters.hinge_mass_reference, parameters.persistance_length_reference);
}
#[test]
fn number_of_links()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
assert_eq!(number_of_links, WLC::init(number_of_links, parameters.link_length_reference, parameters.hinge_mass_reference, parameters.persistance_length_reference).number_of_links);
}
}
#[test]
fn link_length()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
assert_eq!(link_length, WLC::init(parameters.number_of_links_minimum, link_length, parameters.hinge_mass_reference, parameters.persistance_length_reference).link_length);
}
}
#[test]
fn hinge_mass()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
assert_eq!(hinge_mass, WLC::init(parameters.number_of_links_minimum, parameters.link_length_reference, hinge_mass, parameters.persistance_length_reference).hinge_mass);
}
}
#[test]
fn persistance_length()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
assert_eq!(persistance_length, WLC::init(parameters.number_of_links_minimum, parameters.link_length_reference, parameters.hinge_mass_reference, persistance_length).persistance_length);
}
}
#[test]
fn all_parameters()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
assert_eq!(number_of_links, model.number_of_links);
assert_eq!(link_length, model.link_length);
assert_eq!(hinge_mass, model.hinge_mass);
assert_eq!(persistance_length, model.persistance_length);
}
}
}
mod nondimensional
{
use super::*;
use rand::Rng;
#[test]
fn gibbs_free_energy()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_gibbs_free_energy = model.nondimensional_gibbs_free_energy(&nondimensional_end_to_end_length_per_link, &temperature);
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let gibbs_free_energy = model.gibbs_free_energy(&end_to_end_length, &temperature);
let residual_abs = &gibbs_free_energy/BOLTZMANN_CONSTANT/temperature - &nondimensional_gibbs_free_energy;
let residual_rel = &residual_abs/&nondimensional_gibbs_free_energy;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn gibbs_free_energy_per_link()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_gibbs_free_energy_per_link = model.nondimensional_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link, &temperature);
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let gibbs_free_energy_per_link = model.gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let residual_abs = &gibbs_free_energy_per_link/BOLTZMANN_CONSTANT/temperature - &nondimensional_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&nondimensional_gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn relative_gibbs_free_energy()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_relative_gibbs_free_energy = model.nondimensional_relative_gibbs_free_energy(&nondimensional_end_to_end_length_per_link);
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let relative_gibbs_free_energy = model.relative_gibbs_free_energy(&end_to_end_length, &temperature);
let residual_abs = &relative_gibbs_free_energy/BOLTZMANN_CONSTANT/temperature - &nondimensional_relative_gibbs_free_energy;
let residual_rel = &residual_abs/&nondimensional_relative_gibbs_free_energy;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn relative_gibbs_free_energy_per_link()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_relative_gibbs_free_energy_per_link = model.nondimensional_relative_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link);
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let relative_gibbs_free_energy_per_link = model.relative_gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let residual_abs = &relative_gibbs_free_energy_per_link/BOLTZMANN_CONSTANT/temperature - &nondimensional_relative_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&nondimensional_relative_gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
}
mod per_link
{
use super::*;
use rand::Rng;
#[test]
fn gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let gibbs_free_energy = model.gibbs_free_energy(&end_to_end_length, &temperature);
let gibbs_free_energy_per_link = model.gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let residual_abs = &gibbs_free_energy/(number_of_links as f64) - &gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn relative_gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let relative_gibbs_free_energy = model.relative_gibbs_free_energy(&end_to_end_length, &temperature);
let relative_gibbs_free_energy_per_link = model.relative_gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let residual_abs = &relative_gibbs_free_energy/(number_of_links as f64) - &relative_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&relative_gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn nondimensional_gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_gibbs_free_energy = model.nondimensional_gibbs_free_energy(&nondimensional_end_to_end_length_per_link, &temperature);
let nondimensional_gibbs_free_energy_per_link = model.nondimensional_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link, &temperature);
let residual_abs = &nondimensional_gibbs_free_energy/(number_of_links as f64) - &nondimensional_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&nondimensional_gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn nondimensional_relative_gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let nondimensional_relative_gibbs_free_energy = model.nondimensional_relative_gibbs_free_energy(&nondimensional_end_to_end_length_per_link);
let nondimensional_relative_gibbs_free_energy_per_link = model.nondimensional_relative_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link);
let residual_abs = &nondimensional_relative_gibbs_free_energy/(number_of_links as f64) - &nondimensional_relative_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&nondimensional_relative_gibbs_free_energy_per_link;
assert!(residual_abs.abs() <= parameters.abs_tol);
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
}
mod relative
{
use super::*;
use rand::Rng;
use crate::physics::single_chain::ZERO;
#[test]
fn gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let gibbs_free_energy = model.gibbs_free_energy(&end_to_end_length, &temperature);
let gibbs_free_energy_0 = model.gibbs_free_energy(&(ZERO*(number_of_links as f64)*link_length), &temperature);
let relative_gibbs_free_energy = model.relative_gibbs_free_energy(&end_to_end_length, &temperature);
let residual_abs = &gibbs_free_energy - &gibbs_free_energy_0 - &relative_gibbs_free_energy;
let residual_rel = &residual_abs/&gibbs_free_energy_0;
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn gibbs_free_energy_per_link()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let end_to_end_length = nondimensional_end_to_end_length_per_link*(number_of_links as f64)*link_length;
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let gibbs_free_energy_per_link = model.gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let gibbs_free_energy_per_link_0 = model.gibbs_free_energy_per_link(&(ZERO*(number_of_links as f64)*link_length), &temperature);
let relative_gibbs_free_energy_per_link = model.relative_gibbs_free_energy_per_link(&end_to_end_length, &temperature);
let residual_abs = &gibbs_free_energy_per_link - &gibbs_free_energy_per_link_0 - &relative_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&gibbs_free_energy_per_link_0;
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn nondimensional_gibbs_free_energy()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_gibbs_free_energy = model.nondimensional_gibbs_free_energy(&nondimensional_end_to_end_length_per_link, &temperature);
let nondimensional_gibbs_free_energy_0 = model.nondimensional_gibbs_free_energy(&ZERO, &temperature);
let nondimensional_relative_gibbs_free_energy = model.nondimensional_relative_gibbs_free_energy(&nondimensional_end_to_end_length_per_link);
let residual_abs = &nondimensional_gibbs_free_energy - &nondimensional_gibbs_free_energy_0 - &nondimensional_relative_gibbs_free_energy;
let residual_rel = &residual_abs/&nondimensional_gibbs_free_energy_0;
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
#[test]
fn nondimensional_gibbs_free_energy_per_link()
{
let parameters = Parameters::default();
let mut rng = rand::thread_rng();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_end_to_end_length_per_link = parameters.nondimensional_end_to_end_length_per_link_reference + parameters.nondimensional_end_to_end_length_per_link_scale*(0.5 - rng.gen::<f64>());
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let nondimensional_gibbs_free_energy_per_link = model.nondimensional_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link, &temperature);
let nondimensional_gibbs_free_energy_per_link_0 = model.nondimensional_gibbs_free_energy_per_link(&ZERO, &temperature);
let nondimensional_relative_gibbs_free_energy_per_link = model.nondimensional_relative_gibbs_free_energy_per_link(&nondimensional_end_to_end_length_per_link);
let residual_abs = &nondimensional_gibbs_free_energy_per_link - &nondimensional_gibbs_free_energy_per_link_0 - &nondimensional_relative_gibbs_free_energy_per_link;
let residual_rel = &residual_abs/&nondimensional_gibbs_free_energy_per_link_0;
assert!(residual_rel.abs() <= parameters.rel_tol);
}
}
}
mod zero
{
use super::*;
use rand::Rng;
use crate::physics::single_chain::ZERO;
#[test]
fn relative_gibbs_free_energy()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let relative_gibbs_free_energy_0 = model.relative_gibbs_free_energy(&(ZERO*(number_of_links as f64)*link_length), &temperature);
assert!(relative_gibbs_free_energy_0.abs() <= BOLTZMANN_CONSTANT*temperature*(number_of_links as f64)*ZERO);
}
}
#[test]
fn relative_gibbs_free_energy_per_link()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>());
let relative_gibbs_free_energy_per_link_0 = model.relative_gibbs_free_energy_per_link(&(ZERO*(number_of_links as f64)*link_length), &temperature);
assert!(relative_gibbs_free_energy_per_link_0.abs() <= BOLTZMANN_CONSTANT*temperature*ZERO);
}
}
#[test]
fn nondimensional_relative_gibbs_free_energy()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_relative_gibbs_free_energy_0 = model.nondimensional_relative_gibbs_free_energy(&ZERO);
assert!(nondimensional_relative_gibbs_free_energy_0.abs() <= (number_of_links as f64)*ZERO);
}
}
#[test]
fn nondimensional_relative_gibbs_free_energy_per_link()
{
let mut rng = rand::thread_rng();
let parameters = Parameters::default();
for _ in 0..parameters.number_of_loops
{
let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum);
let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>());
let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>());
let persistance_length = parameters.persistance_length_reference + parameters.persistance_length_scale*(0.5 - rng.gen::<f64>());
let model = WLC::init(number_of_links, link_length, hinge_mass, persistance_length);
let nondimensional_relative_gibbs_free_energy_per_link_0 = model.nondimensional_relative_gibbs_free_energy_per_link(&ZERO);
assert!(nondimensional_relative_gibbs_free_energy_per_link_0.abs() <= ZERO);
}
}
} |
// run simulate_nodes and main app to debug
#![allow(unused_variables)]
#[macro_use]
extern crate diesel;
use std::collections::{HashMap};
use mosquitto_client::{Mosquitto};
use std::io::{Error, ErrorKind};
use std::time::Duration;
use std::thread;
use std::cell::RefCell;
use env_logger::Env;
use log::{debug, info, warn};
#[path = "../config.rs"]
pub mod config;
#[path = "../helper.rs"]
pub mod helper;
#[path = "../zone.rs"]
pub mod zone;
#[path = "../repository.rs"]
pub mod repository;
#[path = "../schema.rs"]
pub mod schema;
use crate::config::{load_config, ControlNodes, Settings};
fn send_temperature(client: &Mosquitto, namespace: &str, name: &str, pin: u8, value: f32) -> bool
{
let topic = format!("{namespace}/nodes/{name}/current/temperature/{pin}", namespace=namespace, name=name, pin=pin);
let result = client.publish(
&topic,
format!("{}", value).as_bytes(),
1,
false
);
if let Err(v) = result {
warn!("Unable to send temperature to {} {:?}", name, v);
return false;
}
debug!("Sent temperature: {} {}", topic, value);
true
}
fn send_pin(client: &Mosquitto, namespace: &str, name: &str, pin: u8, value: &u16) -> bool
{
let topic = format!("{namespace}/nodes/{name}/current/analog/{pin}", namespace=namespace, name=name, pin=pin);
let result = client.publish(
&topic,
format!("{}", value).as_bytes(),
1,
false
);
if let Err(v) = result {
warn!("Unable to send data to {} {} {} {:?}", name, topic, value, v);
return false;
}
debug!("Sent pin: {} {}", topic, value);
true
}
fn send_zones(client: &Mosquitto, config_name: &str, control_nodes: &ControlNodes, pin_states: &HashMap<String, u16>, temperature: f32)
{
for (node_name, control_node) in control_nodes {
for (zone_name, zone) in &control_node.zones {
send_temperature(client, config_name, &zone_name, zone.sensor_pin, temperature);
send_pin(client, config_name, &node_name, zone.control_pin, pin_states.get(&format!("{}_{}", node_name, zone.control_pin)).unwrap_or(&0));
}
if control_node.control_pin > 0 {
send_pin(client, config_name, &node_name, control_node.control_pin, pin_states.get(&format!("{}_{}", node_name, control_node.control_pin)).unwrap_or(&0));
}
}
}
fn main() -> Result<(), Error>
{
//env_logger::from_env(Env::default().default_filter_or("debug")).init();
let (config, control_nodes) = load_config("src/config.yml", 0)?;
env_logger::from_env(Env::default().default_filter_or("debug")).init();
let config = Settings::new(config);
let client = Mosquitto::new("test1");//&format!("{}-simulate", config.name));
client.connect(&config.host(), 1883)
.map_err(|e| Error::new(ErrorKind::NotConnected, format!("Unable to connect to host: {} {:?}", config.host(), e)))?;
for (node_name, control_node) in &control_nodes {
for (zone_name, zone) in &control_node.zones {
let topic = format!("{main}/nodes/{name}/set/json", main=config.name(), name=zone_name);
client.subscribe(&topic, 0)
.map(|a| { info!("Listening to: {}", topic); a })
.map_err(|e| Error::new(ErrorKind::NotConnected, format!("Unable to subscribe: {} {}", zone_name, e)))?;
}
let topic = format!("{main}/nodes/{name}/set/json", main=config.name(), name=node_name);
client.subscribe(&topic, 0)
.map(|a| { info!("Listening to: {}", topic); a })
.map_err(|e| Error::new(ErrorKind::NotConnected, format!("Unable to subscribe: {} {:?}", node_name, e)))?;
}
let states = RefCell::new(HashMap::new());
let mut m = client.callbacks(());
m.on_message(|_,msg| {
debug!("Received: {:?} {}", msg, msg.text());
let j = json::parse(msg.text()).unwrap();
let mut paths: Vec<&str> = msg.topic().split("/").collect();
paths.pop(); paths.pop();
states.borrow_mut().insert(format!("{}_{}", paths.pop().unwrap_or("none"), j["pin"].as_u8().unwrap()), j["set"].as_u16().unwrap());
});
let mut count = 0;
let max_temp = 22.0;
let min_temp = 19.0;
let mut temperature = 19.0;
let mut increasing = true;
loop {
send_zones(&client, &config.name(), &control_nodes, &states.borrow(), temperature);
println!("{:?}", states.borrow());
for i in 0..100 {
let conn_result = client.do_loop(-1)
.map_err(|e| Error::new(ErrorKind::NotConnected, format!("Mqtt error {}", e)));
if !conn_result.is_ok() {
println!("{:?}", conn_result);
client.reconnect()
.map_err(|e| Error::new(ErrorKind::NotConnected, format!("Mqtt can not reconnect {}", e)))?;
}
thread::sleep(Duration::from_millis(500));
}
count += 1;
if count % 50 == 0 {
if temperature >= max_temp {
increasing = false;
} else if temperature <= min_temp {
increasing = true;
}
if increasing {
temperature += 1.0;
} else {
temperature -= 1.0;
}
}
}
}
|
mod justification;
mod keyring;
mod mock;
use crate::chain::{Chain, OpaqueExtrinsic};
use crate::grandpa::{verify_justification, AuthoritySet, Error, GrandpaJustification};
use crate::{
initialize, validate_finalized_block, ChainTip, CurrentAuthoritySet, Error as ErrorP,
InitializationData, OldestKnownParent, ValidationCheckPoint,
};
use codec::Encode;
use frame_support::dispatch::DispatchResult;
use frame_support::{assert_err, assert_ok};
use justification::*;
use keyring::*;
use mock::{run_test, ChainId, TestRuntime};
use sp_consensus_grandpa::{ConsensusLog, ScheduledChange, GRANDPA_ENGINE_ID};
use sp_core::Hasher as HasherT;
use sp_runtime::generic::SignedBlock;
use sp_runtime::traits::{BlakeTwo256, Hash, Header};
use sp_runtime::{generic, Digest, DigestItem, DispatchError};
type TestHeader = generic::Header<u32, BlakeTwo256>;
struct TestFeedChain;
impl Chain for TestFeedChain {
type BlockNumber = u32;
type Hash = <BlakeTwo256 as HasherT>::Out;
type Header = generic::Header<u32, BlakeTwo256>;
type Hasher = BlakeTwo256;
}
#[test]
fn valid_justification_accepted() {
let authorities = vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1), (DAVE, 1)];
let params = JustificationGeneratorParams {
header: test_header(1),
round: TEST_GRANDPA_ROUND,
set_id: TEST_GRANDPA_SET_ID,
authorities: authorities.clone(),
ancestors: 7,
forks: 3,
};
let justification = make_justification_for_header::<TestHeader>(params.clone());
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&justification,
),
Ok(()),
);
assert_eq!(justification.commit.precommits.len(), authorities.len());
assert_eq!(
justification.votes_ancestries.len(),
params.ancestors as usize
);
}
#[test]
fn valid_justification_accepted_with_single_fork() {
let params = JustificationGeneratorParams {
header: test_header(1),
round: TEST_GRANDPA_ROUND,
set_id: TEST_GRANDPA_SET_ID,
authorities: vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1), (DAVE, 1), (EVE, 1)],
ancestors: 5,
forks: 1,
};
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&make_justification_for_header::<TestHeader>(params)
),
Ok(()),
);
}
#[test]
fn valid_justification_accepted_with_arbitrary_number_of_authorities() {
use finality_grandpa::voter_set::VoterSet;
use sp_consensus_grandpa::AuthorityId;
let n = 15;
let authorities = accounts(n).iter().map(|k| (*k, 1)).collect::<Vec<_>>();
let params = JustificationGeneratorParams {
header: test_header(1),
round: TEST_GRANDPA_ROUND,
set_id: TEST_GRANDPA_SET_ID,
authorities: authorities.clone(),
ancestors: n.into(),
forks: n.into(),
};
let authorities = authorities
.iter()
.map(|(id, w)| (AuthorityId::from(*id), *w))
.collect::<Vec<(AuthorityId, _)>>();
let voter_set = VoterSet::new(authorities).unwrap();
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set,
&make_justification_for_header::<TestHeader>(params)
),
Ok(()),
);
}
#[test]
fn justification_with_invalid_target_rejected() {
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(2),
TEST_GRANDPA_SET_ID,
&voter_set(),
&make_default_justification::<TestHeader>(&test_header(1)),
),
Err(Error::InvalidJustificationTarget),
);
}
#[test]
fn justification_with_invalid_commit_rejected() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
justification.commit.precommits.clear();
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&justification,
),
Err(Error::ExtraHeadersInVotesAncestries),
);
}
#[test]
fn justification_with_invalid_authority_signature_rejected() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
justification.commit.precommits[0].signature =
sp_core::crypto::UncheckedFrom::unchecked_from([1u8; 64]);
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&justification,
),
Err(Error::InvalidAuthoritySignature),
);
}
#[test]
fn justification_with_invalid_precommit_ancestry() {
let mut justification = make_default_justification::<TestHeader>(&test_header(1));
justification.votes_ancestries.push(test_header(10));
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&justification,
),
Err(Error::ExtraHeadersInVotesAncestries),
);
}
#[test]
fn justification_is_invalid_if_we_dont_meet_threshold() {
// Need at least three authorities to sign off or else the voter set threshold can't be reached
let authorities = vec![(ALICE, 1), (BOB, 1)];
let params = JustificationGeneratorParams {
header: test_header(1),
round: TEST_GRANDPA_ROUND,
set_id: TEST_GRANDPA_SET_ID,
authorities: authorities.clone(),
ancestors: 2 * authorities.len() as u32,
forks: 2,
};
assert_eq!(
verify_justification::<TestHeader>(
header_id::<TestHeader>(1),
TEST_GRANDPA_SET_ID,
&voter_set(),
&make_justification_for_header::<TestHeader>(params)
),
Err(Error::TooLowCumulativeWeight),
);
}
fn valid_digests() -> Vec<DigestItem> {
vec![DigestItem::Consensus(
GRANDPA_ENGINE_ID,
ConsensusLog::ScheduledChange::<u32>(ScheduledChange {
next_authorities: authority_list(),
delay: 0,
})
.encode(),
)]
}
fn init_with_origin(chain_id: ChainId, number: u32) -> Result<TestHeader, DispatchError> {
let mut best_finalized = test_header::<TestHeader>(number);
valid_digests()
.into_iter()
.for_each(|digest| best_finalized.digest_mut().push(digest));
let init_data = InitializationData {
best_known_finalized_header: best_finalized.encode(),
set_id: 1,
};
initialize::<TestRuntime, TestFeedChain>(chain_id, init_data.encode().as_slice())?;
// import block
assert_ok!(submit_finality_proof(
chain_id,
best_finalized.clone(),
Some(make_default_justification(&best_finalized))
));
Ok(best_finalized)
}
fn valid_extrinsics() -> Vec<OpaqueExtrinsic> {
vec![(0..255).collect()]
}
fn invalid_extrinsics() -> Vec<OpaqueExtrinsic> {
vec![(128..255).collect()]
}
fn valid_extrinsics_root<H: Header>() -> H::Hash {
H::Hashing::ordered_trie_root(
valid_extrinsics().iter().map(Encode::encode).collect(),
sp_runtime::StateVersion::V0,
)
}
fn submit_valid_finality_proof(chain_id: ChainId, header: u8) -> Result<TestHeader, DispatchError> {
let header = test_header::<TestHeader>(header.into());
let justification = make_default_justification(&header);
submit_finality_proof(chain_id, header.clone(), Some(justification))?;
Ok(header)
}
fn submit_finality_proof(
chain_id: ChainId,
header: TestHeader,
maybe_justification: Option<GrandpaJustification<TestHeader>>,
) -> DispatchResult {
let justification =
maybe_justification.map(|justification| (GRANDPA_ENGINE_ID, justification.encode()).into());
let block = SignedBlock {
block: generic::Block::<TestHeader, OpaqueExtrinsic> {
header,
extrinsics: valid_extrinsics(),
},
justifications: justification,
};
validate_finalized_block::<TestRuntime, TestFeedChain>(chain_id, block.encode().as_slice())?;
Ok(())
}
#[test]
fn test_init_storage_entries_are_correctly_initialized_with_genesis() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
assert_eq!(
CurrentAuthoritySet::<TestRuntime>::get(chain_id).authorities,
authority_list()
);
assert_eq!(
<ValidationCheckPoint<TestRuntime>>::get(chain_id),
(0u32.encode(), validation_header.encode())
);
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(0u32.encode(), validation_header.hash().encode())
);
// since the block 0 is imported already
// the oldest know parent will be 00000.... with so that block 0 is not imported again
assert_eq!(
<OldestKnownParent<TestRuntime>>::get(chain_id),
(0u32.encode(), [0u8; 32].encode())
);
})
}
#[test]
fn test_init_storage_entries_are_correctly_initialized() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 10).unwrap();
assert_eq!(
CurrentAuthoritySet::<TestRuntime>::get(chain_id).authorities,
authority_list()
);
assert_eq!(
<ValidationCheckPoint<TestRuntime>>::get(chain_id),
(10u32.encode(), validation_header.encode())
);
assert_eq!(
<OldestKnownParent<TestRuntime>>::get(chain_id),
(9u32.encode(), test_header::<TestHeader>(9).hash().encode())
);
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(10u32.encode(), validation_header.hash().encode())
);
})
}
#[test]
fn successfully_imports_header_in_forward_direction() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).expect("must not fail to init chain");
// cannot import block 0 twice
assert_err!(
submit_finality_proof(
chain_id,
validation_header.clone(),
Some(make_default_justification(&validation_header))
),
ErrorP::<TestRuntime>::InvalidBlock
);
let mut parent_header = validation_header;
for tip in 1..10 {
let mut header = test_header::<TestHeader>(tip);
header.set_parent_hash(parent_header.hash());
assert_ok!(submit_finality_proof(
chain_id,
header.clone(),
Some(make_default_justification(&header))
));
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(tip.encode(), header.hash().encode())
);
parent_header = header;
}
})
}
#[test]
fn successfully_imports_parent_headers_in_reverse_and_forward() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 5).expect("must not fail to init chain");
for parent in (1..=4).rev() {
assert_ok!(submit_valid_finality_proof(chain_id, parent));
assert_eq!(
<OldestKnownParent<TestRuntime>>::get(chain_id),
(
((parent - 1) as u32).encode(),
test_header::<TestHeader>((parent - 1) as u32)
.hash()
.encode()
)
);
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(5u32.encode(), validation_header.hash().encode())
);
}
// import block 0
assert_ok!(submit_valid_finality_proof(chain_id, 0));
// cannot import block 0 twice
assert_err!(
submit_valid_finality_proof(chain_id, 0),
ErrorP::<TestRuntime>::InvalidBlock
);
let mut parent_header = validation_header;
for tip in 6..10 {
let mut header = test_header::<TestHeader>(tip);
header.set_parent_hash(parent_header.hash());
assert_ok!(submit_finality_proof(
chain_id,
header.clone(),
Some(make_default_justification(&header))
));
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(tip.encode(), header.hash().encode())
);
parent_header = header;
}
})
}
#[test]
fn rejects_justification_that_skips_authority_set_transition() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
let params = JustificationGeneratorParams::<TestHeader> {
set_id: 2,
..Default::default()
};
let justification = make_justification_for_header(params);
assert_err!(
submit_finality_proof(chain_id, header, Some(justification)),
<ErrorP<TestRuntime>>::InvalidJustification
);
})
}
#[test]
fn does_not_import_header_with_invalid_finality_proof() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
let mut justification = make_default_justification(&header);
justification.round = 42;
assert_err!(
submit_finality_proof(chain_id, header, Some(justification)),
<ErrorP<TestRuntime>>::InvalidJustification
);
})
}
#[test]
fn does_not_import_header_with_invalid_extrinsics() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
let block = SignedBlock {
block: generic::Block::<TestHeader, OpaqueExtrinsic> {
header: header.clone(),
extrinsics: invalid_extrinsics(),
},
justifications: Some(
(
GRANDPA_ENGINE_ID,
make_default_justification(&header).encode(),
)
.into(),
),
};
assert_err!(
validate_finalized_block::<TestRuntime, TestFeedChain>(
chain_id,
block.encode().as_slice(),
),
<ErrorP<TestRuntime>>::InvalidBlock
);
})
}
#[test]
fn disallows_invalid_authority_set() {
run_test(|| {
let chain_id: ChainId = 1;
let invalid_authority_list = vec![(ALICE.into(), u64::MAX), (BOB.into(), u64::MAX)];
let mut genesis = test_header::<TestHeader>(0);
let mut digest: Digest = Default::default();
digest.push(DigestItem::Consensus(
GRANDPA_ENGINE_ID,
ConsensusLog::ScheduledChange::<u32>(ScheduledChange {
next_authorities: invalid_authority_list,
delay: 0,
})
.encode(),
));
genesis.digest = digest;
let init_data = InitializationData {
best_known_finalized_header: genesis.encode(),
set_id: 1,
};
assert_ok!(initialize::<TestRuntime, TestFeedChain>(
chain_id,
init_data.encode().as_slice()
));
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(genesis.hash());
let justification = make_default_justification(&header);
assert_err!(
submit_finality_proof(chain_id, header, Some(justification)),
<ErrorP<TestRuntime>>::InvalidAuthoritySet
);
})
}
fn change_log(delay: u32) -> Digest {
let consensus_log = ConsensusLog::<u32>::ScheduledChange(ScheduledChange {
next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)],
delay,
});
Digest {
logs: vec![DigestItem::Consensus(
GRANDPA_ENGINE_ID,
consensus_log.encode(),
)],
}
}
#[test]
fn importing_header_enacts_new_authority_set() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
let next_set_id = 2;
let next_authorities = vec![(ALICE.into(), 1), (BOB.into(), 1)];
// Need to update the header digest to indicate that our header signals an authority set
// change. The change will be enacted when we import our header.
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
header.digest = change_log(0);
// Create a valid justification for the header
let justification = make_default_justification(&header);
// Let's import our test header
assert_ok!(submit_finality_proof(
chain_id,
header.clone(),
Some(justification)
));
// Make sure that our header is the best finalized
assert_eq!(
<ChainTip<TestRuntime>>::get(chain_id),
(1u32.encode(), header.hash().encode())
);
// Make sure that the authority set actually changed upon importing our header
assert_eq!(
<CurrentAuthoritySet<TestRuntime>>::get(chain_id),
AuthoritySet {
authorities: next_authorities,
set_id: next_set_id
},
);
})
}
#[test]
fn importing_header_rejects_header_with_scheduled_change_delay() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
// Need to update the header digest to indicate that our header signals an authority set
// change. However, the change doesn't happen until the next block.
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
header.digest = change_log(1);
// Create a valid justification for the header
let justification = make_default_justification(&header);
// Should not be allowed to import this header
assert_err!(
submit_finality_proof(chain_id, header, Some(justification)),
<ErrorP<TestRuntime>>::UnsupportedScheduledChange
);
})
}
fn forced_change_log(delay: u32) -> Digest {
let consensus_log = ConsensusLog::<u32>::ForcedChange(
delay,
ScheduledChange {
next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)],
delay,
},
);
Digest {
logs: vec![DigestItem::Consensus(
GRANDPA_ENGINE_ID,
consensus_log.encode(),
)],
}
}
#[test]
fn importing_header_rejects_header_with_forced_changes() {
run_test(|| {
let chain_id: ChainId = 1;
let validation_header = init_with_origin(chain_id, 0).unwrap();
// Need to update the header digest to indicate that it signals a forced authority set
// change.
let mut header = test_header::<TestHeader>(1);
header.set_parent_hash(validation_header.hash());
header.digest = forced_change_log(0);
// Create a valid justification for the header
let justification = make_default_justification(&header);
// Should not be allowed to import this header
assert_err!(
submit_finality_proof(chain_id, header, Some(justification)),
<ErrorP<TestRuntime>>::UnsupportedScheduledChange
);
})
}
|
use serde_json::{Value};
use crate::ldtab_2_ofn::axiom_translation as axiom_translation;
use crate::ldtab_2_ofn::annotation_translation as annotation_translation;
use crate::util::parser as parser;
/// Given an LDTab ThickTriple (encoded as a string),
/// return its corresponding OFN S-expression encoded as a serde_json::value::Value
///
/// #Examples
///
/// ```
/// use serde_json::{Value};
/// use wiring_rs::ldtab_2_ofn::translation as translation;
/// let thick_triple_string = r#"{"subject": "obo:IAO_0000120",
/// "predicate": "rdfs:subClassOf",
/// "object": {"owl:someValuesFrom": [{"object": "obo:OBI_0500000",
/// "datatype":"_iri",
/// "meta":null}],
/// "rdf:type": [{"object": "owl:Restriction",
/// "datatype":"_iri",
/// "meta":null}],
/// "owl:onProperty": [{"object": "obo:BFO_0000050",
/// "datatype":"_iri",
/// "meta":null}]},
/// "annotation": null,
/// "assertion":"1",
/// "graph":"graph",
/// "retraction":"0",
/// "datatype":"_iri"}"#;
///
/// let thick_triple = serde_json::from_str(thick_triple_string).unwrap();
///
/// let ofn = translation::thick_triple_2_ofn(&thick_triple);
///
/// let ofn_expected_string =r#"["SubClassOf","obo:IAO_0000120",["SomeValuesFrom","obo:BFO_0000050","obo:OBI_0500000"]]"#;
/// let ofn_expected : Value = serde_json::from_str(ofn_expected_string).unwrap();
///
/// assert_eq!(ofn, ofn_expected);
/// ```
pub fn thick_triple_2_ofn(thick_triple : &Value) -> Value {
//translate subject, predicate, object into OFN S-expression
let subject = thick_triple["subject"].to_string();
let predicate = thick_triple["predicate"].to_string();
let object = thick_triple["object"].to_string();
let owl = translate_triple(&subject, &predicate, &object);
//translate annotation
let annotations = annotation_translation::translate(&thick_triple["annotation"]);
//merge OFN S-expression with annotations
let rest = &owl.as_array().unwrap()[1..];
let mut res = vec![owl[0].clone()];
for annotation in annotations {
res.push(annotation.clone());
}
for r in rest {
res.push(r.clone());
}
Value::Array(res)
}
fn translate_triple(subject: &str, predicate: &str, object: &str) -> Value {
let subject_json = parser::parse_thick_triple_object(subject);
let predicate_json = parser::parse_string(predicate); //Assumption: this is a string
let object_json = parser::parse_thick_triple_object(object);
match predicate_json.as_str() {
"rdfs:subClassOf" => axiom_translation::translate_subclass_of_axiom(&subject_json, &object_json),
"owl:equivalentClass" => axiom_translation::translate_equivalent_class(&subject_json, &object_json),
"owl:AllDisjointClasses" => axiom_translation::translate_disjoint_classes(&object_json),
"owl:disjointUnionOf" => axiom_translation::translate_disjoint_union(&subject_json,&object_json),
"owl:disjointWith" => axiom_translation::translate_disjoint_with(&subject_json, &object_json),
"rdf:type" => axiom_translation::translate_rdf_type(&subject_json, &object_json),
"rdfs:domain" => axiom_translation::translate_domain(&subject_json, &object_json),
"rdfs:range" => axiom_translation::translate_range(&subject_json, &object_json),
"owl:inverseOf" => axiom_translation:: translate_inverse_object_properties(&subject_json, &object_json),
"owl:equivalentProperty" => axiom_translation::translate_equivalent_properties(&subject_json, &object_json),
"owl:propertyDisjointWith" => axiom_translation::translate_property_disjoint_with(&subject_json, &object_json),
"owl:AllDisjointProperties" => axiom_translation::translate_all_disjoint_properties(&subject_json, &object_json),
"rdfs:subPropertyOf" => axiom_translation::translate_sub_property_of(&subject_json, &object_json),
"owl:AllDifferent" => axiom_translation::translate_all_different(&subject_json, &object_json),
"owl:differentFrom" => axiom_translation::translate_different_from(&subject_json, &object_json),
"owl:sameAs" => axiom_translation::translate_same_as(&subject_json, &object_json),
"owl:AllSameAs" => axiom_translation::translate_all_same_as(&subject_json, &object_json),
"owl:propertyChainAxiom" => axiom_translation::translate_property_chain(&subject_json, &object_json),
"owl:NegativePropertyAssertion" => axiom_translation::translate_negative_property_assertion(&subject_json, &object_json),
"owl:hasKey" => axiom_translation::translate_has_key(&subject_json, &object_json),
"owl:imports" => axiom_translation::translate_import(&subject_json, &object_json),
_ => axiom_translation::translate_thin_triple(subject, predicate, object),
}
}
|
use std::borrow::Cow;
use std::convert::TryFrom;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt::Result as FmtResult;
use std::fmt::Write;
use pest::error::Error as PestError;
use pest::Parser as PestParser;
use super::Arg;
use crate::parser::Parser;
use crate::parser::Rule;
/// A mnemonic with its arguments, e.g. `jnz %rax, start`.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Instruction<'a> {
mnemonic: Cow<'a, str>,
args: Vec<Arg<'a>>,
}
impl<'a> Instruction<'a> {
pub fn new<M>(mnemonic: M) -> Self
where
M: Into<Cow<'a, str>>,
{
Self {
mnemonic: mnemonic.into(),
args: Vec::new(),
}
}
pub fn with_arguments<M, A>(mnemonic: M, args: A) -> Self
where
M: Into<Cow<'a, str>>,
A: IntoIterator<Item = Arg<'a>>,
{
let mut ins = Self::new(mnemonic);
ins.args = args.into_iter().collect();
ins
}
pub fn add_argument<A>(&mut self, arg: A) -> &mut Self
where
A: Into<Arg<'a>>,
{
self.args.push(arg.into());
self
}
pub fn mnemonic(&self) -> &str {
&self.mnemonic
}
pub fn set_mnemonic<M>(&mut self, mnemonic: M)
where
M: Into<Cow<'a, str>>,
{
self.mnemonic = mnemonic.into();
}
pub fn arguments(&self) -> &Vec<Arg<'a>> {
&self.args
}
}
impl<'a> Display for Instruction<'a> {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
f.write_str(&self.mnemonic)?;
let mut args = self.args.iter().peekable();
if args.peek().is_some() {
f.write_char('\t')?;
}
while let Some(arg) = args.next() {
arg.fmt(f)?;
if args.peek().is_some() {
f.write_str(", ")?;
}
}
Ok(())
}
}
impl<'a> TryFrom<&'a str> for Instruction<'a> {
type Error = PestError<Rule>;
fn try_from(s: &'a str) -> Result<Self, PestError<Rule>> {
Parser::parse(Rule::instruction, s).and_then(|mut pairs| {
let pair = pairs.next().unwrap();
check_complete!(pair, s);
let mut inner = pair.into_inner();
let op = inner.next().unwrap().as_str();
let mut args = Vec::new();
for pair in inner {
args.push(Arg::try_from(pair.as_str())?);
}
Ok(Self::with_arguments(op, args))
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::Register;
#[test]
fn parse() {
let mut ins = Instruction::new("mov");
ins.add_argument(Register::new("rax"));
ins.add_argument(Register::new("rbx"));
assert_eq!(Instruction::try_from("mov %rax, %rbx"), Ok(ins));
assert!(Instruction::try_from("mov;").is_err());
}
#[test]
fn to_string() {
let mut ins = Instruction::new("nop");
assert_eq!(&ins.to_string(), "nop");
ins.set_mnemonic("inc");
ins.add_argument(Register::new("rax"));
assert_eq!(&ins.to_string(), "inc\t%rax");
ins.set_mnemonic("mov");
ins.add_argument(Register::new("rbx"));
assert_eq!(&ins.to_string(), "mov\t%rax, %rbx");
}
}
|
fn main() {
let size : usize = 1001;
// Generate array
let spiral_numbers = generate_spiral(size);
//print_spiral(&spiral_numbers, size);
// Calculate diagonal sum
let sum = calculate_diagonal_sum(&spiral_numbers, size);
println!("Diagonal sum: {}", sum);
}
fn generate_spiral(size: usize) -> std::vec::Vec<std::vec::Vec<i32>> {
assert!(size > 0);
let mut spiral_numbers : std::vec::Vec<std::vec::Vec<i32>> = vec![vec![0; size]; size];
let mut current_position = ((size - 1) / 2, (size - 1) / 2);
let mut last_direction = 'n';
for i in 1 .. size * size + 1 {
// Set value
let (x, y) = current_position;
spiral_numbers[x][y] = i as i32;
// Set next field
if last_direction == 'n' {
if spiral_numbers[x + 1][y] == 0 {
current_position = (x + 1, y);
last_direction = 'e';
} else {
current_position = (x, y - 1);
}
} else if last_direction == 'e' {
if spiral_numbers[x][y + 1] == 0 {
current_position = (x, y + 1);
last_direction = 's';
} else {
current_position = (x + 1, y);
}
} else if last_direction == 's' {
if spiral_numbers[x - 1][y] == 0 {
current_position = (x - 1, y);
last_direction = 'w';
} else {
current_position = (x, y + 1);
}
} else if last_direction == 'w' {
if spiral_numbers[x][y - 1] == 0 {
current_position = (x, y - 1);
last_direction = 'n';
} else {
current_position = (x - 1, y);
}
}
}
return spiral_numbers;
}
fn calculate_diagonal_sum(spiral_numbers: &std::vec::Vec<std::vec::Vec<i32>>, size: usize) -> i64 {
assert!(size % 2 == 1);
let mut sum : i64 = 0;
// Sum descending diagonal
for i in 0 .. size {
let (x, y) = (i, i);
sum += spiral_numbers[x][y] as i64;
}
// Sum ascending diagonal
for i in 0 .. size {
let (x, y) = (0 + i, size - 1 - i);
sum += spiral_numbers[x][y] as i64;
}
// Subtract doubly summed center
sum -= spiral_numbers[(size - 1) / 2][(size - 1) / 2] as i64;
return sum;
}
fn print_spiral(spiral_numbers: &std::vec::Vec<std::vec::Vec<i32>>, size: usize) {
for y in 0 .. size {
for x in 0 .. size {
print!("{} ", spiral_numbers[x][y]);
}
println!("");
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServicesListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Service>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Service {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "resourceTypes", default, skip_serializing_if = "Vec::is_empty")]
pub resource_types: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProblemClassificationsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ProblemClassification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProblemClassification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ProblemClassificationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProblemClassificationProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameAvailabilityInput {
pub name: String,
#[serde(rename = "type")]
pub type_: check_name_availability_input::Type,
}
pub mod check_name_availability_input {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
#[serde(rename = "Microsoft.Support/supportTickets")]
MicrosoftSupportSupportTickets,
#[serde(rename = "Microsoft.Support/communications")]
MicrosoftSupportCommunications,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameAvailabilityOutput {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SupportTicketsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<SupportTicketDetails>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SupportTicketDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SupportTicketDetailsProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<CommunicationDetails>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CommunicationDetailsProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommunicationDetailsProperties {
#[serde(rename = "communicationType", default, skip_serializing_if = "Option::is_none")]
pub communication_type: Option<communication_details_properties::CommunicationType>,
#[serde(rename = "communicationDirection", default, skip_serializing_if = "Option::is_none")]
pub communication_direction: Option<communication_details_properties::CommunicationDirection>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sender: Option<String>,
pub subject: String,
pub body: String,
#[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")]
pub created_date: Option<String>,
}
pub mod communication_details_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CommunicationType {
#[serde(rename = "web")]
Web,
#[serde(rename = "phone")]
Phone,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CommunicationDirection {
#[serde(rename = "inbound")]
Inbound,
#[serde(rename = "outbound")]
Outbound,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SupportTicketDetailsProperties {
#[serde(rename = "supportTicketId", default, skip_serializing_if = "Option::is_none")]
pub support_ticket_id: Option<String>,
pub description: String,
#[serde(rename = "problemClassificationId")]
pub problem_classification_id: String,
#[serde(rename = "problemClassificationDisplayName", default, skip_serializing_if = "Option::is_none")]
pub problem_classification_display_name: Option<String>,
pub severity: support_ticket_details_properties::Severity,
#[serde(rename = "enrollmentId", default, skip_serializing_if = "Option::is_none")]
pub enrollment_id: Option<String>,
#[serde(rename = "require24X7Response", default, skip_serializing_if = "Option::is_none")]
pub require24_x7_response: Option<bool>,
#[serde(rename = "contactDetails")]
pub contact_details: ContactProfile,
#[serde(rename = "serviceLevelAgreement", default, skip_serializing_if = "Option::is_none")]
pub service_level_agreement: Option<ServiceLevelAgreement>,
#[serde(rename = "supportEngineer", default, skip_serializing_if = "Option::is_none")]
pub support_engineer: Option<SupportEngineer>,
#[serde(rename = "supportPlanType", default, skip_serializing_if = "Option::is_none")]
pub support_plan_type: Option<String>,
pub title: String,
#[serde(rename = "problemStartTime", default, skip_serializing_if = "Option::is_none")]
pub problem_start_time: Option<String>,
#[serde(rename = "serviceId")]
pub service_id: String,
#[serde(rename = "serviceDisplayName", default, skip_serializing_if = "Option::is_none")]
pub service_display_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "createdDate", default, skip_serializing_if = "Option::is_none")]
pub created_date: Option<String>,
#[serde(rename = "modifiedDate", default, skip_serializing_if = "Option::is_none")]
pub modified_date: Option<String>,
#[serde(rename = "technicalTicketDetails", default, skip_serializing_if = "Option::is_none")]
pub technical_ticket_details: Option<TechnicalTicketDetails>,
#[serde(rename = "quotaTicketDetails", default, skip_serializing_if = "Option::is_none")]
pub quota_ticket_details: Option<QuotaTicketDetails>,
}
pub mod support_ticket_details_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Severity {
#[serde(rename = "minimal")]
Minimal,
#[serde(rename = "moderate")]
Moderate,
#[serde(rename = "critical")]
Critical,
#[serde(rename = "highestcriticalimpact")]
Highestcriticalimpact,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceLevelAgreement {
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "expirationTime", default, skip_serializing_if = "Option::is_none")]
pub expiration_time: Option<String>,
#[serde(rename = "slaMinutes", default, skip_serializing_if = "Option::is_none")]
pub sla_minutes: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SupportEngineer {
#[serde(rename = "emailAddress", default, skip_serializing_if = "Option::is_none")]
pub email_address: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExceptionResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ServiceError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ServiceErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContactProfile {
#[serde(rename = "firstName")]
pub first_name: String,
#[serde(rename = "lastName")]
pub last_name: String,
#[serde(rename = "preferredContactMethod")]
pub preferred_contact_method: contact_profile::PreferredContactMethod,
#[serde(rename = "primaryEmailAddress")]
pub primary_email_address: String,
#[serde(rename = "additionalEmailAddresses", default, skip_serializing_if = "Vec::is_empty")]
pub additional_email_addresses: Vec<String>,
#[serde(rename = "phoneNumber", default, skip_serializing_if = "Option::is_none")]
pub phone_number: Option<String>,
#[serde(rename = "preferredTimeZone")]
pub preferred_time_zone: String,
pub country: String,
#[serde(rename = "preferredSupportLanguage")]
pub preferred_support_language: String,
}
pub mod contact_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PreferredContactMethod {
#[serde(rename = "email")]
Email,
#[serde(rename = "phone")]
Phone,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateContactProfile {
#[serde(rename = "firstName", default, skip_serializing_if = "Option::is_none")]
pub first_name: Option<String>,
#[serde(rename = "lastName", default, skip_serializing_if = "Option::is_none")]
pub last_name: Option<String>,
#[serde(rename = "preferredContactMethod", default, skip_serializing_if = "Option::is_none")]
pub preferred_contact_method: Option<update_contact_profile::PreferredContactMethod>,
#[serde(rename = "primaryEmailAddress", default, skip_serializing_if = "Option::is_none")]
pub primary_email_address: Option<String>,
#[serde(rename = "additionalEmailAddresses", default, skip_serializing_if = "Vec::is_empty")]
pub additional_email_addresses: Vec<String>,
#[serde(rename = "phoneNumber", default, skip_serializing_if = "Option::is_none")]
pub phone_number: Option<String>,
#[serde(rename = "preferredTimeZone", default, skip_serializing_if = "Option::is_none")]
pub preferred_time_zone: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
#[serde(rename = "preferredSupportLanguage", default, skip_serializing_if = "Option::is_none")]
pub preferred_support_language: Option<String>,
}
pub mod update_contact_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PreferredContactMethod {
#[serde(rename = "email")]
Email,
#[serde(rename = "phone")]
Phone,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TechnicalTicketDetails {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QuotaTicketDetails {
#[serde(rename = "quotaChangeRequestSubType", default, skip_serializing_if = "Option::is_none")]
pub quota_change_request_sub_type: Option<String>,
#[serde(rename = "quotaChangeRequestVersion", default, skip_serializing_if = "Option::is_none")]
pub quota_change_request_version: Option<String>,
#[serde(rename = "quotaChangeRequests", default, skip_serializing_if = "Vec::is_empty")]
pub quota_change_requests: Vec<QuotaChangeRequest>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QuotaChangeRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub payload: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateSupportTicket {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub severity: Option<update_support_ticket::Severity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<update_support_ticket::Status>,
#[serde(rename = "contactDetails", default, skip_serializing_if = "Option::is_none")]
pub contact_details: Option<UpdateContactProfile>,
}
pub mod update_support_ticket {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Severity {
#[serde(rename = "minimal")]
Minimal,
#[serde(rename = "moderate")]
Moderate,
#[serde(rename = "critical")]
Critical,
#[serde(rename = "highestcriticalimpact")]
Highestcriticalimpact,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "open")]
Open,
#[serde(rename = "closed")]
Closed,
}
}
|
use std::env;
use std::thread::{self, JoinHandle};
/// Spawns a new thread with the given code, or executes it directly if the environment variable `PERSEUS_CLI_SEQUENTIAL` is set to
/// any valid (Unicode) value. Multithreading is the default.
pub fn spawn_thread<F, T>(f: F) -> ThreadHandle<F, T>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
let single = env::var("PERSEUS_CLI_SEQUENTIAL").is_ok();
if single {
ThreadHandle {
join_handle: None,
f: Some(f),
}
} else {
let join_handle = thread::spawn(f);
ThreadHandle {
join_handle: Some(join_handle),
f: None,
}
}
}
/// An abstraction over a `JoinHandle` in a multithreaded case, or just a similar interface that will immediately return if otherwise.
/// This allows the interfaces for multithreading and single-threading to be basically identical.
pub struct ThreadHandle<F, T>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
/// If multithreaded, this is the join handle.
join_handle: Option<JoinHandle<T>>,
// If single-threaded, this is the output (it's already been executed).
f: Option<F>,
}
impl<F, T> ThreadHandle<F, T>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
/// Waits for the 'thread' to complete, properly if it's multithreaded, or by direct execution if it's single-threaded.
pub fn join(
self,
) -> Result<T, std::boxed::Box<(dyn std::any::Any + std::marker::Send + 'static)>> {
if let Some(join_handle) = self.join_handle {
join_handle.join()
} else if let Some(f) = self.f {
let output = f();
Ok(output)
} else {
unreachable!();
}
}
}
|
use crate::avrcore::Avrcore;
use std::fs;
use regex::Regex;
use std::str;
enum FieldNumber {
BCField = 1,
AddrField = 2,
RTField = 3,
DatField = 4,
CSField = 5
}
impl FieldNumber {
fn from_usize(value: usize) -> FieldNumber {
match value {
1 => FieldNumber::BCField,
2 => FieldNumber::AddrField,
3 => FieldNumber::RTField,
4 => FieldNumber::DatField,
5 => FieldNumber::CSField,
_ => panic!("Unknown FieldNumber: {}", value)
}
}
}
#[derive(Debug)]
struct IhexLine {
byte_count: u8,
address: u16,
record_type: u8,
data: Vec<u8>,
checksum: u8,
}
pub struct IhexDump {
indexer: usize,
data: Vec<u16>
}
impl IhexDump {
pub fn get_next_word(&mut self) -> Result<u16, &str> {
if self.indexer < self.data.len() {
let next_word = self.data[self.indexer];
self.indexer = self.indexer + 1;
Ok(next_word)
} else {
Err("End of hexdump")
}
}
pub fn getIndex(&self) -> usize {
self.indexer*2
}
}
fn split_ihex_line(line: &str) -> IhexLine {
if line.starts_with(":") {
let mut ihexline = IhexLine{ byte_count: 0,
address: 0,
record_type: 0,
data: Vec::new(),
checksum: 0 };
let re = Regex::new(r":(?P<byte_count>[[:xdigit:]]{2})(?P<address>[[:xdigit:]]{4})(?P<record_type>[[:xdigit:]]{2})(?P<data>[[:xdigit:]]+)?(?P<check_sum>[[:xdigit:]]{2})").unwrap();
let caps = re.captures(line).unwrap();
for i in 1..6 {
let field = caps.get(i).map_or("None", |m| m.as_str());
match FieldNumber::from_usize(i) {
FieldNumber::BCField => {
if field != "None" {
ihexline.byte_count = u8::from_str_radix(field, 16).unwrap()
}
},
FieldNumber::AddrField => {
if field != "None" {
let addr = u16::from_str_radix(field, 16).unwrap();
ihexline.address = addr
}
},
FieldNumber::RTField => {
if field != "None" {
ihexline.record_type = u8::from_str_radix(field, 16).unwrap()
}
},
FieldNumber::DatField => {
if field != "None" {
let subs = field.as_bytes()
.chunks(2)
.map(str::from_utf8)
.collect::<Result<Vec<&str>, _>>()
.unwrap();
for s in subs {
ihexline.data.push(
u8::from_str_radix(s, 16).unwrap()
)
}
}
},
FieldNumber::CSField => {
if field != "None" {
ihexline.checksum = u8::from_str_radix(field, 16).unwrap()
}
}
}
}
//println!("{:?}\n-----", ihexline)
ihexline
}
else {
panic!("Encountered {}, but line does not start with ':'", line);
}
}
pub fn ihex_to_dump(path: &str) -> IhexDump {
let mut flash: Vec<u16> = Vec::new();
let data = fs::read_to_string(path).expect("Cannot read file");
for line in data.lines() {
let ihex = split_ihex_line(line);
for bytes in ihex.data.chunks(2) {
flash.push((bytes[1] as u16) << 8 | (bytes[0] as u16))
}
}
IhexDump {
indexer: 0,
data: flash
}
}
|
use error::*;
use futures::future;
use futures::{Future, Stream};
use mime;
use multipart;
use hyper;
use hyper_tls::HttpsConnector;
use std::path::PathBuf;
use hyper::server::{self, Service};
use hyper::header::{Header, ContentDisposition, ContentType, DispositionParam};
use hyper::{Request, Response};
use multipart::client::lazy;
use hyper::header::Location;
use hyper::{Uri, StatusCode};
use std::io::prelude::*;
use tokio_core::reactor::Handle;
pub fn https_connector(handle: &Handle) -> HttpsConnector {
HttpsConnector::new(4, handle)
}
pub trait ServerResponseExt {
fn with_body<T: Into<hyper::Body>>(self, body: T) -> Self;
}
impl ServerResponseExt for server::Response {
fn with_body<T: Into<hyper::Body>>(self, body: T) -> Self {
let mut res = self;
res.set_body(body.into());
res
}
}
pub trait ServerRequestExt {
fn get_body_bytes(self) -> Box<Future<Item = Vec<u8>, Error = Error>>;
fn has_content_type(&self, mime: mime::Mime) -> bool;
}
impl ServerRequestExt for server::Request {
fn get_body_bytes(self) -> Box<Future<Item = Vec<u8>, Error = Error>> {
Box::new(self.body()
.map_err(Error::from)
.fold(Vec::new(), |mut acc, chunk| {
acc.extend_from_slice(&chunk);
future::ok::<_, Error>(acc)
}))
}
fn has_content_type(&self, mime: mime::Mime) -> bool {
use mime::Mime;
let content_type = self.headers().get::<ContentType>().cloned();
let Mime(top_level, sub_level, _) = mime;
if let Some(ContentType(Mime(found_top_level, found_sub_level, _))) = content_type {
found_top_level == top_level && found_sub_level == sub_level
} else {
false
}
}
}
pub trait ResponseExt {
fn filename(&self) -> Option<String>;
fn get_body_bytes(self) -> Box<Future<Item = Vec<u8>, Error = Error>>;
/// Try to populate a vector with the contents of the response body, but stop after `limit`
/// bytes with an error.
fn get_body_bytes_with_limit(self, limit: u32) -> Box<Future<Item = Vec<u8>, Error = Error>>;
}
impl ResponseExt for Response {
fn get_body_bytes_with_limit(self, limit: u32) -> Box<Future<Item = Vec<u8>, Error = Error>> {
Box::new(self.body()
.from_err()
.fold(Vec::<u8>::new(), move |mut acc, chunk| {
if (acc.len() + chunk.len()) > limit as usize {
return future::err(ErrorKind::UnprocessableEntity.into());
}
acc.extend_from_slice(&chunk);
future::ok::<_, Error>(acc)
}))
}
fn filename(&self) -> Option<String> {
match self.headers().get::<ContentDisposition>() {
Some(&ContentDisposition { parameters: ref params, .. }) => {
params
.iter()
.find(|param| match **param {
DispositionParam::Filename(_, _, _) => true,
_ => false,
})
.and_then(|param| if let DispositionParam::Filename(_, _, ref bytes) = *param {
String::from_utf8(bytes.to_owned()).ok()
} else {
None
})
}
_ => None,
}
}
fn get_body_bytes(self) -> Box<Future<Item = Vec<u8>, Error = Error>> {
Box::new(self.body()
.map_err(Error::from)
.fold(Vec::new(), |mut acc, chunk| {
acc.extend_from_slice(&chunk);
future::ok::<_, Error>(acc)
}))
}
}
pub trait RequestExt {
fn with_header<T: Header>(self, header: T) -> Self;
fn with_body(self, body: hyper::Body) -> Self;
}
impl RequestExt for Request {
fn with_header<T: Header>(mut self, header: T) -> Self {
{
let mut h = self.headers_mut();
h.set(header);
}
self
}
fn with_body(self, body: hyper::Body) -> Self {
let mut req = self;
req.set_body(body);
req
}
}
pub trait ClientExt {
fn get_follow_redirect(self, uri: &Uri) -> Box<Future<Item = Response, Error = Error>>;
}
impl<S> ClientExt for S
where S: Service<Request = Request, Response = Response, Error = hyper::Error> + 'static
{
fn get_follow_redirect(self, uri: &Uri) -> Box<Future<Item = Response, Error = Error>> {
Box::new(future::loop_fn(uri.clone(), move |uri| {
let request = Request::new(hyper::Method::Get, uri);
self.call(request)
.map_err(Error::from)
.and_then(|res| match determine_get_result(res) {
Ok(GetResult::Redirect(redirect_uri)) => {
Ok(future::Loop::Continue(redirect_uri))
}
Ok(GetResult::Ok(res)) => Ok(future::Loop::Break(res)),
Err(err) => Err(err),
})
}))
}
}
enum GetResult {
Ok(Response),
Redirect(Uri),
}
fn determine_get_result(res: Response) -> Result<GetResult> {
match res.status() {
StatusCode::TemporaryRedirect |
StatusCode::PermanentRedirect => {
match res.headers().get::<Location>() {
Some(location) => Ok(GetResult::Redirect(location.parse()?)),
None => Err("Redirect without Location header".into()),
}
}
_ => Ok(GetResult::Ok(res)),
}
}
pub fn multipart_request_with_file(request: Request,
path: PathBuf)
-> ::std::result::Result<Request, Error> {
let mut fields = lazy::Multipart::new()
.add_file("file", path)
.prepare_threshold(Some(u64::max_value() - 1))
.map_err(|_| "Failed to prepare multipart body")?;
let mut bytes: Vec<u8> = Vec::new();
fields.read_to_end(&mut bytes)?;
Ok(request
.with_body(bytes.into())
.with_header(ContentType(mime!(Multipart/FormData; Boundary=(fields.boundary())))))
}
pub fn multipart_request_with_error(request: Request, error: &Error) -> Result<Request> {
let mut fields = lazy::Multipart::new()
.add_text("error", format!("{}", error))
.prepare()
.map_err(|_| "Failed to prepare multipart body")?;
let mut bytes: Vec<u8> = Vec::new();
fields.read_to_end(&mut bytes)?;
Ok(request
.with_body(bytes.into())
.with_header(ContentType(mime!(Multipart/FormData; Boundary=(fields.boundary())))))
}
#[derive(Debug)]
pub struct MultipartRequest(pub hyper::Headers, pub Vec<u8>);
impl multipart::server::HttpRequest for MultipartRequest {
type Body = ::std::io::Cursor<Vec<u8>>;
fn multipart_boundary(&self) -> Option<&str> {
let content_type = self.0.get::<ContentType>();
match content_type {
Some(&ContentType(mime::Mime(mime::TopLevel::Multipart,
mime::SubLevel::FormData,
ref params))) => {
// param is (attr, value)
params
.iter()
.find(|param| param.0.as_str() == "boundary")
.map(|param| param.1.as_str())
}
_ => None,
}
}
fn body(self) -> Self::Body {
::std::io::Cursor::new(self.1)
}
}
#[cfg(test)]
mod tests {
use super::*;
use hyper;
use hyper::server::Service;
use futures::future;
#[derive(Debug, Clone)]
struct MockServer {
response_to_logo_png: hyper::header::ContentDisposition,
}
impl MockServer {
fn respond_to_logo_png_with(content_disposition: hyper::header::ContentDisposition)
-> MockServer {
MockServer { response_to_logo_png: content_disposition }
}
}
impl Service for MockServer {
type Request = server::Request;
type Response = server::Response;
type Error = hyper::Error;
type Future = Box<Future<Item = server::Response, Error = hyper::Error>>;
fn call(&self, req: Self::Request) -> Self::Future {
let res = match req.path() {
"/assets/logo.png" => {
server::Response::new()
.with_body(b"54321" as &[u8])
.with_header(self.response_to_logo_png.clone())
}
_ => server::Response::new().with_status(hyper::StatusCode::NotFound),
};
Box::new(future::ok(res))
}
}
#[test]
fn test_filename_prefers_content_disposition() {
let response_header = hyper::header::ContentDisposition {
disposition: hyper::header::DispositionType::Attachment,
parameters: vec![hyper::header::DispositionParam::Filename(
hyper::header::Charset::Ext("UTF-8".to_string()),
None,
b"this_should_be_the_filename.png".to_vec())],
};
let server = MockServer::respond_to_logo_png_with(response_header);
let request: hyper::client::Request<hyper::Body> =
Request::new(hyper::Method::Get,
"http://127.0.0.1:8738/assets/logo.png".parse().unwrap());
let response = server.call(request).wait().unwrap();
assert_eq!(response.filename(),
Some("this_should_be_the_filename.png".to_string()))
}
#[test]
fn test_filename_works_with_content_disposition_inline() {
let response_header = hyper::header::ContentDisposition {
disposition: hyper::header::DispositionType::Inline,
parameters: vec![hyper::header::DispositionParam::Filename(
hyper::header::Charset::Ext("UTF-8".to_string()),
None,
b"this_should_be_the_filename.png".to_vec())],
};
let server = MockServer::respond_to_logo_png_with(response_header);
let request: hyper::client::Request<hyper::Body> =
Request::new(hyper::Method::Get,
"http://127.0.0.1:8738/assets/logo.png".parse().unwrap());
let response = server.call(request).wait().unwrap();
assert_eq!(response.filename(),
Some("this_should_be_the_filename.png".to_string()))
}
// S3 returns Content-Disposition without disposition (just filename)
#[test]
fn test_content_disposition_works_without_disposition() {
let server = MockServer::respond_to_logo_png_with(hyper::header::ContentDisposition {
disposition: hyper::header::DispositionType::Ext("".to_string()),
parameters: vec![hyper::header::DispositionParam::Filename(
hyper::header::Charset::Ext("UTF-8".to_string()),
None,
b"this_should_be_the_filename.png".to_vec(),
)],
});
let request: hyper::client::Request<hyper::Body> =
Request::new(hyper::Method::Get,
"http://127.0.0.1:8740/assets/logo.png".parse().unwrap());
let response = server.call(request).wait().unwrap();
assert_eq!(response.filename(),
Some("this_should_be_the_filename.png".to_string()))
}
struct MockFileServer;
impl Service for MockFileServer {
type Request = server::Request;
type Response = server::Response;
type Error = hyper::Error;
type Future = Box<Future<Item = server::Response, Error = hyper::Error>>;
fn call(&self, _: Self::Request) -> Self::Future {
let mut response_body: Vec<u8> = Vec::with_capacity(3000);
for n in 0..3000 {
response_body.push((n / 250) as u8);
}
let res = server::Response::new().with_body(response_body);
Box::new(future::ok(res))
}
}
#[test]
fn test_get_body_bytes_with_limit_is_enforced() {
let request = Request::new(hyper::Method::Get, "/".parse().unwrap());
let response = MockFileServer.call(request).wait().unwrap();
let result = response.get_body_bytes_with_limit(2000).wait();
match result {
Err(Error(ErrorKind::UnprocessableEntity, _)) => (),
other => panic!("Wrong result to get_body_bytes_max_size: {:?}", other),
}
}
#[test]
fn test_get_body_bytes_with_limit_is_not_excessively_zealous() {
let request = Request::new(hyper::Method::Get, "/".parse().unwrap());
let response = MockFileServer.call(request).wait().unwrap();
let result = response.get_body_bytes_with_limit(3000).wait();
match result {
Ok(_) => (),
other => panic!("Wrong result to get_body_bytes_max_size: {:?}", other),
}
}
}
|
use super::super::prelude::{
HINSTANCE , CursorService , IconService , Text , Cursor , Icon
};
pub type Application = HINSTANCE;
pub trait ResourceFunctionInApplication {
fn loadCursor(&self , iconName : Text) -> Cursor;
fn loadIcon(&self , iconName : Text) -> Icon;
}
impl ResourceFunctionInApplication for Application {
fn loadCursor(&self , cursorName : Text) -> Cursor {
CursorService::loadCursor(Some(*self) , cursorName)
}
fn loadIcon(&self , iconName : Text) -> Icon {
IconService::loadIcon(Some(*self) , iconName)
}
} |
#[cfg(test)]
mod tests {
use casper_engine_test_support::{Code, Hash, SessionBuilder, TestContextBuilder};
use casper_types::{
account::AccountHash, runtime_args, PublicKey, RuntimeArgs, SecretKey, U512,
};
#[test]
fn should_store_hello_world() {
// Prepare the account.
let public_key: PublicKey = SecretKey::ed25519([7u8; 32]).into();
let account_addr = AccountHash::from(&public_key);
let mut context = TestContextBuilder::new()
.with_public_key(public_key, U512::from(500_000_000_000_000_000u64))
.build();
// Deploy the main contract.
let session_code = Code::from("managed_counter.wasm");
let session = SessionBuilder::new(session_code, RuntimeArgs::new())
.with_address(account_addr)
.with_authorization_keys(&[account_addr])
.build();
context.run(session);
// Call the manager contract to create a new counter contract.
let session_code = Code::NamedKey(String::from("manager"), String::from("new_counter"));
let session_args = runtime_args! {
"name" => String::from("counter_one")
};
let session = SessionBuilder::new(session_code, session_args)
.with_address(account_addr)
.with_authorization_keys(&[account_addr])
.build();
context.run(session);
// Ready counter_one's hash.
let counter_one_hash: Hash = context
.query(
account_addr,
&[String::from("manager"), String::from("counter_one_hash")],
)
.unwrap()
.into_t()
.unwrap();
// Increment counter 3 times.
for _ in 0..3 {
let session_code = Code::Hash(counter_one_hash, String::from("increment"));
let session = SessionBuilder::new(session_code, RuntimeArgs::new())
.with_address(account_addr)
.with_authorization_keys(&[account_addr])
.build();
context.run(session);
}
// Read counter_one's value.
let counter_one_value: u32 = context
.query(
account_addr,
&[
String::from("manager"),
String::from("counter_one"),
String::from("counter_value"),
],
)
.unwrap()
.into_t()
.unwrap();
// Expect the counter to be incremented.
assert_eq!(counter_one_value, 3);
}
}
fn main() {
panic!("The main should not be used here");
} |
// https://doc.rust-lang.ru/book/ch03-05-control-flow.html
// конвертер температур из единиц Фаренгейта в единицы Цельсия
use std::io;
fn converter(f: f32) -> f32 {
let x = (5.0 / 9.0) as f32;
((f - 32.0) * x) as f32
}
fn main() {
loop {
println!("\nВведите температуру в единицах Фаренгейта.");
let mut frgt = String::new();
io::stdin()
.read_line(&mut frgt)
.expect("Failed to read line");
let frgt: f32 = match frgt.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
let cl: f32 = converter(frgt);
println!("Температура в по единицах Цельсия: {}.", cl)
}
}
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use uvll;
use raw::{Loop, Handle, Allocated, Raw};
use UvResult;
pub struct Async {
handle: *mut uvll::uv_async_t,
}
impl Async {
/// Create a new uv_async_t handle.
///
/// This function is unsafe as a successful return value is not
/// automatically deallocated.
pub unsafe fn new(uv_loop: &Loop, cb: uvll::uv_async_cb) -> UvResult<Async> {
let raw = Raw::new();
try!(call!(uvll::uv_async_init(uv_loop.raw(), raw.get(), cb)));
Ok(Async { handle: raw.unwrap() })
}
pub fn send(&self) {
unsafe { uvll::uv_async_send(self.handle) }
}
}
impl Allocated for uvll::uv_async_t {
fn size(_self: Option<uvll::uv_async_t>) -> uint {
unsafe { uvll::uv_handle_size(uvll::UV_ASYNC) as uint }
}
}
impl Handle<uvll::uv_async_t> for Async {
fn raw(&self) -> *mut uvll::uv_async_t { self.handle }
fn from_raw(t: *mut uvll::uv_async_t) -> Async { Async { handle: t } }
}
|
/*
https://projecteuler.net
It was proposed by Christian Goldbach that every odd composite number
can be written as the sum of a prime and twice a square.
9 = 7 + 2×12
15 = 7 + 2×22
21 = 3 + 2×32
25 = 7 + 2×32
27 = 19 + 2×22
33 = 31 + 2×12
It turns out that the conjecture was false.
What is the smallest odd composite that cannot be written as the sum
of a prime and twice a square?
NOTES:
*/
fn solve() -> u64 {
// a vector to hold primes, preload it with the first value
let mut primes = Vec::<u64>::new();
sb::math::prime_next(&mut primes); // a function to add the next prime to the existing list
// a vector of 2 x n^2, preload with the first value
let mut squares = Vec::<u64>::new();
squares.push(2);
let mut n = 3;
loop {
// add to primes if it's too small
if primes.last().unwrap() < &n {
sb::math::prime_next( &mut primes );
}
// add to squares if it's too small
if squares.last().unwrap() < &n {
let i = squares.len() as u64 + 1;
squares.push( 2 * i * i );
}
// we only look at composites, skip over primes
if let Err(_) = primes.binary_search(&n) {
let mut found = false;
// For every "square"...
for sq in &squares {
// ...assuming the square is less than n...
if sq >= &n {
break;
}
// ...find out what the prime value wuld need to be and search for it!
let target = n - sq;
if let Err(_) = primes.binary_search(&target) {
continue; // not prime, go to the next "square"
}
// We found the value, set our flag and break out of the loop
found = true;
break;
}
// if we didn't find a value, then we found the solution!
if !found {
return n;
}
}
// go to the next odd number
n += 2;
}
}
fn main() {
let start_time = std::time::Instant::now();
let sol = solve();
let elapsed = start_time.elapsed().as_micros();
println!("\nSolution: {}", sol);
let mut remain = elapsed;
let mut s = String::new();
if remain == 0 {
s.insert(0,'0');
}
while remain > 0 {
let temp = remain%1000;
remain /= 1000;
if remain > 0 {
s = format!(",{:03}",temp) + &s;
}
else {
s = format!("{}",temp) + &s;
}
}
println!("Elasped time: {} us", s);
}
|
pub use self::graphemes_struct::Graphemes;
/// Vector of graphemes
mod graphemes_struct {
extern crate unicode_segmentation;
use unicode_segmentation::UnicodeSegmentation;
use std::ops::{Deref, Index, IndexMut};
use std::fmt::{Display, Formatter};
use std::fmt;
use len_trait::len::{Len, Empty, Clear};
use push_trait::base::{Push, CanPush};
use std::slice::SliceIndex;
/// A vector of graphemes.
/// Graphemes can vary in size which is why Vec<&str> is used.
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct Graphemes<'a> {
graphemes : Vec<&'a str>,
}
impl<'a> Graphemes<'a> {
pub fn new() -> Graphemes<'a> {
Graphemes {
graphemes: vec![]
}
}
pub fn from(string : &'a str) -> Graphemes<'a> {
let graphemes = UnicodeSegmentation::graphemes(string, true).collect::<Vec<&str>>();
Graphemes {
graphemes,
}
}
pub fn get(&self, index : usize) -> Option<&&str> {
self.graphemes.get(index)
}
pub fn reverse(&mut self) {
self.graphemes.reverse();
}
pub fn slice(&self, start : usize, end : usize) -> Self {
let graphemes = self.graphemes[start..end].to_vec();
Graphemes { graphemes }
}
pub fn append(&mut self, mut other : Graphemes<'a>) {
self.graphemes.append(&mut other.graphemes);
}
pub fn split(&self, splitter : &'a str) -> Vec<Graphemes> {
self.graphemes.split(|character| *character == splitter).map(
|str_arr| Graphemes { graphemes: str_arr.to_vec()} ).collect()
}
}
impl<'a> Display for Graphemes<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.graphemes.concat())
}
}
impl<'a> Deref for Graphemes<'a> {
type Target = Vec<&'a str>;
fn deref(&self) -> &Self::Target {
&self.graphemes
}
}
impl<'a> Empty for Graphemes<'a> {
fn is_empty(&self) -> bool {
self.graphemes.is_empty()
}
}
impl<'a> Len for Graphemes<'a> {
fn len(&self) -> usize {
self.graphemes.len()
}
}
impl<'a, T : 'a + SliceIndex<[&'a str]>> Index<T> for Graphemes<'a> {
type Output = T::Output;
fn index(&self, index: T) -> &Self::Output {
Index::index(&***self, index)
}
}
impl<'a, T : 'a + SliceIndex<[&'a str]>> IndexMut<T> for Graphemes<'a> {
fn index_mut(&mut self, index: T) -> &mut Self::Output {
IndexMut::index_mut(&mut *self.graphemes, index)
}
}
impl<'a> Clear for Graphemes<'a> {
fn clear(&mut self) {
self.graphemes.clear();
}
}
impl<'a> CanPush<&'a str> for Graphemes<'a> {
type PushedOut = ();
}
impl<'a> Push<&'a str> for Graphemes<'a> {
fn push(&mut self, val: &'a str) -> Option<Self::PushedOut> {
self.graphemes.push(val);
Some(())
}
}
impl<'a> Default for Graphemes<'a> {
fn default() -> Self {
Graphemes::new()
}
}
}
#[cfg(test)]
mod test_cases {
use super::graphemes_struct::Graphemes;
#[test]
fn graphemes_split_test() {
assert_eq!(Graphemes::from("hello world").split(" "), vec![Graphemes::from("hello"), Graphemes::from("world")])
}
} |
use std::sync::mpsc;
use std::sync::mpsc::{Receiver, Sender};
use std::sync::{Once, ONCE_INIT};
use std::thread;
use std::time::{Duration, Instant};
use nats::Client;
use config;
pub fn publish(subject: String, msg: String) {
let pipe = Pipe::get();
pipe.as_ref()
.map(|p| p.sender.send((subject, msg)).unwrap());
}
type PublishPair = (String, String);
static ONCE: Once = ONCE_INIT;
static mut PIPE: *mut Option<Pipe> = 0_usize as *mut _;
const DISCARD_MODE_SECONDS: u64 = 5;
enum Mode {
Publish,
Discard(Instant),
}
struct Pipe {
thread: thread::JoinHandle<()>,
sender: Sender<PublishPair>,
}
impl Pipe {
fn get<'a>() -> &'a Option<Self> {
unsafe {
ONCE.call_once(|| {
let pipe = if config::config().nats().enabled() {
let (sender, receiver) = mpsc::channel();
let thread = thread::Builder::new()
.name("NATS sender".to_string())
.spawn(|| Pipe::work(receiver))
.unwrap();
Some(Pipe { thread, sender })
} else {
None
};
PIPE = Box::into_raw(Box::new(pipe));
});
&*PIPE
}
}
fn work(receiver: Receiver<PublishPair>) {
let mut client = match Client::new(config::config().nats().addresses()) {
Ok(client) => client,
Err(e) => {
warn!("Error when creating NATS client: {}", e);
return;
}
};
let mut mode = Mode::Publish;
let discard_duration = Duration::from_secs(DISCARD_MODE_SECONDS);
let mut process_pair = |pair: PublishPair| match mode {
Mode::Publish => match client.publish(&pair.0, pair.1.as_bytes()) {
Ok(_) => info!("success published"),
Err(e) => {
warn!("{:?}", e);
warn!("Discarding messages for {} seconds.", DISCARD_MODE_SECONDS);
mode = Mode::Discard(Instant::now());
}
},
Mode::Discard(begin) if begin.elapsed() < discard_duration => (),
Mode::Discard(_) => {
info!("Accepting messages again.");
mode = Mode::Publish;
}
};
for pair in receiver {
process_pair(pair);
}
}
}
|
extern crate phrases;
fn main() {
println!("{}", phrases::english::hello());
println!("{}", phrases::japanese::goodbye());
}
|
use std::sync::Arc;
use async_trait::async_trait;
use common::error::Error;
use common::result::Result;
use identity::domain::user::{UserId, UserRepository};
use publishing::domain::content_manager::{
ContentManager, ContentManagerId, ContentManagerRepository,
};
pub struct ContentManagerTranslator {
user_repo: Arc<dyn UserRepository>,
}
impl ContentManagerTranslator {
pub fn new(user_repo: Arc<dyn UserRepository>) -> Self {
ContentManagerTranslator { user_repo }
}
}
#[async_trait]
impl ContentManagerRepository for ContentManagerTranslator {
async fn next_id(&self) -> Result<ContentManagerId> {
let user_id = self.user_repo.next_id().await?;
Ok(ContentManagerId::new(user_id.value())?)
}
async fn find_by_id(&self, id: &ContentManagerId) -> Result<ContentManager> {
let user = self.user_repo.find_by_id(&UserId::new(id.value())?).await?;
if !user.role().is("admin") && !user.role().is("content-manager") {
return Err(Error::new("user", "unauthorized"));
}
Ok(ContentManager::new(ContentManagerId::new(
user.base().id().value(),
)?)?)
}
async fn save(&self, _author: &mut ContentManager) -> Result<()> {
Ok(())
}
}
|
pub mod database_config;
pub mod github;
pub mod gitlab;
pub mod global_config;
|
#![no_std]
extern crate alloc;
pub use plugin_map::*;
pub use registry::*;
pub mod info;
mod plugin_map;
mod registry;
mod plugin;
|
#[doc = "Register `PFCR` reader"]
pub type R = crate::R<PFCR_SPEC>;
#[doc = "Register `PFCR` writer"]
pub type W = crate::W<PFCR_SPEC>;
#[doc = "Field `PF` reader - Pixel Format"]
pub type PF_R = crate::FieldReader<PF_A>;
#[doc = "Pixel Format\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum PF_A {
#[doc = "0: ARGB8888"]
Argb8888 = 0,
#[doc = "1: RGB888"]
Rgb888 = 1,
#[doc = "2: RGB565"]
Rgb565 = 2,
#[doc = "3: ARGB1555"]
Argb1555 = 3,
#[doc = "4: ARGB4444"]
Argb4444 = 4,
#[doc = "5: L8 (8-bit luminance)"]
L8 = 5,
#[doc = "6: AL44 (4-bit alpha, 4-bit luminance)"]
Al44 = 6,
#[doc = "7: AL88 (8-bit alpha, 8-bit luminance)"]
Al88 = 7,
}
impl From<PF_A> for u8 {
#[inline(always)]
fn from(variant: PF_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for PF_A {
type Ux = u8;
}
impl PF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PF_A {
match self.bits {
0 => PF_A::Argb8888,
1 => PF_A::Rgb888,
2 => PF_A::Rgb565,
3 => PF_A::Argb1555,
4 => PF_A::Argb4444,
5 => PF_A::L8,
6 => PF_A::Al44,
7 => PF_A::Al88,
_ => unreachable!(),
}
}
#[doc = "ARGB8888"]
#[inline(always)]
pub fn is_argb8888(&self) -> bool {
*self == PF_A::Argb8888
}
#[doc = "RGB888"]
#[inline(always)]
pub fn is_rgb888(&self) -> bool {
*self == PF_A::Rgb888
}
#[doc = "RGB565"]
#[inline(always)]
pub fn is_rgb565(&self) -> bool {
*self == PF_A::Rgb565
}
#[doc = "ARGB1555"]
#[inline(always)]
pub fn is_argb1555(&self) -> bool {
*self == PF_A::Argb1555
}
#[doc = "ARGB4444"]
#[inline(always)]
pub fn is_argb4444(&self) -> bool {
*self == PF_A::Argb4444
}
#[doc = "L8 (8-bit luminance)"]
#[inline(always)]
pub fn is_l8(&self) -> bool {
*self == PF_A::L8
}
#[doc = "AL44 (4-bit alpha, 4-bit luminance)"]
#[inline(always)]
pub fn is_al44(&self) -> bool {
*self == PF_A::Al44
}
#[doc = "AL88 (8-bit alpha, 8-bit luminance)"]
#[inline(always)]
pub fn is_al88(&self) -> bool {
*self == PF_A::Al88
}
}
#[doc = "Field `PF` writer - Pixel Format"]
pub type PF_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, PF_A>;
impl<'a, REG, const O: u8> PF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "ARGB8888"]
#[inline(always)]
pub fn argb8888(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Argb8888)
}
#[doc = "RGB888"]
#[inline(always)]
pub fn rgb888(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Rgb888)
}
#[doc = "RGB565"]
#[inline(always)]
pub fn rgb565(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Rgb565)
}
#[doc = "ARGB1555"]
#[inline(always)]
pub fn argb1555(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Argb1555)
}
#[doc = "ARGB4444"]
#[inline(always)]
pub fn argb4444(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Argb4444)
}
#[doc = "L8 (8-bit luminance)"]
#[inline(always)]
pub fn l8(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::L8)
}
#[doc = "AL44 (4-bit alpha, 4-bit luminance)"]
#[inline(always)]
pub fn al44(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Al44)
}
#[doc = "AL88 (8-bit alpha, 8-bit luminance)"]
#[inline(always)]
pub fn al88(self) -> &'a mut crate::W<REG> {
self.variant(PF_A::Al88)
}
}
impl R {
#[doc = "Bits 0:2 - Pixel Format"]
#[inline(always)]
pub fn pf(&self) -> PF_R {
PF_R::new((self.bits & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - Pixel Format"]
#[inline(always)]
#[must_use]
pub fn pf(&mut self) -> PF_W<PFCR_SPEC, 0> {
PF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Layerx Pixel Format Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pfcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pfcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PFCR_SPEC;
impl crate::RegisterSpec for PFCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pfcr::R`](R) reader structure"]
impl crate::Readable for PFCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pfcr::W`](W) writer structure"]
impl crate::Writable for PFCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PFCR to value 0"]
impl crate::Resettable for PFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use seed::prelude::{Node, Orders};
pub trait ActionComponent<'a, A, M: 'static>
{
fn view(model: &A) -> Node<M>;
fn handle(msg: M, model: &'a mut A, _: &'a mut impl Orders<M>);
}
pub trait Component<A, M>
{
fn view(model: &A) -> Node<M>;
}
|
use crate::error::Error;
#[derive(Debug)]
pub struct Char {
pub code: u8,
pub row: u32,
pub col: u16,
}
pub struct Chars<I: Iterator<Item = Result<u8, Error>>> {
source: I,
row: u32,
col: u16,
single_comment: bool,
multi_comment: bool,
}
impl<I: Iterator<Item = Result<u8, Error>>> From<I> for Chars<I> {
fn from(source: I) -> Self {
Self {
source: source,
row: 1,
col: 0,
single_comment: false,
multi_comment: false,
}
}
}
impl<I: Iterator<Item = Result<u8, Error>>> Iterator for Chars<I> {
type Item = Result<Char, Error>;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.source.next() {
Some(Ok(b'\n')) => {
self.row += 1;
self.col = 0;
self.single_comment = false;
}
Some(Ok(b'\r')) => {
self.col = 0;
}
Some(Ok(b'(')) if !self.single_comment && !self.multi_comment => {
self.col += 1;
self.multi_comment = true;
}
Some(Ok(b')')) if self.multi_comment => {
self.col += 1;
self.multi_comment = false;
}
Some(Ok(b';')) if !self.single_comment && !self.multi_comment => {
self.single_comment = true;
}
Some(Ok(code)) => {
self.col += 1;
if code != b' ' && !self.single_comment && !self.multi_comment {
return Some(Ok(Char {
code: code,
row: self.row,
col: self.col,
}));
}
}
Some(Err(err)) => break Some(Err(err)),
None => break None,
};
}
}
}
|
// This file was generated
mod barrier_wait_result_private { pub trait Sealed { } }
/// Extension for [`BarrierWaitResult`](std::sync::BarrierWaitResult)
pub trait IsntBarrierWaitResultExt: barrier_wait_result_private::Sealed {
/// The negation of [`is_leader`](std::sync::BarrierWaitResult::is_leader)
#[must_use]
fn is_not_leader(&self) -> bool;
}
impl barrier_wait_result_private::Sealed for std::sync::BarrierWaitResult { }
impl IsntBarrierWaitResultExt for std::sync::BarrierWaitResult {
#[inline]
fn is_not_leader(&self) -> bool {
!self.is_leader()
}
}
mod mutex_private { pub trait Sealed<T: ?Sized> { } }
/// Extension for [`Mutex`](std::sync::Mutex)
pub trait IsntMutexExt<T: ?Sized>: mutex_private::Sealed<T> {
/// The negation of [`is_poisoned`](std::sync::Mutex::is_poisoned)
#[must_use]
fn is_not_poisoned(&self) -> bool;
}
impl<T: ?Sized> mutex_private::Sealed<T> for std::sync::Mutex<T> { }
impl<T: ?Sized> IsntMutexExt<T> for std::sync::Mutex<T> {
#[inline]
fn is_not_poisoned(&self) -> bool {
!self.is_poisoned()
}
}
mod once_private { pub trait Sealed { } }
/// Extension for [`Once`](std::sync::Once)
pub trait IsntOnceExt: once_private::Sealed {
/// The negation of [`is_completed`](std::sync::Once::is_completed)
#[must_use]
fn is_not_completed(&self) -> bool;
}
impl once_private::Sealed for std::sync::Once { }
impl IsntOnceExt for std::sync::Once {
#[inline]
fn is_not_completed(&self) -> bool {
!self.is_completed()
}
}
mod rw_lock_private { pub trait Sealed<T: ?Sized> { } }
/// Extension for [`RwLock`](std::sync::RwLock)
pub trait IsntRwLockExt<T: ?Sized>: rw_lock_private::Sealed<T> {
/// The negation of [`is_poisoned`](std::sync::RwLock::is_poisoned)
#[must_use]
fn is_not_poisoned(&self) -> bool;
}
impl<T: ?Sized> rw_lock_private::Sealed<T> for std::sync::RwLock<T> { }
impl<T: ?Sized> IsntRwLockExt<T> for std::sync::RwLock<T> {
#[inline]
fn is_not_poisoned(&self) -> bool {
!self.is_poisoned()
}
}
mod wait_timeout_result_private { pub trait Sealed { } }
/// Extension for [`WaitTimeoutResult`](std::sync::WaitTimeoutResult)
pub trait IsntWaitTimeoutResultExt: wait_timeout_result_private::Sealed {
/// The negation of [`timed_out`](std::sync::WaitTimeoutResult::timed_out)
#[must_use]
fn not_timed_out(&self) -> bool;
}
impl wait_timeout_result_private::Sealed for std::sync::WaitTimeoutResult { }
impl IsntWaitTimeoutResultExt for std::sync::WaitTimeoutResult {
#[inline]
fn not_timed_out(&self) -> bool {
!self.timed_out()
}
}
mod weak_private { pub trait Sealed<T: ?Sized> { } }
/// Extension for [`Weak`](std::sync::Weak)
pub trait IsntWeakExt<T: ?Sized>: weak_private::Sealed<T> {
/// The negation of [`ptr_eq`](std::sync::Weak::ptr_eq)
#[must_use]
fn not_ptr_eq(&self, other: &std::sync::Weak<T>) -> bool;
}
impl<T: ?Sized> weak_private::Sealed<T> for std::sync::Weak<T> { }
impl<T: ?Sized> IsntWeakExt<T> for std::sync::Weak<T> {
#[inline]
fn not_ptr_eq(&self, other: &std::sync::Weak<T>) -> bool {
!self.ptr_eq(other)
}
}
|
extern crate ralloc;
mod util;
use std::thread;
fn make_thread() -> thread::JoinHandle<()> {
thread::spawn(|| {
let mut vec = Vec::new();
for i in 0..0xFFF {
util::acid(|| {
vec.push(0);
vec[i] = i;
});
}
for i in 0..0xFFF {
assert_eq!(vec[i], i);
}
})
}
#[test]
#[ignore]
fn multithread_join_handle_vec() {
util::multiply(|| {
let mut join = Vec::new();
for _ in 0..20 {
util::acid(|| {
join.push(make_thread());
});
}
for i in join {
i.join().unwrap();
}
});
}
|
extern crate rand;
use std::error::Error;
use crate::lib::core::*;
use crate::lib::feature::StaticFeature;
use crate::lib::node;
use crate::lib::star::StaticStarSecret;
const MY_STAR_SECRET: StaticStarSecret = StaticStarSecret {};
const FEATURE_STAR_ALIAS: StaticFeature = StaticFeature { facet: "Star", point: "Alias" };
#[test]
fn it_works() -> Result<(), Box<dyn Error>> {
let data_dir = tools::unique_tmp_dir("relic-tests-it-works")?;
let node = node::start(&data_dir)?;
node.push(&vec![
ChangeOrder::Add(&MY_STAR_SECRET, &FEATURE_STAR_ALIAS, "Bartholomew"),
]);
let view = node.view();
let star_alias = view.unconfirmed_value(&MY_STAR_SECRET, &FEATURE_STAR_ALIAS);
assert_eq!(Some("Bartholomew".to_string()), star_alias);
Ok(())
}
mod tools {
use std::error::Error;
use std::path::PathBuf;
pub fn unique_tmp_dir(prefix: &str) -> Result<PathBuf, Box<dyn Error>> {
let mut path = std::env::temp_dir();
path.push(format!("{}-{}", prefix, rand::random::<u32>()));
std::fs::create_dir_all(&path)?;
Ok(path)
}
} |
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
//! Basic polynomial operations.
//!
//! This module provides a set of function for basic polynomial operations, including:
//! - Polynomial evaluation using Horner method.
//! - Polynomial interpolation using Lagrange method.
//! - Polynomial addition, subtraction, multiplication, and division.
//! - Synthetic polynomial division for efficient division by polynomials of the form
//! `x`^`a` - `b`.
//!
//! In the context of this module any slice of field elements is considered to be a polynomial
//! in reverse coefficient form. A few examples:
//!
//! ```
//! # use winter_math::{fields::{f128::BaseElement}, FieldElement};
//! // p(x) = 2 * x + 1
//! let p = vec![BaseElement::new(1), BaseElement::new(2)];
//!
//! // p(x) = 4 * x^2 + 3
//! let p = [BaseElement::new(3), BaseElement::ZERO, BaseElement::new(4)];
//! ```
use crate::{field::FieldElement, utils::batch_inversion};
use core::mem;
use utils::{collections::Vec, group_vector_elements};
#[cfg(test)]
mod tests;
// POLYNOMIAL EVALUATION
// ================================================================================================
/// Evaluates a polynomial at a single point and returns the result.
///
/// Evaluates polynomial `p` at coordinate `x` using
/// [Horner's method](https://en.wikipedia.org/wiki/Horner%27s_method).
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // define polynomial: f(x) = 3 * x^2 + 2 * x + 1
/// let p = (1u32..4).map(BaseElement::from).collect::<Vec<_>>();
///
/// // evaluate the polynomial at point 4
/// let x = BaseElement::new(4);
/// assert_eq!(BaseElement::new(57), eval(&p, x));
/// ```
pub fn eval<B, E>(p: &[B], x: E) -> E
where
B: FieldElement,
E: FieldElement + From<B>,
{
// Horner evaluation
p.iter()
.rev()
.fold(E::ZERO, |acc, &coeff| acc * x + E::from(coeff))
}
/// Evaluates a polynomial at multiple points and returns a vector of results.
///
/// Evaluates polynomial `p` at all coordinates in `xs` slice by repeatedly invoking
/// `polynom::eval()` function.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // define polynomial: f(x) = 3 * x^2 + 2 * x + 1
/// let p = (1_u32..4).map(BaseElement::from).collect::<Vec<_>>();
/// let xs = (3_u32..6).map(BaseElement::from).collect::<Vec<_>>();
///
/// let expected = xs.iter().map(|x| eval(&p, *x)).collect::<Vec<_>>();
/// assert_eq!(expected, eval_many(&p, &xs));
/// ```
pub fn eval_many<B, E>(p: &[B], xs: &[E]) -> Vec<E>
where
B: FieldElement,
E: FieldElement + From<B>,
{
xs.iter().map(|x| eval(p, *x)).collect()
}
// POLYNOMIAL INTERPOLATION
// ================================================================================================
/// Returns a polynomial in coefficient form interpolated from a set of X and Y coordinates.
///
/// Uses [Lagrange interpolation](https://en.wikipedia.org/wiki/Lagrange_polynomial) to build a
/// polynomial from X and Y coordinates. If `remove_leading_zeros = true`, all leading coefficients
/// which are ZEROs will be truncated; otherwise, the length of result will be equal to the number
/// of X coordinates.
///
/// # Panics
/// Panics if number of X and Y coordinates is not the same.
///
/// # Example
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// let xs = BaseElement::prng_vector([0; 32], 16);
/// let ys = BaseElement::prng_vector([1; 32], 16);
///
/// let p = interpolate(&xs, &ys, false);
/// assert_eq!(ys, eval_many(&p, &xs));
/// ```
pub fn interpolate<E>(xs: &[E], ys: &[E], remove_leading_zeros: bool) -> Vec<E>
where
E: FieldElement,
{
debug_assert!(
xs.len() == ys.len(),
"number of X and Y coordinates must be the same"
);
let roots = get_zero_roots(xs);
let numerators: Vec<Vec<E>> = xs.iter().map(|&x| syn_div(&roots, 1, x)).collect();
let denominators: Vec<E> = numerators
.iter()
.zip(xs)
.map(|(e, &x)| eval(e, x))
.collect();
let denominators = batch_inversion(&denominators);
let mut result = E::zeroed_vector(xs.len());
for i in 0..xs.len() {
let y_slice = ys[i] * denominators[i];
for (j, res) in result.iter_mut().enumerate() {
*res += numerators[i][j] * y_slice;
}
}
if remove_leading_zeros {
crate::polynom::remove_leading_zeros(&result)
} else {
result
}
}
/// Returns a vector of polynomials interpolated from the provided X and Y coordinate batches.
///
/// Uses [Lagrange interpolation](https://en.wikipedia.org/wiki/Lagrange_polynomial) to build a
/// vector of polynomial from X and Y coordinate batches (one polynomial per batch).
///
/// When the number of batches is larger, this function is significantly faster than using
/// `polynom::interpolate()` function individually for each batch of coordinates. The speed-up
/// is primarily due to computing all inversions as a single batch inversion across all
/// coordinate batches.
///
/// # Panics
/// Panics if the number of X coordinate batches and Y coordinate batches is not the same.
///
/// # Examples
/// ```
/// # use core::convert::TryInto;
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// let x_batches: Vec<[BaseElement; 8]> = vec![
/// BaseElement::prng_vector([0; 32], 8).try_into().unwrap(),
/// BaseElement::prng_vector([1; 32], 8).try_into().unwrap(),
/// ];
/// let y_batches: Vec<[BaseElement; 8]> = vec![
/// BaseElement::prng_vector([2; 32], 8).try_into().unwrap(),
/// BaseElement::prng_vector([3; 32], 8).try_into().unwrap(),
/// ];
///
/// let polys = interpolate_batch(&x_batches, &y_batches);
/// for ((p, xs), ys) in polys.iter().zip(x_batches).zip(y_batches) {
/// assert_eq!(ys.to_vec(), eval_many(p, &xs));
/// }
/// ```
pub fn interpolate_batch<E, const N: usize>(xs: &[[E; N]], ys: &[[E; N]]) -> Vec<[E; N]>
where
E: FieldElement,
{
debug_assert!(
xs.len() == ys.len(),
"number of X coordinate batches and Y coordinate batches must be the same"
);
let n = xs.len();
let mut equations = group_vector_elements(E::zeroed_vector(n * N * N));
let mut inverses = E::zeroed_vector(n * N);
// TODO: converting this to an array results in about 5% speed-up, but unfortunately, complex
// generic constraints are not yet supported: https://github.com/rust-lang/rust/issues/76560
let mut roots = vec![E::ZERO; N + 1];
for (i, xs) in xs.iter().enumerate() {
fill_zero_roots(xs, &mut roots);
for (j, &x) in xs.iter().enumerate() {
let equation = &mut equations[i * N + j];
// optimized synthetic division for this context
equation[N - 1] = roots[N];
for k in (0..N - 1).rev() {
equation[k] = roots[k + 1] + equation[k + 1] * x;
}
inverses[i * N + j] = eval(equation, x);
}
}
let equations = group_vector_elements::<[E; N], N>(equations);
let inverses = group_vector_elements::<E, N>(batch_inversion(&inverses));
let mut result = group_vector_elements(E::zeroed_vector(n * N));
for (i, poly) in result.iter_mut().enumerate() {
for j in 0..N {
let inv_y = ys[i][j] * inverses[i][j];
for (res_coeff, &eq_coeff) in poly.iter_mut().zip(equations[i][j].iter()) {
*res_coeff += eq_coeff * inv_y;
}
}
}
result
}
// POLYNOMIAL MATH OPERATIONS
// ================================================================================================
/// Returns a polynomial resulting from adding two polynomials together.
///
/// Polynomials `a` and `b` are expected to be in the coefficient form, and the returned
/// polynomial will be in the coefficient form as well. The length of the returned vector
/// will be max(a.len(), b.len()).
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p1(x) = 4 * x^2 + 3 * x + 2
/// let p1 = (2_u32..5).map(BaseElement::from).collect::<Vec<_>>();
/// // p2(x) = 2 * x + 1
/// let p2 = (1_u32..3).map(BaseElement::from).collect::<Vec<_>>();
///
/// // expected result = 4 * x^2 + 5 * x + 3
/// let expected = vec![
/// BaseElement::new(3),
/// BaseElement::new(5),
/// BaseElement::new(4),
/// ];
/// assert_eq!(expected, add(&p1, &p2));
/// ```
pub fn add<E>(a: &[E], b: &[E]) -> Vec<E>
where
E: FieldElement,
{
let result_len = core::cmp::max(a.len(), b.len());
let mut result = Vec::with_capacity(result_len);
for i in 0..result_len {
let c1 = if i < a.len() { a[i] } else { E::ZERO };
let c2 = if i < b.len() { b[i] } else { E::ZERO };
result.push(c1 + c2);
}
result
}
/// Returns a polynomial resulting from subtracting one polynomial from another.
///
/// Specifically, subtracts polynomial `b` from polynomial `a` and returns the result. Both
/// polynomials are expected to be in the coefficient form, and the returned polynomial will
/// be in the coefficient form as well. The length of the returned vector will be
/// max(a.len(), b.len()).
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p1(x) = 4 * x^2 + 3 * x + 2
/// let p1 = (2_u32..5).map(BaseElement::from).collect::<Vec<_>>();
/// // p2(x) = 2 * x + 1
/// let p2 = (1_u32..3).map(BaseElement::from).collect::<Vec<_>>();
///
/// // expected result = 4 * x^2 + x + 1
/// let expected = vec![
/// BaseElement::new(1),
/// BaseElement::new(1),
/// BaseElement::new(4),
/// ];
/// assert_eq!(expected, sub(&p1, &p2));
/// ```
pub fn sub<E>(a: &[E], b: &[E]) -> Vec<E>
where
E: FieldElement,
{
let result_len = core::cmp::max(a.len(), b.len());
let mut result = Vec::with_capacity(result_len);
for i in 0..result_len {
let c1 = if i < a.len() { a[i] } else { E::ZERO };
let c2 = if i < b.len() { b[i] } else { E::ZERO };
result.push(c1 - c2);
}
result
}
/// Returns a polynomial resulting from multiplying two polynomials together.
///
/// Polynomials `a` and `b` are expected to be in the coefficient form, and the returned
/// polynomial will be in the coefficient form as well. The length of the returned vector
/// will be a.len() + b.len() - 1.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p1(x) = x + 1
/// let p1 = [BaseElement::ONE, BaseElement::ONE];
/// // p2(x) = x^2 + 2
/// let p2 = [BaseElement::new(2), BaseElement::ZERO, BaseElement::ONE];
///
/// // expected result = x^3 + x^2 + 2 * x + 2
/// let expected = vec![
/// BaseElement::new(2),
/// BaseElement::new(2),
/// BaseElement::new(1),
/// BaseElement::new(1),
/// ];
/// assert_eq!(expected, mul(&p1, &p2));
/// ```
pub fn mul<E>(a: &[E], b: &[E]) -> Vec<E>
where
E: FieldElement,
{
let result_len = a.len() + b.len() - 1;
let mut result = E::zeroed_vector(result_len);
for i in 0..a.len() {
for j in 0..b.len() {
let s = a[i] * b[j];
result[i + j] += s;
}
}
result
}
/// Returns a polynomial resulting from multiplying a given polynomial by a scalar value.
///
/// Specifically, multiplies every coefficient of polynomial `p` by constant `k` and returns
/// the resulting vector.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// let p = [
/// BaseElement::new(1),
/// BaseElement::new(2),
/// BaseElement::new(3),
/// ];
/// let k = BaseElement::new(2);
///
/// let expected = vec![
/// BaseElement::new(2),
/// BaseElement::new(4),
/// BaseElement::new(6),
/// ];
/// assert_eq!(expected, mul_by_scalar(&p, k));
/// ```
pub fn mul_by_scalar<E>(p: &[E], k: E) -> Vec<E>
where
E: FieldElement,
{
let mut result = Vec::with_capacity(p.len());
for coeff in p {
result.push(*coeff * k);
}
result
}
/// Returns a polynomial resulting from dividing one polynomial by another.
///
/// Specifically, divides polynomial `a` by polynomial `b` and returns the result. If the
/// polynomials don't divide evenly, the remainder is ignored. Both polynomials are expected to
/// be in the coefficient form, and the returned polynomial will be in the coefficient form as
/// well. The length of the returned vector will be a.len() - b.len() + 1.
///
/// # Panics
/// Panics if:
/// * Polynomial `b` is empty.
/// * Degree of polynomial `b` is zero and the constant coefficient is ZERO.
/// * The degree of polynomial `b` is greater than the degree of polynomial `a`.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p1(x) = x^3 + x^2 + 2 * x + 2
/// let p1 = [
/// BaseElement::new(2),
/// BaseElement::new(2),
/// BaseElement::new(1),
/// BaseElement::new(1),
/// ];
/// // p2(x) = x^2 + 2
/// let p2 = [BaseElement::new(2), BaseElement::ZERO, BaseElement::ONE];
///
/// // expected result = x + 1
/// let expected = vec![BaseElement::ONE, BaseElement::ONE];
/// assert_eq!(expected, div(&p1, &p2));
/// ```
pub fn div<E>(a: &[E], b: &[E]) -> Vec<E>
where
E: FieldElement,
{
let mut apos = degree_of(a);
let mut a = a.to_vec();
let bpos = degree_of(b);
assert!(apos >= bpos, "cannot divide by polynomial of higher degree");
if bpos == 0 {
assert!(!b.is_empty(), "cannot divide by empty polynomial");
assert!(b[0] != E::ZERO, "cannot divide polynomial by zero");
}
let mut result = E::zeroed_vector(apos - bpos + 1);
for i in (0..result.len()).rev() {
let quot = a[apos] / b[bpos];
result[i] = quot;
for j in (0..bpos).rev() {
a[i + j] -= b[j] * quot;
}
apos = apos.wrapping_sub(1);
}
result
}
/// Returns a polynomial resulting from dividing a polynomial by a polynomial of special form.
///
/// Specifically, divides polynomial `p` by polynomial (x^`a` - `b`) using
/// [synthetic division](https://en.wikipedia.org/wiki/Synthetic_division) method; if the
/// polynomials don't divide evenly, the remainder is ignored. Polynomial `p` is expected
/// to be in the coefficient form, and the result will be in the coefficient form as well.
/// The length of the resulting polynomial will be equal to `p.len()`.
///
/// This function is significantly faster than the generic `polynom::div()` function.
///
/// # Panics
/// Panics if:
/// * `a` is zero;
/// * `b` is zero;
/// * `p.len()` is smaller than or equal to `a`.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p(x) = x^3 + x^2 + 2 * x + 2
/// let p = [
/// BaseElement::new(2),
/// BaseElement::new(2),
/// BaseElement::new(1),
/// BaseElement::new(1),
/// ];
///
/// // expected result = x^2 + 2
/// let expected = vec![
/// BaseElement::new(2),
/// BaseElement::ZERO,
/// BaseElement::new(1),
/// BaseElement::ZERO,
/// ];
///
/// // divide by x + 1
/// assert_eq!(expected, syn_div(&p, 1, -BaseElement::ONE));
/// ```
pub fn syn_div<E>(p: &[E], a: usize, b: E) -> Vec<E>
where
E: FieldElement,
{
let mut result = p.to_vec();
syn_div_in_place(&mut result, a, b);
result
}
/// Divides a polynomial by a polynomial of special form and saves the result into the original
/// polynomial.
///
/// Specifically, divides polynomial `p` by polynomial (x^`a` - `b`) using
/// [synthetic division](https://en.wikipedia.org/wiki/Synthetic_division) method and saves the
/// result into `p`. If the polynomials don't divide evenly, the remainder is ignored. Polynomial
/// `p` is expected to be in the coefficient form, and the result will be in coefficient form as
/// well.
///
/// This function is significantly faster than the generic `polynom::div()` function, and as
/// compared to `polynom::syn_div()` function, this function does not allocate any additional
/// memory.
///
/// # Panics
/// Panics if:
/// * `a` is zero;
/// * `b` is zero;
/// * `p.len()` is smaller than or equal to `a`.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// // p(x) = x^3 + x^2 + 2 * x + 2
/// let mut p = [
/// BaseElement::new(2),
/// BaseElement::new(2),
/// BaseElement::new(1),
/// BaseElement::new(1),
/// ];
///
/// // divide by x + 1
/// syn_div_in_place(&mut p, 1, -BaseElement::ONE);
///
/// // expected result = x^2 + 2
/// let expected = [
/// BaseElement::new(2),
/// BaseElement::ZERO,
/// BaseElement::new(1),
/// BaseElement::ZERO,
/// ];
///
/// assert_eq!(expected, p);
pub fn syn_div_in_place<E>(p: &mut [E], a: usize, b: E)
where
E: FieldElement,
{
assert!(a != 0, "divisor degree cannot be zero");
assert!(b != E::ZERO, "constant cannot be zero");
assert!(
p.len() > a,
"divisor degree cannot be greater than dividend size"
);
if a == 1 {
// if we are dividing by (x - `b`), we can use a single variable to keep track
// of the remainder; this way, we can avoid shifting the values in the slice later
let mut c = E::ZERO;
for coeff in p.iter_mut().rev() {
*coeff += b * c;
mem::swap(coeff, &mut c);
}
} else {
// if we are dividing by a polynomial of higher power, we need to keep track of the
// full remainder. we do that in place, but then need to shift the values at the end
// to discard the remainder
let degree_offset = p.len() - a;
if b == E::ONE {
// if `b` is 1, no need to multiply by `b` in every iteration of the loop
for i in (0..degree_offset).rev() {
p[i] += p[i + a];
}
} else {
for i in (0..degree_offset).rev() {
p[i] += p[i + a] * b;
}
}
// discard the remainder
p.copy_within(a.., 0);
p[degree_offset..].fill(E::ZERO);
}
}
// DEGREE INFERENCE
// ================================================================================================
/// Returns the degree of the provided polynomial.
///
/// If the size of the provided slice is much larger than the degree of the polynomial (i.e.,
/// a large number of leading coefficients is ZERO), this operation can be quite inefficient.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// assert_eq!(0, degree_of::<BaseElement>(&[]));
/// assert_eq!(0, degree_of(&[BaseElement::ONE]));
/// assert_eq!(1, degree_of(&[BaseElement::ONE, BaseElement::new(2)]));
/// assert_eq!(1, degree_of(&[BaseElement::ONE, BaseElement::new(2), BaseElement::ZERO]));
/// assert_eq!(2, degree_of(&[BaseElement::ONE, BaseElement::new(2), BaseElement::new(3)]));
/// assert_eq!(
/// 2,
/// degree_of(&[
/// BaseElement::ONE,
/// BaseElement::new(2),
/// BaseElement::new(3),
/// BaseElement::ZERO
/// ])
/// );
/// ```
pub fn degree_of<E>(poly: &[E]) -> usize
where
E: FieldElement,
{
for i in (0..poly.len()).rev() {
if poly[i] != E::ZERO {
return i;
}
}
0
}
/// Returns a polynomial with all leading ZERO coefficients removed.
///
/// # Examples
/// ```
/// # use winter_math::polynom::*;
/// # use winter_math::{fields::{f128::BaseElement}, FieldElement};
/// let a = vec![1u128, 2, 3, 4, 5, 6, 0, 0]
/// .into_iter()
/// .map(BaseElement::new)
/// .collect::<Vec<_>>();
/// let b = remove_leading_zeros(&a);
/// assert_eq!(6, b.len());
/// assert_eq!(a[..6], b);
///
/// let a = vec![0u128, 0, 0, 0]
/// .into_iter()
/// .map(BaseElement::new)
/// .collect::<Vec<_>>();
/// let b = remove_leading_zeros(&a);
/// assert_eq!(0, b.len());
/// ```
pub fn remove_leading_zeros<E>(values: &[E]) -> Vec<E>
where
E: FieldElement,
{
for i in (0..values.len()).rev() {
if values[i] != E::ZERO {
return values[..(i + 1)].to_vec();
}
}
vec![]
}
// HELPER FUNCTIONS
// ================================================================================================
fn get_zero_roots<E: FieldElement>(xs: &[E]) -> Vec<E> {
let mut result = unsafe { utils::uninit_vector(xs.len() + 1) };
fill_zero_roots(xs, &mut result);
result
}
fn fill_zero_roots<E: FieldElement>(xs: &[E], result: &mut [E]) {
let mut n = result.len();
n -= 1;
result[n] = E::ONE;
for i in 0..xs.len() {
n -= 1;
result[n] = E::ZERO;
#[allow(clippy::assign_op_pattern)]
for j in n..xs.len() {
result[j] = result[j] - result[j + 1] * xs[i];
}
}
}
|
use nu_engine::CallExt;
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoInterruptiblePipelineData, PipelineData, ShellError, Signature, Span,
SyntaxShape, Value,
};
#[derive(Clone)]
pub struct SortBy;
impl Command for SortBy {
fn name(&self) -> &str {
"sort-by"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("sort-by")
.rest("columns", SyntaxShape::Any, "the column(s) to sort by")
.switch("reverse", "Sort in reverse order", Some('r'))
.switch(
"insensitive",
"Sort string-based columns case-insensitively",
Some('i'),
)
.switch(
"natural",
"Sort alphanumeric string-based columns naturally",
Some('n'),
)
.category(Category::Filters)
}
fn usage(&self) -> &str {
"Sort by the given columns, in increasing order."
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
example: "[2 0 1] | sort-by",
description: "sort the list by increasing value",
result: Some(Value::List {
vals: vec![Value::test_int(0), Value::test_int(1), Value::test_int(2)],
span: Span::test_data(),
}),
},
Example {
example: "[2 0 1] | sort-by -r",
description: "sort the list by decreasing value",
result: Some(Value::List {
vals: vec![Value::test_int(2), Value::test_int(1), Value::test_int(0)],
span: Span::test_data(),
}),
},
Example {
example: "[betty amy sarah] | sort-by",
description: "sort a list of strings",
result: Some(Value::List {
vals: vec![
Value::test_string("amy"),
Value::test_string("betty"),
Value::test_string("sarah"),
],
span: Span::test_data(),
}),
},
Example {
example: "[betty amy sarah] | sort-by -r",
description: "sort a list of strings in reverse",
result: Some(Value::List {
vals: vec![
Value::test_string("sarah"),
Value::test_string("betty"),
Value::test_string("amy"),
],
span: Span::test_data(),
}),
},
Example {
example: "[test1 test11 test2] | sort-by -n",
description: "sort a list of alphanumeric strings naturally",
result: Some(Value::List {
vals: vec![
Value::test_string("test1"),
Value::test_string("test2"),
Value::test_string("test11"),
],
span: Span::test_data(),
}),
},
Example {
description: "Sort strings (case-insensitive)",
example: "echo [airplane Truck Car] | sort-by -i",
result: Some(Value::List {
vals: vec![
Value::test_string("airplane"),
Value::test_string("Car"),
Value::test_string("Truck"),
],
span: Span::test_data(),
}),
},
Example {
description: "Sort strings (reversed case-insensitive)",
example: "echo [airplane Truck Car] | sort-by -i -r",
result: Some(Value::List {
vals: vec![
Value::test_string("Truck"),
Value::test_string("Car"),
Value::test_string("airplane"),
],
span: Span::test_data(),
}),
},
Example {
description: "Sort a table by its column (reversed order)",
example: "[[fruit count]; [apple 9] [pear 3] [orange 7]] | sort-by fruit -r",
result: Some(Value::List {
vals: vec![
Value::test_record(
vec!["fruit", "count"],
vec![Value::test_string("pear"), Value::test_int(3)],
),
Value::test_record(
vec!["fruit", "count"],
vec![Value::test_string("orange"), Value::test_int(7)],
),
Value::test_record(
vec!["fruit", "count"],
vec![Value::test_string("apple"), Value::test_int(9)],
),
],
span: Span::test_data(),
}),
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let columns: Vec<String> = call.rest(engine_state, stack, 0)?;
let reverse = call.has_flag("reverse");
let insensitive = call.has_flag("insensitive");
let natural = call.has_flag("natural");
let metadata = &input.metadata();
let mut vec: Vec<_> = input.into_iter().collect();
crate::sort(&mut vec, columns, call.head, insensitive, natural)?;
if reverse {
vec.reverse()
}
let iter = vec.into_iter();
match metadata {
Some(m) => {
Ok(iter.into_pipeline_data_with_metadata(m.clone(), engine_state.ctrlc.clone()))
}
None => Ok(iter.into_pipeline_data(engine_state.ctrlc.clone())),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(SortBy {})
}
}
|
#![feature(no_std, plugin)]
#![no_std]
#![plugin(arm_rt_macro)]
extern crate stm32;
extern crate rlibc;
use stm32::stm32f4::*;
#[entry_point]
fn main() -> ! {
// enable LED GPIOs (PG13 = green, PG14 = red)
RCC.ahb1enr.set_gpiog_en(true);
// set LED pins to "output"
GPIOG.moder.set_mode(13, stm32::gpio::GPIO_moder_mode::Output)
.set_mode(14, stm32::gpio::GPIO_moder_mode::Output);
// enable TIM2
RCC.apb1enr.set_tim2_en(true);
// configure TIM2. System clock at boot is 16 MHz and we haven't
// changed it, so setting prescaler to 15999 will yield a 1 ms tick.
// CEN bit switches the timer on.
TIM2.psc.set_psc(15999);
TIM2.cr1.set_cen(stm32::timer::GPTIM32_cr1_cen::Enable);
// apply configuration changes to TIM2
TIM2.egr.set_ug(true);
loop {
// Red on, Green off
GPIOG.bsrr.set_br(13, true)
.set_bs(14, true);
wait_ms(300);
// Green on, Red off
GPIOG.bsrr.set_bs(13, true)
.set_br(14, true);
wait_ms(300);
}
}
fn wait_ms(ms : u32) {
let start = TIM2.cnt.cnt();
while TIM2.cnt.cnt() - start < ms
{
// just spin
}
}
|
use std::*;
fn read_line() -> String {
let mut line = String::new();
io::stdin().read_line(&mut line).unwrap();
line
}
fn main() {
let mut a = vec![1, 2, 3];
// 这里相当于是 for i in a.into_iter(), 会发生 move, 所以 p16 会报错
// for i in a {
// println!("i is {}", i);
// }
// println!("sum is {}", a.iter().sum::<i32>());
// 相当于 for i in a.iter()
for i in &a {
println!("i is {}", i);
}
println!("sum is {}", a.iter().sum::<i32>());
// 相当于 for i in a.iter_mut()
for i in &mut a {
*i += 10;
println!("i is {}", i);
}
println!("sum is {}", a.iter().sum::<i32>());
fn gao(x: i32) -> i32 {
if x == 1 {
return 1;
}
x + gao(x - 1)
}
let a: i32 = read_line().trim().parse().unwrap();
println!("a is {}", gao(a));
}
|
//! Chain specification for the evm domain.
use evm_domain_test_runtime::{AccountId as AccountId20, GenesisConfig, Precompiles, Signature};
use sp_core::{ecdsa, Pair, Public};
use sp_domains::DomainId;
use sp_runtime::traits::{IdentifyAccount, Verify};
use subspace_runtime_primitives::SSC;
type AccountPublic = <Signature as Verify>::Signer;
/// Helper function to generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId20
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(
TPublic::Pair::from_string(&format!("//{seed}"), None)
.expect("static values are valid; qed")
.public(),
)
.into_account()
}
fn endowed_accounts() -> Vec<AccountId20> {
vec![
get_account_id_from_seed::<ecdsa::Public>("Alice"),
get_account_id_from_seed::<ecdsa::Public>("Bob"),
get_account_id_from_seed::<ecdsa::Public>("Charlie"),
get_account_id_from_seed::<ecdsa::Public>("Dave"),
get_account_id_from_seed::<ecdsa::Public>("Eve"),
get_account_id_from_seed::<ecdsa::Public>("Ferdie"),
get_account_id_from_seed::<ecdsa::Public>("Alice//stash"),
get_account_id_from_seed::<ecdsa::Public>("Bob//stash"),
get_account_id_from_seed::<ecdsa::Public>("Charlie//stash"),
get_account_id_from_seed::<ecdsa::Public>("Dave//stash"),
get_account_id_from_seed::<ecdsa::Public>("Eve//stash"),
get_account_id_from_seed::<ecdsa::Public>("Ferdie//stash"),
]
}
/// Get the genesis config of the evm domain
pub fn testnet_evm_genesis() -> GenesisConfig {
// This is the simplest bytecode to revert without returning any data.
// We will pre-deploy it under all of our precompiles to ensure they can be called from
// within contracts.
// (PUSH1 0x00 PUSH1 0x00 REVERT)
let revert_bytecode = vec![0x60, 0x00, 0x60, 0x00, 0xFD];
let alice = get_account_id_from_seed::<ecdsa::Public>("Alice");
evm_domain_test_runtime::GenesisConfig {
system: evm_domain_test_runtime::SystemConfig {
code: evm_domain_test_runtime::WASM_BINARY
.expect("WASM binary was not build, please build it!")
.to_vec(),
},
transaction_payment: Default::default(),
balances: evm_domain_test_runtime::BalancesConfig {
balances: endowed_accounts()
.iter()
.cloned()
.map(|k| (k, 2_000_000 * SSC))
.collect(),
},
messenger: evm_domain_test_runtime::MessengerConfig {
relayers: vec![(alice, alice)],
},
sudo: evm_domain_test_runtime::SudoConfig { key: Some(alice) },
evm_chain_id: evm_domain_test_runtime::EVMChainIdConfig { chain_id: 100 },
evm: evm_domain_test_runtime::EVMConfig {
// We need _some_ code inserted at the precompile address so that
// the evm will actually call the address.
accounts: Precompiles::used_addresses()
.into_iter()
.map(|addr| {
(
addr,
fp_evm::GenesisAccount {
nonce: Default::default(),
balance: Default::default(),
storage: Default::default(),
code: revert_bytecode.clone(),
},
)
})
.collect(),
},
ethereum: Default::default(),
base_fee: Default::default(),
self_domain_id: evm_domain_test_runtime::SelfDomainIdConfig {
// Id of the genesis domain
domain_id: Some(DomainId::new(0)),
},
}
}
|
use syntax::tokens::{
TokenType,
Token,
BinOp,
};
/* A sadly OOP approach on a lexer.
* Potentially improved using a peekable iterator.
*/
pub struct Lexer {
tokens: Vec<Token>,
lines: u32,
start: usize,
pos: usize,
top: usize,
}
impl Lexer {
pub fn new() -> Lexer {
Lexer {
tokens: Vec::new(),
lines: 0,
start: 0,
pos: 0,
top: 0,
}
}
pub fn from(tokens: Vec<Token>) -> Lexer {
Lexer {
tokens: tokens,
lines: 0,
start: 0,
pos: 0,
top: 0,
}
}
pub fn get_tokens(&self) -> &Vec<Token> {
&self.tokens
}
pub fn reset(&mut self) {
self.tokens = Vec::new();
self.lines = 0;
self.start = 0;
self.pos = 0;
self.top = 0;
}
fn push_token(&mut self, token_type: TokenType, line: &str) {
self.tokens.push(Token::new(
token_type,
String::from(&line[self.start .. self.pos]),
self.lines,
self.pos as u32,
));
self.start = self.pos;
}
fn look(&self, line: &str, offset: usize) -> char {
match line.chars().nth(self.pos + offset) {
Some(v) => v,
None => ' ',
}
}
fn skip_whitespace(&mut self, line: &str) {
while self.look(line, 0) == ' ' && self.pos < line.len() - 1 {
self.pos += 1;
self.start += 1;
}
}
pub fn bin_op(v: &str) -> Option<(BinOp, u8)> {
match v {
"*" => Some((BinOp::Mul, 1)),
"/" => Some((BinOp::Div, 1)),
"+" => Some((BinOp::Plus, 2)),
"-" => Some((BinOp::Minus, 2)),
"==" => Some((BinOp::Equal, 4)),
"~=" => Some((BinOp::NotEqual, 4)),
"<" => Some((BinOp::Lt, 4)),
">" => Some((BinOp::Gt, 4)),
"<=" => Some((BinOp::GtEqual, 4)),
">=" => Some((BinOp::LtEqual, 4)),
_ => None,
}
}
fn keyword(&mut self, line: &str) -> Option<TokenType> {
match &line[self.start .. self.pos] {
"define" => Some(TokenType::Definition),
"lambda" => Some(TokenType::Lambda),
"if" => Some(TokenType::If),
"else" => Some(TokenType::Else),
"return" => Some(TokenType::Return),
"true" => Some(TokenType::True),
"false" => Some(TokenType::False),
"do" => Some(TokenType::Do),
"end" => Some(TokenType::End),
_ => None
}
}
fn is_bin_op(&mut self, line: &str) -> bool {
let mut is_bin_op = false;
let mut offset = 2;
while self.pos + offset >= line.len() {
offset -= 1;
}
while offset > 0 && !is_bin_op {
match Lexer::bin_op(&line[self.start .. self.pos + offset]) {
Some(_) => is_bin_op = true,
None => ()
}
offset -= 1;
}
self.pos += offset;
is_bin_op
}
pub fn next_token(&mut self) -> bool {
if self.top < self.tokens.len() {
self.top += 1;
return true
}
false
}
pub fn previous_token(&mut self) -> bool {
if self.top != 0 {
self.top -= 1;
return true
}
false
}
pub fn tokens_remaining(&self) -> usize {
self.tokens.len() - self.top
}
pub fn current_token(&self) -> &Token {
if self.top > self.tokens.len() - 1 {
return &self.tokens[self.tokens.len() - 1]
}
&self.tokens[self.top]
}
pub fn current_token_content(&self) -> String {
self.current_token().content.clone()
}
pub fn match_current_token(&self, t: TokenType) -> Result<&Token, String> {
match self.current_token().token_type == t {
true => Ok(self.current_token()),
false => Err(format!(
"expected {:?} but found {:?}", t, self.current_token()
))
}
}
fn push_move(&mut self, t: TokenType, line: &str) {
self.pos += 1;
self.push_token(t, line);
}
pub fn tokenize(&mut self, source: String) -> Result<(), String> {
fn identifier_valid(c: char) -> bool {
c.is_alphabetic() || c == '_'
|| c == '?'
|| c == '!'
|| c.is_digit(10)
}
for line in source.lines() {
self.lines += 1;
self.start = 0;
self.pos = 0;
while self.pos < line.len() {
self.skip_whitespace(line);
let chr = self.look(line, 0);
if chr == '"' || chr == '\'' {
let del = chr;
self.start += 1;
self.pos += 1;
while self.look(line, 0) != del {
self.pos += 1;
}
self.push_token(TokenType::Text, line);
self.start += 1;
self.pos += 1;
continue
}
if chr.is_alphabetic() {
while identifier_valid(self.look(line, 0)) {
self.pos += 1;
}
match self.keyword(line) {
Some(t) => self.push_token(t, line),
None => self.push_token(TokenType::Identifier, line),
}
continue
}
let peek = self.look(line, 1);
if chr.is_digit(10) ||
chr == '.' && peek.is_digit(10) ||
chr == '-' && peek.is_digit(10) {
if chr == '-' {
self.pos += 1;
}
while self.look(line, 0).is_digit(10) {
self.pos += 1;
}
if self.look(line, 0) == '.' && self.look(line, 1).is_digit(10) {
self.pos += 1;
while self.look(line, 0).is_digit(10) {
self.pos += 1;
}
self.push_token(TokenType::Float, line);
continue;
}
self.push_token(TokenType::Integer, line);
continue;
}
if chr == '-' && self.look(line, 1) == '>' {
self.pos += 2;
self.push_token(TokenType::Arrow, line);
continue
}
if self.is_bin_op(line) {
self.pos += 1;
self.push_token(TokenType::BinOp, line);
continue
}
match chr {
'=' => {
self.push_move(TokenType::Assign, line);
continue
}
'(' => {
self.push_move(TokenType::LParen, line);
continue
}
')' => {
self.push_move(TokenType::RParen, line);
continue
}
'[' => {
self.push_move(TokenType::LBracket, line);
continue
}
']' => {
self.push_move(TokenType::RBracket, line);
continue
}
'{' => {
self.push_move(TokenType::LBrace, line);
continue
}
'}' => {
self.push_move(TokenType::RBrace, line);
continue
}
':' => {
self.push_move(TokenType::Colon, line);
continue
}
',' => {
self.push_move(TokenType::Comma, line);
continue
}
'.' => {
self.push_move(TokenType::Period, line);
continue
}
';' => {
self.push_move(TokenType::Semicolon, line);
continue
}
' ' => break,
'\0' => break,
'\n' => break,
_ => {
panic!("fucked symbol: {}, line: {} col: {}",
&line[self.start .. line.len()],
self.lines, self.start)
},
}
}
}
Ok(())
}
} |
/// runs shell commands
#[macro_export]
macro_rules! async_shell {
() => {};
($($t:tt)*) => {
{
$crate::__tokio_internal_builder!()
.arg(
$crate::__internal_command_builder!($($t)*)
).spawn()?.wait_with_output().await?
}
};
}
/// creates a detached process
#[macro_export]
macro_rules! async_detach {
() => {};
($($t:tt)*) => {
{
$crate::__tokio_internal_builder!()
.arg(
$crate::__internal_command_builder!($($t)*)
)
.spawn()?
}
};
}
/// returns a string of the stdout of the process
#[macro_export]
macro_rules! async_execute {
() => {};
($($t:tt)*) => {
{
let p = $crate::__tokio_internal_builder!()
.arg($crate::__internal_command_builder!($($t)*))
.output().await?;
std::string::String::from_utf8_lossy(
p.stdout.as_slice()
).to_string()
}
};
}
/// returns a bool indicating if the process was successful
#[macro_export]
macro_rules! async_exec {
() => {{}};
($($t:tt)*) => {
$crate::__tokio_internal_builder!()
.arg($crate::__internal_command_builder!($($t)*))
.output()
.await?
.status
.success()
};
}
|
use criterion::{criterion_group, criterion_main, Criterion};
use once_cell::sync::Lazy;
use rusttype::*;
static DEJA_VU_MONO: Lazy<Font<'static>> = Lazy::new(|| {
Font::try_from_bytes(include_bytes!("../fonts/dejavu/DejaVuSansMono.ttf") as &[u8]).unwrap()
});
static OPEN_SANS_ITALIC: Lazy<Font<'static>> = Lazy::new(|| {
Font::try_from_bytes(include_bytes!("../fonts/opensans/OpenSans-Italic.ttf") as &[u8]).unwrap()
});
static EXO2_OTF: Lazy<Font<'static>> =
Lazy::new(|| Font::try_from_bytes(include_bytes!("../fonts/Exo2-Light.otf") as &[u8]).unwrap());
static EXO2_TTF: Lazy<Font<'static>> =
Lazy::new(|| Font::try_from_bytes(include_bytes!("../fonts/Exo2-Light.ttf") as &[u8]).unwrap());
fn draw_big_biohazard(c: &mut Criterion) {
let glyph = DEJA_VU_MONO
.glyph('☣')
.scaled(Scale::uniform(600.0))
.positioned(point(0.0, 0.0));
const WIDTH: usize = 294;
const HEIGHT: usize = 269;
let bounds = glyph.pixel_bounding_box().unwrap();
assert_eq!(
(bounds.width() as usize, bounds.height() as usize),
(WIDTH, HEIGHT)
);
let mut target = [0u8; WIDTH * HEIGHT];
c.bench_function("draw_big_biohazard", |b| {
b.iter(|| {
glyph.draw(|x, y, alpha| {
let (x, y) = (x as usize, y as usize);
target[WIDTH * y + x] = (alpha * 255.0) as u8;
})
});
});
}
fn draw_w(c: &mut Criterion) {
let glyph = DEJA_VU_MONO
.glyph('w')
.scaled(Scale::uniform(16.0))
.positioned(point(0.0, 0.0));
const WIDTH: usize = 9;
const HEIGHT: usize = 8;
let bounds = glyph.pixel_bounding_box().unwrap();
assert_eq!(
(bounds.width() as usize, bounds.height() as usize),
(WIDTH, HEIGHT)
);
let mut target = [0u8; WIDTH * HEIGHT];
c.bench_function("draw_w", |b| {
b.iter(|| {
glyph.draw(|x, y, alpha| {
let (x, y) = (x as usize, y as usize);
target[WIDTH * y + x] = (alpha * 255.0) as u8;
})
});
});
}
fn draw_iota(c: &mut Criterion) {
let glyph = OPEN_SANS_ITALIC
.glyph('ΐ')
.scaled(Scale::uniform(60.0))
.positioned(point(0.0, 0.0));
const WIDTH: usize = 14;
const HEIGHT: usize = 38;
let bounds = glyph.pixel_bounding_box().unwrap();
assert_eq!(
(bounds.width() as usize, bounds.height() as usize),
(WIDTH, HEIGHT)
);
let mut target = [0u8; WIDTH * HEIGHT];
c.bench_function("draw_iota", |b| {
b.iter(|| {
glyph.draw(|x, y, alpha| {
let (x, y) = (x as usize, y as usize);
target[WIDTH * y + x] = (alpha * 255.0) as u8;
})
});
});
}
fn draw_otf_tailed_e(c: &mut Criterion) {
let glyph = EXO2_OTF
.glyph('ę')
.scaled(Scale::uniform(300.0))
.positioned(point(0.0, 0.0));
const WIDTH: usize = 106;
const HEIGHT: usize = 183;
let bounds = glyph.pixel_bounding_box().unwrap();
assert_eq!(
(bounds.width() as usize, bounds.height() as usize),
(WIDTH, HEIGHT)
);
let mut target = [0u8; WIDTH * HEIGHT];
c.bench_function("draw_otf_tailed_e", |b| {
b.iter(|| {
glyph.draw(|x, y, alpha| {
let (x, y) = (x as usize, y as usize);
target[WIDTH * y + x] = (alpha * 255.0) as u8;
})
});
});
}
fn draw_ttf_tailed_e(c: &mut Criterion) {
let glyph = EXO2_TTF
.glyph('ę')
.scaled(Scale::uniform(300.0))
.positioned(point(0.0, 0.0));
const WIDTH: usize = 106;
const HEIGHT: usize = 177;
let bounds = glyph.pixel_bounding_box().unwrap();
assert_eq!(
(bounds.width() as usize, bounds.height() as usize),
(WIDTH, HEIGHT)
);
let mut target = [0u8; WIDTH * HEIGHT];
c.bench_function("draw_ttf_tailed_e", |b| {
b.iter(|| {
glyph.draw(|x, y, alpha| {
let (x, y) = (x as usize, y as usize);
target[WIDTH * y + x] = (alpha * 255.0) as u8;
})
});
});
}
criterion_group!(
draw_benches,
draw_big_biohazard,
draw_w,
draw_iota,
draw_otf_tailed_e,
draw_ttf_tailed_e,
);
criterion_main!(draw_benches);
|
extern crate clap;
use clap::{App, Arg};
use std::process;
// imports the library crate that has a public API available to test!
use minigrep::Config;
fn main() {
let matches = App::new("minigrep")
.version("1.0")
.author("James M. <jmstudyacc@gmail.com>")
.about(
"Searches a file for a target query string and prints any line containing the query.",
)
.arg(
Arg::with_name("QUERY")
.short("q")
.long("query")
.help("Provides a Slice to be used as query")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("FILE")
.short("f")
.long("file")
.help("Defines the file that will be searched")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("CASE")
.short("c")
.long("case-sensitive")
.multiple(false)
.takes_value(false)
.help("Sets if search is case-sensitive or case insensitive [default=false]")
.required(false),
)
.get_matches();
/* config now creates a new instance of the Config struct
main() needs to handle the Result value return*/
let config = Config::new(
String::from(matches.value_of("QUERY").unwrap()),
String::from(matches.value_of("FILE").unwrap()),
matches.is_present("CASE"),
)
.unwrap_or_else(|err| {
/* unwrap_or_else() requires you to define custom, non-panic! error handling
if the return is Ok() it acts like unwrap() otherwise it calls the code in the closure
an anonymous function defined and passed as an argument to unwrap_or_else()
eprintln! macro prints errors to stderr and not stdout, provided by the standard library.
*/
eprintln!("Problem passing arguments: {}", err);
process::exit(1);
});
/* if run returns an error we return the error value and exit the code
we return () if no error so we do not need to unwrap anything - we only care about errors*/
if let Err(e) = minigrep::run(config) {
eprintln!("Application error: {}", e);
process::exit(1);
// print the error and exit
}
}
// cargo run to poem.txt > output.txt
// this will print any errors to stderr - you'll see them on the command line
// successful runs of this will send the output to output.txt
|
use std::{
collections::VecDeque,
io::{stdout, Write},
time::{Duration, Instant},
};
use futures::{future::FutureExt, select, StreamExt};
use futures_timer::Delay;
use crossterm::{
cursor,
event::{Event, EventStream, KeyCode},
execute,
queue,
terminal
};
enum Direction {
Up, Right, Down, Left
}
impl Direction {
pub fn get_delta(&self) -> (i16, i16) {
match self {
Direction::Up => (0, -1),
Direction::Right => (1, 0),
Direction::Down => (0, 1),
Direction::Left => (-1, 0)
}
}
}
struct Point {
x: u16,
y: u16
}
#[derive(Debug, Clone)]
struct OutOfBounds;
struct Snake {
body: VecDeque<Point>,
direction: Direction
}
impl Snake {
pub fn new(room_w: u16, room_h: u16) -> Snake {
let room_mid = room_w / 2;
let mut body = VecDeque::new();
body.push_front(Point{ x: room_mid, y: room_h - 2 });
body.push_front(Point{ x: room_mid, y: room_h - 3 });
body.push_front(Point{ x: room_mid, y: room_h - 4 });
let direction = Direction::Up;
Snake {
body,
direction
}
}
pub fn turn(&mut self, dir: Direction) {
self.direction = dir;
}
pub fn draw(&self) {
let mut stdout = stdout();
for point in self.body.iter() {
let Point{x, y} = point;
queue!(stdout, cursor::MoveTo(*x, *y)).unwrap();
print!("x");
stdout.flush().unwrap();
}
}
pub fn step(&mut self) -> Result<(), OutOfBounds> {
match self.get_next_point() {
Some(point) => {
let _removed_point = self.body.pop_back().unwrap();
self.body.push_front(point);
Ok(())
},
None => Err(OutOfBounds)
}
}
fn get_next_point(&self) -> Option<Point> {
let head = self.body.front().unwrap();
let (dx, dy) = self.direction.get_delta();
if head.x == 0 && dx < 0 { return None }
if head.y == 0 && dy < 0 { return None }
return Some(Point {
x: (head.x as i16 + dx) as u16,
y: (head.y as i16 + dy) as u16
})
}
}
async fn game_loop() {
let mut reader = EventStream::new();
let (w, h) = terminal::size().unwrap();
let mut snake = Snake::new(w, h);
let target_delay = 1_000;
let mut planned_delay: u64 = target_delay;
loop {
let mut delay = Delay::new(Duration::from_millis(planned_delay)).fuse();
let mut event = reader.next().fuse();
let cycle_started_at = Instant::now();
select! {
_ = delay => {
let mut stdout = stdout();
queue!(stdout, terminal::Clear(terminal::ClearType::All)).unwrap();
match snake.step() {
Err(_) => { break; },
_ => {}
};
snake.draw();
planned_delay = target_delay;
},
maybe_event = event => {
match maybe_event {
Some(Ok(event)) => {
match event {
Event::Key(key) => match key.code {
KeyCode::Esc => break,
KeyCode::Char(char) => match char {
'u' => snake.turn(Direction::Up),
'n' => snake.turn(Direction::Left),
'i' => snake.turn(Direction::Right),
'e' => snake.turn(Direction::Down),
_ => {}
},
_ => {}
},
_ => {}
}
}
Some(Err(e)) => println!("Error: {:?}\r", e),
None => break,
}
let event_happened_at = Instant::now();
let time_passed = event_happened_at - cycle_started_at;
planned_delay = target_delay - time_passed.as_millis() as u64;
}
};
}
}
fn main() -> crossterm::Result<()> {
terminal::enable_raw_mode()?;
let mut stdout = stdout();
execute!(stdout, terminal::Clear(terminal::ClearType::All))?;
execute!(stdout, cursor::Hide)?;
async_std::task::block_on(game_loop());
execute!(stdout, cursor::Show)?;
terminal::disable_raw_mode()
}
|
#![allow(dead_code)]
use std::marker::PhantomData;
use std::string::String;
/// Сущности
struct User {
user_id: u64,
full_name: String,
email: String,
}
struct Post<S> {
post_id: u64,
user: User,
title: String,
body: String,
state: PhantomData<S>,
}
/// Состояния
struct New;
struct Unmoderated;
struct Published;
struct Deleted;
impl Default for User {
fn default() -> User {
User {
user_id: 0u64,
full_name: "Guest".to_string(),
email: "".to_string(),
}
}
}
impl<S> Default for Post<S> {
fn default() -> Post<S> {
Post {
post_id: 0,
user: Default::default(),
title: "".to_string(),
body: "".to_string(),
state: PhantomData,
}
}
}
///Вариант основан на преобразованим From and PhantomData
/// New -- Unmoderated
impl From<Post<New>> for Post<Unmoderated> {
fn from(_val: Post<New>) -> Post<Unmoderated> {
Post {
post_id: _val.post_id,
user: _val.user,
title: _val.title,
body: _val.body,
state: PhantomData,
}
}
}
/// Unmoderated -- Published
impl From<Post<Unmoderated>> for Post<Published> {
fn from(_val: Post<Unmoderated>) -> Post<Published> {
Post {
post_id: _val.post_id,
user: _val.user,
title: _val.title,
body: _val.body,
state: PhantomData,
}
}
}
/// Unmoderated -- Deleted
impl From<Post<Unmoderated>> for Post<Deleted> {
fn from(_val: Post<Unmoderated>) -> Post<Deleted> {
Post {
post_id: _val.post_id,
user: _val.user,
title: _val.title,
body: _val.body,
state: PhantomData,
}
}
}
/// Published -- Deleted
impl From<Post<Published>> for Post<Deleted> {
fn from(_val: Post<Published>) -> Post<Deleted> {
Post {
post_id: _val.post_id,
user: _val.user,
title: _val.title,
body: _val.body,
state: PhantomData,
}
}
}
/// Create new Post
/// state New
fn new(user: User, title: String, body: String) -> Post<New> {
Post {
user: user,
post_id: 1u64,
..Default::default()
}
}
fn publish(post: Post<New>) -> Post<Unmoderated> {
println!("New -- \"publish()\" --> Unmoderated");
post.into()
}
fn allow(post: Post<Unmoderated>) -> Post<Published> {
println!("Unmoderated -- \"allow()\" --> Published");
post.into()
}
fn deny(post: Post<Unmoderated>) -> Post<Deleted> {
println!("Unmoderated -- \"deny()\" --> Deleted");
post.into()
}
fn delete(post: Post<Published>) -> Post<Deleted> {
println!("Published -- \"delete()\" --> Deleted");
post.into()
}
fn main() {
let user = User {
user_id: 1u64,
..Default::default()
};
let post_new = new(user, String::from("title"), String::from("body"));
let post_unmoderated = publish(post_new);
let post_published = allow(post_unmoderated);
let _post_delete = delete(post_published);
}
|
//! Azure iot_hub crate for the unofficial Microsoft Azure SDK for Rust. This crate is part of a collection of crates: for more information please refer to [https://github.com/azure/azure-sdk-for-rust](https://github.com/azure/azure-sdk-for-rust).
#![deny(missing_docs)]
//! The IoT Hub crate contains a client that can be used to manage the IoT Hub.
/// The service module contains the IoT Hub Service Client that can be used to manage the IoT Hub.
pub mod service;
/// A specialized Result type.
pub type Result<T> = std::result::Result<T, Error>;
/// A general error having to do with the IoTHub.
#[allow(missing_docs)]
#[non_exhaustive]
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("etag is not set")]
EtagNotSet,
#[error("From connection string error: {0}")]
FromConnectionStringError(#[from] service::FromConnectionStringError),
#[error("Generate SAS token error: {0}")]
GenerateSasTokenError(#[from] service::GenerateSasTokenError),
#[error("core error: {0}")]
CoreError(#[from] azure_core::Error),
#[error("core http error: {0}")]
CoreHttpError(#[from] azure_core::HttpError),
#[error("json error: {0}")]
JsonError(#[from] serde_json::Error),
#[error("http error: {0}")]
HttpError(#[from] http::Error),
}
|
use std::io;
fn main() {
let mut pancakes: Vec<u32> = Vec::new();
let mut buffer: String = String::new();
io::stdin().read_line(&mut buffer)
.expect("failed to read line");
pancakes = buffer
.trim()
.split_whitespace()
.map(|x: &str| x.parse::<u32>().unwrap())
.collect();
println!("{:?}", pancakes);
for i in (pancakes.len()..0).rev() {
let max_idx = pancakes[..i].iter().enumerate().max_by(|x,y| x.cmp(y));
println!("{:?}", i);
if pancakes[i] < pancakes[i - 1] {
let mut slice: Vec<u32> = pancakes[..i + 1].to_vec();
slice.reverse();
pancakes[..i + 1].clone_from_slice(&slice);
println!("{:?}", slice);
}
}
}
|
use std::env;
use std::fs;
use std::io;
static PART2: bool = true;
fn main() {
let input: &String = &env::args().nth(1).unwrap();
let input_data = read_input(&input).unwrap();
let result = calc_fuel(&input_data);
println!("Total fuel: {}", result);
}
fn read_input(path: &str) -> io::Result<Vec<i32>> {
let contents = fs::read_to_string(path)?;
let numbers: Vec<i32> = contents.split_whitespace().map(|x| x.parse::<i32>().unwrap()).collect();
Ok(numbers)
}
fn calc_module_fuel(input: i32) -> i32 {
(input / 3) - 2
}
fn calc_fuel(input_data: &Vec<i32>) -> i32 {
let mut total_fuel: i32 = 0;
for n in input_data.iter() {
let fuel = calc_module_fuel(*n);
total_fuel = total_fuel + fuel;
if PART2 {
total_fuel = total_fuel + calc_additional_fuel(fuel);
}
}
total_fuel
}
fn calc_additional_fuel(input: i32) -> i32 {
let mut total_fuel = 0;
let mut additional_fuel = calc_module_fuel(input);
while additional_fuel > 0 {
total_fuel = total_fuel + additional_fuel;
additional_fuel = calc_module_fuel(additional_fuel);
}
total_fuel
} |
pub mod control_rov;
pub mod port_select;
use ::errors::*;
pub enum Trans {
Quit,
None,
Switch(Box<Screen>),
}
pub trait Screen {
fn init(&mut self, engine: &mut Engine) -> Result<()>;
fn update(&mut self, engine: &mut Engine, delta: f64) -> Result<Trans>;
fn render(&mut self, engine: &mut Engine, delta: f64) -> Result<()>;
}
use sdl2::EventPump;
use sdl2::render::{WindowCanvas, Texture};
use rusttype::{Font, PositionedGlyph};
use rusttype::gpu_cache::Cache;
use gilrs;
use config::Config;
pub struct Engine<'app> {
pub event_pump: EventPump,
pub controllers: gilrs::Gilrs,
pub canvas: WindowCanvas,
pub rfont: Font<'app>,
pub cache: Cache,
pub glyphs: Vec<PositionedGlyph<'app>>,
pub cache_texture: Texture<'app>,
pub config: Config,
}
use rusttype::Scale;
impl<'a> Engine<'a> {
pub fn queue_text(&mut self, x: f32, y: f32, scale: Scale, text: &str) {
use unicode_normalization::UnicodeNormalization;
use rusttype::Point;
let y = y + scale.y;
let mut caret = Point {x, y};
for c in text.nfc() {
let base_glyph = if let Some(glyph) = self.rfont.glyph(c) {
glyph
} else {
continue;
};
let glyph = base_glyph.scaled(scale).positioned(caret);
caret.x += glyph.unpositioned().h_metrics().advance_width;
self.glyphs.push(glyph.standalone());
}
}
pub fn render_text(&mut self) {
use sdl2::rect::Rect as SdlRect;
for glyph in &self.glyphs {
self.cache.queue_glyph(0, glyph.clone());
}
let mut to_cache = vec![];
self.cache.cache_queued(|rect,data| {
let rect = SdlRect::new(rect.min.x as i32,
rect.min.y as i32,
rect.width(),
rect.height());
let mut pixel_data = vec![];
// Assuming the cache texture is in RGBA8888
for p in data {
let fill = 0xFF;
pixel_data.push(*p);
pixel_data.push(fill);
pixel_data.push(fill);
pixel_data.push(fill);
}
to_cache.push((rect, pixel_data));
}).expect("render_text queue character");
// This for loop is used to avoid double borrowing in the closure
for (rect, pixel_data) in to_cache {
self.cache_texture.update(
Some(rect),
&pixel_data,
rect.width() as usize * 4)
.expect("Error updating font cache");
}
let (cache_width, cache_height) = self.cache.dimensions();
let (cache_width, cache_height) = (cache_width as f32, cache_height as f32);
for glyph in &self.glyphs {
let cache_result = self.cache.rect_for(0, glyph)
.expect("Glyph not in cache");
if let Some((src, dest)) = cache_result {
let cache_rect = SdlRect::new(
(src.min.x * cache_width) as i32,
(src.min.y * cache_height) as i32,
(src.width() * cache_width) as u32,
(src.height() * cache_height) as u32);
let dest_rect = SdlRect::new(
dest.min.x as i32,
dest.min.y as i32,
dest.width() as u32,
dest.height() as u32);
self.canvas.copy(
&self.cache_texture,
Some(cache_rect),
Some(dest_rect))
.expect("Error rendering glyph to screen");
}
}
self.glyphs.clear();
}
}
|
use super::core::element::*;
use super::core::level::Level;
use std::fs;
use unicode_segmentation::UnicodeSegmentation;
pub fn element_to_unicode(element: Option<&Element>) -> &str {
match element {
Some(Element::Object(Object::FERRIS)) => return "🦀",
Some(Element::Object(Object::ROCKET)) => return "🚀",
Some(Element::Object(Object::FLAG)) => return "🚩",
Some(Element::Object(Object::WALL)) => return "🧱",
Some(Element::Object(Object::WATER)) => return "🌊",
Some(Element::Object(Object::LAVA)) => return "🔥",
Some(Element::Object(Object::KEY)) => return "🔑",
Some(Element::Object(Object::DOOR)) => return "🚪",
Some(Element::Object(Object::MOON)) => return "🌙",
Some(Element::Object(Object::STAR)) => return "⭐",
Some(Element::Object(Object::BAT)) => return "🦇",
Some(Element::Object(Object::HAND)) => return "🤚",
Some(Element::Object(Object::FUNGUS)) => return "🍄",
Some(Element::Object(Object::KEKE)) => return "👽",
Some(Element::Object(Object::ICE)) => return "❄️ ",
Some(Element::Object(Object::SKULL)) => return "💀",
Some(Element::Object(Object::LOVE)) => return "❤️ ",
Some(Element::Object(Object::BOX)) => return "📦",
Some(Element::Object(Object::CLIFF)) => return "⛰️ ",
Some(Element::Object(Object::GHOST)) => return "👻",
Some(Element::Object(Object::CLOUD)) => return "☁️",
Some(Element::Object(Object::ME)) => return "🙂",
Some(Element::Object(Object::FENCE)) => return "🚧",
Some(Element::Object(Object::STATUE)) => return "🗿",
Some(Element::Object(Object::ROCK)) => return "💎",
Some(Element::Object(Object::GRASS)) => return "🌿",
Some(Element::Object(Object::FLOWER)) => return "🌼",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FERRIS)))) => return "Fe",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ROCKET)))) => return "Ro",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FLAG)))) => return "Fg",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::WALL)))) => return "Wa",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::WATER)))) => return "Wt",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::TEXT)))) => return "Tx",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::LAVA)))) => return "La",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::KEY)))) => return "Ke",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::DOOR)))) => return "Do",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::MOON)))) => return "Mo",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::STAR)))) => return "Sr",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::BAT)))) => return "Ba",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::HAND)))) => return "Hd",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FUNGUS)))) => return "Fu",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::KEKE)))) => return "ET",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ICE)))) => return "Ic",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::SKULL)))) => return "Sk",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::LOVE)))) => return "Lv",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::BOX)))) => return "Bx",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::CLIFF)))) => return "Cf",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::GHOST)))) => return "Gh",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::CLOUD)))) => return "Cd",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ME)))) => return "M€",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FENCE)))) => return "Fn",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::STATUE)))) => return "Su",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ROCK)))) => return "Rc",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::GRASS)))) => return "Gr",
Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FLOWER)))) => return "Fw",
Some(Element::Text(Text::Verb(Verb::IS))) => return "==",
Some(Element::Text(Text::Verb(Verb::HAS))) => return "Ha",
Some(Element::Text(Text::Misc(Misc::AND))) => return "&&",
Some(Element::Text(Text::Misc(Misc::ON))) => return "On",
Some(Element::Text(Text::Misc(Misc::NEAR))) => return "Nr",
Some(Element::Text(Text::Misc(Misc::FACING))) => return "Fc",
Some(Element::Text(Text::Misc(Misc::LONELY))) => return "Lo",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::YOU)))) => return "U ",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::WIN)))) => return "Wi",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::STOP)))) => return "St",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::PUSH)))) => return "Pu",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::SINK)))) => return "Si",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::FLOAT)))) => return "Fl",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::MOVE)))) => return "Mv",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::DEFEAT)))) => return "Df",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::HOT)))) => return "Ho",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::MELT)))) => return "Me",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::SHIFT)))) => return "Sh",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::OPEN)))) => return "Op",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::SHUT)))) => return "Cl",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::WEAK)))) => return "We",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::PULL)))) => return "Pl",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::TELE)))) => return "Te",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::SWAP)))) => return "Sw",
Some(Element::Text(Text::Nominal(Nominal::Adjective(Adjective::FALL)))) => return "Fa",
None => return "..",
};
}
// Kind of reverse function of the function above; try to de-duplicate that, but without dropping the match (that will detect conflicting definitions)
pub fn unicode_to_element(chars: &str) -> Option<Element> {
match chars {
"🦀" => Some(Element::Object(Object::FERRIS)),
"🚀" => Some(Element::Object(Object::ROCKET)),
"🚩" => Some(Element::Object(Object::FLAG)),
"🧱" => Some(Element::Object(Object::WALL)),
"🌊" => Some(Element::Object(Object::WATER)),
"🔥" => Some(Element::Object(Object::LAVA)),
"🔑" => Some(Element::Object(Object::KEY)),
"🚪" => Some(Element::Object(Object::DOOR)),
"🌙" => Some(Element::Object(Object::MOON)),
"⭐" => Some(Element::Object(Object::STAR)),
"🦇" => Some(Element::Object(Object::BAT)),
"🤚" => Some(Element::Object(Object::HAND)),
"🍄" => Some(Element::Object(Object::FUNGUS)),
"👽" => Some(Element::Object(Object::KEKE)),
"❄️" => Some(Element::Object(Object::ICE)),
"💀" => Some(Element::Object(Object::SKULL)),
"❤️" => Some(Element::Object(Object::LOVE)),
"📦" => Some(Element::Object(Object::BOX)),
"⛰️" => Some(Element::Object(Object::CLIFF)),
"👻" => Some(Element::Object(Object::GHOST)),
"☁️" => Some(Element::Object(Object::CLOUD)),
"🙂" => Some(Element::Object(Object::ME)),
"🚧" => Some(Element::Object(Object::FENCE)),
"🗿" => Some(Element::Object(Object::STATUE)),
"💎" => Some(Element::Object(Object::ROCK)),
"🌿" => Some(Element::Object(Object::GRASS)),
"🌼" => Some(Element::Object(Object::FLOWER)),
"Fe" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FERRIS)))),
"Ro" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ROCKET)))),
"Fg" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FLAG)))),
"Wa" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::WALL)))),
"Wt" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::WATER)))),
"Tx" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::TEXT)))),
"La" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::LAVA)))),
"Ke" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::KEY)))),
"Do" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::DOOR)))),
"Mo" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::MOON)))),
"Sr" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::STAR)))),
"Ba" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::BAT)))),
"Hd" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::HAND)))),
"Fu" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FUNGUS)))),
"ET" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::KEKE)))),
"Ic" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ICE)))),
"Sk" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::SKULL)))),
"Lv" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::LOVE)))),
"Bx" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::BOX)))),
"Cf" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::CLIFF)))),
"Gh" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::GHOST)))),
"Cd" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::CLOUD)))),
"M€" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ME)))),
"Fn" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FENCE)))),
"Su" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::STATUE)))),
"Rc" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::ROCK)))),
"Gr" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::GRASS)))),
"Fw" => Some(Element::Text(Text::Nominal(Nominal::Noun(Noun::FLOWER)))),
"==" => Some(Element::Text(Text::Verb(Verb::IS))),
"Ha" => Some(Element::Text(Text::Verb(Verb::HAS))),
"&&" => Some(Element::Text(Text::Misc(Misc::AND))),
"On" => Some(Element::Text(Text::Misc(Misc::ON))),
"Nr" => Some(Element::Text(Text::Misc(Misc::NEAR))),
"Fc" => Some(Element::Text(Text::Misc(Misc::FACING))),
"Lo" => Some(Element::Text(Text::Misc(Misc::LONELY))),
"U " => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::YOU,
)))),
"Wi" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::WIN,
)))),
"St" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::STOP,
)))),
"Pu" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::PUSH,
)))),
"Si" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::SINK,
)))),
"Fl" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::FLOAT,
)))),
"Mv" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::MOVE,
)))),
"Df" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::DEFEAT,
)))),
"Ho" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::HOT,
)))),
"Me" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::MELT,
)))),
"Sh" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::SHIFT,
)))),
"Op" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::OPEN,
)))),
"Cl" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::SHUT,
)))),
"We" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::WEAK,
)))),
"Pl" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::PULL,
)))),
"Te" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::TELE,
)))),
"Sw" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::SWAP,
)))),
"Fa" => Some(Element::Text(Text::Nominal(Nominal::Adjective(
Adjective::FALL,
)))),
".." => None,
_ => panic!("Unknown character {}", chars),
}
}
pub fn build_level_from_lines(lines: std::str::Lines, seed: Option<[u8; 32]>) -> Level {
let mut width = 0;
let mut height = 0;
let mut elements_to_add: Vec<(usize, usize, Element)> = Vec::new();
for line in lines {
if line.starts_with("#") {
continue;
}
let mut local_width = 0;
let graphemes = UnicodeSegmentation::graphemes(line, true).collect::<Vec<&str>>();
let mut previous_grapheme = "";
for grapheme in graphemes {
if grapheme.is_ascii() {
local_width += 1;
if (local_width % 2) == 0 {
if let Some(element) =
unicode_to_element(&(previous_grapheme.to_owned() + grapheme))
{
elements_to_add.push((local_width / 2 - 1, height, element))
}
} else {
previous_grapheme = grapheme;
}
} else {
local_width += 2;
if let Some(element) = unicode_to_element(grapheme) {
elements_to_add.push((local_width / 2 - 1, height, element));
}
}
}
if width > 0 && local_width != width * 2 {
panic!(
"The width of the line {} is inconsistent with the one of previous line(s)!",
&line
);
}
height += 1;
width = local_width / 2;
}
let mut level = Level::new(width, height, seed);
for (x, y, element) in elements_to_add {
level.add_element(x, y, element);
}
level
}
pub fn build_level_from_file(file_path: String, seed: Option<[u8; 32]>) -> Level {
let file_content = fs::read_to_string(file_path).expect("Could not open file !");
let lines = file_content.lines();
build_level_from_lines(lines, seed)
}
pub fn get_level_lines(level: &Level) -> Vec<String> {
let mut lines: Vec<String> = Vec::with_capacity(level.height);
for y in 0..level.height {
let mut line = String::with_capacity(level.width);
for x in 0..level.width {
let elements = level.get_elements(x, y);
let first_element = elements.get(0);
line.push_str(element_to_unicode(first_element))
}
lines.push(line);
}
lines
}
|
mod support;
use self::support::*;
#[test]
fn outbound_http1() {
let _ = env_logger::try_init();
let srv = server::http1().route("/", "hello h1").run();
let ctrl = controller::new()
.destination("transparency.test.svc.cluster.local", srv.addr)
.run();
let proxy = proxy::new().controller(ctrl).outbound(srv).run();
let client = client::http1(proxy.outbound, "transparency.test.svc.cluster.local");
assert_eq!(client.get("/"), "hello h1");
}
#[test]
fn inbound_http1() {
let _ = env_logger::try_init();
let srv = server::http1().route("/", "hello h1").run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::http1(proxy.inbound, "transparency.test.svc.cluster.local");
assert_eq!(client.get("/"), "hello h1");
}
#[test]
fn http1_connect_not_supported() {
let _ = env_logger::try_init();
let srv = server::tcp()
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::tcp(proxy.inbound);
let tcp_client = client.connect();
tcp_client.write("CONNECT foo.bar:443 HTTP/1.1\r\nHost: foo.bar:443\r\n\r\n");
let expected = "HTTP/1.1 502 Bad Gateway\r\n";
assert_eq!(s(&tcp_client.read()[..expected.len()]), expected);
}
#[test]
fn http1_removes_connection_headers() {
let _ = env_logger::try_init();
let srv = server::http1()
.route_fn("/", |req| {
assert!(!req.headers().contains_key("x-foo-bar"));
Response::builder()
.header("x-server-quux", "lorem ipsum")
.header("connection", "close, x-server-quux")
.body("".into())
.unwrap()
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::http1(proxy.inbound, "transparency.test.svc.cluster.local");
let res = client.request(client.request_builder("/")
.header("x-foo-bar", "baz")
.header("connection", "x-foo-bar, close"));
assert_eq!(res.status(), http::StatusCode::OK);
assert!(!res.headers().contains_key("x-server-quux"));
}
#[test]
fn http10_with_host() {
let _ = env_logger::try_init();
let host = "transparency.test.svc.cluster.local";
let srv = server::http1()
.route_fn("/", move |req| {
assert_eq!(req.version(), http::Version::HTTP_10);
assert_eq!(req.headers().get("host").unwrap(), host);
Response::builder()
.version(http::Version::HTTP_10)
.body("".into())
.unwrap()
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::http1(proxy.inbound, host);
let res = client.request(client.request_builder("/")
.version(http::Version::HTTP_10)
.header("host", host));
assert_eq!(res.status(), http::StatusCode::OK);
assert_eq!(res.version(), http::Version::HTTP_10);
}
#[test]
fn http10_without_host() {
let _ = env_logger::try_init();
let srv = server::http1()
.route_fn("/", move |req| {
assert_eq!(req.version(), http::Version::HTTP_10);
Response::builder()
.version(http::Version::HTTP_10)
.body("".into())
.unwrap()
})
.run();
let ctrl = controller::new()
.destination(&srv.addr.to_string(), srv.addr)
.run();
let proxy = proxy::new()
.controller(ctrl)
.outbound(srv)
.run();
let client = client::http1(proxy.outbound, "foo.bar");
let res = client.request(client.request_builder("/")
.version(http::Version::HTTP_10)
.header("host", ""));
assert_eq!(res.status(), http::StatusCode::OK);
assert_eq!(res.version(), http::Version::HTTP_10);
}
#[test]
fn http11_absolute_uri_differs_from_host() {
let _ = env_logger::try_init();
let host = "transparency.test.svc.cluster.local";
let srv = server::http1()
.route_fn("/", move |req| {
assert_eq!(req.version(), http::Version::HTTP_11);
assert_eq!(req.headers().get("host").unwrap(), host);
Response::builder()
.body("".into())
.unwrap()
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::http1_absolute_uris(proxy.inbound, host);
let res = client.request(client.request_builder("/")
.version(http::Version::HTTP_11)
.header("host", "foo.bar"));
assert_eq!(res.status(), http::StatusCode::OK);
assert_eq!(res.version(), http::Version::HTTP_11);
}
#[test]
fn outbound_tcp() {
let _ = env_logger::try_init();
let msg1 = "custom tcp hello";
let msg2 = "custom tcp bye";
let srv = server::tcp()
.accept(move |read| {
assert_eq!(read, msg1.as_bytes());
msg2
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.outbound(srv)
.run();
let client = client::tcp(proxy.outbound);
let tcp_client = client.connect();
tcp_client.write(msg1);
assert_eq!(tcp_client.read(), msg2.as_bytes());
}
#[test]
fn inbound_tcp() {
let _ = env_logger::try_init();
let msg1 = "custom tcp hello";
let msg2 = "custom tcp bye";
let srv = server::tcp()
.accept(move |read| {
assert_eq!(read, msg1.as_bytes());
msg2
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::tcp(proxy.inbound);
let tcp_client = client.connect();
tcp_client.write(msg1);
assert_eq!(tcp_client.read(), msg2.as_bytes());
}
#[test]
fn tcp_with_no_orig_dst() {
let _ = env_logger::try_init();
let srv = server::tcp()
.accept(move |_| "don't read me")
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
// no outbound configured for proxy
let client = client::tcp(proxy.outbound);
let tcp_client = client.connect();
tcp_client.write("custom tcp hello");
let read = tcp_client
.try_read()
// This read might be an error, or an empty vec
.unwrap_or_else(|_| Vec::new());
assert_eq!(read, b"");
}
#[test]
fn http11_upgrade_not_supported() {
let _ = env_logger::try_init();
// our h1 proxy will strip the Connection header
// and headers it mentions
let msg1 = "\
GET /chat HTTP/1.1\r\n\
Host: foo.bar\r\n\
Connection: Upgrade\r\n\
Upgrade: websocket\r\n\
\r\n\
";
// but let's pretend the server tries to upgrade
// anyways
let msg2 = "\
HTTP/1.1 101 Switching Protocols\r\n\
Upgrade: websocket\r\n\
Connection: Upgrade\r\n\
\r\n\
";
let srv = server::tcp()
.accept(move |read| {
let head = s(&read);
assert!(!head.contains("Upgrade: websocket"));
msg2
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::tcp(proxy.inbound);
let tcp_client = client.connect();
tcp_client.write(msg1);
let expected = "HTTP/1.1 500 ";
assert_eq!(s(&tcp_client.read()[..expected.len()]), expected);
}
#[test]
fn http1_get_doesnt_add_transfer_encoding() {
let _ = env_logger::try_init();
let srv = server::http1()
.route_fn("/", |req| {
assert!(!req.headers().contains_key("transfer-encoding"));
Response::new("hello h1".into())
})
.run();
let ctrl = controller::new().run();
let proxy = proxy::new()
.controller(ctrl)
.inbound(srv)
.run();
let client = client::http1(proxy.inbound, "transparency.test.svc.cluster.local");
assert_eq!(client.get("/"), "hello h1");
}
|
use {ContextRef, ValueRef, TypeRef, AtomicOrdering, SynchronizationScope,
IntegerPredicateKind, FloatPredicateKind, AtomicBinaryOp};
use libc;
cpp! {
#include "ffi_helpers.h"
#include "llvm/IR/Instructions.h"
pub fn LLVMRustInstructionInsertAfter(inst: ValueRef as "llvm::Value*",
after: ValueRef as "llvm::Value*") {
support::cast<llvm::Instruction>(inst)->insertAfter(
support::cast<llvm::Instruction>(after));
}
pub fn LLVMRustInstructionAppend(inst: ValueRef as "llvm::Value*",
block: ValueRef as "llvm::Value*") {
support::cast<llvm::BasicBlock>(block)->getInstList().push_back(
support::cast<llvm::Instruction>(inst)
);
}
pub fn LLVMRustCreateReturnInst(context: ContextRef as "llvm::LLVMContext*",
ret_val: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::ReturnInst::Create(*context, ret_val);
}
pub fn LLVMRustCreateBranchInst(on_true: ValueRef as "llvm::Value*",
on_false: ValueRef as "llvm::Value*",
condition: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::BranchInst::Create(
support::cast<llvm::BasicBlock>(on_true), support::cast<llvm::BasicBlock>(on_false),
condition);
}
// FIXME: add bundle support
pub fn LLVMRustCreateCallInst(func: ValueRef as "llvm::Value*",
args: &[ValueRef] as "support::Slice<llvm::Value*>")
-> ValueRef as "llvm::Value*" {
auto bundles = llvm::None;
return llvm::CallInst::Create(support::cast<llvm::Function>(func), args.ref(), bundles);
}
pub fn LLVMRustCreateAllocaInst(ty: TypeRef as "llvm::Type*",
array_size: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return new llvm::AllocaInst(ty, array_size);
}
pub fn LLVMRustCreateStoreInst(value: ValueRef as "llvm::Value*",
ptr: ValueRef as "llvm::Value*",
is_volatile: bool as "bool",
align: libc::c_uint as "unsigned",
atomic_ordering: AtomicOrdering as "llvm::AtomicOrdering",
sync_scope: SynchronizationScope as "llvm::SynchronizationScope")
-> ValueRef as "llvm::Value*" {
return new llvm::StoreInst(value, ptr, is_volatile, align, atomic_ordering, sync_scope);
}
pub fn LLVMRustCreateLoadInst(ptr: ValueRef as "llvm::Value*",
is_volatile: bool as "bool",
align: libc::c_uint as "unsigned",
atomic_ordering: AtomicOrdering as "llvm::AtomicOrdering",
sync_scope: SynchronizationScope as "llvm::SynchronizationScope")
-> ValueRef as "llvm::Value*" {
return new llvm::LoadInst(ptr, llvm::Twine(), is_volatile, align, atomic_ordering, sync_scope);
}
pub fn LLVMRustCreateGetElementPtrInst(pointee_ty: TypeRef as "llvm::Type*",
ptr: ValueRef as "llvm::Value*",
indices: &[ValueRef] as "support::Slice<llvm::Value*>",
in_bounds: bool as "bool")
-> ValueRef as "llvm::Value*" {
if (in_bounds)
return llvm::GetElementPtrInst::CreateInBounds(pointee_ty, ptr, indices.ref());
else
return llvm::GetElementPtrInst::Create(pointee_ty, ptr, indices.ref());
}
pub fn LLVMRustCreateExtractElementInst(vector: ValueRef as "llvm::Value*",
index: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::ExtractElementInst::Create(vector, index);
}
pub fn LLVMRustCreateExtractValueInst(aggregate: ValueRef as "llvm::Value*",
indices: &[libc::c_uint] as "support::Slice<unsigned>")
-> ValueRef as "llvm::Value*" {
return llvm::ExtractValueInst::Create(aggregate, indices.ref());
}
pub fn LLVMRustCreateInsertElementInst(vector: ValueRef as "llvm::Value*",
new_element: ValueRef as "llvm::Value*",
index: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::InsertElementInst::Create(vector, new_element, index);
}
pub fn LLVMRustCreateInsertValueInst(aggregate: ValueRef as "llvm::Value*",
new_value: ValueRef as "llvm::Value*",
indices: &[libc::c_uint] as "support::Slice<unsigned>")
-> ValueRef as "llvm::Value*" {
return llvm::InsertValueInst::Create(aggregate, new_value, indices.ref());
}
pub fn LLVMRustCreateUnreachableInst(context: ContextRef as "llvm::LLVMContext*")
-> ValueRef as "llvm::Value*" {
return new llvm::UnreachableInst(*context);
}
pub fn LLVMRustCreateSelectInst(condition: ValueRef as "llvm::Value*",
on_true: ValueRef as "llvm::Value*",
on_false: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::SelectInst::Create(condition, on_true, on_false);
}
pub fn LLVMRustCreateAddrSpaceCastInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::AddrSpaceCastInst(value, ty);
}
pub fn LLVMRustCreateFenceInst(context: ContextRef as "llvm::LLVMContext*",
ordering: AtomicOrdering as "unsigned",
sync_scope: SynchronizationScope as "unsigned")
-> ValueRef as "llvm::Value*" {
return new llvm::FenceInst(*context, (llvm::AtomicOrdering)ordering,
(llvm::SynchronizationScope)sync_scope);
}
pub fn LLVMRustCreateBinaryOperator(opcode: libc::c_uint as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::BinaryOperator::Create((llvm::Instruction::BinaryOps)opcode, lhs, rhs);
}
pub fn LLVMRustCreateBinaryOperatorNSW(opcode: libc::c_uint as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::BinaryOperator::CreateNSW((llvm::Instruction::BinaryOps)opcode, lhs, rhs);
}
pub fn LLVMRustCreateBinaryOperatorNUW(opcode: libc::c_uint as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::BinaryOperator::CreateNUW((llvm::Instruction::BinaryOps)opcode, lhs, rhs);
}
pub fn LLVMRustCreateBinaryOperatorExact(opcode: libc::c_uint as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::BinaryOperator::CreateExact((llvm::Instruction::BinaryOps)opcode, lhs, rhs);
}
pub fn LLVMRustCreatePtrToIntInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::PtrToIntInst(value, ty);
}
pub fn LLVMRustCreateIntToPtrInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::IntToPtrInst(value, ty);
}
// FIXME: add bundle support
pub fn LLVMRustCreateInvokeInst(func: ValueRef as "llvm::Value*",
args: &[ValueRef] as "support::Slice<llvm::Value*>",
on_success: ValueRef as "llvm::Value*",
on_error: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
auto bundles = llvm::None;
return llvm::InvokeInst::Create(
support::cast<llvm::Function>(func),
support::cast<llvm::BasicBlock>(on_success), support::cast<llvm::BasicBlock>(on_error),
args.ref(), bundles);
}
pub fn LLVMRustCreateTruncInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::TruncInst(value, ty);
}
pub fn LLVMRustCreateFPTruncInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::FPTruncInst(value, ty);
}
pub fn LLVMRustCreateZExtInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::ZExtInst(value, ty);
}
pub fn LLVMRustCreateSExtInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::SExtInst(value, ty);
}
pub fn LLVMRustCreateFPExtInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::FPExtInst(value, ty);
}
pub fn LLVMRustCreateBitCastInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::BitCastInst(value, ty);
}
pub fn LLVMRustCreateFPToSIInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::FPToSIInst(value, ty);
}
pub fn LLVMRustCreateFPToUIInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::FPToUIInst(value, ty);
}
pub fn LLVMRustCreateSIToFPInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::SIToFPInst(value, ty);
}
pub fn LLVMRustCreateUIToFPInst(value: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::UIToFPInst(value, ty);
}
pub fn LLVMRustCreateICmpInst(predicate_kind: IntegerPredicateKind as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return new llvm::ICmpInst(nullptr, (llvm::CmpInst::Predicate)predicate_kind,
lhs, rhs);
}
pub fn LLVMRustCreateFCmpInst(predicate_kind: FloatPredicateKind as "unsigned",
lhs: ValueRef as "llvm::Value*",
rhs: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return new llvm::FCmpInst(nullptr, (llvm::CmpInst::Predicate)predicate_kind,
lhs, rhs);
}
pub fn LLVMRustCreateIndirectBrInst(address: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::IndirectBrInst::Create(address, 0);
}
pub fn LLVMRustIndirectBrInstAddDestination(indirect_br: ValueRef as "llvm::Value*",
block: ValueRef as "llvm::Value*") {
support::cast<llvm::IndirectBrInst>(indirect_br)->addDestination(
support::cast<llvm::BasicBlock>(block));
}
pub fn LLVMRustCreateResumeInst(exception: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::ResumeInst::Create(exception);
}
pub fn LLVMRustCreateCatchPadInst(catch_switch: ValueRef as "llvm::Value*",
arguments: &[ValueRef] as "support::Slice<llvm::Value*>")
-> ValueRef as "llvm::Value*" {
return llvm::CatchPadInst::Create(support::cast<llvm::CatchSwitchInst>(catch_switch),
arguments.ref());
}
pub fn LLVMRustCreateCleanupPadInst(parent_pad: ValueRef as "llvm::Value*",
arguments: &[ValueRef] as "support::Slice<llvm::Value*>")
-> ValueRef as "llvm::Value*" {
return llvm::CleanupPadInst::Create(parent_pad, arguments.ref());
}
pub fn LLVMRustCreateVAArgInst(list: ValueRef as "llvm::Value*",
ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return new llvm::VAArgInst(list, ty);
}
pub fn LLVMRustCreateSwitchInst(value: ValueRef as "llvm::Value*",
default_block: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::SwitchInst::Create(value, support::cast<llvm::BasicBlock>(default_block), 0);
}
pub fn LLVMRustSwitchInstAddCase(switch_inst: ValueRef as "llvm::Value*",
constant_int: ValueRef as "llvm::Value*",
dest: ValueRef as "llvm::Value*") {
support::cast<llvm::SwitchInst>(switch_inst)->addCase(
support::cast<llvm::ConstantInt>(constant_int),
support::cast<llvm::BasicBlock>(dest));
}
pub fn LLVMRustCreateLandingPadInst(ret_ty: TypeRef as "llvm::Type*")
-> ValueRef as "llvm::Value*" {
return llvm::LandingPadInst::Create(ret_ty, 0);
}
pub fn LLVMRustCreateCatchReturnInst(catch_pad: ValueRef as "llvm::Value*",
block: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::CatchReturnInst::Create(support::cast<llvm::CatchPadInst>(catch_pad),
support::cast<llvm::BasicBlock>(block));
}
pub fn LLVMRustCreateCatchSwitchInst(parent_pad: ValueRef as "llvm::Value*",
unwind_dest: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::CatchSwitchInst::Create(support::cast<llvm::CatchPadInst>(parent_pad),
support::cast<llvm::BasicBlock>(unwind_dest), 0);
}
pub fn LLVMRustCatchSwitchInstAddHandler(catch_switch: ValueRef as "llvm::Value*",
dest_block: ValueRef as "llvm::Value*") {
support::cast<llvm::CatchSwitchInst>(catch_switch)->addHandler(
support::cast<llvm::BasicBlock>(dest_block));
}
pub fn LLVMRustCreateCleanupReturnInst(cleanup_pad: ValueRef as "llvm::Value*",
unwind_block: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return llvm::CleanupReturnInst::Create(support::cast<llvm::CatchPadInst>(cleanup_pad),
support::cast<llvm::BasicBlock>(unwind_block));
}
pub fn LLVMRustCreateAtomicCmpXchgInst(pointer: ValueRef as "llvm::Value*",
cmp: ValueRef as "llvm::Value*",
new_value: ValueRef as "llvm::Value*",
success_ordering: AtomicOrdering as "unsigned",
failure_ordering: AtomicOrdering as "unsigned",
sync_scope: SynchronizationScope as "unsigned")
-> ValueRef as "llvm::Value*" {
return new llvm::AtomicCmpXchgInst(pointer, cmp, new_value, (llvm::AtomicOrdering)success_ordering,
(llvm::AtomicOrdering)failure_ordering, (llvm::SynchronizationScope)sync_scope);
}
pub fn LLVMRustCreateAtomicRMWInst(op: AtomicBinaryOp as "unsigned",
ptr: ValueRef as "llvm::Value*",
value: ValueRef as "llvm::Value*",
ordering: AtomicOrdering as "unsigned",
sync_scope: SynchronizationScope as "unsigned")
-> ValueRef as "llvm::Value*" {
return new llvm::AtomicRMWInst((llvm::AtomicRMWInst::BinOp)op, ptr, value,
(llvm::AtomicOrdering)ordering, (llvm::SynchronizationScope)sync_scope);
}
pub fn LLVMRustCreateShuffleVectorInst(v1: ValueRef as "llvm::Value*",
v2: ValueRef as "llvm::Value*",
mask: ValueRef as "llvm::Value*")
-> ValueRef as "llvm::Value*" {
return new llvm::ShuffleVectorInst(v1, v2, mask);
}
}
|
fn main() {
let mut fib = vec![1, 2];
let mut ans = 2;
while fib.last() < Some(&4000000) {
let l = fib.len();
let n = fib[l - 1] + fib[l - 2];
fib.push(n);
if n % 2 == 0 {
ans += n;
}
}
println!("{}", ans);
}
|
extern crate crypto;
extern crate image_base64;
use crypto::digest::Digest;
use crypto::md5::Md5;
use std::fs;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::MAIN_SEPARATOR;
use std::str;
use std::string::String;
static FILE_NAME: &'static str = "nyan";
#[test]
fn jpg_to_base64() {
image_to_base64("jpg");
}
#[test]
fn gif_to_base64() {
image_to_base64("gif");
}
#[test]
fn png_to_base64() {
image_to_base64("png");
}
#[test]
fn webp_to_base64() {
image_to_base64("webp");
}
#[test]
fn ico_to_base64() {
image_to_base64("ico");
}
#[test]
fn tiff_to_base64() {
image_to_base64("tiff");
}
#[test]
fn bmp_to_base64() {
image_to_base64("bmp");
}
#[test]
fn exr_to_base64() {
image_to_base64("exr");
}
#[test]
fn hdr_to_base64() {
image_to_base64("hdr");
}
#[test]
fn flif_to_base64() {
image_to_base64("flif");
}
#[test]
fn pbm_to_base64() {
image_to_base64("pbm");
}
#[test]
fn pgm_to_base64() {
image_to_base64("pgm");
}
#[test]
fn ppm_to_base64() {
image_to_base64("ppm");
}
#[test]
fn ras_to_base64() {
image_to_base64("ras");
}
#[test]
fn xbm_to_base64() {
image_to_base64("xbm");
}
fn image_to_base64(extension: &str) {
let path = format!("res{}{}_data", MAIN_SEPARATOR, extension);
let mut file = match File::open(Path::new(&path)) {
Err(why) => panic!("couldn't open {}: {}", &path, why),
Ok(file) => file,
};
let mut buffer = String::new();
match file.read_to_string(&mut buffer) {
Err(why) => panic!("couldn't read {}", why),
Ok(_) => {}
}
let base64 =
image_base64::to_base64(&format!("res{}{}.{}", MAIN_SEPARATOR, FILE_NAME, extension))
.unwrap();
assert_eq!(base64, buffer);
}
#[test]
fn base64_to_jpg() {
base64_to_image("jpg");
validate("jpg");
}
#[test]
fn base64_to_gif() {
base64_to_image("gif");
validate("gif");
}
#[test]
fn base64_to_png() {
base64_to_image("png");
validate("png");
}
#[test]
fn base64_to_webp() {
base64_to_image("webp");
validate("webp");
}
#[test]
fn base64_to_ico() {
base64_to_image("ico");
validate("ico");
}
#[test]
fn base64_to_tiff() {
base64_to_image("tiff");
validate("tiff");
}
#[test]
fn base64_to_bmp() {
base64_to_image("bmp");
validate("bmp");
}
#[test]
fn base64_to_exr() {
base64_to_image("exr");
validate("exr");
}
#[test]
fn base64_to_hdr() {
base64_to_image("hdr");
validate("hdr");
}
#[test]
fn base64_to_flif() {
base64_to_image("flif");
validate("flif");
}
#[test]
fn base64_to_pbm() {
base64_to_image("pbm");
validate("pbm");
}
#[test]
fn base64_to_pgm() {
base64_to_image("pgm");
validate("pgm");
}
#[test]
fn base64_to_ppm() {
base64_to_image("ppm");
validate("ppm");
}
#[test]
fn base64_to_ras() {
base64_to_image("ras");
validate("ras");
}
#[test]
fn base64_to_xbm() {
base64_to_image("xbm");
validate("xbm");
}
fn base64_to_image(extension: &str) {
let mut original = match File::open(format!("res{}{}_data", MAIN_SEPARATOR, extension)) {
Err(why) => panic!("couldn't open {}", why),
Ok(file) => file,
};
let mut base64 = String::new();
match original.read_to_string(&mut base64) {
Err(why) => panic!("couldn't read {}", why),
Ok(_) => {}
}
let img = image_base64::from_base64(base64);
let mut output = File::create(&Path::new(&format!(
"output{}{}.{}",
MAIN_SEPARATOR, FILE_NAME, extension
)))
.unwrap();
output.write_all(img.as_slice()).unwrap();
}
fn validate(extension: &str) {
assert_eq!(
get_file_size("res", extension),
get_file_size("output", extension)
);
assert_eq!(get_hash("res", extension), get_hash("output", extension));
}
fn get_hash(dir: &str, extension: &str) -> String {
let mut hasher = Md5::new();
let mut file = match File::open(&format!(
"{}{}{}.{}",
dir, MAIN_SEPARATOR, FILE_NAME, extension
)) {
Err(why) => panic!("couldn't open {}", why),
Ok(file) => file,
};
let mut file_vec = Vec::new();
match file.read_to_end(&mut file_vec) {
Err(why) => panic!("couldn't read {}", why),
Ok(_) => {}
}
let file_arr = vector_as_u8_4_array(file_vec);
hasher.input(&file_arr);
hasher.result_str()
}
fn get_file_size(dir: &str, extension: &str) -> u64 {
let meta = match fs::metadata(&format!(
"{}{}{}.{}",
dir, MAIN_SEPARATOR, FILE_NAME, extension
)) {
Err(why) => panic!("couldn't read {}", why),
Ok(meta) => meta,
};
meta.len()
}
fn vector_as_u8_4_array(vector: Vec<u8>) -> [u8; 4] {
let mut arr = [0u8; 4];
for (place, element) in arr.iter_mut().zip(vector.iter()) {
*place = *element;
}
arr
}
|
extern crate regex;
extern crate rustc_serialize;
use std::env;
use std::io::{self, Read, Write};
use std::fs::File;
use regex::Regex;
use rustc_serialize::base64::{ToBase64, STANDARD};
fn replace_contents(line: &str) -> String {
let args: Vec<String> = env::args().collect();
let assets_dir = args[1].clone();
let re_path = Regex::new(r"url\('(?P<path>.+?)'\);").unwrap();
let cap = re_path.captures(line).unwrap();
let path = cap.name("path").unwrap();
let mut src_path = assets_dir;
let extension = path.split('.').last().unwrap();
src_path.push_str(path);
let mut image_file = match File::open(&src_path) {
Ok(f) => {f},
Err(_) => {
let mut stderr = std::io::stderr();
writeln!(&mut stderr, "Cannot open {} Skipping.", src_path).unwrap();
return line.to_string();
}
};
let mut file_contents = Vec::new();
match image_file.read_to_end(&mut file_contents) {
Ok(_) => {},
Err(_) => {
println!("Cannot read {}. Skipping.", src_path);
return line.to_string();
}
}
let mut new_property = String::new();
new_property.push_str("data:image/");
new_property.push_str(extension);
new_property.push_str(";base64,");
new_property.push_str(&file_contents.to_base64(STANDARD));
let new_line: String;
new_line = line.replace(path, &new_property);
return new_line;
}
fn main() {
let mut contents = String::new();
match io::stdin().read_to_string(&mut contents) {
Ok(_) => {},
Err(_) => panic!("Cannot read stdin, what")
};
let lines_split = contents.split('\n');
for line in lines_split {
if line.contains("background-image: url") {
println!("{}", replace_contents(line));
} else {
println!("{}", line);
}
}
}
|
extern crate amqp;
extern crate env_logger;
use serde_json;
use ofborg::worker;
use ofborg::stats;
use amqp::protocol::basic::{Deliver, BasicProperties};
pub struct StatCollectorWorker<E> {
events: E,
collector: stats::MetricCollector,
}
impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> {
pub fn new(events: E, collector: stats::MetricCollector) -> StatCollectorWorker<E> {
StatCollectorWorker {
events: events,
collector: collector,
}
}
}
impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker<E> {
type J = stats::EventMessage;
fn msg_to_job(
&mut self,
_: &Deliver,
_: &BasicProperties,
body: &Vec<u8>,
) -> Result<Self::J, String> {
return match serde_json::from_slice(body) {
Ok(e) => Ok(e),
Err(_) => {
let mut modified_body: Vec<u8> = vec!["\"".as_bytes()[0]];
modified_body.append(&mut body.clone());
modified_body.push("\"".as_bytes()[0]);
match serde_json::from_slice(&modified_body) {
Ok(e) => {
self.events.notify(stats::Event::StatCollectorLegacyEvent(stats::event_metric_name(&e)));
Ok(stats::EventMessage {
sender: "".to_owned(),
events: vec![e],
})
},
Err(e) => {
self.events.notify(stats::Event::StatCollectorBogusEvent);
error!(
"Failed to decode message: {:?}, Err: {:?}",
String::from_utf8(body.clone()),
e
);
Err("Failed to decode message".to_owned())
}
}
}
};
}
fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
let sender = job.sender.clone();
for event in job.events.iter() {
self.collector.record(sender.clone(), event.clone());
}
return vec![worker::Action::Ack];
}
}
|
use super::*;
use crate::components::LogEntry;
use crate::indices::EntityTime;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Default, Debug, Serialize, Deserialize)]
pub struct BTreeTable<Id, Row>
where
Id: TableId,
Row: TableRow,
{
data: BTreeMap<Id, Row>,
}
impl<Id, Row> BTreeTable<Id, Row>
where
Id: TableId,
Row: TableRow,
{
pub fn new() -> Self {
Self {
data: BTreeMap::new(),
}
}
pub fn iter(&self) -> impl TableIterator<Id, &Row> {
self.data.iter().map(|(id, row)| (*id, row))
}
pub fn iter_mut(&mut self) -> impl TableIterator<Id, &mut Row> {
self.data.iter_mut().map(|(id, row)| (*id, row))
}
pub fn get(&self, id: Id) -> Option<&Row> {
self.data.get(&id)
}
pub fn get_by_id_mut(&mut self, id: Id) -> Option<&mut Row> {
self.data.get_mut(&id)
}
pub fn get_by_ids(&self, ids: &[Id]) -> Vec<(Id, &Row)> {
self.data
.iter()
.filter(move |(i, _)| ids.iter().any(|x| *i == x))
.map(move |(i, v)| (*i, v))
.collect()
}
pub fn contains(&self, id: Id) -> bool {
self.data.get(&id).is_some()
}
pub fn insert(&mut self, id: Id, row: Row) -> Option<Row> {
self.data.insert(id, row)
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn clear(&mut self) {
self.data.clear();
}
}
impl<Id, Row> Table for BTreeTable<Id, Row>
where
Id: TableId,
Row: TableRow,
{
type Id = Id;
type Row = Row;
fn delete(&mut self, id: Id) -> Option<Row> {
self.data.remove(&id)
}
fn get(&self, id: Id) -> Option<&Row> {
BTreeTable::get(self, id)
}
}
impl LogTable for BTreeTable<EntityTime, LogEntry> {
fn get_logs_by_time(&self, time: u64) -> Vec<(EntityTime, LogEntry)> {
self.data
.iter()
.filter(|(t, _)| t.1 == time)
.map(|(k, v)| (*k, v.clone()))
.collect()
}
}
|
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate rocket;
extern crate kafka;
use std::time::Duration;
use kafka::producer::{Producer, Record, RequiredAcks};
use kafka::error::Error as KafkaError;
fn send_message(message: String) -> Result<(), KafkaError>
{
let kafka_server = "rocket-kafka_kafka:9092";
let topic = "topic";
let mut producer = try!(Producer::from_hosts(vec![kafka_server.to_owned()])
.with_ack_timeout(Duration::from_secs(5))
.with_required_acks(RequiredAcks::One)
.create());
try!(producer.send(&Record::from_value(topic, message)));
Ok(())
}
#[post("/message/<message>")]
fn message<'r>(message: String) -> &'r str {
if let Err(error) = send_message(message) {
println!("{}", error);
}
"OK"
}
fn main() {
rocket::ignite().mount(
"/api/1",
routes![message]
).launch();
}
|
mod cmp;
pub use self::cmp::*;
|
use axum_websockets::{
configuration::get_configuration,
telemetry::{get_subscriber, init_subscriber},
Application,
};
#[tokio::main]
async fn main() -> Result<(), hyper::Error> {
let subscriber = get_subscriber("actix_websockets".into(), "info".into(), std::io::stdout);
init_subscriber(subscriber);
let configuration = get_configuration().expect("Failed to read configuration.");
let application = Application::build(configuration).expect("Failed to build application.");
application.run_until_stopped().await?;
Ok(())
}
|
fn main() {
let code = String::from_utf8(std::fs::read("input/day8").unwrap())
.unwrap()
.split_terminator("\n")
.map(|instruction| {
let mut instruction = instruction.split_whitespace();
(
instruction.next().unwrap().to_owned(),
instruction.next().unwrap().parse().unwrap(),
)
})
.collect::<Vec<(_, isize)>>();
let mut visited = vec![false; code.len()];
let mut accumulator = 0;
let mut instruction_ptr = 0usize;
loop {
if visited[instruction_ptr] {
break;
}
visited[instruction_ptr] = true;
let (opcode, arg) = &code[instruction_ptr];
match opcode.as_str() {
"acc" => {
accumulator += arg;
instruction_ptr += 1;
}
"jmp" => instruction_ptr = (instruction_ptr as isize + arg) as usize,
"nop" => instruction_ptr += 1,
_ => unreachable!(),
}
}
println!("{}", accumulator);
}
|
use crate::database::values::dsl::{ExprDb, SelectDb};
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
};
#[derive(Clone)]
pub struct ExprAsNu;
impl Command for ExprAsNu {
fn name(&self) -> &str {
"into nu"
}
fn usage(&self) -> &str {
"Convert a db expression into a nu value for access and exploration"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.input_type(Type::Custom("db-expression".into()))
.output_type(Type::Any)
.category(Category::Custom("db-expression".into()))
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Convert a col expression into a nushell value",
example: "field name_1 | into nu",
result: Some(Value::Record {
cols: vec!["value".into(), "quoted_style".into()],
vals: vec![
Value::String {
val: "name_1".into(),
span: Span::test_data(),
},
Value::String {
val: "None".into(),
span: Span::test_data(),
},
],
span: Span::test_data(),
}),
}]
}
fn run(
&self,
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head);
if let Ok(expr) = ExprDb::try_from_value(&value) {
Ok(expr.to_value(call.head).into_pipeline_data())
} else {
let select = SelectDb::try_from_value(&value)?;
Ok(select.to_value(call.head).into_pipeline_data())
}
}
}
#[cfg(test)]
mod test {
use super::super::FieldExpr;
use super::*;
use crate::database::test_database::test_database;
#[test]
fn test_examples() {
test_database(vec![Box::new(ExprAsNu {}), Box::new(FieldExpr {})])
}
}
|
use crate::*;
use futures::Future;
use std::fmt;
use std::pin::Pin;
use std::task::{Context, Poll};
pub struct TestDevice {
pub device_info: DeviceInfo,
pub device: Device<TestDeviceTransport>,
}
pub fn test_with_devices<FN, F>(_f: FN)
where
FN: Fn(TestDevice) -> F,
F: Future<Output = Result<(), Error>> + Send + 'static,
{
unimplemented!()
}
pub struct TestDeviceTransport;
impl Default for TestDeviceTransport {
fn default() -> Self {
Self
}
}
impl Transport for TestDeviceTransport {
type Error = TestDeviceTransportError;
type Output = TestDeviceTransportOutput;
type Body = TestDeviceTransportBody;
type Chunk = Vec<u8>;
fn roundtrip(&self, _request: http::Request<Vec<u8>>) -> Self::Output {
unimplemented!()
}
}
#[derive(Debug)]
pub struct TestDeviceTransportError;
impl std::error::Error for TestDeviceTransportError {}
impl fmt::Display for TestDeviceTransportError {
fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {
Ok(())
}
}
pub struct TestDeviceTransportOutput;
impl Future for TestDeviceTransportOutput {
type Output = Result<http::Response<TestDeviceTransportBody>, TestDeviceTransportError>;
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
unimplemented!()
}
}
pub struct TestDeviceTransportBody;
impl futures::Stream for TestDeviceTransportBody {
type Item = Result<Vec<u8>, TestDeviceTransportError>;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
unimplemented!()
}
}
|
//! The [`Binder`] translates a [`Token`] stream into bound expressions
//! that can then be translated into machine code.
use prelude::*;
use prelude::parse::*;
use builder::Builder;
use core::syscore;
use expr::{BuiltIns, Unit};
use input::{Input, InputFile};
use intrinsics::import_intrinsics;
use typesystem::{Member, MemberTree};
use std::fmt::{self, Debug, Formatter};
use std::io::{self, Write};
use std::mem;
use futures::prelude::*;
/// Parent of a `Binder`.
/// - If the `Binder` is a root binder, the parent is a `VirtualMachine`,
/// and some fields are only declared for the root.
/// - If the `Binder` is a child binder, the parent is another `Binder`.
///
/// # Safety
/// The parent `Binder` is pointed to using a pointer (instead of a reference)
/// since Rust has some doesn't allow direct parent-child relationships (except
/// through smart pointers).
/// However, the root `Binder` cannot be dropped until all inner `Binder`s are done,
/// which means that no parent can be dropped during execution. All is thus safe.
pub(crate) enum BinderParent<'cx, 'vm: 'cx> {
Root(RootBinder<'cx, 'vm>),
Binder(*mut Binder<'cx, 'vm>)
}
/// Represents a root `Binder`.
pub(crate) struct RootBinder<'cx, 'vm: 'cx> {
vm: Vm<'vm>,
context: Context<'cx>,
file: InputFile
}
impl<'cx, 'vm> RootBinder<'cx, 'vm> {
pub fn new(vm: Vm<'vm>, file: InputFile) -> Self {
RootBinder {
vm, context: Context::new(vm), file
}
}
pub fn _new_in_context<'pcx: 'cx>(vm: Vm<'vm>, file: InputFile, context: &'cx Context<'pcx>) -> Self {
RootBinder {
vm, context: Context::child(context), file
}
}
}
/// State of a `Binder` after executing some work.
pub enum BinderState {
/// Completely processed its input.
Done,
/// Stalled, and unable to finish parsing its input.
Stalled,
/// Awaiting a new symbol.
Pending
}
/// Represents a structure that dispatches parsers in a token stream.
pub struct Binder<'cx, 'vm: 'cx> {
pub(crate) parent: BinderParent<'cx, 'vm>,
#[allow(dead_code)]
symbol: Sym,
children: usize,
parse_groups: Vec<ParseGroup>,
imported_trees: Vec<&'cx MemberTree<'cx>>,
syntaxes: Vec<Syntax<'cx>>,
pending_symbols: Vec<Spanned<Sym>>,
expressions: Vec<Expr<'cx>>,
tokens: Vec<Token>
}
impl<'cx, 'vm: 'cx> Binder<'cx, 'vm> {
/// Creates a new root `Binder` belonging to the specified virtual machine and binding the specified
/// file, and returns it.
///
/// # Errors
/// If producing a token stream is impossible, `Err` will be returned.
pub fn new(vm: Vm<'vm>, file: InputFile, diagnostics: Diags) -> Result<Self, InputFile> {
let tokens = Lexer::new(file.source()).tokenize(diagnostics);
if tokens.is_err() {
return Err(file)
}
let tokens = tokens.unwrap();
if tokens.is_empty() {
return Err(file)
}
let mut binder = Binder {
tokens,
symbol: Sym::default(),
parent: BinderParent::Root(RootBinder::new(vm, file)),
imported_trees: Vec::new(),
syntaxes: Vec::new(),
parse_groups: Vec::new(),
pending_symbols: Vec::new(),
expressions: Vec::new(),
children: 0
};
syscore::initialize(&mut binder, vm);
import_intrinsics(&mut binder);
Self::initialize(&mut binder, diagnostics);
Ok(binder)
}
/// Creates a new empty `Binder`, with no diagnostics nor input.
pub fn empty(vm: Vm<'vm>) -> Self {
let mut binder = Binder {
tokens: Vec::new(),
symbol: Sym::default(),
parent: BinderParent::Root(RootBinder::new(vm, InputFile::new(String::new(), String::new(), 0))),
imported_trees: Vec::new(),
syntaxes: Vec::new(),
parse_groups: Vec::new(),
pending_symbols: Vec::new(),
expressions: Vec::new(),
children: 0
};
syscore::initialize(&mut binder, vm);
import_intrinsics(&mut binder);
binder
}
/// Creates a new `Binder` whose context is given by another binder, in order
/// to bind a given file.
pub fn resume<'a>(binder: &'a mut Binder<'cx, 'vm>, file: InputFile, diagnostics: Diags) -> Result<&'a mut Self, InputFile> {
let tokens = Lexer::new(file.source()).tokenize(diagnostics);
if tokens.is_err() {
return Err(file)
}
let tokens = tokens.unwrap();
if tokens.is_empty() {
return Err(file)
}
binder.tokens = tokens;
Self::initialize(binder, diagnostics);
Ok(binder)
}
/// Creates a `Binder` that acts as a "child" of the current binder,
/// but having a smaller scope and a specific set of tokens.
pub fn child<'c>(&'c mut self, symbol: Sym, tokens: Vec<Token>, diagnostics: Diags) -> Binder<'c, 'vm> {
self.children += 1;
let mut binder = Binder {
tokens, symbol,
parent: BinderParent::Binder(unsafe { mem::transmute(self as *mut _) }),
imported_trees: Vec::new(),
syntaxes: Vec::new(),
parse_groups: Vec::new(),
pending_symbols: Vec::new(),
expressions: Vec::new(),
children: 0
};
Self::initialize(&mut binder, diagnostics);
binder
}
/// Initializes the `Binder`, populating its sub-parsers.
fn initialize<'a, 'lcx>(binder: &'a mut Binder<'lcx, 'vm>, diagnostics: Diags) {
// We're dividing tokens into different groups in order to parse groups in any order.
let mut size = 0;
let mut depth = 0;
let mut index = 0;
let tokens = &binder.tokens;
let mut groups = Vec::new();
/// Groups multiple tokens together into a `ParseGroup`.
macro_rules! group {
() => ({
// Adds a new parser to the parsers vector, populating it with every token
// encountered till now
let nth = groups.len();
groups.push(ParseGroup { from: index, to: index + size, position: 0, index: nth });
index += size;
size = 0;
// The previous assignements raise warnings for no reason
// FIX: https://github.com/rust-lang/rust/issues/24580
move || assert!(index != size); // Anonymous closure is never actually called
});
}
/// Gets the `Token` at the current position, performing a few checks beforehand.
macro_rules! token {
() => (match tokens.get(size) {
Some(token) => {
size += 1;
token
},
None => {
if depth != 0 {
// Invalid end of statement.
let last_token = unsafe {
// safe to do, since depth is 0 when size is 0
tokens.get_unchecked(size - 1)
};
diagnostics.report(Diagnostic::unexpected_eof(last_token.span()))
} else if size != 0 {
// Non-empty statement, regroup everything
group!();
}
break
}
});
}
// Collect tokens into different groups
loop {
match token!() {
// End of statement
&Token::Semicolon(_) => group!(),
// Start of a bracketed scope, increase depth and start parsing inner
&Token::LBracket(_) => {
depth += 1;
loop {
match token!() {
&Token::LBracket(_) => depth += 1,
&Token::RBracket(_) => {
depth -= 1;
if depth == 0 {
group!();
continue
}
},
_ => ()
}
}
},
_ => ()
}
}
// Set parsers
mem::replace(&mut binder.parse_groups, groups);
}
/// Parses, binds and executes the content given to this `Binder`.
///
/// If `quote` is `true`, then the content will not be executed,
/// and an expression will be returned instead.
pub fn process(&mut self, quote: bool, diagnostics: Diags) -> BinderState {
let mut stalled = 0;
let mut i = 0;
// Making a refernce to the tokens that the borrow checked won't care about,
// since once tokens have been produced, they are NEVER modified.
let tokens = dup!(self.tokens.as_slice());
// Take care of every parser, one by one, starting at the first one
'outer: loop {
let parsed = loop {
// Some strange scopes to make the borrow checker happy...
let reason = {
let mut parser = match self.parse_groups.get(i).cloned() {
Some(group) => Parser::new(self, &tokens, group),
None => return BinderState::Done
};
match parser.parse_expression(diagnostics) {
Ok(parsed) => break parsed,
Err(reason) => reason
}
};
if let Failure::Match(span) = reason {
stalled += 1;
if stalled == self.parse_groups.len() {
diagnostics.report(Diagnostic::no_match(span));
return BinderState::Stalled
}
} else {
self.parse_groups.remove(i);
}
continue 'outer
};
if diagnostics.has_error() {
i += 1;
continue
}
// We got this far, which means that the parsed expression was parsed successfully.
// We can add it to the parsed expressions, or execute it.
let group = self.parse_groups.remove(i);
if quote {
self.expressions.insert(group.index, parsed)
} else {
self.compute(&parsed, diagnostics)
}
}
}
fn compute(&mut self, expr: &Expr<'cx>, diagnostics: Diags) {
let mut builder = Builder::new::<_, String>(
self.ctx(),
::arch::Architecture::current().unwrap(),
expr.span(),
diagnostics, "<tmp>", &[]);
expr.build(&mut builder);
}
/// Returns a reference to the [`Context`] to which this binder belongs.
pub fn context(&self) -> &Context<'cx> {
&self.root().context
}
/// Returns a mutable reference to the [`Context`] to which this binder belongs.
pub fn ctx<'a>(&'a mut self) -> &'cx mut Context<'cx> {
unsafe {
&mut *(&mut self.mut_root().context as *mut _)
}
}
/// Returns a [`Vm`] object that represents the virtual machine that created this binder.
pub fn vm<'s>(&'s self) -> Vm<'vm> {
self.root().vm
}
pub(crate) fn root(&self) -> &RootBinder<'cx, 'vm> {
match &self.parent {
&BinderParent::Root(ref root) => root,
&BinderParent::Binder(binder) => unsafe { (*binder).root() }
}
}
pub(crate) fn mut_root(&mut self) -> &mut RootBinder<'cx, 'vm> {
match &mut self.parent {
&mut BinderParent::Root(ref mut root) => root,
&mut BinderParent::Binder(binder) => unsafe { (*binder).mut_root() }
}
}
/// Returns a reference to the parent binder of this binder, if it has one.
pub fn parent(&self) -> Option<&Self> {
match self.parent {
BinderParent::Root(_) => None,
BinderParent::Binder(binder) => unsafe { binder.as_ref() }
}
}
/// Returns a mutable reference to the parent binder of this binder, if it has one.
pub fn mut_parent(&mut self) -> Option<&mut Self> {
match self.parent {
BinderParent::Root(_) => None,
BinderParent::Binder(binder) => unsafe { binder.as_mut() }
}
}
/// Returns whether or not this binder is pending, which means that it has jobs pending.
pub fn pending(&self) -> bool {
self.parse_groups.is_empty() || self.children > 0
}
/// Returns the file to which this binder corresponds.
pub fn file(&self) -> &InputFile {
&self.root().file
}
}
unsafe impl<'cx, 'vm: 'cx> Send for Binder<'cx, 'vm> { }
unsafe impl<'cx, 'vm: 'cx> Sync for Binder<'cx, 'vm> { }
impl<'cx, 'vm: 'cx> Binder<'cx, 'vm> {
/// Returns whether the given operand is a unary prefix operator.
pub fn is_prefix(&self, op: &str) -> bool {
self.vm().read().unary_operators.contains_key(op)
}
/// Returns the precedence of the given binary operator.
///
/// # Errors
/// If the specified operator is unknown, a `Diagnostic` will be returned.
pub fn get_precedence(&self, span: Span, op: &str, diagnostics: Diags) -> Precedence {
match self.vm().read().binary_operators.get(op) {
Some(op) => *op,
None => {
diagnostics.report(Diagnostic::undefined_binary_op(span, op));
Precedence::default()
}
}
}
/// Looks up the type matching the given name.
///
/// # Errors
/// If the type cannot be found, a `Diagnostic` will be reported, and [`Ty::unknown`] will be
/// returned.
pub fn lookup_type(&self, span: Span, name: &str, diagnostics: Diags) -> &'cx Ty<'cx> {
let sym = Sym::from(name);
let diag = match self.context().members().get(&sym) {
Some(&Member::Type(ty)) => return ty,
Some(_) => Diagnostic::not_a_type(span),
None => Diagnostic::undefined_type(span, name)
};
if let Some(&Member::Type(ty)) = self.vm().read().context().members().get(&sym) {
return unsafe { mem::transmute(ty) }
}
diagnostics.report(diag);
Ty::unknown()
}
/// Looks up the function matching the given name and arguments.
///
/// # Errors
/// If the function cannot be found, a `Diagnostic` will be reported, and [`Fun::unknown`] will be
/// returned.
pub fn lookup_function(&self, span: Span, name: &str, args: &[&Expr<'cx>], diagnostics: Diags) -> &'cx Fun<'cx> {
fn closeness<'cx>(args: &[&Expr<'cx>], fun: &'cx Fun<'cx>) -> usize {
let diff = (args.len() as isize) - (fun.parameters().len() as isize);
// ensure that both functions have the same number of parameters
if diff != 0 {
// parameter count mismatch, return large difference
return diff.abs() as usize * 100
}
let mut diff = 0;
for (arg, param) in args.iter().zip(fun.parameters()) {
// ensuring the type corresponds:
// type equality, or expression type
if arg.ty() == param.ty() {
continue
}
if let Some(ty) = param.ty().generic_parameters().get(0) {
if ty == arg.ty() && param.ty() == Ty::expression() {
continue
}
}
diff += 1;
}
diff
}
fn lookup_tree<'cx>(symbol: &Sym, args: &[&Expr<'cx>], tree: &MemberTree<'cx>, matches: &mut Vec<(&'cx Fun<'cx>, usize)>) {
let lasthash = symbol.parts()[symbol.parts().len() - 1];
let node = match tree.lookup(symbol) {
Some(node) => node,
None => return
};
for item in node.siblings() {
if let Some(&Member::Function(fun)) = item.value() {
if item.hash() == lasthash {
let closeness = closeness(args, fun);
matches.push((fun, closeness));
}
}
}
}
// find all functions that bear the given name, finding how much their parameters match the
// given arguments
let mut matches = Vec::new();
let sym = Sym::from(name);
for tree in &self.imported_trees {
lookup_tree(&sym, args, tree, &mut matches);
}
lookup_tree(&sym, args, self.context().members(), &mut matches);
unsafe {
lookup_tree(&sym, args, mem::transmute(self.vm().read().context().members()), &mut matches);
}
// no function bear the given name, report this and return
if matches.is_empty() {
diagnostics.report(Diagnostic::undefined_function(span, name));
return Fun::unknown()
}
// find the closest match
// note that the closer 'closest' is to 0, the closer the actual match;
// 'closest' == 0 <=> perfect match
let (mut target, mut closest) = matches.swap_remove(0);
for (fun, closeness) in matches.drain(..) {
if closeness >= closest {
if closest == 0 && closeness == 0 && fun != target {
// ambiguous perfect match
diagnostics.report(Diagnostic::ambiguous_match(span));
return Fun::unknown()
}
// equal (or lower) match; we keep the previous function, because its
// importing scope is closer to the call
continue
}
// greater match, update target
target = fun;
closest = closeness;
}
if closest == 0 {
// perfect match, return it
target
} else {
// no match; report the error, displaying the closest match
diagnostics.report(Diagnostic::signature_mismatch(span));
Fun::unknown()
}
}
/// Returns a slice containing all parsers in this scope.
pub fn syntaxes(&self) -> &[Syntax<'cx>] {
&self.syntaxes
}
/// Declares a member at the context level.
#[allow(needless_pass_by_value)] // Probably will need this later on
pub fn declare(&self, _member: Member<'cx>) {
unimplemented!()
}
/// Imports the specified tree in this scope, making all of its direct members
/// available in this binder.
pub fn import_tree<'s>(&'s mut self, node: &'cx MemberTree<'cx>) {
let nodes = match node.child() {
Some(child) => child.siblings(),
None => return
};
for sibl in nodes {
self.imported_trees.push(sibl)
}
}
/// Imports the given syntax in this context.
pub fn import_syntax(&mut self, syntax: Syntax<'cx>) {
self.syntaxes.push(syntax);
}
}
/// A `Future` that represents a group of parsers and binders
/// running on an input.
pub struct Bind<'cx, 'vm: 'cx> {
vm: Vm<'vm>,
diagnostics: DiagnosticBag,
input_stream: Input,
binders: Vec<Binder<'cx, 'vm>>,
pending_symbols: Vec<Sym>,
failing: bool
}
impl<'cx, 'vm> Bind<'cx, 'vm> {
/// Creates a new binding stream that returns
/// expressions as they are parsed.
pub(crate) fn new(vm: Vm<'vm>, input: Input) -> Self {
Bind {
vm,
binders: Vec::with_capacity(input.len()),
input_stream: input,
diagnostics: DiagnosticBag::default(),
pending_symbols: Vec::new(),
failing: false
}
}
}
impl<'cx, 'vm> Stream for Bind<'cx, 'vm> {
type Item = Bound<'cx>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
macro_rules! take_diagnostics {
() => ( mem::replace(&mut self.diagnostics, DiagnosticBag::default()) );
}
// take care of unfinished file reads
if let Async::Ready(Some(file)) = self.input_stream.poll()? {
let hash = file.hash();
match Binder::<'cx, 'vm>::new(self.vm, file, self.diagnostics.set_active_file(hash)) {
Ok(binder) => self.binders.push(binder),
Err(file) => {
let span = file.span();
return Ok(Async::Ready(Some(Bound {
succeeded: false, file,
expression: Expr::native(Unit::new(), span, BuiltIns::unit()),
diagnostics: take_diagnostics!(),
missing_symbols: Vec::new()
})));
}
}
}
// process current tasks
let binders = &mut self.binders;
if binders.is_empty() {
return Ok(Async::Ready(None))
}
macro_rules! fail {
() => {{
let Binder { parent, pending_symbols, mut expressions, .. } = binders.swap_remove(0);
let file = match parent {
BinderParent::Root(RootBinder { file, .. }) => file,
_ => panic!("Not possible")
};
let expression = if expressions.len() == 1 {
expressions.swap_remove(0)
} else if expressions.len() == 0 {
Expr::native(Unit::new(), file.span(), BuiltIns::unit())
} else {
Expr::block(expressions, file.span())
};
return Ok(Async::Ready(Some(Bound {
succeeded: false,
expression, file,
diagnostics: take_diagnostics!(),
missing_symbols: pending_symbols
})))
}};
}
if self.failing {
fail!()
}
let mut pending = 0;
let mut i = 0;
while i < binders.len() {
let file = binders[i].file().hash();
let succeeded = match binders[i].process(false, self.diagnostics.set_active_file(file)) {
BinderState::Pending => {
pending += 1;
i += 1;
continue
},
BinderState::Stalled => false,
BinderState::Done => true
};
let Binder { parent, pending_symbols, mut expressions, .. } = binders.swap_remove(i);
let file = match parent {
BinderParent::Root(RootBinder { file, .. }) => file,
_ => panic!("Non-root binder cannot be returned.")
};
let expression = if expressions.len() == 1 {
expressions.swap_remove(0)
} else {
Expr::block(expressions, file.span())
};
let bound = Bound {
expression, file,
diagnostics: take_diagnostics!(),
missing_symbols: pending_symbols,
succeeded
};
return Ok(Async::Ready(Some(bound)))
}
// return if we're simply waiting for more binders to finish their work
if pending != binders.len() {
return Ok(Async::NotReady)
}
// ugh, it failed
self.failing = true;
fail!()
}
}
impl<'cx, 'vm> Debug for Bind<'cx, 'vm> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("Bind")
.field("pending", &self.pending_symbols.len())
.field("diagnostics", &self.diagnostics.len())
.finish()
}
}
/// A bound `Expr`.
///
/// The `expression` field will always return a big `Expr` that contains
/// every expression declared in the file in an AST.
#[derive(Debug)]
pub struct Bound<'cx> {
/// The bound expression.
/// If the binding phase failed, this field will be `None`.
pub expression: Expr<'cx>,
/// The diagnostics reported during the parsing of this file.
pub diagnostics: DiagnosticBag,
/// The symbols that were missing.
pub missing_symbols: Vec<Spanned<Sym>>,
/// The file whose content was parsed and bound.
pub file: InputFile,
/// Whether or not the binding operation was entirely successful.
pub succeeded: bool
}
impl<'a, 'cx, 'vm> Bound<'cx> where 'vm: 'cx, 'vm: 'a {
/// Creates a new `Bound` structure, given the binder that contains the data,
/// and the diagnostics gotten thus far.
pub fn new(binder: &'a mut Binder<'cx, 'vm>, diagnostics: DiagnosticBag) -> Bound<'cx> {
let file = match &mut binder.parent {
&mut BinderParent::Root(RootBinder { ref mut file, .. }) => mem::replace(file, InputFile::empty()),
_ => panic!("Cannot get bound node from child binder")
};
let expression = {
let mut exprs: Vec<Expr<'cx>> = mem::replace(&mut binder.expressions, Vec::new());
if exprs.len() == 1 {
exprs.remove(0)
} else {
Expr::block(exprs, file.span())
}
};
Bound {
succeeded: !diagnostics.has_error(),
expression,
diagnostics,
file,
missing_symbols: mem::replace(&mut binder.pending_symbols, Vec::new())
}
}
}
impl<'cx> Bound<'cx> {
/// Generates a `Diagnostic` for every missing symbol.
pub fn generate_missing_symbols_diagnostics(&mut self) {
for symbol in &self.missing_symbols {
self.diagnostics.report(Diagnostic::undefined_symbol(symbol.span(), symbol.full_name()));
}
}
/// Prints the diagnostics encountered during the binding operation to the given stream.
pub fn print_diagnostics(&self, out: &mut Write, errors: bool, warnings: bool, infos: bool) {
use diagnostics::{DiagnosticPrinter, DiagnosticSeverity};
let mut printer = DiagnosticPrinter::new(out, &self.file);
let diagnostics = self.diagnostics.as_ref().iter().filter(|diag| match diag.severity() {
DiagnosticSeverity::Fatal => true,
DiagnosticSeverity::Error if errors => true,
DiagnosticSeverity::Warning if warnings => true,
DiagnosticSeverity::Info if infos => true,
_ => false
});
printer.print_many(diagnostics);
}
}
|
use std::net::{ TcpStream, Shutdown, SocketAddr };
use std::time::Duration;
fn main() {
let mut open_ports = vec![];
for i in 1..=65535 {
let ip = "192.168.0.1:".to_string();
let port = i.to_string();
let addr = ip + &port;
let socket: SocketAddr = addr.parse().unwrap();
let stream = TcpStream::connect_timeout(&socket, Duration::from_millis(1500));
match stream {
Ok(x) => {
println!(
"Open {}", &socket
);
if let Err(e) = x.shutdown(Shutdown::Both) {
println!("Shutdown stream error {}", &e);
}
open_ports.push(socket);
}
Err(e) => {
let _error_string = e.to_string();
// println!("{}", _error_string);
}
}
}
println!("\tOpen ports: ");
for i in open_ports {
println!("Open: {:?}", i);
}
}
/*
Port Scanner
Works 100% locally, however,
on internet, or networks in general, it gets stuck on port
that is not open. This behaviour is only present when scanning a
remote host.
*/ |
use glam::Vec3;
#[derive(Clone, Copy)]
pub struct Ray {
pub origin: Vec3,
pub direction: Vec3,
}
impl Ray {
pub fn new(origin: Vec3, direction: Vec3) -> Self {
Ray { origin, direction }
}
}
|
use crate::base::id;
use crate::base::BOOL;
use crate::foundation::NSString;
use crate::foundation::NSURL;
// https://developer.apple.com/documentation/appkit/nsimage
#[derive(Clone, Copy, Debug)]
#[repr(C)]
pub struct NSImage(id);
impl NSImage {
pub fn alloc() -> Self {
Self(unsafe { msg_send!(class!(NSImage), alloc) })
}
pub fn with_contents<T: Into<NSString>>(self, filename: T) -> Self {
Self(unsafe { msg_send!(self.0, initWithContentsOfFile: filename.into()) })
}
pub fn with_url<T: Into<NSURL>>(self, url: T) -> Self {
Self(unsafe { msg_send!(self.0, initWithContentsOfURL: url.into()) })
}
pub fn template(self, flag: BOOL) -> Self {
unsafe {
let _: () = msg_send!(self.0, setTemplate: flag);
}
self
}
}
|
use rand::Rng;
use std::f32::consts;
fn main() {
let net = Network::new(vec![2,1,2]);
println!("{:?}", net);
let result = feedforward(net.biases, net.weights, vec![0.1, 0.5]);
println!("{:?}", result);
}
#[derive(Debug)]
struct Network {
num_layer: u8,
sizes: Vec<i16>,
biases: Vec<Vec<f32>>,
weights: Vec<Vec<Vec<f32>>>,
}
impl Network {
fn new(sizes: Vec<i16>) -> Network {
let num_layer: u8 = sizes.len() as u8;
let sizes: Vec<i16> = sizes;
let mut biases = Vec::new();
let mut weights = Vec::new();
let x = &sizes[..sizes.len()-1];
let y = &sizes[1..];
let mut rng = rand::thread_rng();
// Preenchendo 'biases' com valores aleatórios
// Lista de vetores com o tamanho de cada camada
for i in 0..y.len() {
let layer = (0..y[i]).map(|_| rng.gen_range::<f32>(0.0,1.0)).collect();
biases.push(layer)
}
// Preenchendo weights
// Lista com matrizes
// weights[i][j][k] = peso do j elemento da camada i+1 em relação à saída k da camada i
for i in 0..y.len() {
let mut linha = Vec::new();
for _ in 0..y[i] {
let coluna: Vec<f32> = (0..x[i]).map(|_| rng.gen_range::<f32>(-1.0,1.0)).collect();
linha.push(coluna);
}
weights.push(linha);
}
Network { num_layer, sizes, biases, weights }
}
}
fn sigmoid(x: f32) -> f32 {
1.0/(1.0 + f32::powf(consts::E, -x))
}
fn vec_sigmoid(v: Vec<f32>) -> Vec<f32> {
let mut result: Vec<f32> = vec![0.0; v.len()];
for i in 0..v.len() {
result[i] = sigmoid(v[i])
}
result
}
fn feedforward(biases: Vec<Vec<f32>>, weights: Vec<Vec<Vec<f32>>>, a: Vec<f32>) -> Vec<f32> {
let mut result = a;
for i in 0..biases.len() {
println!("Camada {}", i);
let b = &biases[i];
let w = &weights[i];
let mut layer_output: Vec<f32> = Vec::new();
layer_output.resize(w.len(), 0.0);
for j in 0..w.len() {
for k in 0..w[j].len() {
layer_output[j] += w[j][k] * result[k];
}
layer_output[j] -= b[j];
}
println!("Camada finalizada");
result = vec_sigmoid(layer_output);
}
result
}
|
use astroplant_mqtt::{KitRpc, ServerRpcRequest};
fn main() {
let (receiver, kits_rpc) = astroplant_mqtt::run(
"mqtt.ops".to_owned(),
1883,
"server".to_owned(),
"abcdef".to_owned(),
);
std::thread::spawn(move || {
println!("Querying kit with serial 'k_develop'");
let kit_rpc: KitRpc = kits_rpc.kit_rpc("k_develop".to_owned());
println!(
"Version response: {:?}",
futures::executor::block_on(kit_rpc.version())
);
println!(
"Uptime response: {:?}",
futures::executor::block_on(kit_rpc.uptime())
);
});
while let Ok(res) = receiver.recv() {
println!("Received request: {:?}", res);
if let astroplant_mqtt::MqttApiMessage::ServerRpcRequest(rpc_request) = res {
match rpc_request {
ServerRpcRequest::Version { response } => {
response
.send("astroplant-mqtt-bin-tester".to_owned())
.unwrap();
}
_ => {}
}
}
}
println!("Disconnected")
}
|
#![cfg(test)]
mod util;
mod connect_obvious_tests;
mod prune_tests;
mod reg_tests;
mod zero_area_loop_tests;
mod graph_stitch_tests;
|
pub mod udp_server;
pub mod data_structs;
extern crate csv;
extern crate chrono;
extern crate byteorder;
fn main() {
// need to get these from command line
let lines_to_skip: usize = 4;
udp_server::socket_response("0.0.0.0", 13389, lines_to_skip);
}
|
#[macro_use]
extern crate criterion;
use criterion::{BatchSize, Criterion};
use hacspec_p256::*;
fn benchmark(c: &mut Criterion) {
// TODO: allow key generation and make these random
c.bench_function("P256 ECDH", |b| {
b.iter_batched(
|| {
let k = P256Scalar::from_hex(
"0612465c89a023ab17855b0a6bcebfd3febb53aef84138647b5352e02c10c346",
);
let p = (
P256FieldElement::from_hex(
"62d5bd3372af75fe85a040715d0f502428e07046868b0bfdfa61d731afe44f26",
),
P256FieldElement::from_hex(
"ac333a93a9e70a81cd5a95b5bf8d13990eb741c8c38872b4a07d275a014e30cf",
),
);
(k, p)
},
|(k, p)| {
let _r = p256_point_mul(k, p);
},
BatchSize::SmallInput,
)
});
}
criterion_group!(benches, benchmark);
criterion_main!(benches);
|
extern crate tree_child;
use tree_child::network::TcNet;
use tree_child::newick;
use tree_child::tree_child_sequence;
use tree_child::tree::TreeBuilder;
/// Test that we correctly construct a network if all trees are the same
#[test]
fn trivial_network() {
let mut builder = TreeBuilder::new();
let tree_newick = "(((a,b),(c,d)),(e,f));\n(((a,b),(c,d)),(e,f));\n(((a,b),(c,d)),(e,f));\n";
newick::parse_forest(&mut builder, tree_newick).unwrap();
let trees = builder.trees();
let seq = tree_child_sequence::tree_child_sequence(
trees.clone(), 32, Some(1), true, true);
let network = TcNet::from_seq(seq);
let network_newick = newick::format_network(&network).unwrap();
assert_eq!(network_newick, "(((a,b),(c,d)),(e,f));");
}
/// Test that we correctly construct a simple network with one reticulation
#[test]
fn simple_network_one_reticulation() {
let mut builder = TreeBuilder::new();
let tree_newick = "((a,b),c);\n(a,(b,c));\n";
newick::parse_forest(&mut builder, tree_newick).unwrap();
let trees = builder.trees();
let seq = tree_child_sequence::tree_child_sequence(
trees.clone(), 32, Some(1), true, true);
let network = TcNet::from_seq(seq);
let network_newick = newick::format_network(&network).unwrap();
assert_eq!(network_newick, "((a)#H0,((#H0,b),c));");
}
/// Test that we correctly construct a more complex network with three reticulations
#[test]
fn complex_network_three_reticulations() {
let mut builder = TreeBuilder::new();
let tree_newick =
"(((a,c),d),((b,f),((g,e),h)));\n((a,(d,c)),(((e,g),(h,f)),b));\n((((h,(f,((g,e),c))),b),a),d);\n";
newick::parse_forest(&mut builder, tree_newick).unwrap();
let trees = builder.trees();
let seq = tree_child_sequence::tree_child_sequence(
trees.clone(), 1, None, true, true);
let network = TcNet::from_seq(seq);
let network_newick = newick::format_network(&network).unwrap();
assert_eq!(network_newick, "((#H0,(((#H0,c))#H1,d)),((a)#H0,((#H2,b),((#H2,(#H1,(g,e))),((f)#H2,h)))));");
}
|
use proconio::{fastout, input};
#[fastout]
fn main() {
input! {
n: i64,
mut a_list: [i128; n],
}
for a in a_list.iter() {
if a == &0 {
println!("0");
return;
}
}
for a in a_list.iter() {
if a >= &(1e18 as i128) {
println!("0");
return;
}
}
let mut ans: i128 = 1;
for a in a_list {
if a <= 1000000000000000000 / ans {
ans *= a;
} else {
println!("-1");
return;
}
}
println!("{}", ans);
}
|
use crate::error::Error;
use crate::error::ErrorKind;
use crate::kind::Kind;
use crate::class::Class;
use crate::dnssec;
use crate::edns;
use crate::ser::Serializer;
use crate::ser::Serialize;
use crate::de::Deserializer;
use crate::de::Deserialize;
use base64;
use std::io;
use std::net::IpAddr;
use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct Digest<T: AsRef<[u8]>> {
inner: T
}
impl<T: AsRef<[u8]>> Digest<T> {
#[inline]
pub fn new(inner: T) -> Self {
Self { inner }
}
#[inline]
pub fn as_bytes(&self) -> &[u8] {
self.inner.as_ref()
}
#[inline]
pub fn into_inner(self) -> T {
self.inner
}
#[inline]
pub fn len(&self) -> usize {
self.as_bytes().len()
}
#[inline]
pub fn base64(&self) -> String {
let digest = self.as_ref();
base64::encode(digest)
}
}
impl<T: AsRef<[u8]>> AsRef<[u8]> for Digest<T> {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl<T: AsRef<[u8]>> std::fmt::Debug for Digest<T> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{:?}", self.as_ref())
}
}
impl<T: AsRef<[u8]>> std::fmt::LowerHex for Digest<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let data = self.as_bytes();
if data.len() > 0 {
if f.alternate() {
f.write_str("0x")?;
}
for n in data.iter() {
write!(f, "{:02x}", n)?;
}
}
Ok(())
}
}
impl<T: AsRef<[u8]>> std::fmt::UpperHex for Digest<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let data = self.as_bytes();
if data.len() > 0 {
if f.alternate() {
f.write_str("0x")?;
}
for n in data.iter() {
write!(f, "{:02X}", n)?;
}
}
Ok(())
}
}
// 4.1.3. Resource record format
// https://tools.ietf.org/html/rfc1035#section-4.1.3
//
// The answer, authority, and additional sections all share the same
// format: a variable number of resource records, where the number of
// records is specified in the corresponding count field in the header.
// Each resource record has the following format:
// 1 1 1 1 1 1
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | |
// / /
// / NAME /
// | |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | TYPE |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | CLASS |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | TTL |
// | |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | RDLENGTH |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
// / RDATA /
// / /
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
//
// where:
//
// NAME a domain name to which this resource record pertains.
//
// TYPE two octets containing one of the RR type codes. This
// field specifies the meaning of the data in the RDATA
// field.
//
// CLASS two octets which specify the class of the data in the
// RDATA field.
//
// TTL a 32 bit unsigned integer that specifies the time
// interval (in seconds) that the resource record may be
// cached before it should be discarded. Zero values are
// interpreted to mean that the RR can only be used for the
// transaction in progress, and should not be cached.
//
// RDLENGTH an unsigned 16 bit integer that specifies the length in
// octets of the RDATA field.
//
// RDATA a variable length string of octets that describes the
// resource. The format of this information varies
// according to the TYPE and CLASS of the resource record.
// For example, the if the TYPE is A and the CLASS is IN,
// the RDATA field is a 4 octet ARPA Internet address.
//
// Extension Mechanisms for DNS (EDNS(0))
//
// 6.1. OPT Record Definition
// https://tools.ietf.org/html/rfc6891#section-6.1
//
// An OPT RR has a fixed part and a variable set of options expressed as
// {attribute, value} pairs. The fixed part holds some DNS metadata,
// and also a small collection of basic extension elements that we
// expect to be so popular that it would be a waste of wire space to
// encode them as {attribute, value} pairs.
//
// The fixed part of an OPT RR is structured as follows:
//
// +------------+--------------+------------------------------+
// | Field Name | Field Type | Description |
// +------------+--------------+------------------------------+
// | NAME | domain name | MUST be 0 (root domain) |
// | TYPE | u_int16_t | OPT (41) |
// | CLASS | u_int16_t | requestor's UDP payload size |
// | TTL | u_int32_t | extended RCODE and flags |
// | RDLEN | u_int16_t | length of all RDATA |
// | RDATA | octet stream | {attribute,value} pairs |
// +------------+--------------+------------------------------+
//
// OPT RR Format
//
//
// 6.1.3. OPT Record TTL Field Use
//
// The extended RCODE and flags, which OPT stores in the RR Time to Live
// (TTL) field, are structured as follows:
//
// +0 (MSB) +1 (LSB)
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 0: | EXTENDED-RCODE | VERSION |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 2: | DO| Z |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
//
// EXTENDED-RCODE
// Forms the upper 8 bits of extended 12-bit RCODE (together with the
// 4 bits defined in [RFC1035]. Note that EXTENDED-RCODE value 0
// indicates that an unextended RCODE is in use (values 0 through
// 15).
//
// VERSION
// Indicates the implementation level of the setter. Full
// conformance with this specification is indicated by version '0'.
// Requestors are encouraged to set this to the lowest implemented
// level capable of expressing a transaction, to minimise the
// responder and network load of discovering the greatest common
// implementation level between requestor and responder. A
// requestor's version numbering strategy MAY ideally be a run-time
// configuration option.
// If a responder does not implement the VERSION level of the
// request, then it MUST respond with RCODE=BADVERS. All responses
// MUST be limited in format to the VERSION level of the request, but
// the VERSION of each response SHOULD be the highest implementation
// level of the responder. In this way, a requestor will learn the
// implementation level of a responder as a side effect of every
// response, including error responses and including RCODE=BADVERS.
//
// 6.1.4. Flags
//
// DO
// DNSSEC OK bit as defined by [RFC3225].
//
// Z
// Set to zero by senders and ignored by receivers, unless modified
// in a subsequent specification.
//
//
// The variable part of an OPT RR may contain zero or more options in
// the RDATA. Each option MUST be treated as a bit field. Each option
// is encoded as:
//
// +0 (MSB) +1 (LSB)
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 0: | OPTION-CODE |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 2: | OPTION-LENGTH |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 4: | |
// / OPTION-DATA /
// / /
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
//
// OPTION-CODE
// Assigned by the Expert Review process as defined by the DNSEXT
// working group and the IESG.
//
// OPTION-LENGTH
// Size (in octets) of OPTION-DATA.
//
// OPTION-DATA
// Varies per OPTION-CODE. MUST be treated as a bit field.
//
// The order of appearance of option tuples is not defined. If one
// option modifies the behaviour of another or multiple options are
// related to one another in some way, they have the same effect
// regardless of ordering in the RDATA wire encoding.
//
// Any OPTION-CODE values not understood by a responder or requestor
// MUST be ignored. Specifications of such options might wish to
// include some kind of signaled acknowledgement. For example, an
// option specification might say that if a responder sees and supports
// option XYZ, it MUST include option XYZ in its response.
//
//
//
// Client Subnet in DNS Queries
//
// 6. Option Format
// https://tools.ietf.org/html/rfc7871#section-6
//
// +0 (MSB) +1 (LSB)
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 0: | OPTION-CODE |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 2: | OPTION-LENGTH |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 4: | FAMILY |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 6: | SOURCE PREFIX-LENGTH | SCOPE PREFIX-LENGTH |
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
// 8: | ADDRESS... /
// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
//
// The format of the address part depends on the value of FAMILY. This
// document only defines the format for FAMILY 1 (IPv4) and FAMILY 2 (IPv6).
//
macro_rules! rr {
($name:ident, $($element: ident: $ty: ty),*) => {
#[derive(Debug, Eq, Hash, Clone)]
pub struct $name {
pub name: String,
pub class: Class,
pub ttl: u32,
$(pub $element: $ty),* // RDATA
}
impl $name {
pub const KIND: Kind = Kind::$name;
#[inline]
pub const fn kind(&self) -> Kind {
Self::KIND
}
}
impl PartialEq for $name {
fn eq(&self, other: &Self) -> bool {
if self.name.as_str() != other.name.as_str() {
return false;
}
if self.class != other.class {
return false;
}
$(self.$element == other.$element) && *
}
}
};
}
rr! { A,
value: Ipv4Addr
}
rr! { AAAA,
value: Ipv6Addr
}
rr! { NS,
value: String
}
rr! { CNAME,
value: String
}
rr! { DNAME,
value: String
}
rr! { PTR,
value: String
}
rr! { TXT,
value: String
}
rr! { MX,
preference: i16,
exchange: String
}
rr! { SOA,
mname: String,
rname: String,
serial: u32,
refresh: i32,
retry: i32,
expire: i32,
minimum: u32
}
// https://tools.ietf.org/html/rfc2782
rr! { SRV,
priority: u16,
weight: u16,
port: u16,
// The domain name of the target host.
// A Target of "." means that the service is decidedly not
// available at this domain.
target: String
}
// 3.3.2. HINFO RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.2
rr! { HINFO,
// https://tools.ietf.org/html/rfc8482#section-4.2
// 在 RFC-8482 当中提到 `cpu` 字段应该被设置成 "RFC8482".
// `os` 字段应该被设置成 NULL.
cpu: String,
os: String
}
// ===== DNSSEC ======
rr! { DNSKEY,
flags: dnssec::DNSKEYFlags, // 16 bits
protocol: dnssec::DNSKEYProtocol, // 8 bits
algorithm: dnssec::Algorithm, // 8 bits
public_key: Digest<Vec<u8>>
}
rr! { RRSIG,
type_covered: Kind,
algorithm: dnssec::Algorithm, // 8 bits
labels: u8,
original_ttl: u32,
signature_expiration: u32,
signature_inception: u32,
key_tag: u16,
signer_name: String,
signature: Digest<Vec<u8>>
}
rr! { NSEC,
next_domain_name: String,
type_bit_maps: Vec<Kind>
}
rr! { NSEC3,
hash_algorithm: dnssec::Algorithm, // 8 bits
flags: dnssec::NSEC3Flags, // 8 bits
iterations: u16,
// salt_length: u8,
salt: Digest<Vec<u8>>,
// hash_length: u8,
next_hashed_owner_name: Digest<Vec<u8>>, // It is the unmodified binary hash value.
type_bit_maps: Vec<Kind>
}
rr! { NSEC3PARAM,
hash_algorithm: dnssec::Algorithm, // 8 bits
flags: u8,
iterations: u16,
// salt_length: u8,
salt: Digest<Vec<u8>>
}
rr! { DS,
key_tag: u16,
algorithm: dnssec::Algorithm,
digest_type: dnssec::DigestKind,
digest: Digest<Vec<u8>>
}
// 5.1.1. Canonical Presentation Format
// https://tools.ietf.org/html/rfc6844#section-5.1.1
rr! { CAA,
// Is an unsigned integer between 0 and 255.
//
// 7.3. Certification Authority Restriction Flags
// https://tools.ietf.org/html/rfc6844#section-7.3
//
// Flag Meaning Reference
// ----------- ---------------------------------- ---------
// 0 Issuer Critical Flag [RFC6844]
// 1-7 Reserved> [RFC6844]
flags: u8,
// Is a non-zero sequence of US-ASCII letters and numbers in lower case.
//
// 7.2. Certification Authority Restriction Properties
// https://tools.ietf.org/html/rfc6844#section-7.2
//
// Tag Meaning Reference
// ----------- -------------------------------------- ---------
// issue Authorization Entry by Domain [RFC6844]
// issuewild Authorization Entry by Wildcard Domain [RFC6844]
// iodef Report incident by IODEF report [RFC6844]
// auth Reserved [HB2011]
// path Reserved [HB2011]
// policy Reserved [HB2011]
tag: String,
// Is the <character-string> encoding of the value field as specified in [RFC1035], Section 5.1.
value: String
}
// 4.5. URI RDATA Wire Format
// https://tools.ietf.org/html/rfc7553#section-4.5
rr!{ URI,
priority: u16,
weight: u16,
target: String
}
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct ClientSubnet {
pub src_prefix_len: u8,
pub scope_prefix_len: u8,
pub address: IpAddr,
}
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub enum OptAttr {
ECS(ClientSubnet),
}
// ======= pseudo resource records ========
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct OPT {
pub name: String, // MUST be 0 (root domain)
pub udp_size: u16, // requestor's UDP payload size
pub rcode: u8, // extended RCODE,高 8 位, 低 4 位在 DNS MESSAGE HEADER 里面。
pub version: u8, // version
pub flags: edns::EDNSFlags, // flags
pub attrs: Vec<OptAttr>, // RDATA
}
impl OPT {
pub const KIND: Kind = Kind::OPT;
#[inline]
pub const fn kind(&self) -> Kind {
Self::KIND
}
}
#[derive(PartialEq, Eq, Hash, Clone)]
pub enum Record {
// 3.4.1. A RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.4.1
A(A),
// 2.2 AAAA data format
// https://tools.ietf.org/html/rfc3596#section-2.2
AAAA(AAAA),
// 3.3.11. NS RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.11
NS(NS),
// 3.3.1. CNAME RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.1
CNAME(CNAME),
// DNAME Redirection in the DNS
// https://tools.ietf.org/html/rfc6672
DNAME(DNAME),
// 3.3.14. TXT RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.14
//
// Using the Domain Name System To Store Arbitrary String Attributes
// https://tools.ietf.org/html/rfc1464
TXT(TXT),
// 3.3.9. MX RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.9
MX(MX),
// 3.3.13. SOA RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.13
SOA(SOA),
// 3.3.12. PTR RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.12
PTR(PTR),
// https://tools.ietf.org/html/rfc2782
SRV(SRV),
// 4.2. Answer with a Synthesized HINFO RRset
// https://tools.ietf.org/html/rfc8482#section-4.2
//
// The CPU field of the HINFO RDATA SHOULD be set to "RFC8482".
// The OS field of the HINFO RDATA SHOULD be set to the null string to minimize the size of the response.
//
// Note: 当客户端发起 Class=ANY 的查询时,DNS服务可以选择:
// 1. 返回 所有相关的 RR.
// 2. 返回 一个 HINFO RR.
// 3. 返回 相似的 RR 列表.
//
// 3.3.2. HINFO RDATA format
// https://tools.ietf.org/html/rfc1035#section-3.3.2
HINFO(HINFO),
// https://tools.ietf.org/html/rfc4034
DNSKEY(DNSKEY),
// 3. The RRSIG Resource Record
// https://tools.ietf.org/html/rfc4034#section-3
RRSIG(RRSIG),
// 4. The NSEC Resource Record
// https://tools.ietf.org/html/rfc4034#page-12
NSEC(NSEC),
// 3. The NSEC3 Resource Record
// https://tools.ietf.org/html/rfc5155#section-3.2
NSEC3(NSEC3),
// 4.2. NSEC3PARAM RDATA Wire Format
// https://tools.ietf.org/html/rfc5155#page-13
NSEC3PARAM(NSEC3PARAM),
// 5.1. DS RDATA Wire Format
// https://tools.ietf.org/html/rfc4034#section-5.1
DS(DS),
// 5.1.1. Canonical Presentation Format
// https://tools.ietf.org/html/rfc6844#section-5.1.1
CAA(CAA),
// 6.1. OPT Record Definition
// https://tools.ietf.org/html/rfc6891#section-6.1
OPT(OPT),
// ALL(ALL),
// AXFR,
// IXFR
// 4.5. URI RDATA Wire Format
// https://tools.ietf.org/html/rfc7553#section-4.5
URI(URI),
// NOTE: 这些不再被使用的资源类型,支持一下也许更好?
// SSHFP
// SPF
//
// 3.3.10. NULL RDATA format (EXPERIMENTAL)
// https://tools.ietf.org/html/rfc1035#section-3.3.10
// NULL
}
impl Record {
pub fn name(&self) -> &str {
match self {
Self::A(inner) => &inner.name,
Self::AAAA(inner) => &inner.name,
Self::NS(inner) => &inner.name,
Self::CNAME(inner) => &inner.name,
Self::DNAME(inner) => &inner.name,
Self::TXT(inner) => &inner.name,
Self::MX(inner) => &inner.name,
Self::SOA(inner) => &inner.name,
Self::PTR(inner) => &inner.name,
Self::SRV(inner) => &inner.name,
Self::HINFO(inner) => &inner.name,
Self::DNSKEY(inner) => &inner.name,
Self::RRSIG(inner) => &inner.name,
Self::NSEC(inner) => &inner.name,
Self::NSEC3(inner) => &inner.name,
Self::NSEC3PARAM(inner) => &inner.name,
Self::DS(inner) => &inner.name,
Self::CAA(inner) => &inner.name,
Self::OPT(inner) => &inner.name,
Self::URI(inner) => &inner.name,
}
}
pub fn kind(&self) -> Kind {
match self {
Self::A(inner) => inner.kind(),
Self::AAAA(inner) => inner.kind(),
Self::NS(inner) => inner.kind(),
Self::CNAME(inner) => inner.kind(),
Self::DNAME(inner) => inner.kind(),
Self::TXT(inner) => inner.kind(),
Self::MX(inner) => inner.kind(),
Self::SOA(inner) => inner.kind(),
Self::PTR(inner) => inner.kind(),
Self::SRV(inner) => inner.kind(),
Self::HINFO(inner) => inner.kind(),
Self::DNSKEY(inner) => inner.kind(),
Self::RRSIG(inner) => inner.kind(),
Self::NSEC(inner) => inner.kind(),
Self::NSEC3(inner) => inner.kind(),
Self::NSEC3PARAM(inner) => inner.kind(),
Self::DS(inner) => inner.kind(),
Self::CAA(inner) => inner.kind(),
Self::OPT(inner) => inner.kind(),
Self::URI(inner) => inner.kind(),
}
}
pub fn class(&self) -> Class {
match self {
Self::A(inner) => inner.class,
Self::AAAA(inner) => inner.class,
Self::NS(inner) => inner.class,
Self::CNAME(inner) => inner.class,
Self::DNAME(inner) => inner.class,
Self::TXT(inner) => inner.class,
Self::MX(inner) => inner.class,
Self::SOA(inner) => inner.class,
Self::PTR(inner) => inner.class,
Self::SRV(inner) => inner.class,
Self::HINFO(inner) => inner.class,
Self::DNSKEY(inner) => inner.class,
Self::RRSIG(inner) => inner.class,
Self::NSEC(inner) => inner.class,
Self::NSEC3(inner) => inner.class,
Self::NSEC3PARAM(inner) => inner.class,
Self::DS(inner) => inner.class,
Self::CAA(inner) => inner.class,
// unreachable
Self::OPT(inner) => Class(inner.udp_size),
Self::URI(inner) => inner.class,
}
}
pub fn ttl(&self) -> u32 {
match self {
Self::A(inner) => inner.ttl,
Self::AAAA(inner) => inner.ttl,
Self::NS(inner) => inner.ttl,
Self::CNAME(inner) => inner.ttl,
Self::DNAME(inner) => inner.ttl,
Self::TXT(inner) => inner.ttl,
Self::MX(inner) => inner.ttl,
Self::SOA(inner) => inner.ttl,
Self::PTR(inner) => inner.ttl,
Self::SRV(inner) => inner.ttl,
Self::HINFO(inner) => inner.ttl,
Self::DNSKEY(inner) => inner.ttl,
Self::RRSIG(inner) => inner.ttl,
Self::NSEC(inner) => inner.ttl,
Self::NSEC3(inner) => inner.ttl,
Self::NSEC3PARAM(inner) => inner.ttl,
Self::DS(inner) => inner.ttl,
Self::CAA(inner) => inner.ttl,
// uncacheable
Self::OPT(_) => 0,
Self::URI(inner) => inner.ttl,
}
}
pub fn is_pseudo_record(&self) -> bool {
match self {
Self::OPT(_) => true,
_ => false,
}
}
}
impl std::fmt::Debug for Record {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::A(inner) => std::fmt::Debug::fmt(inner, f),
Self::AAAA(inner) => std::fmt::Debug::fmt(inner, f),
Self::NS(inner) => std::fmt::Debug::fmt(inner, f),
Self::CNAME(inner) => std::fmt::Debug::fmt(inner, f),
Self::DNAME(inner) => std::fmt::Debug::fmt(inner, f),
Self::TXT(inner) => std::fmt::Debug::fmt(inner, f),
Self::MX(inner) => std::fmt::Debug::fmt(inner, f),
Self::SOA(inner) => std::fmt::Debug::fmt(inner, f),
Self::PTR(inner) => std::fmt::Debug::fmt(inner, f),
Self::SRV(inner) => std::fmt::Debug::fmt(inner, f),
Self::HINFO(inner) => std::fmt::Debug::fmt(inner, f),
Self::DNSKEY(inner) => std::fmt::Debug::fmt(inner, f),
Self::RRSIG(inner) => std::fmt::Debug::fmt(inner, f),
Self::NSEC(inner) => std::fmt::Debug::fmt(inner, f),
Self::NSEC3(inner) => std::fmt::Debug::fmt(inner, f),
Self::NSEC3PARAM(inner) => std::fmt::Debug::fmt(inner, f),
Self::DS(inner) => std::fmt::Debug::fmt(inner, f),
Self::CAA(inner) => std::fmt::Debug::fmt(inner, f),
Self::OPT(inner) => std::fmt::Debug::fmt(inner, f),
Self::URI(inner) => std::fmt::Debug::fmt(inner, f),
}
}
}
impl Deserialize for Record {
fn deserialize(deserializer: &mut Deserializer) -> Result<Self, io::Error> {
let name = String::deserialize(deserializer)?;
let kind = Kind(u16::deserialize(deserializer)?);
#[inline]
fn deserialize_normal_rr(deserializer: &mut Deserializer) -> Result<(Class, u32, u16), io::Error> {
let class = Class(u16::deserialize(deserializer)?);
let ttl = u32::deserialize(deserializer)?;
let rdlen = u16::deserialize(deserializer)?;
Ok((class, ttl, rdlen))
}
match kind {
Kind::A => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = std::net::Ipv4Addr::deserialize(deserializer)?;
Ok(Record::A(A { name, class, ttl, value }))
},
Kind::AAAA => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = std::net::Ipv6Addr::deserialize(deserializer)?;
Ok(Record::AAAA(AAAA { name, class, ttl, value }))
},
Kind::NS => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = String::deserialize(deserializer)?;
Ok(Record::NS(NS { name, class, ttl, value }))
},
Kind::CNAME => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = String::deserialize(deserializer)?;
Ok(Record::CNAME(CNAME { name, class, ttl, value }))
},
Kind::DNAME => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = String::deserialize(deserializer)?;
Ok(Record::DNAME(DNAME { name, class, ttl, value }))
},
Kind::TXT => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let buffer = deserializer.get_ref();
let start = deserializer.position();
let end = start + rdlen as usize;
match buffer.get(start..end) {
Some(rdata) => {
let value = (&rdata).iter().map(|b| *b as char).collect::<String>();
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::TXT(TXT { name, class, ttl, value }))
},
None => {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
}
},
Kind::MX => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let preference = i16::deserialize(deserializer)?;
let exchange = String::deserialize(deserializer)?;
Ok(Record::MX(MX { name, class, ttl, preference, exchange }))
},
Kind::SOA => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let mname = String::deserialize(deserializer)?;
let rname = String::deserialize(deserializer)?;
let serial = u32::deserialize(deserializer)?;
let refresh = i32::deserialize(deserializer)?;
let retry = i32::deserialize(deserializer)?;
let expire = i32::deserialize(deserializer)?;
let minimum = u32::deserialize(deserializer)?;
Ok(Record::SOA(SOA { name, class, ttl, mname, rname, serial, refresh, retry, expire, minimum }))
},
Kind::PTR => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let value = String::deserialize(deserializer)?;
Ok(Record::PTR(PTR { name, class, ttl, value }))
},
Kind::SRV => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let priority = u16::deserialize(deserializer)?;
let weight = u16::deserialize(deserializer)?;
let port = u16::deserialize(deserializer)?;
let target = String::deserialize(deserializer)?;
Ok(Record::SRV(SRV { name, class, ttl, priority, weight, port, target }))
},
Kind::HINFO => {
// https://tools.ietf.org/html/rfc1035#section-5.1
//
// <character-string> is expressed in one or two ways: as a contiguous set
// of characters without interior spaces, or as a string beginning with a "
// and ending with a ". Inside a " delimited string any character can
// occur, except for a " itself, which must be quoted using \ (back slash).
//
// Because these files are text files several special encodings are
// necessary to allow arbitrary data to be loaded. In particular:
//
// of the root.
//
// @ A free standing @ is used to denote the current origin.
//
// \X where X is any character other than a digit (0-9), is
// used to quote that character so that its special meaning
// does not apply. For example, "\." can be used to place
// a dot character in a label.
//
// \DDD where each D is a digit is the octet corresponding to
// the decimal number described by DDD. The resulting
// octet is assumed to be text and is not checked for
// special meaning.
//
// ( ) Parentheses are used to group data that crosses a line
// boundary. In effect, line terminations are not
// recognized within parentheses.
//
// ; Semicolon is used to start a comment; the remainder of
// the line is ignored.
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
// https://tools.ietf.org/html/rfc8482#section-4.2
// 在 RFC-8482 当中提到 `cpu` 字段应该被设置成 "RFC8482".
// `os` 字段应该被设置成 NULL.
let buffer = deserializer.get_ref();
let start = deserializer.position();
let end = start + rdlen as usize;
match buffer.get(start..end) {
Some(rdata) => {
debug!("HINFO RDATA: {:?}", rdata);
let mut cpu = (&rdata).iter().map(|b| *b as char).collect::<String>(); // "RFC8482"
if let Some(last) = cpu.as_bytes().last() {
if last == &b'\0' {
cpu.pop();
}
}
debug!("HINFO RDATA cpu field: {:?}", cpu);
let os = String::new();
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::HINFO(HINFO { name, class, ttl, cpu, os }))
},
None => {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
}
},
Kind::OPT => {
let class = Class(u16::deserialize(deserializer)?);
let udp_size = class.0;
let rcode = u8::deserialize(deserializer)?;
let version = u8::deserialize(deserializer)?;
if version != edns::EDNS_V0 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid EDNS version(must be 0)."));
}
let flags = edns::EDNSFlags::new_unchecked(u16::deserialize(deserializer)?);
let mut rdlen = u16::deserialize(deserializer)?;
let mut attrs: Vec<OptAttr> = Vec::new();
while rdlen > 4 {
let opt_code = edns::OptionCode(u16::deserialize(deserializer)?);
let opt_len = u16::deserialize(deserializer)?;
if opt_code == edns::OptionCode::EDNS_CLIENT_SUBNET {
let address_family = edns::AddressFamily(u16::deserialize(deserializer)?);
let src_prefix_len = u8::deserialize(deserializer)?;
let scope_prefix_len = u8::deserialize(deserializer)?;
let address = if address_family == edns::AddressFamily::IPV4 {
std::net::IpAddr::V4(std::net::Ipv4Addr::deserialize(deserializer)?)
} else if address_family == edns::AddressFamily::IPV6 {
std::net::IpAddr::V6(std::net::Ipv6Addr::deserialize(deserializer)?)
} else {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid EDNS Client Subnet AddressFamily."));
};
attrs.push(OptAttr::ECS(ClientSubnet { src_prefix_len, scope_prefix_len, address }));
} else {
debug!("EDNS Attribute is droped. OptCode={:?} OptLen={:?}", opt_code, opt_len);
deserializer.set_position(deserializer.position() + opt_len as usize);
}
rdlen -= opt_len;
}
Ok(Record::OPT(OPT { name, udp_size, rcode, version, flags, attrs }))
},
// ===== DNSSEC ======
Kind::DNSKEY => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let flags = dnssec::DNSKEYFlags::new_unchecked(u16::deserialize(deserializer)?);
let protocol = dnssec::DNSKEYProtocol(u8::deserialize(deserializer)?);
let algorithm = dnssec::Algorithm(u8::deserialize(deserializer)?);
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + rdlen as usize;
match buf.get(start..end) {
Some(rdata) => {
let public_key = Digest::new(rdata.to_vec());
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::DNSKEY(DNSKEY { name, class, ttl, flags, protocol, algorithm, public_key }))
},
None => {
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
}
}
},
Kind::RRSIG => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let type_covered = Kind(u16::deserialize(deserializer)?);
let algorithm = dnssec::Algorithm(u8::deserialize(deserializer)?);
let labels = u8::deserialize(deserializer)?;
let original_ttl = u32::deserialize(deserializer)?;
let signature_expiration = u32::deserialize(deserializer)?;
let signature_inception = u32::deserialize(deserializer)?;
let key_tag = u16::deserialize(deserializer)?;
let signer_name = String::deserialize(deserializer)?;
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = rdata_pos + rdlen as usize;
match buf.get(start..end) {
Some(rdata) => {
let signature = Digest::new(rdata.to_vec());
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::RRSIG(RRSIG {
name,
class,
ttl,
type_covered,
algorithm,
labels,
original_ttl,
signature_expiration,
signature_inception,
key_tag,
signer_name,
signature,
}))
},
None => {
dbg!(start, end);
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
}
}
},
Kind::NSEC => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let name_pos = deserializer.position();
let next_domain_name = String::deserialize(deserializer)?;
let name_amt = deserializer.position() - name_pos;
match (rdlen as usize).checked_sub(name_amt) {
Some(len) => {
let type_bit_maps = crate::de::read_type_bit_maps(deserializer, len)?;
let end = name_pos + rdlen as usize;
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::NSEC(NSEC { name, class, ttl, next_domain_name, type_bit_maps, }))
},
None => {
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
},
}
},
Kind::NSEC3 => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let hash_algorithm = dnssec::Algorithm(u8::deserialize(deserializer)?);
let flags = dnssec::NSEC3Flags::new_unchecked(u8::deserialize(deserializer)?);
let iterations = u16::deserialize(deserializer)?;
let salt_length = u8::deserialize(deserializer)?;
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + salt_length as usize;
let salt_data = buf.get(start..end);
if salt_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let salt = Digest::new(salt_data.unwrap().to_vec());
deserializer.set_position(start + salt_length as usize);
let hash_length = u8::deserialize(deserializer)?;
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + hash_length as usize;
let hash_data = buf.get(start..end);
if hash_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let next_hashed_owner_name = Digest::new(hash_data.unwrap().to_vec());
deserializer.set_position(start + hash_length as usize);
let amt = deserializer.position() - rdata_pos;
match (rdlen as usize).checked_sub(amt) {
Some(len) => {
let type_bit_maps = crate::de::read_type_bit_maps(deserializer, len)?;
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::NSEC3(NSEC3 {
name,
class,
ttl,
hash_algorithm,
flags,
iterations,
salt,
next_hashed_owner_name,
type_bit_maps,
}))
},
None => {
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
}
}
},
Kind::NSEC3PARAM => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let hash_algorithm = dnssec::Algorithm(u8::deserialize(deserializer)?);
let flags = u8::deserialize(deserializer)?;
let iterations = u16::deserialize(deserializer)?;
let salt_length = u8::deserialize(deserializer)?;
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + salt_length as usize;
let salt_data = buf.get(start..end);
if salt_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let salt = Digest::new(salt_data.unwrap().to_vec());
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::NSEC3PARAM(NSEC3PARAM { name, class, ttl, hash_algorithm, flags, iterations, salt, }))
},
Kind::DS => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let key_tag = u16::deserialize(deserializer)?;
let algorithm = dnssec::Algorithm(u8::deserialize(deserializer)?);
let digest_type = dnssec::DigestKind(u8::deserialize(deserializer)?);
let amt = deserializer.position() - rdata_pos;
match (rdlen as usize).checked_sub(amt) {
Some(len) => {
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + len as usize;
let digest_data = buf.get(start..end);
if digest_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let digest = Digest::new(digest_data.unwrap().to_vec());
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::DS(DS {
name,
class,
ttl,
key_tag,
algorithm,
digest_type,
digest,
}))
},
None => {
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
}
}
},
Kind::CAA => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let flags = u8::deserialize(deserializer)?;
let tag_len = u8::deserialize(deserializer)?;
let buf = deserializer.get_ref();
let start = deserializer.position();
let end = start + tag_len as usize;
let tag_data = buf.get(start..end);
if tag_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let tag = tag_data.unwrap().iter().map(|b| *b as char).collect::<String>();
deserializer.set_position(start + tag_len as usize);
let buf = deserializer.get_ref();
let start = deserializer.position();
match (rdlen as usize).checked_sub(2 + tag_len as usize) {
Some(len) => {
let end = start + len;
let value_data = buf.get(start..end);
if value_data.is_none() {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
let value = value_data.unwrap().iter().map(|b| *b as char).collect::<String>();
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::CAA(CAA { name, class, ttl, flags, tag, value, }))
},
None => {
return Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
}
}
},
Kind::URI => {
let (class, ttl, rdlen) = deserialize_normal_rr(deserializer)?;
let rdata_pos = deserializer.position();
let priority = u16::deserialize(deserializer)?;
let weight = u16::deserialize(deserializer)?;
// The Target field contains the URI as a sequence of octets (without the
// enclosing double-quote characters used in the presentation format).
// The length of the Target field MUST be greater than zero.
let buffer = deserializer.get_ref();
let start = deserializer.position();
let end = start + rdlen as usize - 4;
if end - start < 1 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "The length of the URI.Target field MUST be greater than zero."));
}
let target = &buffer[start..end];
let target = (&target).iter().map(|b| *b as char).collect::<String>();
deserializer.set_position(rdata_pos + rdlen as usize);
Ok(Record::URI(URI { name, class, ttl, priority, weight, target, }))
},
_ => {
unimplemented!()
}
}
}
}
fn write_rdlen(serializer: &mut Serializer, rdlen_pos: usize) -> Result<(), io::Error> {
let rdlen = serializer.position() - rdlen_pos - 2;
if rdlen > std::u16::MAX as usize {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid rdlen."));
}
let rdlen_bytes = (rdlen as u16).to_be_bytes();
let buffer = serializer.get_mut();
buffer[rdlen_pos] = rdlen_bytes[0];
buffer[rdlen_pos + 1] = rdlen_bytes[1];
Ok(())
}
impl Serialize for Record {
fn serialize(&self, serializer: &mut Serializer) -> Result<(), io::Error> {
macro_rules! serialize_normal_rr {
($rr:ident, $stmt:stmt) => {
$rr.name.serialize(serializer)?;
$rr.kind().0.serialize(serializer)?;
$rr.class.0.serialize(serializer)?;
$rr.ttl.serialize(serializer)?;
let rdlen_pos = serializer.position();
0u16.serialize(serializer)?; // RDLEN
$stmt
write_rdlen(serializer, rdlen_pos)?;
}
}
match self {
&Record::A(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::AAAA(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::NS(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::CNAME(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::DNAME(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::TXT(ref rr) => {
serialize_normal_rr!(rr, {
if !rr.value.is_ascii() {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid txt string(must be ASCII)."));
}
rr.value.as_bytes().serialize(serializer)?;
});
},
&Record::MX(ref rr) => {
serialize_normal_rr!(rr, {
rr.preference.serialize(serializer)?;
rr.exchange.serialize(serializer)?;
});
},
&Record::SOA(ref rr) => {
serialize_normal_rr!(rr, {
rr.mname.serialize(serializer)?;
rr.rname.serialize(serializer)?;
rr.serial.serialize(serializer)?;
rr.refresh.serialize(serializer)?;
rr.retry.serialize(serializer)?;
rr.expire.serialize(serializer)?;
rr.minimum.serialize(serializer)?;
});
},
&Record::PTR(ref rr) => {
serialize_normal_rr!(rr, {
rr.value.serialize(serializer)?;
});
},
&Record::SRV(ref rr) => {
serialize_normal_rr!(rr, {
rr.priority.serialize(serializer)?;
rr.weight.serialize(serializer)?;
rr.port.serialize(serializer)?;
rr.target.serialize(serializer)?;
});
},
&Record::HINFO(ref rr) => {
serialize_normal_rr!(rr, {
if !rr.cpu.is_ascii() {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid HINFO cpu string(must be ASCII)."));
}
rr.cpu.as_bytes().serialize(serializer)?;
// NOTE: os 字段应该设置成 NULL 以节省开销
// https://tools.ietf.org/html/rfc8482#section-4.2
b'\0'.serialize(serializer)?
});
},
&Record::OPT(ref rr) => {
if !rr.name.is_empty() {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid DNS name(ROOT Name must be empty)."));
}
rr.name.serialize(serializer)?;
Kind::OPT.0.serialize(serializer)?;
rr.udp_size.serialize(serializer)?;
rr.rcode.serialize(serializer)?;
rr.version.serialize(serializer)?;
rr.flags.bits().serialize(serializer)?;
let rdlen_pos = serializer.position();
0u16.serialize(serializer)?; // RDLEN
for attr in rr.attrs.iter() {
match attr {
OptAttr::ECS(ecs) => {
edns::OptionCode::EDNS_CLIENT_SUBNET.0.serialize(serializer)?; // OptCode
let opt_len_pos = serializer.position();
0u16.serialize(serializer)?; // OptLen
match ecs.address {
std::net::IpAddr::V4(_) => {
edns::AddressFamily::IPV4.0.serialize(serializer)?;
},
std::net::IpAddr::V6(_) => {
edns::AddressFamily::IPV6.0.serialize(serializer)?;
},
}
ecs.src_prefix_len.serialize(serializer)?;
ecs.scope_prefix_len.serialize(serializer)?;
ecs.address.serialize(serializer)?;
write_rdlen(serializer, opt_len_pos)?;
}
}
}
write_rdlen(serializer, rdlen_pos)?;
},
// ===== DNSSEC ======
&Record::DNSKEY(ref rr) => {
serialize_normal_rr!(rr, {
rr.flags.bits().serialize(serializer)?;
rr.protocol.0.serialize(serializer)?;
rr.algorithm.0.serialize(serializer)?;
rr.public_key.as_ref().serialize(serializer)?;
});
},
&Record::RRSIG(ref rr) => {
serialize_normal_rr!(rr, {
rr.type_covered.0.serialize(serializer)?;
rr.algorithm.0.serialize(serializer)?;
rr.labels.serialize(serializer)?;
rr.original_ttl.serialize(serializer)?;
rr.signature_expiration.serialize(serializer)?;
rr.signature_inception.serialize(serializer)?;
rr.key_tag.serialize(serializer)?;
rr.signer_name.serialize(serializer)?;
rr.signature.as_ref().serialize(serializer)?;
});
},
&Record::NSEC(ref rr) => {
serialize_normal_rr!(rr, {
rr.next_domain_name.serialize(serializer)?;
rr.type_bit_maps.as_slice().serialize(serializer)?;
});
},
&Record::NSEC3(ref rr) => {
serialize_normal_rr!(rr, {
rr.hash_algorithm.0.serialize(serializer)?;
rr.flags.bits().serialize(serializer)?;
rr.iterations.serialize(serializer)?;
let salt_len = rr.salt.len();
if salt_len > std::u8::MAX as usize {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid salt length."));
}
(salt_len as u8).serialize(serializer)?;
rr.salt.as_ref().serialize(serializer)?;
let hash_len = rr.next_hashed_owner_name.len();
if hash_len > std::u8::MAX as usize {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid next hashed owner name length."));
}
(hash_len as u8).serialize(serializer)?;
rr.next_hashed_owner_name.as_ref().serialize(serializer)?;
rr.type_bit_maps.as_slice().serialize(serializer)?;
});
},
&Record::NSEC3PARAM(ref rr) => {
serialize_normal_rr!(rr, {
rr.hash_algorithm.0.serialize(serializer)?;
rr.flags.serialize(serializer)?;
rr.iterations.serialize(serializer)?;
let salt_len = rr.salt.len();
if salt_len > std::u8::MAX as usize {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid salt length."));
}
(salt_len as u8).serialize(serializer)?;
rr.salt.as_ref().serialize(serializer)?;
});
},
&Record::DS(ref rr) => {
serialize_normal_rr!(rr, {
rr.key_tag.serialize(serializer)?;
rr.algorithm.0.serialize(serializer)?;
rr.digest_type.0.serialize(serializer)?;
rr.digest.as_ref().serialize(serializer)?;
});
},
&Record::CAA(ref rr) => {
serialize_normal_rr!(rr, {
rr.flags.serialize(serializer)?;
let tag_len = rr.tag.len();
if tag_len > std::u8::MAX as usize {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid tag length."));
}
(tag_len as u8).serialize(serializer)?;
rr.tag.as_bytes().serialize(serializer)?;
rr.value.as_bytes().serialize(serializer)?;
});
},
&Record::URI(ref rr) => {
serialize_normal_rr!(rr, {
rr.priority.serialize(serializer)?;
rr.weight.serialize(serializer)?;
if !rr.target.is_ascii() {
return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid URI target string(must be ASCII)."));
}
rr.target.as_bytes().serialize(serializer)?;
});
},
#[allow(unreachable_patterns)]
_ => {
unimplemented!()
},
}
Ok(())
}
}
const DNS_DATETIME_FORMAT: &str = "%Y%m%d%H%M%S";
// 1573557530 --> "20191026050000"
pub fn timestamp_to_datetime(timestamp: u32) -> String {
let native_dt = chrono::NaiveDateTime::from_timestamp(timestamp as i64, 0);
let datetime = chrono::DateTime::<chrono::Utc>::from_utc(native_dt, chrono::Utc);
format!("{}", datetime.format(DNS_DATETIME_FORMAT))
}
// "20191026050000" --> 1573557530
pub fn datetime_to_timestamp(s: &str) -> Result<u32, Error> {
let timestamp: i64 = chrono::TimeZone::datetime_from_str(&chrono::Utc, s, DNS_DATETIME_FORMAT)
.map_err(|_| Error::from(ErrorKind::FormatError))?
.timestamp();
if timestamp < 0 || timestamp > std::u32::MAX as i64 {
return Err(Error::from(ErrorKind::FormatError));
}
let timestamp = timestamp as u32;
Ok(timestamp)
}
pub fn decode_hex(s: &str) -> Result<Vec<u8>, Error> {
if s.len() % 2 != 0 {
return Err(Error::new(ErrorKind::FormatError, "invalid hex sequence length"));
}
fn val(c: u8) -> Result<u8, Error> {
match c {
b'A'..=b'F' => Ok(c - b'A' + 10),
b'a'..=b'f' => Ok(c - b'a' + 10),
b'0'..=b'9' => Ok(c - b'0'),
_ => Err(Error::new(ErrorKind::FormatError, "invalid hex character")),
}
}
let mut out = Vec::with_capacity(s.len() / 2);
for chunk in s.as_bytes().chunks(2) {
let a = chunk[0];
let b = chunk[1];
let v = val(a)? << 4 | val(b)?;
out.push(v);
}
Ok(out)
}
impl std::str::FromStr for Record {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// 支持解析的 Kinds
// ['A', 'AAAA', 'DNSKEY', 'DS', 'NS', 'NSEC', 'RRSIG', 'SOA']
//
// https://tools.ietf.org/html/rfc1035#section-5.1
// 两种文本格式:
// 1. DOMAIN_NAME [<TTL>] [<class>] <type> <RDATA>
// 2. DOMAIN_NAME [<class>] [<TTL>] <type> <RDATA>
// 这里支持的是第一种格式.
//
let bytes = s.as_bytes();
let mut name: Option<String> = None;
let mut kind: Option<Kind> = None;
let mut class: Option<Class> = None;
let mut ttl: Option<u32> = None;
let mut rdata: Option<&str> = None;
let mut offset = 0usize;
let mut idx = 0usize;
while offset < bytes.len() {
let ch = bytes[offset];
if ch != b'\t' {
let start = offset;
offset += 1;
while offset < bytes.len() {
if bytes[offset] == b'\t' {
break;
} else {
offset += 1;
}
}
assert!(offset == bytes.len() || bytes[offset] == b'\t');
let end = offset;
let data = &s[start..end];
if idx == 0 {
// Domain Name
let mut domain_name = s[start..end].to_lowercase();
if !domain_name.ends_with('.') {
return Err(Error::new(ErrorKind::FormatError, format!("Invalid DNS Name field: {:?} ", domain_name)));
}
domain_name.pop();
name = Some(domain_name);
} else if idx == 1 {
// TTL
match data.parse::<u32>() {
Ok(n) => {
ttl = Some(n);
},
Err(_) => {
return Err(Error::new(ErrorKind::FormatError, format!("Invalid DNS TTL field: {:?} ", data)));
}
}
} else if idx == 2 {
// Class
match data.parse::<Class>() {
Ok(v) => {
class = Some(v);
},
Err(_) => {
return Err(Error::new(ErrorKind::FormatError, format!("Invalid DNS Class field: {:?} ", data)));
}
}
} else if idx == 3 {
// Kind (Type)
match data.parse::<Kind>() {
Ok(v) => {
kind = Some(v);
},
Err(_) => {
return Err(Error::new(ErrorKind::FormatError, format!("Invalid DNS Type field: {:?} ", data)));
}
}
} else if idx == 4 {
// Data
rdata = Some(data);
} else {
unreachable!();
}
idx += 1;
} else {
offset += 1;
}
}
let (name, kind, class, ttl, rdata) = match (name, kind, class, ttl, rdata) {
(Some(name), Some(kind), Some(class), Some(ttl), Some(rdata)) => (name, kind, class, ttl, rdata),
_ => return Err(Error::new(ErrorKind::FormatError, "Invalid DNS header field.")),
};
match kind {
Kind::A => {
let v = rdata.parse::<Ipv4Addr>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
Ok(Record::A(A { name, class, ttl, value: v }))
},
Kind::AAAA => {
let v = rdata.parse::<Ipv6Addr>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
Ok(Record::AAAA(AAAA { name, class, ttl, value: v }))
},
Kind::NS => {
let mut v = rdata.to_string();
if v.ends_with('.') {
v.pop();
}
Ok(Record::NS(NS { name, class, ttl, value: v }))
},
Kind::DS => {
// "40387 8 2 F2A6E4458136145067FCA10141180BAC9FD4CA768908707D98E5E2412039A1E3"
let mut tmp = rdata.split(' ');
let key_tag = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u16>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let algorithm = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let algorithm = dnssec::Algorithm(algorithm);
let digest_type = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let digest_type = dnssec::DigestKind(digest_type);
let digest = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?;
let digest = decode_hex(digest)?;
let digest = Digest::new(digest);
Ok(Record::DS(DS { name, class, ttl, key_tag, algorithm, digest_type, digest, }))
},
Kind::DNSKEY => {
// "256 3 8 AwEAAbPwrxwtOMENWvblQbUFwBllR7ZtXsu9rg="
let mut tmp = rdata.split(' ');
let flags = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u16>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let flags = dnssec::DNSKEYFlags::new_unchecked(flags);
let protocol = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let protocol = dnssec::DNSKEYProtocol(protocol);
let algorithm = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let algorithm = dnssec::Algorithm(algorithm);
let public_key = tmp.next().ok_or(Error::from(ErrorKind::FormatError))?;
let public_key = base64::decode(public_key).map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let public_key = Digest::new(public_key);
Ok(Record::DNSKEY(DNSKEY { name, class, ttl, flags, protocol, algorithm, public_key, }))
},
Kind::NSEC => {
// "ye. NS DS RRSIG NSEC"
let mut tmp = rdata.split(' ');
let mut next_domain_name = tmp.next().ok_or(Error::from(ErrorKind::FormatError))?.to_string();
if next_domain_name.ends_with('.') {
next_domain_name.pop();
}
let mut type_bit_maps = Vec::new();
for kind in tmp {
if let Ok(kind) = kind.parse::<Kind>() {
type_bit_maps.push(kind);
}
}
type_bit_maps.sort();
Ok(Record::NSEC(NSEC { name, class, ttl, next_domain_name, type_bit_maps, }))
},
Kind::RRSIG => {
// zw. 86400 IN RRSIG
// NSEC 8 1 86400 20191026050000 20191013040000 22545 . EGhf+lJQq8egDzxVATTj8CdW4p6fPZIjr2Y4bLZ1hEx
let mut tmp = rdata.split(' ');
let type_covered = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<Kind>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let algorithm = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let algorithm = dnssec::Algorithm(algorithm);
let labels = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u8>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let original_ttl = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
// 20191026050000
let signature_expiration = tmp.next().ok_or(Error::from(ErrorKind::FormatError))?;
let signature_expiration = datetime_to_timestamp(signature_expiration)?;
// 20191013040000
let signature_inception = tmp.next().ok_or(Error::from(ErrorKind::FormatError))?;
let signature_inception = datetime_to_timestamp(signature_inception)?;
let key_tag = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u16>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let mut signer_name = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.to_string();
if signer_name.ends_with('.') {
signer_name.pop();
}
let signature = tmp.next().ok_or(Error::from(ErrorKind::FormatError))?;
let signature = base64::decode(signature)
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let signature = Digest::new(signature);
Ok(Record::RRSIG(RRSIG {
name, class, ttl,
type_covered,
algorithm,
labels,
original_ttl,
signature_expiration,
signature_inception,
key_tag,
signer_name,
signature,
}))
},
Kind::SOA => {
// . 86400 IN SOA
// a.root-servers.net. nstld.verisign-grs.com. 2019101300 1800 900 604800 86400
let mut tmp = rdata.split(' ');
let mut mname = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.to_string();
let mut rname = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.to_string();
let serial = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let refresh = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<i32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let retry = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<i32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let expire = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<i32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
let minimum = tmp.next()
.ok_or(Error::from(ErrorKind::FormatError))?
.parse::<u32>()
.map_err(|e| Error::new(ErrorKind::FormatError, e))?;
if mname.ends_with('.') {
mname.pop();
}
if rname.ends_with('.') {
rname.pop();
}
Ok(Record::SOA(SOA { name, class, ttl, mname, rname, serial, refresh, retry, expire, minimum, }))
},
_ => {
debug!("Record from str not implemented: Name={:?} Kind={} Class={} TTL={} RDATA={:?}", name, kind, class, ttl, rdata);
Err(Error::from(ErrorKind::NotImplemented))
},
}
}
}
#[test]
fn test_parse_root_zone() {
let data = include_str!("../../data/root.zone");
for line in data.lines() {
assert!(line.parse::<Record>().is_ok(), line);
}
} |
use std::error::Error;
use std::fmt::{self, Formatter};
#[derive(Debug)]
pub struct FieldNotPresentError;
impl fmt::Display for FieldNotPresentError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Mandatory field not present")
}
}
impl Error for FieldNotPresentError {
fn description(&self) -> &str {
"Mandatory field not present"
}
}
#[derive(Debug)]
pub struct SessionLevelRejectErr {
pub kind: SessionLevelRejectReason,
pub source: Option<Box<dyn Error>>,
}
impl SessionLevelRejectErr {
pub fn invalid_value_for_tag_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::InvalidValueForTag,
source: None,
}
}
pub fn invalid_tag_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::InvalidTag,
source: None,
}
}
pub fn required_tag_missing_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::RequiredTagMissing,
source: None,
}
}
pub fn undefined_tag_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::UndefinedTag,
source: None,
}
}
pub fn tag_without_value_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::TagSpecifiedWithoutValue,
source: None,
}
}
pub fn value_out_of_range_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::ValueOutOfRange,
source: None,
}
}
pub fn incorrect_data_format_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::IncorrectDataFormat,
source: None,
}
}
pub fn decryption_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::DecryptionProblem,
source: None,
}
}
pub fn signature_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::SignatureProblem,
source: None,
}
}
pub fn comp_id_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::CompIdProblem,
source: None,
}
}
pub fn sending_time_accuracy_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::SendingTimeAccuracyProblem,
source: None,
}
}
pub fn invalid_msg_type_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::InvalidMessageType,
source: None,
}
}
pub fn invalid_body_len_err() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::InvalidBodyLength,
source: None,
}
}
pub fn invalid_checksum() -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::InvalidChecksum,
source: None,
}
}
pub fn parse_err(err: Option<Box<dyn Error>>) -> Self {
SessionLevelRejectErr {
kind: SessionLevelRejectReason::ParseError,
source: err,
}
}
}
#[derive(Debug)]
pub enum SessionLevelRejectReason {
InvalidValueForTag,
InvalidTag,
RequiredTagMissing,
UndefinedTag,
TagSpecifiedWithoutValue,
ValueOutOfRange,
IncorrectDataFormat,
DecryptionProblem,
SignatureProblem,
CompIdProblem,
SendingTimeAccuracyProblem,
InvalidMessageType,
ParseError,
InvalidBodyLength,
InvalidChecksum,
}
impl fmt::Display for SessionLevelRejectErr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Fix Error {:?}", self.kind)
}
}
impl Error for SessionLevelRejectErr {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if self.source.is_some() {
return self.source.as_deref();
}
return None;
}
}
|
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
use std::io;
use std::str::FromStr;
use crate::base::Part;
pub fn part1(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::One)
}
pub fn part2(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::Two)
}
fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> {
let mut input = String::new();
r.read_to_string(&mut input).map_err(|e| e.to_string())?;
let programs = parse_input(&input);
let tower = construct_tower(&programs);
let bottom_program = find_bottom_program(&tower);
if part == Part::One {
Ok(bottom_program.name.clone())
} else {
let tower_weights = calculate_tower_weights(&tower, &bottom_program);
Ok(find_correct_weight(&tower, &tower_weights, &bottom_program)
.unwrap()
.to_string())
}
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct Program {
name: String,
weight: u64,
holding_up: Option<Vec<String>>,
held_up_by: Option<String>,
}
impl FromStr for Program {
type Err = String;
fn from_str(s: &str) -> Result<Program, String> {
lazy_static! {
static ref NAME_AND_WEIGHT: Regex =
Regex::new(r"(?P<name>\w+) \((?P<weight>\d+)\)").unwrap();
}
let parts: Vec<&str> = s.split(" -> ").collect();
let (name_and_weight, programs) = (
parts[0],
if parts.len() == 2 {
Some(parts[1])
} else {
None
},
);
let name_and_weight_caps = NAME_AND_WEIGHT.captures(name_and_weight).unwrap();
let name = name_and_weight_caps["name"].to_string();
let weight: u64 = name_and_weight_caps["weight"].parse().unwrap();
let holding_up =
programs.map(|program_str| program_str.split(", ").map(String::from).collect());
Ok(Program {
name,
weight,
holding_up,
held_up_by: None,
})
}
}
fn parse_input(input: &str) -> HashMap<String, Program> {
input
.lines()
.map(Program::from_str)
.map(Result::unwrap)
.map(|prog| (prog.name.clone(), prog))
.collect()
}
fn construct_tower(programs: &HashMap<String, Program>) -> HashMap<String, Program> {
let mut tower = programs.clone();
let progs_holding_up: Vec<Program> = tower
.values()
.cloned()
.filter(|prog| prog.holding_up.is_some())
.collect();
for holding_prog in progs_holding_up {
for prog in holding_prog.holding_up.unwrap() {
tower.get_mut(&prog).unwrap().held_up_by = Some(holding_prog.name.clone());
}
}
tower
}
fn find_bottom_program(tower: &HashMap<String, Program>) -> Program {
tower
.values()
.find(|prog| prog.held_up_by.is_none())
.unwrap()
.clone()
}
fn calculate_tower_weights(
tower: &HashMap<String, Program>,
root: &Program,
) -> HashMap<String, u64> {
if root.holding_up.is_none() {
let mut map = HashMap::new();
map.insert(root.name.clone(), root.weight);
return map;
}
let held_up_progs = root.holding_up.clone().unwrap();
let mut map = HashMap::new();
let mut weight = 0;
for prog in &held_up_progs {
map.extend(calculate_tower_weights(tower, tower.get(prog).unwrap()));
weight += map[prog];
}
weight += root.weight;
map.insert(root.name.clone(), weight);
map
}
#[allow(clippy::question_mark)]
fn find_correct_weight(
tower: &HashMap<String, Program>,
tower_weights: &HashMap<String, u64>,
root: &Program,
) -> Option<u64> {
if root.holding_up.is_none() {
return None;
}
let mut map: HashMap<u64, Vec<String>> = HashMap::new();
for held_up_prog in &root.holding_up.clone().unwrap() {
let correct_weight =
find_correct_weight(tower, tower_weights, tower.get(held_up_prog).unwrap());
if correct_weight.is_some() {
return correct_weight;
}
map.entry(*tower_weights.get(held_up_prog).unwrap())
.or_insert_with(Vec::new)
.push(held_up_prog.clone());
}
if map.len() == 1 {
None
} else {
let offending_prog_name = &map.iter().find(|&(_, progs)| progs.len() == 1).unwrap().1[0];
let offending_prog_weight = tower.get(offending_prog_name).unwrap().weight;
let offending_subtower_weight = tower_weights.get(offending_prog_name).unwrap();
let desired_subtower_weight = map.iter().find(|&(_, progs)| progs.len() > 1).unwrap().0;
if offending_subtower_weight > desired_subtower_weight {
Some(offending_prog_weight - (offending_subtower_weight - desired_subtower_weight))
} else {
Some(offending_prog_weight + (desired_subtower_weight - offending_subtower_weight))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test;
mod parse_tests {
use super::*;
#[test]
fn program_not_holding_up() {
let input = "pbga (66)";
let program = Program::from_str(input).unwrap();
assert_eq!("pbga", &program.name);
assert_eq!(66, program.weight);
}
#[test]
fn program_holding_up() {
let input = "fwft (72) -> ktlj, cntj, xhth";
let program = Program::from_str(input).unwrap();
assert_eq!("fwft", &program.name);
assert_eq!(72, program.weight);
assert_eq!(&["ktlj", "cntj", "xhth"], &program.holding_up.unwrap()[..]);
}
}
mod part1 {
use super::*;
test!(example, file "testdata/day07/ex", "tknk", part1);
test!(actual, file "../../../inputs/2017/07", "bpvhwhh", part1);
}
mod part2 {
use super::*;
test!(example, file "testdata/day07/ex", "60", part2);
test!(actual, file "../../../inputs/2017/07", "256", part2);
}
}
|
use std::env;
use std::fs;
use std::path::Path;
use std::collections::HashMap;
fn read_instructions(filename: &str) -> String{
let fpath = Path::new(filename);
let abspath = env::current_dir()
.unwrap()
.into_boxed_path()
.join(fpath);
let content = fs::read_to_string(abspath)
.expect("Error occurred while reading the file!");
return content;
}
fn deliver_presents(content: &String) -> usize{
let (mut x, mut y) = (0, 0);
let mut grid: HashMap<(i32, i32), i32> = HashMap::new();
let mut xy:(i32, i32) = (x,y);
grid.insert(xy, 1);
for c in content.chars(){
match c{
'^' => y+=1,
'v' => y-=1,
'<' => x-=1,
'>' => x+=1,
_ => panic!()
}
xy = (x,y);
*grid.entry(xy).or_insert(1) += 1;
}
return grid.len();
}
fn deliver_presents_duo(content: &String) -> usize{
let (mut x1, mut y1, mut x2, mut y2) = (0, 0, 0, 0);
let mut grid: HashMap<(i32, i32), i32> = HashMap::new();
let mut xy:(i32, i32) = (x1,y1);
let mut turn = 0;
grid.insert(xy, 1);
for c in content.chars(){
match turn{
0 => {
match c{
'^' => y1+=1,
'v' => y1-=1,
'<' => x1-=1,
'>' => x1+=1,
_ => panic!()
}
xy = (x1,y1);
*grid.entry(xy).or_insert(1) += 1;
},
1 => {
match c{
'^' => y2+=1,
'v' => y2-=1,
'<' => x2-=1,
'>' => x2+=1,
_ => panic!()
}
xy = (x2,y2);
*grid.entry(xy).or_insert(1) += 1;
},
_ => panic!()
}
turn = turn ^ 1;
}
return grid.len();
}
pub fn run(){
let content = read_instructions("inputs/day-03.txt");
let santa_run = deliver_presents(&content);
let duo_run = deliver_presents_duo(&content);
println!("\n-- AoC 2015: Day 3: Perfectly Spherical Houses in a Vacuum --");
println!("\nSolo Run: {} \nDuo Run with Santa Bot: {}", santa_run, duo_run );
println!("\n-- DONE --\n");
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.