text stringlengths 8 4.13M |
|---|
use std::collections::{HashMap, BTreeMap};
use time;
use uuid;
use crate::data::ArcDataSlice;
use crate::object;
use crate::paxos;
use crate::store;
use crate::transaction;
#[derive(Debug, Copy, Clone)]
pub enum PrepareResult {
Nack(paxos::ProposalId),
Promise(Option<(paxos::ProposalId, bool)>)
}
#[derive(Debug, Copy, Clone)]
pub enum AcceptResult {
Nack(paxos::ProposalId),
Accepted(bool)
}
pub struct PreTransactionOpportunisticRebuild {
pub object_id: object::Id,
pub required_metadata: object::Metadata,
pub data: ArcDataSlice
}
pub struct Prepare {
pub to: store::Id,
pub from: store::Id,
pub proposal_id: paxos::ProposalId,
pub txd: transaction::TransactionDescription,
pub object_updates: HashMap<object::Id, ArcDataSlice>,
pub pre_tx_rebuilds: Vec<PreTransactionOpportunisticRebuild>,
}
#[derive(Clone)]
pub struct PrepareResponse {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub proposal_id: paxos::ProposalId,
pub response: PrepareResult,
pub disposition: transaction::Disposition
}
pub struct Accept {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub proposal_id: paxos::ProposalId,
pub value: bool
}
pub struct AcceptResponse {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub proposal_id: paxos::ProposalId,
pub response: AcceptResult
}
pub struct Resolved {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub value: bool
}
pub struct Committed {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
/// List of object UUIDs that could not be committed due to transaction requirement errors
pub object_commit_errors: Vec<object::Id>
}
pub struct Finalized {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub value: bool
}
pub struct Heartbeat {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
}
pub struct StatusRequest {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub request_uuid: uuid::Uuid
}
pub struct StatusResponse {
pub to: store::Id,
pub from: store::Id,
pub txid: transaction::Id,
pub request_uuid: uuid::Uuid,
pub status: transaction::Status,
pub finalized: bool
}
pub enum Message {
Prepare(Prepare),
PrepareResponse(PrepareResponse),
Accept(Accept),
AcceptResponse(AcceptResponse),
Resolved(Resolved),
Committed(Committed),
Finalized(Finalized),
Heartbeat(Heartbeat),
StatusRequest(StatusRequest),
StatusResponse(StatusResponse)
}
impl Message {
pub fn to_store(&self) -> store::Id {
match self {
Message::Prepare(m) => m.to,
Message::PrepareResponse(m) => m.to,
Message::Accept(m) => m.to,
Message::AcceptResponse(m) => m.to,
Message::Resolved(m) => m.to,
Message::Committed(m) => m.to,
Message::Finalized(m) => m.to,
Message::Heartbeat(m) => m.to,
Message::StatusRequest(m) => m.to,
Message::StatusResponse(m) => m.to,
}
}
pub fn get_txid(&self) -> transaction::Id {
match self {
Message::Prepare(m) => m.txd.id,
Message::PrepareResponse(m) => m.txid,
Message::Accept(m) => m.txid,
Message::AcceptResponse(m) => m.txid,
Message::Resolved(m) => m.txid,
Message::Committed(m) => m.txid,
Message::Finalized(m) => m.txid,
Message::Heartbeat(m) => m.txid,
Message::StatusRequest(m) => m.txid,
Message::StatusResponse(m) => m.txid,
}
}
}
pub struct MessageCache<T> {
btmap: BTreeMap<time::SteadyTime, transaction::Id>,
hmap: HashMap<transaction::Id, (T, time::SteadyTime)>,
max_entries: usize,
num_entries: usize
}
impl<T> MessageCache<T> {
pub fn new(max_entries: usize) -> MessageCache<T> {
MessageCache {
btmap: BTreeMap::new(),
hmap: HashMap::new(),
max_entries,
num_entries: 0
}
}
pub fn cache(&mut self, txid: transaction::Id, msg: T) {
let now = time::SteadyTime::now();
self.hmap.insert(txid, (msg, now.clone()));
self.btmap.insert(now.clone(), txid);
if self.num_entries == self.max_entries {
let (ts, txid) = self.btmap.iter().next().unwrap();
let ts = ts.clone();
let txid = txid.clone();
self.btmap.remove(&ts);
self.hmap.remove(&txid);
self.num_entries -= 1;
}
self.num_entries += 1;
}
pub fn get(&mut self, txid: &transaction::Id) -> Option<T> {
match self.hmap.remove(txid) {
None => None,
Some((msg, ts)) => {
self.btmap.remove(&ts);
Some(msg)
}
}
}
} |
use std::io::BufRead;
use quick_xml as qx;
use crate::template::writer::TemplateWriter;
/// Extract templates from a stream and pass them to a TemplateWriter.
pub fn extract_templates<R: BufRead>(stream: R, writer: &TemplateWriter) {
use self::qx::events::Event;
let mut reader = qx::Reader::from_reader(stream);
let (mut buf, mut text_buf) = (Vec::new(), Vec::new());
let mut page = String::new();
let mut title = String::new();
let mut in_page = false;
let mut in_template = false;
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref tag)) => match tag.name() {
b"page" => in_page = true,
b"title" => {
if in_page {
title = reader.read_text(b"title", &mut text_buf).unwrap();
if title.starts_with("Template:") {
in_template = true;
}
}
}
b"text" => {
if in_template {
page = reader.read_text(b"text", &mut text_buf).unwrap();
}
}
_ => (),
},
Ok(Event::End(ref tag)) => {
if let b"page" = tag.name() {
in_page = false;
if in_template {
writer.write_template(title, page).unwrap();
title = String::new();
page = String::new();
}
in_template = false;
}
}
Ok(Event::Eof) => break,
Ok(_) => (),
Err(_) => break,
}
buf.clear();
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// Downtime : Downtiming gives you greater control over monitor notifications by allowing you to globally exclude scopes from alerting. Downtime settings, which can be scheduled with start and end times, prevent all alerting related to specified Datadog tags.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Downtime {
/// If a scheduled downtime currently exists.
#[serde(rename = "active", skip_serializing_if = "Option::is_none")]
pub active: Option<bool>,
/// If a scheduled downtime is canceled.
#[serde(rename = "canceled", skip_serializing_if = "Option::is_none")]
pub canceled: Option<i64>,
/// User ID of the downtime creator.
#[serde(rename = "creator_id", skip_serializing_if = "Option::is_none")]
pub creator_id: Option<i32>,
/// If a downtime has been disabled.
#[serde(rename = "disabled", skip_serializing_if = "Option::is_none")]
pub disabled: Option<bool>,
/// `0` for a downtime applied on `*` or all, `1` when the downtime is only scoped to hosts, or `2` when the downtime is scoped to anything but hosts.
#[serde(rename = "downtime_type", skip_serializing_if = "Option::is_none")]
pub downtime_type: Option<i32>,
/// POSIX timestamp to end the downtime. If not provided, the downtime is in effect indefinitely until you cancel it.
#[serde(rename = "end", skip_serializing_if = "Option::is_none")]
pub end: Option<i64>,
/// The downtime ID.
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<i64>,
/// A message to include with notifications for this downtime. Email notifications can be sent to specific users by using the same `@username` notation as events.
#[serde(rename = "message", skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
/// A single monitor to which the downtime applies. If not provided, the downtime applies to all monitors.
#[serde(rename = "monitor_id", skip_serializing_if = "Option::is_none")]
pub monitor_id: Option<i64>,
/// A comma-separated list of monitor tags. For example, tags that are applied directly to monitors, not tags that are used in monitor queries (which are filtered by the scope parameter), to which the downtime applies. The resulting downtime applies to monitors that match ALL provided monitor tags. For example, `service:postgres` **AND** `team:frontend`.
#[serde(rename = "monitor_tags", skip_serializing_if = "Option::is_none")]
pub monitor_tags: Option<Vec<String>>,
/// ID of the parent Downtime.
#[serde(rename = "parent_id", skip_serializing_if = "Option::is_none")]
pub parent_id: Option<i64>,
#[serde(rename = "recurrence", skip_serializing_if = "Option::is_none")]
pub recurrence: Option<Box<crate::models::DowntimeRecurrence>>,
/// The scope(s) to which the downtime applies. For example, `host:app2`. Provide multiple scopes as a comma-separated list like `env:dev,env:prod`. The resulting downtime applies to sources that matches ALL provided scopes (`env:dev` **AND** `env:prod`).
#[serde(rename = "scope", skip_serializing_if = "Option::is_none")]
pub scope: Option<Vec<String>>,
/// POSIX timestamp to start the downtime. If not provided, the downtime starts the moment it is created.
#[serde(rename = "start", skip_serializing_if = "Option::is_none")]
pub start: Option<i64>,
/// The timezone in which to display the downtime's start and end times in Datadog applications.
#[serde(rename = "timezone", skip_serializing_if = "Option::is_none")]
pub timezone: Option<String>,
/// ID of the last user that updated the downtime.
#[serde(rename = "updater_id", skip_serializing_if = "Option::is_none")]
pub updater_id: Option<i32>,
}
impl Downtime {
/// Downtiming gives you greater control over monitor notifications by allowing you to globally exclude scopes from alerting. Downtime settings, which can be scheduled with start and end times, prevent all alerting related to specified Datadog tags.
pub fn new() -> Downtime {
Downtime {
active: None,
canceled: None,
creator_id: None,
disabled: None,
downtime_type: None,
end: None,
id: None,
message: None,
monitor_id: None,
monitor_tags: None,
parent_id: None,
recurrence: None,
scope: None,
start: None,
timezone: None,
updater_id: None,
}
}
}
|
use std::collections::HashMap;
use crate::mcc::agent::agent_genome::AgentGenome;
use crate::neatns::network::activation::Activation;
use crate::neatns::network::node::NodeRef;
use crate::neatns::network::order;
#[derive(Clone, Debug)]
pub enum Action {
Link(usize, usize, f64),
// from, to, weight
Activation(usize, f64, Activation), // node, bias, activation
}
pub struct NeuralNetwork {
values: Vec<f64>,
inputs: Vec<usize>,
outputs: Vec<usize>,
actions: Vec<Action>,
}
impl NeuralNetwork {
pub fn new(genome: &AgentGenome) -> NeuralNetwork {
let input_length = genome.inputs.len();
let cumulative_hidden_length = input_length + genome.hidden_nodes.len(); // Length of input and hidden
let cumulative_output_length = cumulative_hidden_length + genome.outputs.len(); // Length of input, hidden and output
let mut input_keys: Vec<NodeRef> = genome.inputs.keys().cloned().collect();
input_keys.sort();
let mut output_keys: Vec<NodeRef> = genome.outputs.keys().cloned().collect();
output_keys.sort();
let node_mapper: HashMap<NodeRef, usize> = input_keys
.iter()
.enumerate()
.map(|(i, node_ref)| (*node_ref, i))
.chain(
genome
.hidden_nodes
.keys()
.enumerate()
.map(|(i, node_ref)| (*node_ref, i + input_length)),
)
.chain(
output_keys
.iter()
.enumerate()
.map(|(i, node_ref)| (*node_ref, i + cumulative_hidden_length)),
)
.collect();
let actions = genome
.order
.iter()
.map(|action| match action {
order::Action::Link(from, to) => Action::Link(
*node_mapper.get(from).unwrap(),
*node_mapper.get(to).unwrap(),
genome.links.get(&(*from, *to)).unwrap().weight,
),
order::Action::Activation(node) => Action::Activation(
*node_mapper.get(node).unwrap(),
genome.get_bias(node),
genome.get_activation(node),
),
})
.collect();
NeuralNetwork {
values: vec![0.0; cumulative_output_length],
inputs: input_keys.iter().map(|node| node.id() as usize).collect(),
outputs: output_keys
.iter()
.map(|node| node.id() as usize + cumulative_hidden_length)
.collect(),
actions,
}
}
/// Evaluate network, takes input node values, returns output node values
pub fn activate(&mut self, inputs: &Vec<f64>) -> Vec<f64> {
for i in 0..self.values.len() {
self.values[i] = 0.0;
}
// Copy inputs into values
for (i, index) in self.inputs.iter().enumerate() {
self.values[i] = inputs[*index];
}
// Do forward pass
for action in self.actions.iter() {
match action {
Action::Link(from, to, weight) => {
self.values[*to] += self.values[*from] * weight;
}
Action::Activation(node, bias, activation) => {
self.values[*node] = activation.activate(self.values[*node] + *bias)
}
}
}
// Collect output
self.outputs
.iter()
.map(|o| {
if self.values[*o].is_finite() {
self.values[*o]
} else {
0.0
}
})
.collect()
}
}
|
use avl_tree::node::Node;
use avl_tree::tree;
use entry::Entry;
use std::ops::{Index, IndexMut};
/// An ordered map implemented using an avl tree.
///
/// An avl tree is a self-balancing binary search tree that maintains the invariant that the
/// heights of two child subtrees of any node differ by at most one.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(0, 1);
/// map.insert(3, 4);
///
/// assert_eq!(map[&0], 1);
/// assert_eq!(map.get(&1), None);
/// assert_eq!(map.len(), 2);
///
/// assert_eq!(map.min(), Some(&0));
/// assert_eq!(map.ceil(&2), Some(&3));
///
/// map[&0] = 2;
/// assert_eq!(map.remove(&0), Some((0, 2)));
/// assert_eq!(map.remove(&1), None);
/// ```
pub struct AvlMap<T, U> {
tree: tree::Tree<T, U>,
len: usize,
}
impl<T, U> AvlMap<T, U>
where
T: Ord,
{
/// Constructs a new, empty `AvlMap<T, U>`.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let map: AvlMap<u32, u32> = AvlMap::new();
/// ```
pub fn new() -> Self {
AvlMap {
tree: None,
len: 0,
}
}
/// Inserts a key-value pair into the map. If the key already exists in the map, it will return
/// and replace the old key-value pair.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// assert_eq!(map.insert(1, 1), None);
/// assert_eq!(map.get(&1), Some(&1));
/// assert_eq!(map.insert(1, 2), Some((1, 1)));
/// assert_eq!(map.get(&1), Some(&2));
/// ```
pub fn insert(&mut self, key: T, value: U) -> Option<(T, U)> {
let AvlMap { ref mut tree, ref mut len } = self;
let new_node = Node::new(key, value);
*len += 1;
tree::insert(tree, new_node).and_then(|entry| {
let Entry { key, value } = entry;
*len -= 1;
Some((key, value))
})
}
/// Removes a key-value pair from the map. If the key exists in the map, it will return the
/// associated key-value pair. Otherwise it will return `None`.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.remove(&1), Some((1, 1)));
/// assert_eq!(map.remove(&1), None);
/// ```
pub fn remove(&mut self, key: &T) -> Option<(T, U)> {
let AvlMap { ref mut tree, ref mut len } = self;
tree::remove(tree, key).and_then(|entry| {
let Entry { key, value } = entry;
*len -= 1;
Some((key, value))
})
}
/// Checks if a key exists in the map.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert!(!map.contains_key(&0));
/// assert!(map.contains_key(&1));
/// ```
pub fn contains_key(&self, key: &T) -> bool {
self.get(key).is_some()
}
/// Returns an immutable reference to the value associated with a particular key. It will
/// return `None` if the key does not exist in the map.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.get(&0), None);
/// assert_eq!(map.get(&1), Some(&1));
/// ```
pub fn get(&self, key: &T) -> Option<&U> {
tree::get(&self.tree, key).map(|entry| &entry.value)
}
/// Returns a mutable reference to the value associated with a particular key. Returns `None`
/// if such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// *map.get_mut(&1).unwrap() = 2;
/// assert_eq!(map.get(&1), Some(&2));
/// ```
pub fn get_mut(&mut self, key: &T) -> Option<&mut U> {
tree::get_mut(&mut self.tree, key).map(|entry| &mut entry.value)
}
/// Returns the number of elements in the map.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.len(), 1);
/// ```
pub fn len(&self) -> usize {
self.len
}
/// Returns `true` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let map: AvlMap<u32, u32> = AvlMap::new();
/// assert!(map.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, removing all values.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
/// map.clear();
/// assert_eq!(map.is_empty(), true);
/// ```
pub fn clear(&mut self) {
self.tree = None;
self.len = 0;
}
/// Returns a key in the map that is less than or equal to a particular key. Returns `None` if
/// such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.floor(&0), None);
/// assert_eq!(map.floor(&2), Some(&1));
/// ```
pub fn floor(&self, key: &T) -> Option<&T> {
tree::floor(&self.tree, key).map(|entry| &entry.key)
}
/// Returns a key in the map that is greater than or equal to a particular key. Returns `None`
/// if such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.ceil(&0), Some(&1));
/// assert_eq!(map.ceil(&2), None);
/// ```
pub fn ceil(&self, key: &T) -> Option<&T> {
tree::ceil(&self.tree, key).map(|entry| &entry.key)
}
/// Returns the minimum key of the map. Returns `None` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// map.insert(3, 3);
/// assert_eq!(map.min(), Some(&1));
/// ```
pub fn min(&self) -> Option<&T> {
tree::min(&self.tree).map(|entry| &entry.key)
}
/// Returns the maximum key of the map. Returns `None` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// map.insert(3, 3);
/// assert_eq!(map.max(), Some(&3));
/// ```
pub fn max(&self) -> Option<&T> {
tree::max(&self.tree).map(|entry| &entry.key)
}
/// Returns an iterator over the map. The iterator will yield key-value pairs using in-order
/// traversal.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
///
/// let mut iterator = map.iter();
/// assert_eq!(iterator.next(), Some((&1, &1)));
/// assert_eq!(iterator.next(), Some((&2, &2)));
/// assert_eq!(iterator.next(), None);
/// ```
pub fn iter(&self) -> AvlMapIter<T, U> {
AvlMapIter {
current: &self.tree,
stack: Vec::new(),
}
}
/// Returns a mutable iterator over the map. The iterator will yield key-value pairs using
/// in-order traversal.
///
/// # Examples
/// ```
/// use extended_collections::avl_tree::AvlMap;
///
/// let mut map = AvlMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
///
/// for (key, value) in &mut map {
/// *value += 1;
/// }
///
/// let mut iterator = map.iter_mut();
/// assert_eq!(iterator.next(), Some((&1, &mut 2)));
/// assert_eq!(iterator.next(), Some((&2, &mut 3)));
/// assert_eq!(iterator.next(), None);
/// ```
pub fn iter_mut(&mut self) -> AvlMapIterMut<T, U> {
AvlMapIterMut {
current: self.tree.as_mut().map(|node| &mut **node),
stack: Vec::new(),
}
}
}
impl<T, U> IntoIterator for AvlMap<T, U>
where
T: Ord,
{
type Item = (T, U);
type IntoIter = AvlMapIntoIter<T, U>;
fn into_iter(self) -> Self::IntoIter {
Self::IntoIter {
current: self.tree,
stack: Vec::new(),
}
}
}
impl<'a, T, U> IntoIterator for &'a AvlMap<T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a U);
type IntoIter = AvlMapIter<'a, T, U>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T, U> IntoIterator for &'a mut AvlMap<T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a mut U);
type IntoIter = AvlMapIterMut<'a, T, U>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
/// An owning iterator for `AvlMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields owned entries.
pub struct AvlMapIntoIter<T, U> {
current: tree::Tree<T, U>,
stack: Vec<Node<T, U>>,
}
impl<T, U> Iterator for AvlMapIntoIter<T, U>
where
T: Ord,
{
type Item = (T, U);
fn next(&mut self) -> Option<Self::Item> {
while let Some(mut node) = self.current.take() {
self.current = node.left.take();
self.stack.push(*node);
}
self.stack.pop().map(|node| {
let Node {
entry: Entry { key, value },
right,
..
} = node;
self.current = right;
(key, value)
})
}
}
/// An iterator for `AvlMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields immutable references.
pub struct AvlMapIter<'a, T, U>
where
T: 'a,
U: 'a,
{
current: &'a tree::Tree<T, U>,
stack: Vec<&'a Node<T, U>>,
}
impl<'a, T, U> Iterator for AvlMapIter<'a, T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a U);
fn next(&mut self) -> Option<Self::Item> {
while let Some(ref node) = self.current {
self.current = &node.left;
self.stack.push(node);
}
self.stack.pop().map(|node| {
let Node {
entry: Entry { ref key, ref value },
ref right,
..
} = node;
self.current = right;
(key, value)
})
}
}
type BorrowedIterEntryMut<'a, T, U> = Option<(&'a mut Entry<T, U>, BorrowedTreeMut<'a, T, U>)>;
type BorrowedTreeMut<'a, T, U> = Option<&'a mut Node<T, U>>;
/// A mutable iterator for `AvlMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields mutable references.
pub struct AvlMapIterMut<'a, T, U>
where
T: 'a,
U: 'a,
{
current: Option<&'a mut Node<T, U>>,
stack: Vec<BorrowedIterEntryMut<'a, T, U>>,
}
impl<'a, T, U> Iterator for AvlMapIterMut<'a, T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a mut U);
fn next(&mut self) -> Option<Self::Item> {
let AvlMapIterMut { ref mut current, ref mut stack } = self;
while current.is_some() {
stack.push(current.take().map(|node| {
*current = node.left.as_mut().map(|node| &mut **node);
(&mut node.entry, node.right.as_mut().map(|node| &mut **node))
}));
}
stack.pop().and_then(|pair_opt| {
match pair_opt {
Some(pair) => {
let (entry, right) = pair;
let Entry { ref key, ref mut value } = entry;
*current = right;
Some((key, value))
},
None => None,
}
})
}
}
impl<T, U> Default for AvlMap<T, U>
where
T: Ord,
{
fn default() -> Self {
Self::new()
}
}
impl<'a, T, U> Index<&'a T> for AvlMap<T, U>
where
T: Ord,
{
type Output = U;
fn index(&self, key: &T) -> &Self::Output {
self.get(key).expect("Key does not exist.")
}
}
impl<'a, T, U> IndexMut<&'a T> for AvlMap<T, U>
where
T: Ord,
{
fn index_mut(&mut self, key: &T) -> &mut Self::Output {
self.get_mut(key).expect("Key does not exist.")
}
}
#[cfg(test)]
mod tests {
use super::AvlMap;
#[test]
fn test_len_empty() {
let map: AvlMap<u32, u32> = AvlMap::new();
assert_eq!(map.len(), 0);
}
#[test]
fn test_is_empty() {
let map: AvlMap<u32, u32> = AvlMap::new();
assert!(map.is_empty());
}
#[test]
fn test_min_max_empty() {
let map: AvlMap<u32, u32> = AvlMap::new();
assert_eq!(map.min(), None);
assert_eq!(map.max(), None);
}
#[test]
fn test_insert() {
let mut map = AvlMap::new();
assert_eq!(map.insert(1, 1), None);
assert!(map.contains_key(&1));
assert_eq!(map.get(&1), Some(&1));
}
#[test]
fn test_insert_replace() {
let mut map = AvlMap::new();
assert_eq!(map.insert(1, 1), None);
assert_eq!(map.insert(1, 3), Some((1, 1)));
assert_eq!(map.get(&1), Some(&3));
}
#[test]
fn test_remove() {
let mut map = AvlMap::new();
map.insert(1, 1);
assert_eq!(map.remove(&1), Some((1, 1)));
assert!(!map.contains_key(&1));
}
#[test]
fn test_min_max() {
let mut map = AvlMap::new();
map.insert(1, 1);
map.insert(3, 3);
map.insert(5, 5);
assert_eq!(map.min(), Some(&1));
assert_eq!(map.max(), Some(&5));
}
#[test]
fn test_get_mut() {
let mut map = AvlMap::new();
map.insert(1, 1);
{
let value = map.get_mut(&1);
*value.unwrap() = 3;
}
assert_eq!(map.get(&1), Some(&3));
}
#[test]
fn test_floor_ceil() {
let mut map = AvlMap::new();
map.insert(1, 1);
map.insert(3, 3);
map.insert(5, 5);
assert_eq!(map.floor(&0), None);
assert_eq!(map.floor(&2), Some(&1));
assert_eq!(map.floor(&4), Some(&3));
assert_eq!(map.floor(&6), Some(&5));
assert_eq!(map.ceil(&0), Some(&1));
assert_eq!(map.ceil(&2), Some(&3));
assert_eq!(map.ceil(&4), Some(&5));
assert_eq!(map.ceil(&6), None);
}
#[test]
fn test_into_iter() {
let mut map = AvlMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
assert_eq!(
map.into_iter().collect::<Vec<(u32, u32)>>(),
vec![(1, 2), (3, 4), (5, 6)],
);
}
#[test]
fn test_iter() {
let mut map = AvlMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &2), (&3, &4), (&5, &6)],
);
}
#[test]
fn test_iter_mut() {
let mut map = AvlMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
for (_, value) in &mut map {
*value += 1;
}
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &3), (&3, &5), (&5, &7)],
);
}
}
|
use std::collections::hash_map::HashMap;
use std::collections::binary_heap::BinaryHeap;
pub const INFINITY: u32 = ::std::u32::MAX;
pub struct Node {
pub osm_id: i64,
pub lon: f64,
pub lat: f64,
pub adj: HashMap<usize, usize>
}
pub struct Edge {
pub osm_id: i64,
pub length: u32,
pub max_speed: u8,
pub driving_time: u32
}
pub struct Graph {
pub nodes: Vec<Node>,
pub edges: Vec<Edge>,
pub nodes_idx: HashMap<i64, usize>,
pub edges_idx: HashMap<i64, usize>
}
impl Graph {
pub fn new() -> Graph {
Graph {
nodes: Vec::new(),
edges: Vec::new(),
nodes_idx: HashMap::new(),
edges_idx: HashMap::new()
}
}
pub fn add_node(&mut self, n: Node) {
self.nodes_idx.insert(n.osm_id, self.nodes.len());
self.nodes.push(n);
}
pub fn add_edge(&mut self, n1_id: i64, n2_id: i64, e: Edge) {
if !self.nodes_idx.contains_key(&n1_id) || !self.nodes_idx.contains_key(&n2_id) {
return;
}
// add edge to graph
let edge_idx = self.edges.len();
self.edges_idx.insert(e.osm_id, edge_idx);
self.edges.push(e);
// link up adjecents
let n1_idx = self.nodes_idx.get(&n1_id).unwrap();
let n2_idx = self.nodes_idx.get(&n2_id).unwrap();
self.nodes[*n1_idx].adj.insert(*n2_idx, edge_idx);
self.nodes[*n2_idx].adj.insert(*n1_idx, edge_idx);
}
pub fn finalize(&mut self) {
self.nodes.shrink_to_fit();
self.edges.shrink_to_fit();
}
pub fn print(&self) {
println!("Nodes ({}):", self.nodes.len());
for n in self.nodes.iter() {
println!("(osm: {}, id: {})", n.osm_id, self.nodes_idx[&n.osm_id]);
}
println!("Edges ({}):\n", self.edges.len());
for n in self.nodes.iter() {
for (to, edge) in n.adj.iter() {
println!("N{} -> N{} [label={}];", n.osm_id, self.nodes[*to].osm_id, self.edges[*edge].driving_time);
}
}
println!("");
}
}
pub struct DijkstraGraph<'a> {
pub graph: &'a Graph,
pub queue: BinaryHeap<NodeState>,
pub dist: Vec<u32>,
pub parents: Vec<usize>
}
impl <'a> DijkstraGraph<'a> {
pub fn from_graph(graph: &Graph) -> DijkstraGraph {
DijkstraGraph {
graph: graph,
queue: BinaryHeap::new(),
dist: (0..graph.nodes.len()).map(|_| 0u32).collect(),
parents: (0..graph.nodes.len()).map(|_| 0usize).collect()
}
}
pub fn dijkstra(&mut self, start_node: usize) {
for i in 0..self.graph.nodes.len() {
self.dist[i] = INFINITY;
self.parents[i] = -1;
}
self.dist[start_node] = 0;
self.queue.clear(); // just to be sure
self.queue.push(NodeState { idx: start_node, dist: 0});
while let Some(NodeState { idx, dist }) = self.queue.pop() {
// to skip node with already better results
if dist <= self.dist[idx] {
let cur = &self.graph.nodes[idx];
// relax all adjecent edges
for (node, edge) in cur.adj.iter() {
let node_idx = *node;
let edge_idx = *edge;
let w_cur = self.dist[node_idx];
let w_new = self.dist[idx] + self.graph.edges[edge_idx].driving_time;
if w_new < w_cur {
self.dist[node_idx] = w_new;
self.parents[node_idx] = idx;
self.queue.push(NodeState { idx: node_idx, dist: w_new })
}
}
}
}
}
}
pub fn haversine_length(n1: &Node, n2: &Node) -> f64 {
let (lat1, lon1) = (n1.lat.to_radians(), n1.lon.to_radians());
let (lat2, lon2) = (n2.lat.to_radians(), n2.lon.to_radians());
let (dlat, dlon) = (lat2 - lat1, lon2 - lon1);
let a = (dlat/2_f64).sin().powi(2) + lat1.cos() * lat2.cos() * (dlon/2_f64).sin().powi(2);
let c = 2_f64 * a.sqrt().asin();
c * 6367000_f64 // distance in m
}
#[derive(Clone, Copy, Eq, PartialEq)]
pub struct NodeState {
idx: usize,
dist: u32
}
impl Ord for NodeState {
fn cmp(&self, other: &NodeState) -> ::std::cmp::Ordering {
// reverse ordering to get min heap behaviour
other.dist.cmp(&self.dist)
}
}
impl PartialOrd for NodeState {
fn partial_cmp(&self, other: &NodeState) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
|
//pub use crate::fallback::vec3::*;
pub use crate::avx::vec3::*;
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::GLOBEN {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `ENB7`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB7R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB7R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB7R::LCO => true,
ENB7R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB7R {
match value {
true => ENB7R::LCO,
false => ENB7R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB7R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB7R::DIS
}
}
#[doc = "Possible values of the field `ENA7`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA7R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA7R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA7R::LCO => true,
ENA7R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA7R {
match value {
true => ENA7R::LCO,
false => ENA7R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA7R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA7R::DIS
}
}
#[doc = "Possible values of the field `ENB6`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB6R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB6R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB6R::LCO => true,
ENB6R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB6R {
match value {
true => ENB6R::LCO,
false => ENB6R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB6R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB6R::DIS
}
}
#[doc = "Possible values of the field `ENA6`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA6R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA6R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA6R::LCO => true,
ENA6R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA6R {
match value {
true => ENA6R::LCO,
false => ENA6R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA6R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA6R::DIS
}
}
#[doc = "Possible values of the field `ENB5`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB5R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB5R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB5R::LCO => true,
ENB5R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB5R {
match value {
true => ENB5R::LCO,
false => ENB5R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB5R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB5R::DIS
}
}
#[doc = "Possible values of the field `ENA5`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA5R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA5R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA5R::LCO => true,
ENA5R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA5R {
match value {
true => ENA5R::LCO,
false => ENA5R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA5R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA5R::DIS
}
}
#[doc = "Possible values of the field `ENB4`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB4R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB4R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB4R::LCO => true,
ENB4R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB4R {
match value {
true => ENB4R::LCO,
false => ENB4R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB4R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB4R::DIS
}
}
#[doc = "Possible values of the field `ENA4`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA4R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA4R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA4R::LCO => true,
ENA4R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA4R {
match value {
true => ENA4R::LCO,
false => ENA4R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA4R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA4R::DIS
}
}
#[doc = "Possible values of the field `ENB3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB3R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB3R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB3R::LCO => true,
ENB3R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB3R {
match value {
true => ENB3R::LCO,
false => ENB3R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB3R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB3R::DIS
}
}
#[doc = "Possible values of the field `ENA3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA3R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA3R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA3R::LCO => true,
ENA3R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA3R {
match value {
true => ENA3R::LCO,
false => ENA3R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA3R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA3R::DIS
}
}
#[doc = "Possible values of the field `ENB2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB2R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB2R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB2R::LCO => true,
ENB2R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB2R {
match value {
true => ENB2R::LCO,
false => ENB2R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB2R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB2R::DIS
}
}
#[doc = "Possible values of the field `ENA2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA2R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA2R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA2R::LCO => true,
ENA2R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA2R {
match value {
true => ENA2R::LCO,
false => ENA2R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA2R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA2R::DIS
}
}
#[doc = "Possible values of the field `ENB1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB1R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB1R::LCO => true,
ENB1R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB1R {
match value {
true => ENB1R::LCO,
false => ENB1R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB1R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB1R::DIS
}
}
#[doc = "Possible values of the field `ENA1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA1R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA1R::LCO => true,
ENA1R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA1R {
match value {
true => ENA1R::LCO,
false => ENA1R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA1R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA1R::DIS
}
}
#[doc = "Possible values of the field `ENB0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENB0R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB0R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENB0R::LCO => true,
ENB0R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENB0R {
match value {
true => ENB0R::LCO,
false => ENB0R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENB0R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENB0R::DIS
}
}
#[doc = "Possible values of the field `ENA0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ENA0R {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA0R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ENA0R::LCO => true,
ENA0R::DIS => false,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ENA0R {
match value {
true => ENA0R::LCO,
false => ENA0R::DIS,
}
}
#[doc = "Checks if the value of the field is `LCO`"]
#[inline]
pub fn is_lco(&self) -> bool {
*self == ENA0R::LCO
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == ENA0R::DIS
}
}
#[doc = "Values that can be written to the field `ENB7`"]
pub enum ENB7W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB7W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB7W::LCO => true,
ENB7W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB7W<'a> {
w: &'a mut W,
}
impl<'a> _ENB7W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB7W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB7W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB7W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA7`"]
pub enum ENA7W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA7W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA7W::LCO => true,
ENA7W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA7W<'a> {
w: &'a mut W,
}
impl<'a> _ENA7W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA7W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA7W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA7W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB6`"]
pub enum ENB6W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB6W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB6W::LCO => true,
ENB6W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB6W<'a> {
w: &'a mut W,
}
impl<'a> _ENB6W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB6W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB6W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB6W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA6`"]
pub enum ENA6W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA6W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA6W::LCO => true,
ENA6W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA6W<'a> {
w: &'a mut W,
}
impl<'a> _ENA6W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA6W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA6W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA6W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB5`"]
pub enum ENB5W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB5W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB5W::LCO => true,
ENB5W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB5W<'a> {
w: &'a mut W,
}
impl<'a> _ENB5W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB5W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB5W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB5W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA5`"]
pub enum ENA5W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA5W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA5W::LCO => true,
ENA5W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA5W<'a> {
w: &'a mut W,
}
impl<'a> _ENA5W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA5W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA5W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA5W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB4`"]
pub enum ENB4W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB4W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB4W::LCO => true,
ENB4W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB4W<'a> {
w: &'a mut W,
}
impl<'a> _ENB4W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB4W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB4W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB4W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA4`"]
pub enum ENA4W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA4W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA4W::LCO => true,
ENA4W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA4W<'a> {
w: &'a mut W,
}
impl<'a> _ENA4W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA4W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA4W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA4W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB3`"]
pub enum ENB3W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB3W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB3W::LCO => true,
ENB3W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB3W<'a> {
w: &'a mut W,
}
impl<'a> _ENB3W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB3W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB3W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB3W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA3`"]
pub enum ENA3W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA3W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA3W::LCO => true,
ENA3W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA3W<'a> {
w: &'a mut W,
}
impl<'a> _ENA3W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA3W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA3W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA3W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB2`"]
pub enum ENB2W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB2W::LCO => true,
ENB2W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB2W<'a> {
w: &'a mut W,
}
impl<'a> _ENB2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB2W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB2W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB2W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA2`"]
pub enum ENA2W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA2W::LCO => true,
ENA2W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA2W<'a> {
w: &'a mut W,
}
impl<'a> _ENA2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA2W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA2W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA2W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB1`"]
pub enum ENB1W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB1W::LCO => true,
ENB1W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB1W<'a> {
w: &'a mut W,
}
impl<'a> _ENB1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB1W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB1W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB1W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA1`"]
pub enum ENA1W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA1W::LCO => true,
ENA1W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA1W<'a> {
w: &'a mut W,
}
impl<'a> _ENA1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA1W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA1W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA1W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENB0`"]
pub enum ENB0W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENB0W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENB0W::LCO => true,
ENB0W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENB0W<'a> {
w: &'a mut W,
}
impl<'a> _ENB0W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENB0W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENB0W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENB0W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ENA0`"]
pub enum ENA0W {
#[doc = "Use local enable. value."]
LCO,
#[doc = "Disable CTIMER. value."]
DIS,
}
impl ENA0W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ENA0W::LCO => true,
ENA0W::DIS => false,
}
}
}
#[doc = r" Proxy"]
pub struct _ENA0W<'a> {
w: &'a mut W,
}
impl<'a> _ENA0W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ENA0W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Use local enable. value."]
#[inline]
pub fn lco(self) -> &'a mut W {
self.variant(ENA0W::LCO)
}
#[doc = "Disable CTIMER. value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(ENA0W::DIS)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 15 - Alternate enable for B7."]
#[inline]
pub fn enb7(&self) -> ENB7R {
ENB7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 14 - Alternate enable for A7"]
#[inline]
pub fn ena7(&self) -> ENA7R {
ENA7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 13 - Alternate enable for B6"]
#[inline]
pub fn enb6(&self) -> ENB6R {
ENB6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 12 - Alternate enable for A6"]
#[inline]
pub fn ena6(&self) -> ENA6R {
ENA6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 11 - Alternate enable for B5"]
#[inline]
pub fn enb5(&self) -> ENB5R {
ENB5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 10 - Alternate enable for A5"]
#[inline]
pub fn ena5(&self) -> ENA5R {
ENA5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Alternate enable for B4"]
#[inline]
pub fn enb4(&self) -> ENB4R {
ENB4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Alternate enable for A4"]
#[inline]
pub fn ena4(&self) -> ENA4R {
ENA4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 7 - Alternate enable for B3."]
#[inline]
pub fn enb3(&self) -> ENB3R {
ENB3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 6 - Alternate enable for A3"]
#[inline]
pub fn ena3(&self) -> ENA3R {
ENA3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - Alternate enable for B2"]
#[inline]
pub fn enb2(&self) -> ENB2R {
ENB2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - Alternate enable for A2"]
#[inline]
pub fn ena2(&self) -> ENA2R {
ENA2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - Alternate enable for B1"]
#[inline]
pub fn enb1(&self) -> ENB1R {
ENB1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - Alternate enable for A1"]
#[inline]
pub fn ena1(&self) -> ENA1R {
ENA1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Alternate enable for B0"]
#[inline]
pub fn enb0(&self) -> ENB0R {
ENB0R::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 0 - Alternate enable for A0"]
#[inline]
pub fn ena0(&self) -> ENA0R {
ENA0R::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 65535 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 15 - Alternate enable for B7."]
#[inline]
pub fn enb7(&mut self) -> _ENB7W {
_ENB7W { w: self }
}
#[doc = "Bit 14 - Alternate enable for A7"]
#[inline]
pub fn ena7(&mut self) -> _ENA7W {
_ENA7W { w: self }
}
#[doc = "Bit 13 - Alternate enable for B6"]
#[inline]
pub fn enb6(&mut self) -> _ENB6W {
_ENB6W { w: self }
}
#[doc = "Bit 12 - Alternate enable for A6"]
#[inline]
pub fn ena6(&mut self) -> _ENA6W {
_ENA6W { w: self }
}
#[doc = "Bit 11 - Alternate enable for B5"]
#[inline]
pub fn enb5(&mut self) -> _ENB5W {
_ENB5W { w: self }
}
#[doc = "Bit 10 - Alternate enable for A5"]
#[inline]
pub fn ena5(&mut self) -> _ENA5W {
_ENA5W { w: self }
}
#[doc = "Bit 9 - Alternate enable for B4"]
#[inline]
pub fn enb4(&mut self) -> _ENB4W {
_ENB4W { w: self }
}
#[doc = "Bit 8 - Alternate enable for A4"]
#[inline]
pub fn ena4(&mut self) -> _ENA4W {
_ENA4W { w: self }
}
#[doc = "Bit 7 - Alternate enable for B3."]
#[inline]
pub fn enb3(&mut self) -> _ENB3W {
_ENB3W { w: self }
}
#[doc = "Bit 6 - Alternate enable for A3"]
#[inline]
pub fn ena3(&mut self) -> _ENA3W {
_ENA3W { w: self }
}
#[doc = "Bit 5 - Alternate enable for B2"]
#[inline]
pub fn enb2(&mut self) -> _ENB2W {
_ENB2W { w: self }
}
#[doc = "Bit 4 - Alternate enable for A2"]
#[inline]
pub fn ena2(&mut self) -> _ENA2W {
_ENA2W { w: self }
}
#[doc = "Bit 3 - Alternate enable for B1"]
#[inline]
pub fn enb1(&mut self) -> _ENB1W {
_ENB1W { w: self }
}
#[doc = "Bit 2 - Alternate enable for A1"]
#[inline]
pub fn ena1(&mut self) -> _ENA1W {
_ENA1W { w: self }
}
#[doc = "Bit 1 - Alternate enable for B0"]
#[inline]
pub fn enb0(&mut self) -> _ENB0W {
_ENB0W { w: self }
}
#[doc = "Bit 0 - Alternate enable for A0"]
#[inline]
pub fn ena0(&mut self) -> _ENA0W {
_ENA0W { w: self }
}
}
|
#[derive(Debug)]
struct Point {
x:i32,
y:i32,
}
fn main(){
let pt = Point {
x:44,
y:66};
println!("{:?}",pt);
}//--main |
// Example code that deserializes and serializes the model.
// extern crate serde;
// #[macro_use]
// extern crate serde_derive;
// extern crate serde_json;
//
// use generated_module::[object Object];
//
// fn main() {
// let json = r#"{"answer": 42}"#;
// let model: [object Object] = serde_json::from_str(&json).unwrap();
// }
// TODO(zoidbergwill): Trim some of these fields, since we only need some of them.
// https://github.com/serde-rs/serde/pull/201
extern crate serde_json;
#[derive(Debug, Serialize, Deserialize)]
pub struct GitHubPushEvent {
#[serde(rename = "ref")]
pub git_hub_push_event_ref: String,
pub before: String,
pub after: String,
pub created: bool,
pub deleted: bool,
pub forced: bool,
pub base_ref: Option<serde_json::Value>,
pub compare: String,
pub commits: Vec<Commit>,
pub head_commit: Option<Commit>,
pub repository: Repository,
pub pusher: Pusher,
pub sender: Sender,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Commit {
pub id: String,
pub tree_id: String,
pub distinct: bool,
pub message: String,
pub timestamp: String,
pub url: String,
pub author: Author,
pub committer: Author,
pub added: Vec<String>,
pub removed: Vec<Option<serde_json::Value>>,
pub modified: Vec<Option<serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Author {
pub name: String,
pub email: String,
pub username: Name,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Pusher {
pub name: Name,
pub email: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Repository {
pub id: i64,
pub node_id: String,
pub name: String,
pub full_name: String,
pub owner: Sender,
pub private: bool,
pub html_url: String,
pub description: Option<serde_json::Value>,
pub fork: bool,
pub url: String,
pub forks_url: String,
pub keys_url: String,
pub collaborators_url: String,
pub teams_url: String,
pub hooks_url: String,
pub issue_events_url: String,
pub events_url: String,
pub assignees_url: String,
pub branches_url: String,
pub tags_url: String,
pub blobs_url: String,
pub git_tags_url: String,
pub git_refs_url: String,
pub trees_url: String,
pub statuses_url: String,
pub languages_url: String,
pub stargazers_url: String,
pub contributors_url: String,
pub subscribers_url: String,
pub subscription_url: String,
pub commits_url: String,
pub git_commits_url: String,
pub comments_url: String,
pub issue_comment_url: String,
pub contents_url: String,
pub compare_url: String,
pub merges_url: String,
pub archive_url: String,
pub downloads_url: String,
pub issues_url: String,
pub pulls_url: String,
pub milestones_url: String,
pub notifications_url: String,
pub labels_url: String,
pub releases_url: String,
pub deployments_url: String,
pub created_at: i64,
pub updated_at: String,
pub pushed_at: i64,
pub git_url: String,
pub ssh_url: String,
pub clone_url: String,
pub svn_url: String,
pub homepage: Option<String>,
pub size: i64,
pub stargazers_count: i64,
pub watchers_count: i64,
pub language: Option<String>,
pub has_issues: bool,
pub has_projects: bool,
pub has_downloads: bool,
pub has_wiki: bool,
pub has_pages: bool,
pub forks_count: i64,
pub mirror_url: Option<serde_json::Value>,
pub archived: bool,
pub open_issues_count: i64,
pub license: Option<serde_json::Value>,
pub forks: i64,
pub open_issues: i64,
pub watchers: i64,
pub default_branch: String,
pub stargazers: i64,
pub master_branch: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Sender {
pub name: Option<Name>,
pub email: Option<String>,
pub login: Name,
pub id: i64,
pub node_id: NodeId,
pub avatar_url: String,
pub gravatar_id: String,
pub url: String,
pub html_url: String,
pub followers_url: String,
pub following_url: FollowingUrl,
pub gists_url: GistsUrl,
pub starred_url: StarredUrl,
pub subscriptions_url: String,
pub organizations_url: String,
pub repos_url: String,
pub events_url: EventsUrl,
pub received_events_url: String,
#[serde(rename = "type")]
pub sender_type: Type,
pub site_admin: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GitHubIssuesEvent {
pub action: String,
pub issue: Issue,
pub changes: Changes,
pub repository: Repo,
pub sender: Sender,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Changes {}
#[derive(Debug, Serialize, Deserialize)]
pub struct Issue {
pub url: String,
pub repository_url: String,
pub labels_url: String,
pub comments_url: String,
pub events_url: String,
pub html_url: String,
pub id: i64,
pub node_id: String,
pub number: i64,
pub title: String,
pub user: Sender,
pub labels: Vec<Label>,
pub state: String,
pub locked: bool,
pub assignee: Option<serde_json::Value>,
pub assignees: Vec<Option<serde_json::Value>>,
pub milestone: Option<serde_json::Value>,
pub comments: i64,
pub created_at: String,
pub updated_at: String,
pub closed_at: Option<serde_json::Value>,
pub author_association: String,
pub body: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Label {
pub id: i64,
pub node_id: String,
pub url: String,
pub name: String,
pub color: String,
#[serde(rename = "default")]
pub label_default: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Repo {
pub id: i64,
pub node_id: String,
pub name: String,
pub full_name: String,
pub owner: Sender,
pub private: bool,
pub html_url: String,
pub description: Option<serde_json::Value>,
pub fork: bool,
pub url: String,
pub forks_url: String,
pub keys_url: String,
pub collaborators_url: String,
pub teams_url: String,
pub hooks_url: String,
pub issue_events_url: String,
pub events_url: String,
pub assignees_url: String,
pub branches_url: String,
pub tags_url: String,
pub blobs_url: String,
pub git_tags_url: String,
pub git_refs_url: String,
pub trees_url: String,
pub statuses_url: String,
pub languages_url: String,
pub stargazers_url: String,
pub contributors_url: String,
pub subscribers_url: String,
pub subscription_url: String,
pub commits_url: String,
pub git_commits_url: String,
pub comments_url: String,
pub issue_comment_url: String,
pub contents_url: String,
pub compare_url: String,
pub merges_url: String,
pub archive_url: String,
pub downloads_url: String,
pub issues_url: String,
pub pulls_url: String,
pub milestones_url: String,
pub notifications_url: String,
pub labels_url: String,
pub releases_url: String,
pub deployments_url: String,
pub created_at: String,
pub updated_at: String,
pub pushed_at: String,
pub git_url: String,
pub ssh_url: String,
pub clone_url: String,
pub svn_url: String,
pub homepage: Option<serde_json::Value>,
pub size: i64,
pub stargazers_count: i64,
pub watchers_count: i64,
pub language: Option<serde_json::Value>,
pub has_issues: bool,
pub has_projects: bool,
pub has_downloads: bool,
pub has_wiki: bool,
pub has_pages: bool,
pub forks_count: i64,
pub mirror_url: Option<serde_json::Value>,
pub archived: bool,
pub open_issues_count: i64,
pub license: Option<serde_json::Value>,
pub forks: i64,
pub open_issues: i64,
pub watchers: i64,
pub default_branch: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct GitHubPullRequestEvent {
pub action: String,
pub number: i64,
pub pull_request: PullRequest,
pub repository: Repo,
pub sender: Sender,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PullRequest {
pub url: String,
pub id: i64,
pub node_id: String,
pub html_url: String,
pub diff_url: String,
pub patch_url: String,
pub issue_url: String,
pub number: i64,
pub state: String,
pub locked: bool,
pub title: String,
pub user: Sender,
pub body: String,
pub created_at: String,
pub updated_at: String,
pub closed_at: String,
pub merged_at: Option<serde_json::Value>,
pub merge_commit_sha: String,
pub assignee: Option<serde_json::Value>,
pub assignees: Vec<Option<serde_json::Value>>,
pub requested_reviewers: Vec<Option<serde_json::Value>>,
pub requested_teams: Vec<Option<serde_json::Value>>,
pub labels: Vec<Option<serde_json::Value>>,
pub milestone: Option<serde_json::Value>,
pub commits_url: String,
pub review_comments_url: String,
pub review_comment_url: String,
pub comments_url: String,
pub statuses_url: String,
pub head: Base,
pub base: Base,
#[serde(rename = "_links")]
pub links: Links,
pub author_association: String,
pub merged: bool,
pub mergeable: bool,
pub rebaseable: bool,
pub mergeable_state: String,
pub merged_by: Option<serde_json::Value>,
pub comments: i64,
pub review_comments: i64,
pub maintainer_can_modify: bool,
pub commits: i64,
pub additions: i64,
pub deletions: i64,
pub changed_files: i64,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Base {
pub label: String,
#[serde(rename = "ref")]
pub base_ref: String,
pub sha: String,
pub user: Sender,
pub repo: Repo,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Links {
#[serde(rename = "self")]
pub links_self: Comments,
pub html: Comments,
pub issue: Comments,
pub comments: Comments,
pub review_comments: Comments,
pub review_comment: Comments,
pub commits: Comments,
pub statuses: Comments,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Comments {
pub href: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum Name {
Codertocat,
#[serde(rename = "zoidbergwill")]
Zoidbergwill,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum EventsUrl {
#[serde(rename = "https://api.github.com/users/Codertocat/events{/privacy}")]
HttpsApiGithubComUsersCodertocatEventsPrivacy,
#[serde(rename = "https://api.github.com/users/zoidbergwill/events{/privacy}")]
HttpsApiGithubComUsersZoidbergwillEventsPrivacy,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum FollowingUrl {
#[serde(rename = "https://api.github.com/users/Codertocat/following{/other_user}")]
HttpsApiGithubComUsersCodertocatFollowingOtherUser,
#[serde(rename = "https://api.github.com/users/zoidbergwill/following{/other_user}")]
HttpsApiGithubComUsersZoidbergwillFollowingOtherUser,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum GistsUrl {
#[serde(rename = "https://api.github.com/users/Codertocat/gists{/gist_id}")]
HttpsApiGithubComUsersCodertocatGistsGistId,
#[serde(rename = "https://api.github.com/users/zoidbergwill/gists{/gist_id}")]
HttpsApiGithubComUsersZoidbergwillGistsGistId,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum NodeId {
#[serde(rename = "MDQ6VXNlcjI1NzI0OTM=")]
Mdq6VxNlcjI1NzI0Otm,
#[serde(rename = "MDQ6VXNlcjIxMDMxMDY3")]
Mdq6VxNlcjIxMdMxMdy3,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum Type {
User,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum StarredUrl {
#[serde(rename = "https://api.github.com/users/Codertocat/starred{/owner}{/repo}")]
HttpsApiGithubComUsersCodertocatStarredOwnerRepo,
#[serde(rename = "https://api.github.com/users/zoidbergwill/starred{/owner}{/repo}")]
HttpsApiGithubComUsersZoidbergwillStarredOwnerRepo,
}
|
//! Foundation traits for creating Domain abstractions
//! using [the `Aggregate` pattern](https://martinfowler.com/bliki/DDD_Aggregate.html).
use std::fmt::Debug;
use std::ops::Deref;
use async_trait::async_trait;
#[cfg(feature = "serde")]
use serde::Serialize;
use crate::versioning::Versioned;
/// A short extractor type for the [`Aggregate`] [`Id`](Aggregate::Id).
pub type AggregateId<A> = <A as Aggregate>::Id;
/// An [`Aggregate`] manages a domain entity [`State`](Aggregate::State), acting
/// as a _transaction boundary_.
///
/// It allows **state mutations** through the use of
/// [`Command`](Aggregate::Command)s, which the Aggregate instance handles and
/// emits a number of Domain [`Event`](Aggregate::Event)s.
#[async_trait]
pub trait Aggregate {
/// Aggregate identifier: this should represent an unique identifier to
/// refer to a unique Aggregate instance.
type Id: Eq;
/// State of the Aggregate: this should represent the Domain Entity data
/// structure.
type State: Default;
/// Represents a specific, domain-related change to the Aggregate
/// [`State`](Aggregate::State).
type Event;
/// Commands are all the possible operations available on an Aggregate.
/// Use Commands to model business use-cases or [`State`](Aggregate::State)
/// mutations.
type Command;
/// Possible failures while [`apply`](Aggregate::apply)ing
/// [`Event`](Aggregate::Event)s or handling
/// [`Command`](Aggregate::Command)s.
type Error;
/// Applies an [`Event`](Aggregate::Event) to the current Aggregate
/// [`State`](Aggregate::State).
///
/// To enforce immutability, this method takes ownership of the previous
/// [`State`](Aggregate::State) and the current
/// [`Event`](Aggregate::Event) to apply, and returns the new version of the
/// [`State`](Aggregate::State) or an error.
fn apply(state: Self::State, event: Self::Event) -> Result<Self::State, Self::Error>;
/// Handles the requested [`Command`](Aggregate::Command) and returns a list
/// of [`Event`](Aggregate::Event)s to apply the
/// [`State`](Aggregate::State) mutation based on the current representation
/// of the State.
async fn handle(
&self,
id: &Self::Id,
state: &Self::State,
command: Self::Command,
) -> Result<Vec<Self::Event>, Self::Error>;
}
/// Extension trait with some handy methods to use with [`Aggregate`]s.
pub trait AggregateExt: Aggregate {
/// Applies a list of [`Event`](Aggregate::Event)s from an `Iterator`
/// to the current Aggregate [`State`](Aggregate::State).
///
/// Useful to recreate the [`State`](Aggregate::State) of an Aggregate when
/// the [`Event`](Aggregate::Event)s are located in-memory.
#[inline]
fn fold<I>(state: Self::State, mut events: I) -> Result<Self::State, Self::Error>
where
I: Iterator<Item = Self::Event>,
{
events.try_fold(state, Self::apply)
}
}
impl<T> AggregateExt for T where T: Aggregate {}
/// Factory type for new [`AggregateRoot`] instances.
#[derive(Clone)]
pub struct AggregateRootFactory<T>
where
T: Aggregate,
{
aggregate: T,
}
impl<T> From<T> for AggregateRootFactory<T>
where
T: Aggregate,
{
#[inline]
fn from(aggregate: T) -> Self {
Self { aggregate }
}
}
impl<T> AggregateRootFactory<T>
where
T: Aggregate + Clone,
{
/// Builds a new [`AggregateRoot`] instance for the specified [`Aggregate`]
/// [`Id`](Aggregate::Id).
#[inline]
pub fn build(&self, id: T::Id) -> AggregateRoot<T> {
self.build_with_state(id, 0, Default::default())
}
/// Builds a new [`AggregateRoot`] instance for the specified Aggregate
/// with a specified [`State`](Aggregate::State) value.
#[inline]
pub fn build_with_state(&self, id: T::Id, version: u32, state: T::State) -> AggregateRoot<T> {
AggregateRoot {
id,
version,
state,
aggregate: self.aggregate.clone(),
to_commit: Vec::default(),
}
}
}
/// An [`AggregateRoot`] represents an handler to the [`Aggregate`] it's
/// managing, such as:
///
/// * Owning its [`State`](Aggregate::State), [`Id`](Aggregate::Id) and version,
/// * Proxying [`Command`](Aggregate::Command)s to the [`Aggregate`] using the
/// current [`State`](Aggregate::State),
/// * Keeping a list of [`Event`](Aggregate::Event)s to commit after
/// [`Command`](Aggregate::Command) execution.
///
/// ## Initialize
///
/// An [`AggregateRoot`] can only be initialized using the
/// [`AggregateRootFactory`].
///
/// Check [`AggregateRootFactory::build`] for more information.
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(Serialize))]
pub struct AggregateRoot<T>
where
T: Aggregate + 'static,
{
id: T::Id,
version: u32,
#[cfg_attr(feature = "serde", serde(flatten))]
state: T::State,
#[cfg_attr(feature = "serde", serde(skip_serializing))]
aggregate: T,
#[cfg_attr(feature = "serde", serde(skip_serializing))]
to_commit: Vec<T::Event>,
}
impl<T> PartialEq for AggregateRoot<T>
where
T: Aggregate,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
self.id() == other.id()
}
}
impl<T> Versioned for AggregateRoot<T>
where
T: Aggregate,
{
#[inline]
fn version(&self) -> u32 {
self.version
}
}
impl<T> AggregateRoot<T>
where
T: Aggregate,
{
/// Returns a reference to the Aggregate [`Id`](Aggregate::Id) that
/// represents the entity wrapped by this [`AggregateRoot`] instance.
#[inline]
pub fn id(&self) -> &T::Id {
&self.id
}
/// Returns a reference to the current Aggregate
/// [`State`](Aggregate::State).
#[inline]
pub fn state(&self) -> &T::State {
&self.state
}
/// Takes the list of events to commit from the current instance,
/// resetting it to `None`.
#[inline]
pub(crate) fn take_events_to_commit(&mut self) -> Vec<T::Event> {
std::mem::take(&mut self.to_commit)
}
/// Returns a new [`AggregateRoot`] having the specified version.
#[inline]
pub(crate) fn with_version(mut self, version: u32) -> Self {
self.version = version;
self
}
}
impl<T> Deref for AggregateRoot<T>
where
T: Aggregate,
{
type Target = T::State;
fn deref(&self) -> &Self::Target {
self.state()
}
}
impl<T> AggregateRoot<T>
where
T: Aggregate,
T::Event: Clone,
T::State: Clone,
T::Command: Debug,
{
/// Handles the submitted [`Command`](Aggregate::Command) using the
/// [`Aggregate::handle`] method and updates the Aggregate
/// [`State`](Aggregate::State).
///
/// Returns a `&mut self` reference to allow for _method chaining_.
#[cfg_attr(
feature = "with-tracing",
tracing::instrument(level = "debug", name = "AggregateRoot::handle", skip(self))
)]
pub async fn handle(&mut self, command: T::Command) -> Result<&mut Self, T::Error> {
let mut events = self
.aggregate
.handle(self.id(), self.state(), command)
.await?;
// Only apply new events if the command handling actually
// produced new ones.
self.state = T::fold(self.state.clone(), events.iter().cloned())?;
self.to_commit.append(&mut events);
Ok(self)
}
}
|
use std::cell::RefCell;
use std::rc::Rc;
use bincode::options;
use serde::de::DeserializeSeed;
use serde::Serialize;
use super::magic_buffer::MagicBuffer;
use crate::buffer_pool::BufferPool;
use crate::protocol::serialization::KvsRequestDeserializer;
use crate::protocol::KvsRequest;
fn assert_eq_req<const SIZE: usize>(r1: &KvsRequest<SIZE>, r2: &KvsRequest<SIZE>) {
match (r1, r2) {
(KvsRequest::Gossip { map: map1 }, KvsRequest::Gossip { map: map2 }) => {
assert_eq!(map1, map2);
}
(KvsRequest::Delete { key: key1 }, KvsRequest::Delete { key: key2 }) => {
assert_eq!(key1, key2);
}
(KvsRequest::Get { key: key1 }, KvsRequest::Get { key: key2 }) => {
assert_eq!(key1, key2);
}
(
KvsRequest::Put {
key: key1,
value: value1,
},
KvsRequest::Put {
key: key2,
value: value2,
},
) => {
assert_eq!(key1, key2);
assert_eq!(*value1.inner.borrow(), *value2.inner.borrow());
}
_ => panic!(),
}
}
fn serialize_deserialize<'de, S, D, const SIZE: usize>(
req: &KvsRequest<SIZE>,
buffer_pool: &Rc<RefCell<BufferPool<SIZE>>>,
mut serializer: S,
mut deserializer: D,
) -> KvsRequest<SIZE>
where
for<'a> &'a mut S: serde::Serializer,
for<'b> &'b mut D: serde::Deserializer<'de>,
{
Serialize::serialize(&req, &mut serializer).unwrap();
let req2 = {
KvsRequestDeserializer {
collector: Rc::clone(&buffer_pool),
}
.deserialize(&mut deserializer)
.unwrap()
};
req2
}
pub fn check_all<const SIZE: usize>(
buffer_pool: &Rc<RefCell<BufferPool<SIZE>>>,
req: &KvsRequest<SIZE>,
) {
{
let buffer = MagicBuffer::default();
let req2 = serialize_deserialize(
&req,
&buffer_pool,
bincode::Serializer::new(buffer.clone(), options()),
bincode::Deserializer::with_reader(buffer.clone(), options()),
);
assert_eq_req(&req, &req2);
}
{
let buffer = MagicBuffer::default();
let req2 = serialize_deserialize(
&req,
&buffer_pool,
serde_json::Serializer::new(buffer.clone()),
serde_json::Deserializer::from_reader(buffer.clone()),
);
assert_eq_req(&req, &req2);
}
}
|
use ckb_types::{core::BlockNumber, packed::Byte32};
/// The invoker should only rely on `block_median_time` function
/// the other functions only use to help the default `block_median_time`, and maybe unimplemented.
pub trait BlockMedianTimeContext {
fn median_block_count(&self) -> u64;
/// Return timestamp and block_number of the corresponding block_hash, and hash of parent block
///
/// Fake implementation:
/// ```ignore
/// let current_header = get_block_header(block_hash);
/// let parent_header = current_header.timestamp_and_parent().header();
/// return (parent_header.timestamp(), current_header.number(), parent_header.hash());
/// ```
fn timestamp_and_parent(&self, block_hash: &Byte32) -> (u64, BlockNumber, Byte32);
/// Return past block median time, **including the timestamp of the given one**
fn block_median_time(&self, block_hash: &Byte32) -> u64 {
let median_time_span = self.median_block_count();
let mut timestamps: Vec<u64> = Vec::with_capacity(median_time_span as usize);
let mut block_hash = block_hash.clone();
for _ in 0..median_time_span {
let (timestamp, block_number, parent_hash) = self.timestamp_and_parent(&block_hash);
timestamps.push(timestamp);
block_hash = parent_hash;
if block_number == 0 {
break;
}
}
// return greater one if count is even.
timestamps.sort();
timestamps[timestamps.len() >> 1]
}
}
|
use std::hash::{Hash, Hasher};
use std::cmp::{Eq, PartialEq, Ord, PartialOrd, Ordering};
use hex2d::{Angle, Coordinate, Direction, ToCoordinate, Position, ToDirection};
use board::{Board, cube_to_offset};
use scoring::move_score;
pub struct Game {
pub board: Board,
pub source: Vec<Vec<Coordinate>>,
pub seed: u64
}
#[derive(RustcEncodable)]
struct UnitState {
pivot: (i32, i32),
cells: Vec<(i32, i32)>
}
#[derive(RustcEncodable)]
struct GameState {
pub board: Board,
pub unit: UnitState,
pub previous_move: String
}
#[derive(Clone)]
pub struct GamePosition<'a> {
pub game: &'a Game,
pub board: Board,
pub unit: Unit<'a>,
pub sum_unit_size: i32,
pub next_source: usize,
pub cleared_lines_prev: i32,
pub score: i32,
pub previous_move: Option<Command>
}
impl<'a> GamePosition<'a> {
pub fn next_unit(&self) -> Option<Unit<'a>> {
self.game.source.get(self.next_source)
.map(|u| self.board.place_new_unit(u))
}
pub fn to_state(&self) -> GameState {
let cells: Vec<(i32, i32)> = self.unit.iter().collect();
GameState {
board: self.board.clone(),
unit: UnitState {
pivot: cube_to_offset(&self.unit.position.to_coordinate()),
cells: cells
},
previous_move: self.previous_move.map(|c| c.to_string()).unwrap_or("".to_string())
}
}
pub fn start(g: &Game) -> GamePosition {
GamePosition {
game: g,
board: g.board.clone(),
unit: g.board.place_new_unit(&g.source[0]),
sum_unit_size: 0,
next_source: 1,
cleared_lines_prev: 0,
score: 0,
previous_move: None
}
}
pub fn lock_current_unit(&self, c: Command) -> Option<GamePosition<'a>> {
if !(self.next_source < self.game.source.len()) {
return None
}
let (board, cleared_lines) = self.board.lock_unit(&self.unit);
let unit = self.board.place_new_unit(&self.game.source[self.next_source]);
let sum_unit_size = self.sum_unit_size + self.unit.size();
let new_score = self.score + move_score(self.unit.size(),
cleared_lines,
self.cleared_lines_prev);
Some(GamePosition {
game: self.game,
board: board,
unit: unit,
sum_unit_size: sum_unit_size,
next_source: self.next_source + 1,
cleared_lines_prev: cleared_lines,
score: new_score,
previous_move: Some(c)
})
}
pub fn step(&self, c: Command) -> Option<GamePosition<'a>> {
let unit = self.unit.apply(&c);
if self.board.check_unit_position(&unit) {
Some(GamePosition {
unit: unit,
board: self.board.clone(),
previous_move: Some(c),
..*self
})
} else {
self.lock_current_unit(c)
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Command {
Move(Direction),
Rotate(Angle)
}
impl ToString for Command {
fn to_string(&self) -> String {
match self {
&Command::Move(dir) => {
let dir_str = match dir {
Direction::YX => "West".to_string(),
Direction::XY => "East".to_string(),
Direction::ZY => "SE".to_string(),
Direction::ZX => "SW".to_string(),
_ => "UP".to_string()
};
format!("Move: {}", dir_str)
}
&Command::Rotate(ang) => {
let ang_str = match ang {
Angle::Left => "CCW".to_string(),
Angle::Right => "CW".to_string(),
_ => "Unexpected".to_string()
};
format!("Rotate: {}", ang_str)
}
}
}
}
#[test]
fn to_string_test() {
let s = ALL_COMMANDS[0].to_string();
println!("{}", s);
}
pub static ALL_COMMANDS : [Command; 6] = [
Command::Move(Direction::YX), // West
Command::Move(Direction::XY), // East
Command::Move(Direction::ZX), // SW
Command::Move(Direction::ZY), // SE
Command::Rotate(Angle::Left), // CCW
Command::Rotate(Angle::Right) // CW
];
#[derive(Clone, Debug)]
pub struct Unit<'a> {
cells: &'a Vec<Coordinate>,
pub position: Position,
}
impl<'a> Hash for Unit<'a> {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.position.hash(state)
}
}
impl<'a> PartialEq for Unit<'a> {
fn eq(&self, other: &Unit) -> bool {
self.position == other.position
}
}
impl<'a> Eq for Unit<'a> {}
impl<'a> PartialOrd for Unit<'a> {
fn partial_cmp(&self, other: &Unit) -> Option<Ordering> {
self.position.partial_cmp(&other.position)
}
}
impl<'a> Ord for Unit<'a> {
fn cmp(&self, other: &Unit) -> Ordering {
self.position.cmp(&other.position)
}
}
impl<'a> Unit<'a> {
pub fn new(cells: &'a Vec<Coordinate>) -> Unit<'a> {
Unit {
cells: cells,
position: Position::new((0, 0).to_coordinate(), Direction::from_int(0))
}
}
fn apply_to_coord(p: &Position, c: &Coordinate) -> Coordinate {
let angle = p.to_direction() - Direction::from_int(0);
let shift = p.to_coordinate();
c.rotate_around_zero(angle) + shift
}
pub fn iter<'b>(&'b self) -> Box<Iterator<Item=(i32, i32)> + 'b> {
let p = self.position;
let it = self.cells.iter().map(move |&c| {
cube_to_offset(&Unit::apply_to_coord(&p, &c))
});
Box::new(it)
}
pub fn border_top(&self) -> i32 {
self.iter().map(|(_x, y)| y).min().unwrap()
}
pub fn border_left(&self) -> i32 {
self.iter().map(|(x, _y)| x).min().unwrap()
}
pub fn border_right(&self) -> i32 {
self.iter().map(|(x, _y)| x).max().unwrap()
}
pub fn width(&self) -> i32 {
let result = self.border_right() - self.border_left() + 1;
assert!(result > 0);
result
}
pub fn size(&self) -> i32 {
self.cells.len() as i32
}
pub fn apply(&self, c: &Command) -> Unit<'a> {
match c {
&Command::Move(d) => {
assert!(d == Direction::YX || // West
d == Direction::XY || // East
d == Direction::ZY || // SE
d == Direction::ZX); // SW
let position = self.position + d.to_coordinate();
Unit { cells: self.cells, position: position}
},
&Command::Rotate(a) => {
// Read as clockwise and counterclockwise.
assert!(a == Angle::Right || a == Angle::Left);
let position = self.position + a;
Unit { cells: self.cells, position: position}
}
}
}
pub fn move_corner_to<C>(&self, to: C) -> Unit<'a> where C: ToCoordinate + Copy {
let cell = Unit::apply_to_coord(&self.position, self.cells.first().unwrap());
let diff = to.to_coordinate() - cell;
Unit {
cells: self.cells,
position: self.position + diff
}
}
pub fn move_to<C>(&self, target: C) -> Unit<'a>
where C: ToCoordinate + Copy
{
let diff = target.to_coordinate() - self.position.to_coordinate();
Unit {
cells: self.cells,
position: self.position + diff
}
}
}
|
mod base;
mod chain;
|
pub mod label;
pub mod progress_bar;
pub trait Renderable {
fn draw(&self, x : usize, y : usize);
} |
#![allow(unknown_lints)] // for clippy
#![warn(fat_ptr_transmutes)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
// TODO #![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
#![warn(unused_results)]
#![warn(variant_size_differences)]
#[macro_use]
extern crate clap;
extern crate futures;
extern crate ipfs_client;
extern crate tokio_core;
extern crate mhash;
extern crate maddr;
mod util;
mod context;
mod subcommands;
use clap::{ App, AppSettings };
use context::Context;
fn main() {
let matches = App::new("IPFS Daemon CLI")
.author(crate_authors!())
.version(crate_version!())
.settings(&[
AppSettings::ArgRequiredElseHelp,
AppSettings::VersionlessSubcommands,
])
.global_settings(&[
AppSettings::ColoredHelp,
AppSettings::DeriveDisplayOrder,
])
.subcommands(subcommands::subcommands())
.args(&*Context::args())
.get_matches();
subcommands::run(&mut Context::new(&matches), matches);
}
|
pub mod attrib;
pub mod auth_rule;
pub mod author_agreement;
pub mod cred_def;
pub mod node;
pub mod nym;
pub mod pool;
pub mod rev_reg;
pub mod rev_reg_def;
pub mod rich_schema;
pub mod schema;
pub mod txn;
pub mod validator_info;
pub use super::constants;
pub use super::identifiers;
pub use crate::common::did;
pub use crate::common::verkey;
pub use crate::pool::ProtocolVersion;
use std::collections::HashMap;
use std::time::{SystemTime, UNIX_EPOCH};
use serde;
use serde_json;
use crate::common::error::prelude::*;
use did::{DidValue, ShortDidValue};
#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct TxnAuthrAgrmtAcceptanceData {
pub mechanism: String,
pub taa_digest: String,
pub time: u64,
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Request<T: serde::Serialize> {
pub req_id: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub identifier: Option<ShortDidValue>,
pub operation: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub protocol_version: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub signatures: Option<HashMap<String, String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub taa_acceptance: Option<TxnAuthrAgrmtAcceptanceData>,
#[serde(skip_serializing_if = "Option::is_none")]
pub endorser: Option<ShortDidValue>,
}
impl<T: serde::Serialize> Request<T> {
pub fn new(
req_id: u64,
operation: T,
identifier: Option<ShortDidValue>,
protocol_version: Option<usize>,
) -> Request<T> {
Request {
req_id,
identifier,
operation,
protocol_version,
signature: None,
signatures: None,
taa_acceptance: None,
endorser: None,
}
}
pub fn build_request(
req_id: u64,
operation: T,
identifier: Option<&DidValue>,
protocol_version: Option<usize>,
) -> VdrResult<serde_json::Value> {
// FIXME - verify that qualified DID is using a known DID method
serde_json::to_value(&Request::new(
req_id,
operation,
identifier.map(DidValue::to_short),
protocol_version,
))
.with_input_err("Cannot serialize request")
}
}
pub trait RequestType: serde::Serialize {
fn get_txn_type<'a>() -> &'a str;
fn get_sp_key(&self, _protocol_version: ProtocolVersion) -> VdrResult<Option<Vec<u8>>> {
Ok(None)
}
fn get_sp_timestamps(&self) -> VdrResult<(Option<u64>, Option<u64>)> {
Ok((None, None))
}
}
pub fn get_sp_key_marker(code: u8, protocol_version: ProtocolVersion) -> char {
if protocol_version == ProtocolVersion::Node1_3 {
code as char
} else {
(code + 48) as char // digit as ascii
}
}
pub fn get_request_id() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time has gone backwards")
.as_nanos() as u64
}
|
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
use std::time::{Duration, Instant};
pub struct Timer {
start_time: Instant,
duration: Duration,
}
impl Timer {
pub fn new(timer_duration: u64) -> Self {
let start_time = Instant::now();
let duration = Duration::new(timer_duration, 0);
let timer = Timer {
start_time: start_time,
duration: duration,
};
return timer;
}
pub fn start(&mut self) {
self.start_time = Instant::now();
}
pub fn timeout(&self) -> bool {
return Instant::now() > self.start_time + self.duration;
}
}
|
use std::process::Command;
use std::env;
fn main()
{
let mut args = vec![
"-e".to_string(),
include_str!("main.rb").to_string()
];
let mut i = 0;
for argument in env::args() {
if i > 0 {
args.push(argument);
}
i += 1;
}
Command::new("ruby")
.args(&args)
.status()
.expect("failed to execute process");
}
|
fn main() {
let x = f2();
println!("{}", x);
}
fn f2() -> i32 {
3
}
|
#[derive(Debug)]
pub struct Bounds {
position: Vector2D,
dimensions: (f64, f64)
}
impl Bounds {
pub fn new(position : Vector2D, dimensions : (f64, f64)) -> Self {
Self {
position,
dimensions
}
}
pub fn x(&self) -> f64 {
self.position.x()
}
pub fn y(&self) -> f64 {
self.position.y()
}
pub fn width(&self) -> f64 {
self.dimensions.0
}
pub fn height(&self) -> f64 {
self.dimensions.1
}
pub fn x_bounds(&self) -> f64 {
self.position.x() + self.width()
}
pub fn y_bounds(&self) -> f64 {
self.position.y() + self.height()
}
pub fn get_entity_rect(&self) -> [f64; 4] {
[self.x(), self.y(), self.width(), self.height()]
}
pub fn get_position(&mut self) -> &Vector2D {
&self.position
}
pub fn set_position(&mut self, new_position: Vector2D) {
self.position = new_position;
}
// Other is a 4 item array like: [x, y, x+width, y+height]
pub fn contains(me: &Bounds, other: &Bounds) -> bool {
me.position.x() > other.x()
&& me.position.x() < other.x_bounds()
&& me.position.y() > other.y()
&& me.position.y() < other.y_bounds()
}
}
#[derive(Debug)]
pub struct Vector2D {
x: f64,
y: f64,
}
impl Vector2D {
pub fn new(x: f64, y: f64) -> Self {
Self {
x,
y,
}
}
pub fn x(&self) -> f64 {
self.x
}
pub fn y(&self) -> f64 {
self.y
}
pub fn add(vec1: &Vector2D, vec2: &Vector2D) -> Vector2D {
let x = vec1.x() + vec2.x();
let y = vec1.y() + vec2.y();
Vector2D::new(x, y)
}
pub fn set_x(&mut self, x: f64) {
self.x = x;
}
pub fn set_y(&mut self, y: f64) {
self.y = y;
}
pub fn reverse(vector: &Vector2D) -> Vector2D {
let x = vector.x * -1.0;
let y = vector.y * -1.0;
Vector2D::new(x, y)
}
}
impl std::fmt::Display for Vector2D {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Vector2 Position: ({}, {})", self.x, self.y)
}
} |
use crate::arg;
use crate::{Message, MessageType};
use crate::message::MatchRule;
use crate::strings::{BusName, Path, Interface, Member};
/// Helper methods for structs representing a Signal
///
/// # Example
///
/// Listen to InterfacesRemoved signal from org.bluez.obex.
///
/// ```rust,no_run
/// use dbus::blocking::Connection;
/// use dbus::message::SignalArgs;
/// use dbus::blocking::stdintf::org_freedesktop_dbus::ObjectManagerInterfacesRemoved as IR;
/// use std::time::Duration;
///
/// let mut c = Connection::new_session().unwrap();
/// // Add a match for this signal
/// let mr = IR::match_rule(Some(&"org.bluez.obex".into()), None).static_clone();
/// c.add_match(mr, |ir: IR, _, _| {
/// println!("Interfaces {:?} have been removed from bluez on path {}.", ir.interfaces, ir.object);
/// true
/// });
///
/// // Wait for the signal to arrive.
/// loop { c.process(Duration::from_millis(1000)).unwrap(); }
/// ```
pub trait SignalArgs {
/// D-Bus name of signal
const NAME: &'static str;
/// D-Bus name of interface this signal belongs to
const INTERFACE: &'static str;
/// Returns a message that emits the signal.
fn to_emit_message(&self, path: &Path) -> Message where Self: arg::AppendAll {
let mut m = Message::signal(path, &Interface::from(Self::INTERFACE), &Member::from(Self::NAME));
arg::AppendAll::append(self, &mut arg::IterAppend::new(&mut m));
m
}
/// If the message is a signal of the correct type, return its arguments, otherwise return None.
///
/// This does not check sender and path of the message, which is likely relevant to you as well.
#[allow(clippy::if_same_then_else)]
fn from_message(m: &Message) -> Option<Self> where Self: Sized + arg::ReadAll {
if m.msg_type() != MessageType::Signal { None }
else if m.interface().as_ref().map(|x| &**x) != Some(Self::INTERFACE) { None }
else if m.member().as_ref().map(|x| &**x) != Some(Self::NAME) { None }
else {
arg::ReadAll::read(&mut m.iter_init()).ok()
}
}
/// Returns a match rule matching this signal.
///
/// If sender and/or path is None, matches all senders and/or paths.
fn match_rule<'a>(sender: Option<&'a BusName>, path: Option<&'a Path>) -> MatchRule<'a> {
let mut m: MatchRule = Default::default();
m.sender = sender.cloned();
m.path = path.cloned();
m.msg_type = Some(MessageType::Signal);
m.interface = Some(Self::INTERFACE.into());
m.member = Some(Self::NAME.into());
m
}
/// Returns a string that can be sent to `Connection::add_match`.
///
/// If sender and/or path is None, matches all senders and/or paths.
fn match_str(sender: Option<&BusName>, path: Option<&Path>) -> String {
Self::match_rule(sender, path).match_str()
}
}
#[test]
fn intf_removed() {
use crate::blocking::LocalConnection;
use crate::blocking::stdintf::org_freedesktop_dbus::ObjectManagerInterfacesRemoved as IR;
use std::{time::Duration, cell::Cell, rc::Rc};
let mut c = LocalConnection::new_session().unwrap();
let mr = IR::match_rule(Some(&c.unique_name().into()), Some(&"/hello".into())).static_clone();
println!("Match: {:?}", mr);
let ir = IR { object: "/hello".into(), interfaces: vec!("ABC.DEF".into(), "GHI.JKL".into()) };
let ir_msg = ir.to_emit_message(&"/hello".into());
let done = Rc::new(Cell::new(false));
let done2 = done.clone();
c.add_match(mr, move |ir2: IR, _, _| {
assert_eq!(ir2.object, ir.object);
assert_eq!(ir2.interfaces, ir.interfaces);
done2.set(true);
false
}).unwrap();
use crate::channel::Sender;
c.send(ir_msg).expect("Failed to send message");
while !done.get() { c.process(Duration::from_millis(1000)).unwrap(); }
}
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Simple bitset library like C++.
pub struct BitSet {
bits: Vec<u64>,
nbits: usize,
}
impl Default for BitSet {
#[inline]
fn default() -> Self { BitSet { bits: Vec::new(), nbits: 0 } }
}
fn bit_count64(u: u64) -> u64 {
let mut x: u64 = u - ((u >> 1) & 0x5555555555555555u64);
x = (x & 0x3333333333333333u64) + ((x >> 2) & 0x3333333333333333u64);
(((x + (x >> 4)) & 0xF0F0F0F0F0F0F0Fu64).wrapping_mul(0x101010101010101u64)) >> 56
}
// Private functions
impl BitSet {
fn blocks(&self) -> usize {
if self.nbits % 64 == 0 {
self.nbits / 64
}
else {
self.nbits / 64 + 1
}
}
}
// Public functions
impl BitSet {
/// Create a new BitSet with *ZERO* bit.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::new();
/// ```
pub fn new() -> Self {
Self::default()
}
/// Create a new BitSet with `nbits` bits with all bit initialized by `0`.
///
/// # Arguments
///
/// * `nbits` - A integer, which value is the bits count `BitSet` will hold.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// ```
pub fn with_capacity(nbits: usize) -> Self {
let mut bitset = BitSet {
bits: Vec::new(),
nbits: nbits
};
for _ in 0..bitset.blocks() {
bitset.bits.push(0);
}
bitset
}
/// Create a new BitSet from a `u64` value, and initialize all bits by `0`.
///
/// # Arguments
///
/// * `v` - A `u64` value.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::from_u64(2);
/// assert!(bs.test(0) == false);
/// assert!(bs.test(1) == true);
/// ```
pub fn from_u64(v: u64) -> Self {
BitSet {
bits: vec![v],
nbits: 64
}
}
/// Create a new BitSet from a `u64` `vec`, and initialize all bits by `0`.
///
/// # Arguments
///
/// * `vec` - A `u64` vector.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let vec = vec![u64::max_value(), 0, u64::max_value()];
/// let bs = BitSet::from_vec64(&vec);
/// assert!(bs.test(63) == true);
/// assert!(bs.test(64) == false);
/// ```
pub fn from_vec64(vec: &Vec<u64>) -> Self {
BitSet {
bits: vec.to_vec(),
nbits: vec.len() * 64
}
}
/// Return the actual bits count.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// assert!(bs.size() == 100);
/// ```
pub fn size(&self) -> usize {
self.nbits
}
/// Return the count of `1`.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// assert!(bs.count() == 0);
/// ```
pub fn count(&self) -> u64 {
(0..self.blocks()).map(|n| bit_count64(self.bits[n]))
.fold(0, |sum, i| sum + i)
}
/// Return if the given bit index has been set to `1`.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// assert!(bs.test(99) == false);
/// ```
pub fn test(&self, bit_idx: usize) -> bool {
let (block_idx, mod_bit_idx) = (bit_idx / 64, bit_idx % 64);
let n: u64 = self.bits[block_idx];
(n >> mod_bit_idx) & 0x1 == 0x1
}
/// Return if there is one bit has been set to `1` in the whole bitset..
///
/// # Example
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// assert!(bs.any() == false);
/// ```
pub fn any(&self) -> bool {
for i in 0..self.blocks() {
if self.bits[i] != 0 {
return true;
}
}
false
}
/// Return if all bits are set to `0`.
///
/// # Example
/// ```
/// use bitset::BitSet;
///
/// let bs = BitSet::with_capacity(100);
/// assert!(bs.none() == true);
/// ```
pub fn none(&self) -> bool {
!self.any()
}
// // bit vec operations
// fn union(&mut self, vec: &Vec<u64>) {
// //TODO
// }
// fn intersect(&mut self, vec: Vec<u64>) {
// //TODO
// }
// bit operations
/// Set the bit specified by `bit_idx` to `v`, which is `true` or `false`.
///
/// # Arguments
///
/// * `bit_idx` - the bit index we want to set.
/// * `v` - the value we want to set. `true` or `false`.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let mut bs = BitSet::with_capacity(100);
/// bs.set(99, true);
/// assert!(bs.test(99) == true);
/// ```
pub fn set(&mut self, bit_idx: usize, v: bool) {
let (block_idx, mod_bit_idx) = (bit_idx / 64, bit_idx % 64);
if let Some(n) = self.bits.get_mut(block_idx) {
if v {
*n |= 0x1 << mod_bit_idx;
}
else {
*n &= !(0x1 << mod_bit_idx);
}
}
}
/// Reset all bits to `0`.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let mut bs = BitSet::with_capacity(100);
/// bs.set(99, true);
/// assert!(bs.test(99) == true);
/// bs.reset();
/// assert!(bs.test(99) == false);
/// ```
pub fn reset(&mut self) {
for i in 0..self.blocks() {
self.bits[i] = 0;
}
}
/// Flip the bit specified by `bit_idx` to the reverse value.
/// If the bit value is `true`, then it will be flipped to `false`.
/// The other case is like the same.
///
/// # Arguments
///
/// `bit_idx` - the index of the bit we want to flip.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let mut bs = BitSet::with_capacity(100);
/// assert!(bs.test(99) == false);
/// bs.flip(99);
/// assert!(bs.test(99) == true);
/// ```
pub fn flip(&mut self, bit_idx: usize) {
let (block_idx, mod_bit_idx) = (bit_idx / 64, bit_idx % 64);
if let Some(n) = self.bits.get_mut(block_idx) {
if (*n >> mod_bit_idx) & 0x1 == 0x1 {
*n &= !(0x1 << mod_bit_idx);
}
else {
*n |= 0x1 << mod_bit_idx;
}
}
}
/// Flip all bits in the bitset. It may run time-costly.
///
/// # Example
///
/// ```
/// use bitset::BitSet;
///
/// let mut bs = BitSet::with_capacity(100);
/// bs.flip_all();
/// for i in 0..100 {
/// assert!(bs.test(i) == true);
/// }
/// bs.flip_all();
/// for i in 0..100 {
/// assert!(bs.test(i) == false);
/// }
/// ```
pub fn flip_all(&mut self) {
for i in 0..self.blocks() {
self.bits[i] = !self.bits[i];
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bit_count64() {
assert!(bit_count64(0) == 0);
assert!(bit_count64(1) == 1);
assert!(bit_count64(3) == 2);
assert!(bit_count64(u64::max_value()) == 64);
}
#[test]
fn test_new() {
let bitset = BitSet::new();
assert!(bitset.count() == 0);
assert!(bitset.size() == 0);
}
#[test]
fn test_with_capacity() {
let mut bitset = BitSet::with_capacity(100);
// set/reset/test
assert!(bitset.test(99) == false);
bitset.set(0, true);
assert!(bitset.test(0) == true);
bitset.set(0, false);
assert!(bitset.test(0) == false);
bitset.set(99, true);
assert!(bitset.test(99) == true);
bitset.set(99, false);
assert!(bitset.test(99) == false);
bitset.reset();
bitset.set(0, true);
assert!(bitset.test(0) == true);
// any/none
bitset.reset();
assert!(bitset.any() == false);
assert!(bitset.none() == true);
bitset.flip(0);
assert!(bitset.any() == true);
assert!(bitset.none() == false);
// flip
bitset.reset();
bitset.set(99, true);
assert!(bitset.test(99) == true);
bitset.flip(99);
assert!(bitset.test(99) == false);
bitset.flip(99);
assert!(bitset.test(99) == true);
// flip_all
bitset.reset();
bitset.flip_all();
for i in 0..100 {
assert!(bitset.test(i) == true);
}
bitset.flip_all();
for i in 0..100 {
assert!(bitset.test(i) == false);
}
}
}
|
use std::cmp::Ordering;
use std::collections::HashMap;
use crate::solutions::Solution;
use crate::util::int;
pub struct Day4 {}
impl Solution for Day4 {
fn part1(&self, input: String) {
let total: i32 = input
.split('\n')
.map(Room::new)
.filter(|room| room.calculate_checksum() == room.checksum)
.map(|room| room.sector_id)
.sum();
println!("{}", total);
}
fn part2(&self, input: String) {
for room in input.split('\n').map(Room::new) {
let shift_by = room.sector_id;
let shifted: String = room.name.chars().map(|c| shift(c, shift_by)).collect();
if "northpole object storage" == shifted {
println!("{}", room.sector_id);
break;
}
}
}
}
fn shift(c: char, by: i32) -> char {
if c == '-' {
return ' ';
}
let c = c as i32 - 'a' as i32;
let c = (c + by) % 26;
('a' as i32 + c) as u8 as char
}
struct Room<'a> {
name: &'a str,
checksum: &'a str,
sector_id: i32,
}
impl Room<'_> {
fn new(room: &str) -> Room {
let mut parts = room.rsplitn(2, '-');
let mut end = parts.next().unwrap().split('[');
let name = parts.next().unwrap();
let sector_id = int(end.next().unwrap()).unwrap();
let checksum = end.next().unwrap();
let checksum = &checksum[..checksum.len() - 1];
Room {
name,
checksum,
sector_id,
}
}
fn calculate_checksum(&self) -> String {
let mut counts = HashMap::new();
for c in self.name.chars().filter(|&c| c != '-') {
counts.insert(c, 1 + counts.get(&c).unwrap_or(&0));
}
let mut counts: Vec<(&u32, &char)> = counts.iter().map(|(a, b)| (b, a)).collect();
counts.sort_unstable_by(|(an, ac), (bn, bc)| {
let num_cmp = bn.cmp(an);
if let Ordering::Equal = num_cmp {
ac.cmp(bc)
} else {
num_cmp
}
});
counts[..5].iter().map(|(_, &letter)| letter).collect()
}
}
|
use crate::schema::users;
use chrono::{NaiveDateTime, Local};
use diesel::ExpressionMethods;
#[derive(Insertable, Deserialize, AsChangeset)]
#[table_name="users"]
pub struct NewUser {
pub firstName: String,
pub lastName: String,
pub playerNumber: String,
pub created: NaiveDateTime
}
impl NewUser {
pub fn create(&self) -> Result<User, diesel::result::Error> {
use diesel::RunQueryDsl;
use crate::db_connection::establish_connection;
let connection = establish_connection();
let new_user = NewUser {
// first_name: "test_string" would not work . That would pass a reference but this expects insead a std::string::String
firstName: self.lastName.to_string(),
lastName: self.lastName.to_string(),
playerNumber: self.playerNumber.to_string(),
created: Local::now().naive_local()
};
diesel::insert_into(users::table)
.values(&new_user)
.get_result(&connection)
}
}
#[derive(Serialize, Deserialize)]
pub struct UserList(pub Vec<User>);
impl UserList {
pub fn list() -> Self {
use diesel::RunQueryDsl;
use diesel::QueryDsl;
use crate::schema::users::dsl::*;
use crate::db_connection::establish_connection;
let connection = establish_connection();
let result =
users
.limit(10)
.load::<User>(&connection)
.expect("Error loading users");
UserList(result)
}
}
use askama::Template;
#[derive(Template)]
#[template(path = "user.html")]
pub struct UsersTemplate {
pub users: Vec<User>
}
impl UsersTemplate {
pub fn latest() -> Self {
use diesel::RunQueryDsl;
use diesel::QueryDsl;
use crate::schema::users::dsl::*;
use crate::db_connection::establish_connection;
let connection = establish_connection();
let result =
users
.limit(10)
.order(created.desc())
.load::<User>(&connection)
.expect("Error loading users");
return UsersTemplate{ users: result };
}
}
#[derive(Queryable, Serialize, Deserialize, AsChangeset, Insertable)]
pub struct User {
pub Id: i32,
pub playerNumber: String,
pub firstName: String,
pub lastName: String,
pub tier: Option<i32>,
pub address1: String,
pub city: String,
pub zip: Option<String>,
pub country: Option<String>,
pub email: Option<String>,
pub id3: Option<String>,
pub isBanned: Option<i32>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub gender: Option<i32>,
pub created: NaiveDateTime
}
#[derive(Serialize, Deserialize)]
pub struct Webhook {
pub model: String,
pub data: User,
}
impl User {
pub fn find(Id: &i32) -> Result<User, diesel::result::Error> {
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use crate::db_connection::establish_connection;
let connection = establish_connection();
users::table.find(Id).first(&connection)
}
pub fn destroy(Id: &i32) -> Result<(), diesel::result::Error> {
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use crate::schema::users::dsl;
use crate::db_connection::establish_connection;
let connection = establish_connection();
diesel::delete(dsl::users.find(Id)).execute(&connection)?;
Ok(())
}
pub fn update(Id: &i32, user: &User) -> Result<(), diesel::result::Error> {
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use crate::schema::users::dsl;
use crate::db_connection::establish_connection;
let connection = establish_connection();
diesel::update(dsl::users.find(Id))
.set(user)
.execute(&connection)?;
Ok(())
}
pub fn upsert(webhook: &Webhook) -> Result<(), diesel::result::Error> {
use diesel::RunQueryDsl;
use crate::db_connection::establish_connection;
let connection = establish_connection();
let user = &webhook.data;
diesel::insert_into(users::table)
.values(user)
.on_conflict(users::Id)
.do_update()
.set(user)
.execute(&connection)?;
Ok(())
}
} |
pub struct Entity {
id: u32,
// TODO(orglofch): Add optional debug information.
}
|
use error_chain::error_chain;
error_chain! {
foreign_links {
Io(std::io::Error);
Tunstenite(tungstenite::error::Error);
Mpsc(futures::channel::mpsc::SendError);
}
}
|
pub fn encode(source: &str) -> String {
let sentinel_string = format!("{}{}", source, "\0");
let mut char_count = 0;
sentinel_string
.chars()
.zip(sentinel_string.chars().skip(1))
.fold(String::new(), |mut acc, (ch, prev)| {
char_count += 1;
if ch == '\0' || ch != prev {
if char_count <= 1 {
acc.push(ch);
} else {
acc += &format!("{}{}", char_count, ch);
}
char_count = 0;
}
acc
})
}
pub fn decode(source: &str) -> String {
let mut digits = String::new();
source.chars().fold(String::new(), |mut acc, ch| {
if ch.is_digit(10) {
digits.push(ch);
} else {
match digits.parse() {
Ok(n) => acc.push_str(&ch.to_string().repeat(n)),
Err(_) => acc.push(ch),
}
digits.clear();
}
acc
})
}
|
#[doc = "Register `APBRSTR1` reader"]
pub type R = crate::R<APBRSTR1_SPEC>;
#[doc = "Register `APBRSTR1` writer"]
pub type W = crate::W<APBRSTR1_SPEC>;
#[doc = "Field `TIM2RST` reader - TIM2 timer reset"]
pub type TIM2RST_R = crate::BitReader;
#[doc = "Field `TIM2RST` writer - TIM2 timer reset"]
pub type TIM2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM3RST` reader - TIM3 timer reset"]
pub type TIM3RST_R = crate::BitReader;
#[doc = "Field `TIM3RST` writer - TIM3 timer reset"]
pub type TIM3RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM4RST` reader - TIM4 timer reset"]
pub type TIM4RST_R = crate::BitReader;
#[doc = "Field `TIM4RST` writer - TIM4 timer reset"]
pub type TIM4RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM6RST` reader - TIM6 timer reset"]
pub type TIM6RST_R = crate::BitReader;
#[doc = "Field `TIM6RST` writer - TIM6 timer reset"]
pub type TIM6RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM7RST` reader - TIM7 timer reset"]
pub type TIM7RST_R = crate::BitReader;
#[doc = "Field `TIM7RST` writer - TIM7 timer reset"]
pub type TIM7RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPUART2RST` reader - LPUART2RST"]
pub type LPUART2RST_R = crate::BitReader;
#[doc = "Field `LPUART2RST` writer - LPUART2RST"]
pub type LPUART2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART5RST` reader - USART5RST"]
pub type USART5RST_R = crate::BitReader;
#[doc = "Field `USART5RST` writer - USART5RST"]
pub type USART5RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART6RST` reader - USART6RST"]
pub type USART6RST_R = crate::BitReader;
#[doc = "Field `USART6RST` writer - USART6RST"]
pub type USART6RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FDCANRST` reader - FDCANRST"]
pub type FDCANRST_R = crate::BitReader;
#[doc = "Field `FDCANRST` writer - FDCANRST"]
pub type FDCANRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USBRST` reader - USBRST"]
pub type USBRST_R = crate::BitReader;
#[doc = "Field `USBRST` writer - USBRST"]
pub type USBRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI2RST` reader - SPI2 reset"]
pub type SPI2RST_R = crate::BitReader;
#[doc = "Field `SPI2RST` writer - SPI2 reset"]
pub type SPI2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI3RST` reader - SPI3 reset"]
pub type SPI3RST_R = crate::BitReader;
#[doc = "Field `SPI3RST` writer - SPI3 reset"]
pub type SPI3RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CRSRST` reader - CRSRST"]
pub type CRSRST_R = crate::BitReader;
#[doc = "Field `CRSRST` writer - CRSRST"]
pub type CRSRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART2RST` reader - USART2 reset"]
pub type USART2RST_R = crate::BitReader;
#[doc = "Field `USART2RST` writer - USART2 reset"]
pub type USART2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART3RST` reader - USART3 reset"]
pub type USART3RST_R = crate::BitReader;
#[doc = "Field `USART3RST` writer - USART3 reset"]
pub type USART3RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART4RST` reader - USART4 reset"]
pub type USART4RST_R = crate::BitReader;
#[doc = "Field `USART4RST` writer - USART4 reset"]
pub type USART4RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPUART1RST` reader - LPUART1 reset"]
pub type LPUART1RST_R = crate::BitReader;
#[doc = "Field `LPUART1RST` writer - LPUART1 reset"]
pub type LPUART1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C1RST` reader - I2C1 reset"]
pub type I2C1RST_R = crate::BitReader;
#[doc = "Field `I2C1RST` writer - I2C1 reset"]
pub type I2C1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C2RST` reader - I2C2 reset"]
pub type I2C2RST_R = crate::BitReader;
#[doc = "Field `I2C2RST` writer - I2C2 reset"]
pub type I2C2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C3RST` reader - I2C3RST reset"]
pub type I2C3RST_R = crate::BitReader;
#[doc = "Field `I2C3RST` writer - I2C3RST reset"]
pub type I2C3RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CECRST` reader - HDMI CEC reset"]
pub type CECRST_R = crate::BitReader;
#[doc = "Field `CECRST` writer - HDMI CEC reset"]
pub type CECRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `UCPD1RST` reader - UCPD1 reset"]
pub type UCPD1RST_R = crate::BitReader;
#[doc = "Field `UCPD1RST` writer - UCPD1 reset"]
pub type UCPD1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `UCPD2RST` reader - UCPD2 reset"]
pub type UCPD2RST_R = crate::BitReader;
#[doc = "Field `UCPD2RST` writer - UCPD2 reset"]
pub type UCPD2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBGRST` reader - Debug support reset"]
pub type DBGRST_R = crate::BitReader;
#[doc = "Field `DBGRST` writer - Debug support reset"]
pub type DBGRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PWRRST` reader - Power interface reset"]
pub type PWRRST_R = crate::BitReader;
#[doc = "Field `PWRRST` writer - Power interface reset"]
pub type PWRRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DAC1RST` reader - DAC1 interface reset"]
pub type DAC1RST_R = crate::BitReader;
#[doc = "Field `DAC1RST` writer - DAC1 interface reset"]
pub type DAC1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM2RST` reader - Low Power Timer 2 reset"]
pub type LPTIM2RST_R = crate::BitReader;
#[doc = "Field `LPTIM2RST` writer - Low Power Timer 2 reset"]
pub type LPTIM2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM1RST` reader - Low Power Timer 1 reset"]
pub type LPTIM1RST_R = crate::BitReader;
#[doc = "Field `LPTIM1RST` writer - Low Power Timer 1 reset"]
pub type LPTIM1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - TIM2 timer reset"]
#[inline(always)]
pub fn tim2rst(&self) -> TIM2RST_R {
TIM2RST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TIM3 timer reset"]
#[inline(always)]
pub fn tim3rst(&self) -> TIM3RST_R {
TIM3RST_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - TIM4 timer reset"]
#[inline(always)]
pub fn tim4rst(&self) -> TIM4RST_R {
TIM4RST_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 4 - TIM6 timer reset"]
#[inline(always)]
pub fn tim6rst(&self) -> TIM6RST_R {
TIM6RST_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TIM7 timer reset"]
#[inline(always)]
pub fn tim7rst(&self) -> TIM7RST_R {
TIM7RST_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 7 - LPUART2RST"]
#[inline(always)]
pub fn lpuart2rst(&self) -> LPUART2RST_R {
LPUART2RST_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - USART5RST"]
#[inline(always)]
pub fn usart5rst(&self) -> USART5RST_R {
USART5RST_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - USART6RST"]
#[inline(always)]
pub fn usart6rst(&self) -> USART6RST_R {
USART6RST_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 12 - FDCANRST"]
#[inline(always)]
pub fn fdcanrst(&self) -> FDCANRST_R {
FDCANRST_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - USBRST"]
#[inline(always)]
pub fn usbrst(&self) -> USBRST_R {
USBRST_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - SPI2 reset"]
#[inline(always)]
pub fn spi2rst(&self) -> SPI2RST_R {
SPI2RST_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - SPI3 reset"]
#[inline(always)]
pub fn spi3rst(&self) -> SPI3RST_R {
SPI3RST_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - CRSRST"]
#[inline(always)]
pub fn crsrst(&self) -> CRSRST_R {
CRSRST_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - USART2 reset"]
#[inline(always)]
pub fn usart2rst(&self) -> USART2RST_R {
USART2RST_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - USART3 reset"]
#[inline(always)]
pub fn usart3rst(&self) -> USART3RST_R {
USART3RST_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - USART4 reset"]
#[inline(always)]
pub fn usart4rst(&self) -> USART4RST_R {
USART4RST_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - LPUART1 reset"]
#[inline(always)]
pub fn lpuart1rst(&self) -> LPUART1RST_R {
LPUART1RST_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - I2C1 reset"]
#[inline(always)]
pub fn i2c1rst(&self) -> I2C1RST_R {
I2C1RST_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C2 reset"]
#[inline(always)]
pub fn i2c2rst(&self) -> I2C2RST_R {
I2C2RST_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - I2C3RST reset"]
#[inline(always)]
pub fn i2c3rst(&self) -> I2C3RST_R {
I2C3RST_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - HDMI CEC reset"]
#[inline(always)]
pub fn cecrst(&self) -> CECRST_R {
CECRST_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - UCPD1 reset"]
#[inline(always)]
pub fn ucpd1rst(&self) -> UCPD1RST_R {
UCPD1RST_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - UCPD2 reset"]
#[inline(always)]
pub fn ucpd2rst(&self) -> UCPD2RST_R {
UCPD2RST_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - Debug support reset"]
#[inline(always)]
pub fn dbgrst(&self) -> DBGRST_R {
DBGRST_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - Power interface reset"]
#[inline(always)]
pub fn pwrrst(&self) -> PWRRST_R {
PWRRST_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - DAC1 interface reset"]
#[inline(always)]
pub fn dac1rst(&self) -> DAC1RST_R {
DAC1RST_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Low Power Timer 2 reset"]
#[inline(always)]
pub fn lptim2rst(&self) -> LPTIM2RST_R {
LPTIM2RST_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Low Power Timer 1 reset"]
#[inline(always)]
pub fn lptim1rst(&self) -> LPTIM1RST_R {
LPTIM1RST_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TIM2 timer reset"]
#[inline(always)]
#[must_use]
pub fn tim2rst(&mut self) -> TIM2RST_W<APBRSTR1_SPEC, 0> {
TIM2RST_W::new(self)
}
#[doc = "Bit 1 - TIM3 timer reset"]
#[inline(always)]
#[must_use]
pub fn tim3rst(&mut self) -> TIM3RST_W<APBRSTR1_SPEC, 1> {
TIM3RST_W::new(self)
}
#[doc = "Bit 2 - TIM4 timer reset"]
#[inline(always)]
#[must_use]
pub fn tim4rst(&mut self) -> TIM4RST_W<APBRSTR1_SPEC, 2> {
TIM4RST_W::new(self)
}
#[doc = "Bit 4 - TIM6 timer reset"]
#[inline(always)]
#[must_use]
pub fn tim6rst(&mut self) -> TIM6RST_W<APBRSTR1_SPEC, 4> {
TIM6RST_W::new(self)
}
#[doc = "Bit 5 - TIM7 timer reset"]
#[inline(always)]
#[must_use]
pub fn tim7rst(&mut self) -> TIM7RST_W<APBRSTR1_SPEC, 5> {
TIM7RST_W::new(self)
}
#[doc = "Bit 7 - LPUART2RST"]
#[inline(always)]
#[must_use]
pub fn lpuart2rst(&mut self) -> LPUART2RST_W<APBRSTR1_SPEC, 7> {
LPUART2RST_W::new(self)
}
#[doc = "Bit 8 - USART5RST"]
#[inline(always)]
#[must_use]
pub fn usart5rst(&mut self) -> USART5RST_W<APBRSTR1_SPEC, 8> {
USART5RST_W::new(self)
}
#[doc = "Bit 9 - USART6RST"]
#[inline(always)]
#[must_use]
pub fn usart6rst(&mut self) -> USART6RST_W<APBRSTR1_SPEC, 9> {
USART6RST_W::new(self)
}
#[doc = "Bit 12 - FDCANRST"]
#[inline(always)]
#[must_use]
pub fn fdcanrst(&mut self) -> FDCANRST_W<APBRSTR1_SPEC, 12> {
FDCANRST_W::new(self)
}
#[doc = "Bit 13 - USBRST"]
#[inline(always)]
#[must_use]
pub fn usbrst(&mut self) -> USBRST_W<APBRSTR1_SPEC, 13> {
USBRST_W::new(self)
}
#[doc = "Bit 14 - SPI2 reset"]
#[inline(always)]
#[must_use]
pub fn spi2rst(&mut self) -> SPI2RST_W<APBRSTR1_SPEC, 14> {
SPI2RST_W::new(self)
}
#[doc = "Bit 15 - SPI3 reset"]
#[inline(always)]
#[must_use]
pub fn spi3rst(&mut self) -> SPI3RST_W<APBRSTR1_SPEC, 15> {
SPI3RST_W::new(self)
}
#[doc = "Bit 16 - CRSRST"]
#[inline(always)]
#[must_use]
pub fn crsrst(&mut self) -> CRSRST_W<APBRSTR1_SPEC, 16> {
CRSRST_W::new(self)
}
#[doc = "Bit 17 - USART2 reset"]
#[inline(always)]
#[must_use]
pub fn usart2rst(&mut self) -> USART2RST_W<APBRSTR1_SPEC, 17> {
USART2RST_W::new(self)
}
#[doc = "Bit 18 - USART3 reset"]
#[inline(always)]
#[must_use]
pub fn usart3rst(&mut self) -> USART3RST_W<APBRSTR1_SPEC, 18> {
USART3RST_W::new(self)
}
#[doc = "Bit 19 - USART4 reset"]
#[inline(always)]
#[must_use]
pub fn usart4rst(&mut self) -> USART4RST_W<APBRSTR1_SPEC, 19> {
USART4RST_W::new(self)
}
#[doc = "Bit 20 - LPUART1 reset"]
#[inline(always)]
#[must_use]
pub fn lpuart1rst(&mut self) -> LPUART1RST_W<APBRSTR1_SPEC, 20> {
LPUART1RST_W::new(self)
}
#[doc = "Bit 21 - I2C1 reset"]
#[inline(always)]
#[must_use]
pub fn i2c1rst(&mut self) -> I2C1RST_W<APBRSTR1_SPEC, 21> {
I2C1RST_W::new(self)
}
#[doc = "Bit 22 - I2C2 reset"]
#[inline(always)]
#[must_use]
pub fn i2c2rst(&mut self) -> I2C2RST_W<APBRSTR1_SPEC, 22> {
I2C2RST_W::new(self)
}
#[doc = "Bit 23 - I2C3RST reset"]
#[inline(always)]
#[must_use]
pub fn i2c3rst(&mut self) -> I2C3RST_W<APBRSTR1_SPEC, 23> {
I2C3RST_W::new(self)
}
#[doc = "Bit 24 - HDMI CEC reset"]
#[inline(always)]
#[must_use]
pub fn cecrst(&mut self) -> CECRST_W<APBRSTR1_SPEC, 24> {
CECRST_W::new(self)
}
#[doc = "Bit 25 - UCPD1 reset"]
#[inline(always)]
#[must_use]
pub fn ucpd1rst(&mut self) -> UCPD1RST_W<APBRSTR1_SPEC, 25> {
UCPD1RST_W::new(self)
}
#[doc = "Bit 26 - UCPD2 reset"]
#[inline(always)]
#[must_use]
pub fn ucpd2rst(&mut self) -> UCPD2RST_W<APBRSTR1_SPEC, 26> {
UCPD2RST_W::new(self)
}
#[doc = "Bit 27 - Debug support reset"]
#[inline(always)]
#[must_use]
pub fn dbgrst(&mut self) -> DBGRST_W<APBRSTR1_SPEC, 27> {
DBGRST_W::new(self)
}
#[doc = "Bit 28 - Power interface reset"]
#[inline(always)]
#[must_use]
pub fn pwrrst(&mut self) -> PWRRST_W<APBRSTR1_SPEC, 28> {
PWRRST_W::new(self)
}
#[doc = "Bit 29 - DAC1 interface reset"]
#[inline(always)]
#[must_use]
pub fn dac1rst(&mut self) -> DAC1RST_W<APBRSTR1_SPEC, 29> {
DAC1RST_W::new(self)
}
#[doc = "Bit 30 - Low Power Timer 2 reset"]
#[inline(always)]
#[must_use]
pub fn lptim2rst(&mut self) -> LPTIM2RST_W<APBRSTR1_SPEC, 30> {
LPTIM2RST_W::new(self)
}
#[doc = "Bit 31 - Low Power Timer 1 reset"]
#[inline(always)]
#[must_use]
pub fn lptim1rst(&mut self) -> LPTIM1RST_W<APBRSTR1_SPEC, 31> {
LPTIM1RST_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB peripheral reset register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apbrstr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apbrstr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APBRSTR1_SPEC;
impl crate::RegisterSpec for APBRSTR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apbrstr1::R`](R) reader structure"]
impl crate::Readable for APBRSTR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apbrstr1::W`](W) writer structure"]
impl crate::Writable for APBRSTR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APBRSTR1 to value 0"]
impl crate::Resettable for APBRSTR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register CHAN_WORK[%s]"]
pub type R = crate::R<u32, super::CHAN_WORK>;
#[doc = "Reader of field `WORK`"]
pub type WORK_R = crate::R<u16, u16>;
#[doc = "Reader of field `CHAN_WORK_NEWVALUE_MIR`"]
pub type CHAN_WORK_NEWVALUE_MIR_R = crate::R<bool, bool>;
#[doc = "Reader of field `CHAN_WORK_UPDATED_MIR`"]
pub type CHAN_WORK_UPDATED_MIR_R = crate::R<bool, bool>;
impl R {
#[doc = "Bits 0:15 - SAR conversion working data of the channel. The data is written here right after sampling this channel."]
#[inline(always)]
pub fn work(&self) -> WORK_R {
WORK_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bit 27 - mirror bit of corresponding bit in SAR_CHAN_WORK_NEWVALUE register"]
#[inline(always)]
pub fn chan_work_newvalue_mir(&self) -> CHAN_WORK_NEWVALUE_MIR_R {
CHAN_WORK_NEWVALUE_MIR_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 31 - mirror bit of corresponding bit in SAR_CHAN_WORK_UPDATED register"]
#[inline(always)]
pub fn chan_work_updated_mir(&self) -> CHAN_WORK_UPDATED_MIR_R {
CHAN_WORK_UPDATED_MIR_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
|
use super::full_memory_dump::*;
use super::*;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::mem::size_of;
use std::mem::MaybeUninit;
use dataview::Pod;
use log::info;
use memflow::*;
use memflow_derive::*;
pub const DUMP_VALID_DUMP64: u32 = 0x34365544;
pub const IMAGE_FILE_MACHINE_AMD64: u32 = 0x8664;
/// A 64bit Microsoft Windows Coredump Header
#[repr(C)]
#[derive(Copy, Clone, ByteSwap)]
pub struct CoreDumpHeader64 {
pub signature: u32, // 0x0000
pub valid_dump: u32, // 0x0004
pub major_version: u32, // 0x0008
pub minor_version: u32, // 0x000c
pub directory_table_base: u64, // 0x0010
pub pfn_data_base: u64, // 0x0018
pub ps_loaded_module_list: u64, // 0x0020
pub ps_active_process_head: u64, // 0x0028
pub machine_image_type: u32, // 0x0030
pub number_processors: u32, // 0x0034
pub bug_check_code: u32, // 0x0038
pub bug_check_parameter1: u64, // 0x0040
pub bug_check_parameter2: u64, // 0x0048
pub bug_check_parameter3: u64, // 0x0050
pub bug_check_parameter4: u64, // 0x0058
pub version_user: [u8; 32], // 0x0060
pub kd_debugger_data_block: u64, // 0x0080
pub physical_memory_block: PhysicalMemoryDescriptor<u64>, // 0x0088
pub pad0: [u8; 176], // 0x0344
pub context_record: [u8; 3000], // 0x0348
pub exception_record: [u8; 152], // EXCEPTION_RECORD64 - 0x0F00
pub dump_type: u32, // 0x0F98
pub required_dump_space: u64, // 0x0FA0
pub system_time: u64, // 0x0FA8
pub comment: [i8; 0x80], // 0x0FB0 May not be present.
pub system_up_time: u64, // 0x1030
pub mini_dump_fields: u32, // 0x1038
pub secondary_data_state: u32, // 0x103c
pub product_type: u32, // 0x1040
pub suite_mask: u32, // 0x1044
pub writer_status: u32, // 0x1048
pub unused0: u8, // 0x104c
pub kd_secondary_version: u8, // 0x104d only on W2K3 SP1 and up
pub unused1: [u8; 2], // 0x104e
pub reserved0: [u8; 4016], // 0x1050
}
const _: [(); std::mem::size_of::<CoreDumpHeader64>()] = [(); 0x2000];
#[allow(clippy::uninit_assumed_init)]
impl CoreDumpHeader64 {
pub fn uninit() -> Self {
unsafe { MaybeUninit::uninit().assume_init() }
}
}
unsafe impl Pod for CoreDumpHeader64 {}
/// Tries to parse a file handle as a Microsoft Windows 64bit Coredump.
pub fn parse_coredump64(file: &mut File) -> Result<MemoryMap<(Address, usize)>> {
let mut header = CoreDumpHeader64::uninit();
file.seek(SeekFrom::Start(0))
.map_err(|_| Error::Connector("unable to seek to coredump 64 header"))?;
file.read_exact(header.as_bytes_mut())
.map_err(|_| Error::Connector("unable to read coredump 64 header"))?;
if cfg!(target_endian = "big") {
header.byte_swap();
}
if header.signature != DUMP_SIGNATURE {
return Err(Error::Connector("header signature is not valid"));
}
if header.valid_dump != DUMP_VALID_DUMP64 {
return Err(Error::Connector("header dump flag is not valid"));
}
if header.machine_image_type != IMAGE_FILE_MACHINE_AMD64 {
return Err(Error::Connector("invalid machine image type"));
}
info!("64-bit Microsoft Crash Dump verified");
match header.dump_type {
dump_type::FULL => full_memory_dump::parse_full_dump(
header.physical_memory_block,
size_of::<CoreDumpHeader64>(),
),
dump_type::BIT_MAP => bitmap_dump::parse_bitmap_dump(file),
_ => Err(Error::Connector(
"invalid dump type, only full and bitmap dumps are supported",
)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::mem::size_of;
#[test]
fn test_struct_sizes_x64() {
assert_eq!(size_of::<PhysicalMemoryRun<u64>>(), 0x10);
assert_eq!(size_of::<PhysicalMemoryDescriptor<u64>>(), 0x210);
assert_eq!(size_of::<CoreDumpHeader64>(), 0x2000);
}
#[test]
fn test_struct_members_x64() {
let header = CoreDumpHeader64::uninit();
assert_eq!(
&header.version_user as *const _ as usize - &header as *const _ as usize,
0x60
);
assert_eq!(
&header.kd_debugger_data_block as *const _ as usize - &header as *const _ as usize,
0x80
);
assert_eq!(
&header.physical_memory_block as *const _ as usize - &header as *const _ as usize,
0x88
);
assert_eq!(
&header.context_record as *const _ as usize - &header as *const _ as usize,
0x348
);
assert_eq!(
&header.exception_record as *const _ as usize - &header as *const _ as usize,
0xf00
);
assert_eq!(
&header.dump_type as *const _ as usize - &header as *const _ as usize,
0xf98
);
}
}
|
use crate::{core::ClassHash, state::CompressedContract};
#[allow(unused)]
use anyhow::Context;
use rusqlite::{OptionalExtension, Transaction};
use stark_hash::{OverflowError, StarkHash};
pub(crate) fn migrate(transaction: &Transaction<'_>) -> anyhow::Result<()> {
let genesis = transaction
.query_row(
"SELECT hash FROM starknet_blocks WHERE number = 0",
[],
|r| Ok(StarkHash::from_be_slice(r.get_ref_unwrap(0).as_blob()?)),
)
.optional()?;
let (minimum_block, chain) = match genesis {
Some(Ok(x)) if x == crate::consts::GOERLI_GENESIS_HASH.0 => {
(231_579, crate::core::Chain::Goerli)
}
Some(Ok(x)) if x == crate::consts::MAINNET_GENESIS_HASH.0 => {
(2700, crate::core::Chain::Mainnet)
}
Some(Ok(y)) => anyhow::bail!("Unknown genesis block hash: {}", y),
Some(Err(err @ OverflowError)) => {
return Err(anyhow::Error::new(err).context("Failed to read genesis block hash"))
}
None => return Ok(()),
};
let latest_block_number =
transaction.query_row("SELECT max(number) FROM starknet_blocks", [], |r| {
Ok(r.get_ref(0)?.as_i64())
})??;
let (work_tx, work_rx) = std::sync::mpsc::sync_channel(1);
let (downloaded_tx, downloaded_rx) = std::sync::mpsc::sync_channel(1);
let (ready_tx, ready_rx) = std::sync::mpsc::channel();
let handle = tokio::runtime::Handle::current();
let downloader = std::thread::spawn(move || {
use crate::sequencer::ClientApi;
let client = crate::sequencer::Client::new(chain).unwrap();
for class_hash in work_rx.iter() {
let class = handle.block_on(client.class_by_hash(class_hash)).unwrap();
downloaded_tx.send(class).unwrap();
}
});
let extract_compress = std::thread::spawn(move || {
let mut compressor = zstd::bulk::Compressor::new(10).unwrap();
for class in downloaded_rx.iter() {
let (abi, code, hash) =
crate::state::class_hash::extract_abi_code_hash(&class).unwrap();
let definition = compressor.compress(&class).unwrap();
let abi = compressor.compress(&abi).unwrap();
let bytecode = compressor.compress(&code).unwrap();
ready_tx
.send(CompressedContract {
abi,
bytecode,
definition,
hash,
})
.unwrap();
}
});
let mut class_query = transaction.prepare("SELECT 1 FROM contract_code WHERE hash = ?")?;
let mut tx_query = transaction.prepare(
"SELECT tx
FROM starknet_transactions txs
JOIN starknet_blocks blocks ON (txs.block_hash = blocks.hash)
WHERE blocks.number >= ?",
)?;
let mut tx_rows = tx_query.query([minimum_block])?;
let mut buffer = Vec::new();
let mut already_processing = std::collections::HashSet::new();
let mut last_report = std::time::Instant::now();
let mut processed = 0;
tracing::info!(
"Processing transactions from blocks {minimum_block}..={latest_block_number} for missed declare transactions. This can take a while..."
);
while let Some(tx_row) = tx_rows.next()? {
processed += 1;
// this is probably quite slow with this fast loops, don't copy paste it around without
// consideration. this is quite small migration after all so not going for anything more
// complex.
if last_report.elapsed() >= std::time::Duration::from_secs(5) {
tracing::info!(
processed,
missing = already_processing.len(),
"Continuing to process transactions"
);
last_report = std::time::Instant::now();
}
buffer.clear();
zstd::stream::copy_decode(tx_row.get_ref_unwrap(0).as_blob()?, &mut buffer)?;
let tx = serde_json::from_slice::<SlimTransaction>(&buffer)?;
let class_hash = match tx.r#type {
TransactionType::Declare => tx
.class_hash
.expect("should had found a class hash in declare transaction"),
_ => continue,
};
if class_query.exists(&[class_hash.0.as_be_bytes()])? {
continue;
}
if !already_processing.insert(class_hash) {
continue;
}
work_tx
.send(class_hash)
.context("Failed to send, some of tasks failed")?;
}
drop(work_tx);
tracing::info!(classes = already_processing.len(), "Saving missed classes");
drop(already_processing);
for cc in ready_rx.iter() {
crate::storage::ContractCodeTable::insert_compressed(transaction, &cc)
.with_context(|| format!("Failed to save class {}", cc.hash.0))?;
}
downloader.join().unwrap();
extract_compress.join().unwrap();
Ok(())
}
#[derive(serde::Deserialize)]
#[serde(deny_unknown_fields)]
pub enum TransactionType {
#[serde(rename = "DEPLOY")]
Deploy,
#[serde(rename = "INVOKE_FUNCTION")]
InvokeFunction,
#[serde(rename = "DECLARE")]
Declare,
}
#[derive(serde::Deserialize)]
struct SlimTransaction {
r#type: TransactionType,
#[serde(default)]
class_hash: Option<ClassHash>,
}
|
#![allow(non_snake_case)]
use std::io;
macro_rules! parse_line {
($($t: ty),+) => ({
let mut a_str = String::new();
io::stdin().read_line(&mut a_str).expect("read error");
let mut a_iter = a_str.split_whitespace();
(
$(
a_iter.next().unwrap().parse::<$t>().expect("parse error"),
)+
)
})
}
struct Ride {
a: i32,
b: i32,
x: i32,
y: i32,
s: i32,
f: i32,
id: usize,
len: i32,
done: bool,
}
struct Car {
x: i32,
y: i32,
t: i32,
}
fn dist(a: i32, b: i32, x: i32, y: i32) -> i32 {
let (d1, d2) = (a - x, b - y);
d1.abs() + d2.abs()
}
fn main() {
let (R, C, F, N, B, T) = parse_line!(i32, i32, usize, usize, i32, i32);
let mut rides = Vec::new();
for i in 0usize..N {
let (a, b, x, y, s, f) = parse_line!(i32, i32, i32, i32, i32, i32);
rides.push(Ride { a: a, b: b, x: x, y: y, s: s, f: f, id: i,
len: dist(a, b, x, y), done: false });
}
rides.sort_by(|a, b| {
let x = a.s;
let y = b.s;
x.cmp(&y)
});
/*for i in 0usize..N {
println!("{} {}", rides[i].id, rides[i].s);
}*/
let mut res = vec![vec![0; 0]; F];
let mut cars = Vec::new();
for _i in 0usize..F {
cars.push(Car { x: 0, y: 0, t: 0});
}
for _it in 0usize..N {
let mut bst_f_ind: i32 = -1;
let mut bst_n_ind: i32 = -1;
let mut bst_cost: i32 = 0;
for i in 0usize..N {
if rides[i].done {
continue;
}
for j in 0usize..F {
let len = dist(cars[j].x, cars[j].y, rides[i].a, rides[i].b);
let earliest_f = cars[j].t + len + rides[i].len;
if earliest_f <= rides[i].f {
let earliest_s = cars[j].t + len;
/*
// time too early for ride.s --> you have to wait
let cost1 = std::cmp::max(0, rides[i].s - earliest_s);
// distance car to ride begin
let cost2 = len;
// time too late for ride.s
let cost3 = std::cmp::max(0, earliest_s - rides[i].s);
// ride length
let cost4 = rides[i].len;
// weigh time you have to wait high for C, D
// weigh time you are too late high for B, E
let mut cost = 5 * cost1 + 2 * cost2 + 10000 * cost3 + 0 * cost4;
if cost3 == 0 {
cost -= B
}
if bst_f_ind == -1 || cost < bst_cost {
bst_f_ind = j as i32;
bst_n_ind = i as i32;
bst_cost = cost;
}
*/
// code for D high score
// just take earliest starting ride
// if not late, penalize ride taking you far
let mut take_time = std::cmp::max(earliest_s, rides[i].s);
if earliest_f <= (T as f64 * 0.98) as i32 {
take_time += rides[i].len / 15;
}
if bst_f_ind == -1 || take_time < bst_cost {
bst_f_ind = j as i32;
bst_n_ind = i as i32;
bst_cost = take_time;
}
}
}
}
if bst_f_ind != -1 {
let j = bst_f_ind as usize;
let i = bst_n_ind as usize;
res[j].push(rides[i].id);
let len = dist(cars[j].x, cars[j].y, rides[i].a, rides[i].b);
cars[j].t = std::cmp::max(cars[j].t + len + rides[i].len,
rides[i].s + rides[i].len);
cars[j].x = rides[i].x;
cars[j].y = rides[i].y;
rides[i].done = true;
}
}
for i in 0usize..F {
print!("{}", res[i].len());
for j in &res[i] {
print!(" {}", j);
}
print!("\n");
}
}
|
use crate::core::prelude::*;
use crate::queue::clients::QueueAccountClient;
use crate::queue::responses::*;
use crate::queue::HasStorageClient;
use azure_core::errors::AzureError;
use azure_core::prelude::*;
use hyper::StatusCode;
use std::convert::TryInto;
#[derive(Debug, Clone)]
pub struct ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
queue_service: &'a QueueAccountClient<C>,
prefix: Option<&'a str>,
next_marker: Option<&'a str>,
max_results: Option<u32>,
include_metadata: bool,
timeout: Option<u64>,
client_request_id: Option<&'a str>,
}
impl<'a, C> ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
pub(crate) fn new(queue_service: &'a QueueAccountClient<C>) -> Self {
ListQueuesBuilder {
queue_service,
prefix: None,
next_marker: None,
max_results: None,
include_metadata: false,
timeout: None,
client_request_id: None,
}
}
}
//set mandatory no traits methods
impl<'a, C> PrefixOption<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn prefix(&self) -> Option<&'a str> {
self.prefix
}
}
impl<'a, C> NextMarkerOption<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn next_marker(&self) -> Option<&'a str> {
self.next_marker
}
}
impl<'a, C> MaxResultsOption for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn max_results(&self) -> Option<u32> {
self.max_results
}
}
impl<'a, C> IncludeMetadataOption for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn include_metadata(&self) -> bool {
self.include_metadata
}
}
impl<'a, C> TimeoutOption for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn timeout(&self) -> Option<u64> {
self.timeout
}
}
impl<'a, C> ClientRequestIdOption<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
fn client_request_id(&self) -> Option<&'a str> {
self.client_request_id
}
}
impl<'a, C> PrefixSupport<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_prefix(self, prefix: &'a str) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: Some(prefix),
next_marker: self.next_marker,
max_results: self.max_results,
include_metadata: self.include_metadata,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> NextMarkerSupport<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_next_marker(self, next_marker: &'a str) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: self.prefix,
next_marker: Some(next_marker),
max_results: self.max_results,
include_metadata: self.include_metadata,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> MaxResultsSupport for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_max_results(self, max_results: u32) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: self.prefix,
next_marker: self.next_marker,
max_results: Some(max_results),
include_metadata: self.include_metadata,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> IncludeMetadataSupport for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_include_metadata(self) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: self.prefix,
next_marker: self.next_marker,
max_results: self.max_results,
include_metadata: true,
timeout: self.timeout,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> TimeoutSupport for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_timeout(self, timeout: u64) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: self.prefix,
next_marker: self.next_marker,
max_results: self.max_results,
include_metadata: self.include_metadata,
timeout: Some(timeout),
client_request_id: self.client_request_id,
}
}
}
impl<'a, C> ClientRequestIdSupport<'a> for ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
type O = Self;
fn with_client_request_id(self, client_request_id: &'a str) -> Self::O {
ListQueuesBuilder {
queue_service: self.queue_service,
prefix: self.prefix,
next_marker: self.next_marker,
max_results: self.max_results,
include_metadata: self.include_metadata,
timeout: self.timeout,
client_request_id: Some(client_request_id),
}
}
}
// methods callable regardless
impl<'a, C> ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
pub fn queue_service(&self) -> &'a QueueAccountClient<C> {
self.queue_service
}
}
// methods callable only when every mandatory field has been filled
impl<'a, C> ListQueuesBuilder<'a, C>
where
C: Client + Clone,
{
pub async fn execute(self) -> Result<ListQueuesResponse, AzureError> {
let mut url = url::Url::parse(self.queue_service.storage_client().queue_uri())?;
url.query_pairs_mut().append_pair("comp", "list");
IncludeMetadataOption::append_to_url(&self, &mut url);
TimeoutOption::append_to_url(&self, &mut url);
MaxResultsOption::append_to_url(&self, &mut url);
NextMarkerOption::append_to_url(&self, &mut url);
PrefixOption::append_to_url(&self, &mut url);
debug!("url == {}", url);
let perform_request_response = self.queue_service.storage_client().perform_request(
url.as_str(),
&http::Method::GET,
&|mut request| {
request = ClientRequestIdOption::add_header(&self, request);
request
},
Some(&[]),
)?;
let (headers, body) = perform_request_response
.check_status_extract_headers_and_body(StatusCode::OK)
.await?;
(&headers, &body as &[u8]).try_into()
}
}
|
use std::sync::Arc;
use catalogue::container::Container as CatalogueContainer;
use catalogue::infrastructure::persistence::inmem::InMemCatalogueRepository;
use catalogue::infrastructure::service::{SyncCollectionService, SyncPublicationService};
use common::event::EventSubscriber;
use common::infrastructure::event::{InMemEventBus, InMemEventRepository};
use common::result::Result;
use identity::container::Container as IdentityContainer;
use identity::infrastructure::persistence::inmem::{
InMemRoleRepository, InMemTokenRepository, InMemUserRepository,
};
use identity::infrastructure::service::{BcryptHasher, JWTEncoder};
use publishing::container::Container as PublishingContainer;
use publishing::infrastructure::persistence::inmem::{
InMemCategoryRepository, InMemCollectionRepository, InMemInteractionRepository,
InMemPublicationRepository,
};
use crate::development::EventLogger;
use crate::infrastructure::publishing::{
AuthorTranslator, ContentManagerTranslator, ReaderTranslator,
};
pub struct Container {
pub event_bus: Arc<InMemEventBus>,
pub event_repo: Arc<InMemEventRepository>,
pub identity: IdentityContainer<InMemEventBus>,
pub publishing: PublishingContainer<InMemEventBus>,
pub catalogue: CatalogueContainer<InMemEventBus>,
}
impl Container {
pub async fn new() -> Self {
// Common
let event_bus = Arc::new(InMemEventBus::new());
let event_repo = Arc::new(InMemEventRepository::new());
// Identity
let role_repo = Arc::new(InMemRoleRepository::new());
let token_repo = Arc::new(InMemTokenRepository::new());
let user_repo = Arc::new(InMemUserRepository::new());
let password_hasher = Arc::new(BcryptHasher::new());
let token_enc = Arc::new(JWTEncoder::new());
// Publishing
let category_repo = Arc::new(InMemCategoryRepository::new());
let collection_repo = Arc::new(InMemCollectionRepository::new());
let interaction_repo = Arc::new(InMemInteractionRepository::new());
let publication_repo = Arc::new(InMemPublicationRepository::new());
let author_repo = Arc::new(AuthorTranslator::new(
publication_repo.clone(),
user_repo.clone(),
));
let content_manager_repo = Arc::new(ContentManagerTranslator::new(user_repo.clone()));
let reader_repo = Arc::new(ReaderTranslator::new(user_repo.clone()));
// Catalogue
let catalogue_repo = Arc::new(InMemCatalogueRepository::new());
let collection_serv = Arc::new(SyncCollectionService::new(
author_repo.clone(),
category_repo.clone(),
collection_repo.clone(),
));
let publication_serv = Arc::new(SyncPublicationService::new(
author_repo.clone(),
category_repo.clone(),
publication_repo.clone(),
));
let identity = IdentityContainer::new(
event_bus.clone(),
role_repo,
token_repo,
user_repo,
password_hasher,
token_enc,
);
let publishing = PublishingContainer::new(
event_bus.clone(),
author_repo,
category_repo,
collection_repo,
content_manager_repo,
interaction_repo,
publication_repo,
reader_repo,
);
let catalogue = CatalogueContainer::new(
event_bus.clone(),
catalogue_repo,
collection_serv,
publication_serv,
);
Container {
event_bus,
event_repo,
identity,
publishing,
catalogue,
}
}
pub async fn subscribe(&self) -> Result<()> {
let event_logger = EventLogger::new(self.event_repo.clone());
self.event_bus.subscribe(Box::new(event_logger)).await?;
self.catalogue.subscribe(self.event_bus.as_ref()).await?;
Ok(())
}
pub fn event_bus(&self) -> &InMemEventBus {
&self.event_bus
}
pub fn event_repo(&self) -> &InMemEventRepository {
&self.event_repo
}
}
|
use git_sys::git_version_string;
use std::ffi::CStr;
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum InitError {
#[error("this process has already initialized git")]
Initialized,
#[error("the current working directory is not a git repository")]
NonGit,
}
#[derive(Debug)]
pub struct Git {
repo: *mut git_sys::repository,
startup_info: *mut git_sys::startup_info,
}
/// The Current Working Directory **Must** be inside a repository.
pub fn init() -> Result<Git, InitError> {
static INITIALIZED: AtomicBool = AtomicBool::new(false);
if INITIALIZED.swap(true, Ordering::SeqCst) {
Err(InitError::Initialized)
} else {
let mut nongit_ok = 0;
let (repository, startup_info) = unsafe {
// initializes the global the_repository variable
git_sys::initialize_the_repository();
// sets up some globals with information about the git directory
git_sys::setup_git_directory_gently(&mut nongit_ok);
git_sys::validate_cache_entries((*git_sys::the_repository).index);
// pass back unsafe globals to the safe block
(git_sys::the_repository, git_sys::startup_info)
};
// we dont have a use for being in a non-git directory
if nongit_ok != 0 {
return Err(InitError::NonGit);
}
Ok(Git {
repo: repository,
startup_info,
})
}
}
pub struct Rev<'repo> {
pub rev_info: git_sys::rev_info,
_marker: PhantomData<&'repo Git>,
}
impl<'repo> Rev<'repo> {
pub fn new(git: &Git) -> Rev {
let mut rev_info = MaybeUninit::<git_sys::rev_info>::uninit();
let rev_info = unsafe {
git_sys::repo_init_revisions(
git.repo,
rev_info.as_mut_ptr(),
(*git.startup_info).prefix,
);
rev_info.assume_init()
};
Rev {
rev_info,
_marker: Default::default(),
}
}
pub fn add_head_to_pending(&mut self) {
unsafe { git_sys::add_head_to_pending(&mut self.rev_info) }
}
}
static INIT_VERSION: Once = Once::new();
static mut VERSION: &str = "";
pub fn version() -> &'static str {
unsafe {
INIT_VERSION.call_once(|| {
VERSION = CStr::from_ptr(git_version_string.as_ptr())
.to_str()
.expect("invalid utf-8 found in git version string")
});
VERSION
}
}
|
use std::io;
use crate::board::Board;
use crate::coin::Coin;
pub fn read() -> io::Result<String> {
let mut buffer = String::new();
match io::stdin().read_line(&mut buffer) {
Ok(_n) => Ok(buffer),
Err(error) => Err(error),
}
}
pub fn render_board(board: &Board) {
for row_idx in (0..6).rev() {
let mut line = String::from("|");
for col_idx in 0..board.len() {
match &board[col_idx][row_idx] {
Coin::Player(player) => {
let s = format!("{}|", player);
line.push_str(&s);
},
Coin::Empty => line.push_str("x|"),
}
}
println!("{}", line);
}
print!("\n");
} |
//! The gRPC generator
use std::{
convert::TryFrom,
num::{NonZeroU32, NonZeroUsize},
time::Duration,
};
use bytes::{Buf, BufMut, Bytes};
use http::{uri::PathAndQuery, Uri};
use metrics::{counter, gauge, register_counter};
use rand::rngs::StdRng;
use rand::SeedableRng;
use serde::Deserialize;
use tonic::{
codec::{DecodeBuf, Decoder, EncodeBuf, Encoder},
Request, Response, Status,
};
use tracing::{debug, info};
use crate::{
block::{self, chunk_bytes, construct_block_cache, Block},
payload,
signals::Shutdown,
throttle::{self, Throttle},
};
/// Errors produced by [`Grpc`]
#[derive(thiserror::Error, Debug)]
pub enum Error {
/// The remote RPC endpoint returned an error.
#[error("RPC endpoint error: {0}")]
Rpc(#[from] tonic::Status),
/// gRPC transport error
#[error("gRPC transport error: {0}")]
Transport(#[from] tonic::transport::Error),
/// Creation of payload blocks failed.
#[error("Block creation error: {0}")]
Block(#[from] block::Error),
}
/// Config for [`Grpc`]
#[derive(Debug, Deserialize, PartialEq)]
pub struct Config {
/// The gRPC URI. Looks like http://host/service.path/endpoint
pub target_uri: String,
/// The seed for random operations against this target
pub seed: [u8; 32],
/// The payload variant. This should be protobuf encoded for typical gRPC
/// endpoints.
pub variant: payload::Config,
/// The bytes per second to send or receive from the target
pub bytes_per_second: byte_unit::Byte,
/// The block sizes for messages to this target
pub block_sizes: Option<Vec<byte_unit::Byte>>,
/// The maximum size in bytes of the cache of prebuilt messages
pub maximum_prebuild_cache_size_bytes: byte_unit::Byte,
/// The total number of parallel connections to maintain
pub parallel_connections: u16,
/// The load throttle configuration
#[serde(default)]
pub throttle: throttle::Config,
}
/// No-op tonic codec. Sends raw bytes and returns the number of bytes received.
#[derive(Debug, Clone, Default, Copy)]
pub struct NoopCodec;
impl tonic::codec::Codec for NoopCodec {
type Encode = Bytes;
type Decode = usize;
type Encoder = Self;
type Decoder = CountingDecoder;
fn encoder(&mut self) -> Self::Encoder {
Self
}
fn decoder(&mut self) -> Self::Decoder {
CountingDecoder
}
}
impl Encoder for NoopCodec {
type Item = Bytes;
type Error = Status;
fn encode(&mut self, item: Self::Item, buf: &mut EncodeBuf<'_>) -> Result<(), Self::Error> {
buf.put(item);
Ok(())
}
}
/// This decoder returns the number of bytes received
#[derive(Debug, Clone, Default, Copy)]
pub struct CountingDecoder;
impl Decoder for CountingDecoder {
type Item = usize;
type Error = Status;
fn decode(&mut self, buf: &mut DecodeBuf<'_>) -> Result<Option<usize>, Self::Error> {
let response_bytes = buf.remaining();
// Consume the provided response buffer. If this isn't done, tonic will
// throw an unexpected EOF error while processing the response.
buf.advance(response_bytes);
Ok(Some(response_bytes))
}
}
/// The gRPC generator.
///
/// This generator is able to connect to targets via gRPC.
#[derive(Debug)]
pub struct Grpc {
config: Config,
target_uri: Uri,
rpc_path: PathAndQuery,
shutdown: Shutdown,
throttle: Throttle,
block_cache: Vec<Block>,
metric_labels: Vec<(String, String)>,
}
impl Grpc {
/// Create a new [`Grpc`] instance.
///
/// # Errors
///
/// Creation will fail if the underlying governor capacity exceeds u32.
///
/// # Panics
///
/// Function will panic if user has passed zero values for any byte
/// values. Sharp corners.
#[allow(clippy::cast_possible_truncation)]
pub fn new(config: Config, shutdown: Shutdown) -> Result<Self, Error> {
use byte_unit::{Byte, ByteUnit};
let mut rng = StdRng::from_seed(config.seed);
let block_sizes: Vec<NonZeroUsize> = config
.block_sizes
.clone()
.unwrap_or_else(|| {
vec![
Byte::from_unit(1.0 / 32.0, ByteUnit::MB).unwrap(),
Byte::from_unit(1.0 / 16.0, ByteUnit::MB).unwrap(),
Byte::from_unit(1.0 / 8.0, ByteUnit::MB).unwrap(),
Byte::from_unit(1.0 / 4.0, ByteUnit::MB).unwrap(),
Byte::from_unit(1.0 / 2.0, ByteUnit::MB).unwrap(),
Byte::from_unit(1_f64, ByteUnit::MB).unwrap(),
Byte::from_unit(2_f64, ByteUnit::MB).unwrap(),
Byte::from_unit(4_f64, ByteUnit::MB).unwrap(),
]
})
.iter()
.map(|sz| NonZeroUsize::new(sz.get_bytes() as usize).expect("bytes must be non-zero"))
.collect();
let labels = vec![
("component".to_string(), "generator".to_string()),
("component_name".to_string(), "grpc".to_string()),
];
let bytes_per_second = NonZeroU32::new(config.bytes_per_second.get_bytes() as u32).unwrap();
gauge!(
"bytes_per_second",
f64::from(bytes_per_second.get()),
&labels
);
let block_chunks = chunk_bytes(
&mut rng,
NonZeroUsize::new(config.maximum_prebuild_cache_size_bytes.get_bytes() as usize)
.expect("bytes must be non-zero"),
&block_sizes,
)?;
let block_cache = construct_block_cache(&mut rng, &config.variant, &block_chunks, &labels);
let target_uri =
http::uri::Uri::try_from(config.target_uri.clone()).expect("target_uri must be valid");
let rpc_path = target_uri
.path_and_query()
.cloned()
.expect("target_uri should have an RPC path");
let throttle = Throttle::new_with_config(config.throttle, bytes_per_second);
Ok(Self {
target_uri,
rpc_path,
config,
shutdown,
block_cache,
throttle,
metric_labels: labels,
})
}
/// Establish a connection with the configured RPC server
async fn connect(&self) -> Result<tonic::client::Grpc<tonic::transport::Channel>, Error> {
let mut parts = self.target_uri.clone().into_parts();
parts.path_and_query = Some(PathAndQuery::from_static(""));
let uri = Uri::from_parts(parts).unwrap();
let endpoint = tonic::transport::Endpoint::new(uri)?;
let endpoint = endpoint.concurrency_limit(self.config.parallel_connections as usize);
let endpoint = endpoint.connect_timeout(Duration::from_secs(1));
let conn = endpoint.connect().await?;
let conn = tonic::client::Grpc::new(conn);
debug!("gRPC generator connected");
Ok(conn)
}
/// Send one RPC request
async fn req(
client: &mut tonic::client::Grpc<tonic::transport::Channel>,
rpc_path: http::uri::PathAndQuery,
request: Bytes,
) -> Result<Response<usize>, tonic::Status> {
client.ready().await.map_err(|e| {
tonic::Status::new(tonic::Code::Unknown, format!("Service was not ready: {e}"))
})?;
let res = client
.unary(Request::new(request), rpc_path, NoopCodec::default())
.await?;
Ok(res)
}
/// Run to completion or until a shutdown signal is received.
///
/// # Errors
///
/// Function will return an error when the RPC connection cannot be
/// established.
///
/// # Panics
///
/// Function will panic if underlying byte capacity is not available.
pub async fn spin(mut self) -> Result<(), Error> {
let mut client = loop {
match self.connect().await {
Ok(c) => break c,
Err(e) => debug!("Failed to connect gRPC generator (will retry): {}", e),
}
tokio::time::sleep(Duration::from_millis(100)).await;
};
let mut blocks = self.block_cache.iter().cycle().peekable();
let rpc_path = self.rpc_path;
let requests_sent = register_counter!("requests_sent", &self.metric_labels);
let bytes_written = register_counter!("bytes_written", &self.metric_labels);
let request_ok = register_counter!("request_ok", &self.metric_labels);
let response_bytes = register_counter!("response_bytes", &self.metric_labels);
loop {
let blk = blocks.peek().unwrap();
let total_bytes = blk.total_bytes;
tokio::select! {
_ = self.throttle.wait_for(total_bytes) => {
let block_length = blk.bytes.len();
requests_sent.increment(1);
let blk = blocks.next().unwrap(); // actually advance through the blocks
let res = Self::req(
&mut client,
rpc_path.clone(),
Bytes::copy_from_slice(&blk.bytes),
)
.await;
match res {
Ok(res) => {
bytes_written.increment(block_length as u64);
request_ok.increment(1);
response_bytes.increment(res.into_inner() as u64);
}
Err(err) => {
let mut error_labels = self.metric_labels.clone();
error_labels.push(("error".to_string(), err.to_string()));
counter!("request_failure", 1, &error_labels);
}
}
},
_ = self.shutdown.recv() => {
info!("shutdown signal received");
break;
},
}
}
Ok(())
}
}
|
struct Solution {}
impl Solution {
pub fn longest_palindrome(s: String) -> String {
let mut r : u8 = s.as_bytes()[0];
let mut s = s.as_bytes();
let s_len = s.len();
let mut i = 1;
while i < s_len {
println!("{}", r);
r = r ^ s[i];
i+=1;
}
println!("{}", r);
"abc".to_string()
}
}
fn main() {
println!("{}", Solution::longest_palindrome("bab".to_string()));
}
|
use crate::{GuillotineAllocator, ShelfAllocator, Rectangle, Size};
use crate::tiled::SlabAllocatorRegion;
use crate::{point2, size2};
use crate::free_list::*;
#[derive(Copy, Clone, Debug)]
pub struct ArrayAllocId {
id: u32,
allocator_kind: AllocatorKind,
allocator_idx: FreeListHandle,
region_idx: RegionIndex,
}
pub struct ArrayAllocation {
pub rectangle: Rectangle,
pub layer: u16,
pub id: ArrayAllocId,
}
struct RegionInfo {
rectangle: Rectangle,
layer: u16,
region_idx: RegionIndex,
}
type AllocatorIndex = usize;
pub struct ArrayAtlasAllocator {
guillotines: FreeList<(GuillotineAllocator, RegionInfo)>,
horizontal_shelves: FreeList<(ShelfAllocator, RegionInfo)>,
// 16 32 64 256
tiles: FreeList<(SlabAllocatorRegion, RegionInfo)>,
size: Size,
// Width and height of the regions in device pixels.
region_size: u16,
regions: Regions,
}
#[derive(Copy, Clone, Debug, PartialEq)]
enum AllocatorKind {
Guillotine,
HorizontalShelf,
Tiled(u8),
}
#[derive(Copy, Clone, Debug, PartialEq)]
enum RegionSize {
Small,
Large,
}
impl ArrayAtlasAllocator {
pub fn new(size: Size) -> Self {
ArrayAtlasAllocator {
regions: Regions {
regions: Vec::new(),
layout: RegionLayout {
width: 2,
height: 2,
large_region_size: 2,
},
},
region_size: size.width as u16 / 2,
guillotines: FreeList::new(),
horizontal_shelves: FreeList::new(),
tiles: FreeList::new(),
size,
}
}
/// Allocate a rectangle in the atlas.
pub fn allocate(&mut self, size: Size) -> Option<ArrayAllocation> {
let kind = self.select_allocator_kind(size);
match kind {
AllocatorKind::Guillotine => {
for (allocator_idx, (ref mut allocator, ref mut region)) in self.guillotines.iter_mut_with_handles() {
if let Some(alloc) = allocator.allocate(size) {
return Some(ArrayAllocation {
id: ArrayAllocId {
id: alloc.id.serialize(),
region_idx: region.region_idx,
allocator_idx,
allocator_kind: kind,
},
layer: region.layer,
rectangle: alloc.rectangle.translate(region.rectangle.min.to_vector()),
});
}
}
let allocator_idx = self.add_allocator(kind)?;
let (ref mut allocator, ref mut region) = self.guillotines[allocator_idx];
return allocator.allocate(size).map(|alloc| ArrayAllocation {
id: ArrayAllocId {
id: alloc.id.serialize(),
region_idx: region.region_idx,
allocator_idx,
allocator_kind: kind,
},
layer: region.layer,
rectangle: alloc.rectangle.translate(region.rectangle.min.to_vector()),
});
}
AllocatorKind::Tiled(tile_size) => {
//for (allocator_idx, (ref mut allocator, ref mut region)) in self.tiles.iter_mut_with_handles() {
// if allocator.
//}
unimplemented!();
}
_ => {
unimplemented!();
}
}
}
/// Deallocate a rectangle in the atlas.
pub fn deallocate(&mut self, id: ArrayAllocId) {
unimplemented!();
}
fn select_allocator_kind(&self, size: Size) -> AllocatorKind {
let max = size.width.max(size.height);
let next_pow2 = (max as u32).next_power_of_two();
unimplemented!();
}
fn add_allocator(&mut self, kind: AllocatorKind) -> Option<FreeListHandle> {
let region_size = match kind {
AllocatorKind::Guillotine => RegionSize::Large,
_ => RegionSize::Small,
};
let large_region_size = self.regions.layout.large_region_size;
let (n_regions, size) = match region_size {
RegionSize::Small => (1, self.region_size),
RegionSize::Large => ((large_region_size * large_region_size) as u8, self.region_size * large_region_size),
};
let region_idx = if let Some(idx) = self.regions.allocate_region(n_regions, FreeListHandle::NONE) {
idx
} else {
return None;
};
let (x, y, layer) = self.regions.layout.position_for_region_idx(region_idx);
let min = point2(x as i32, y as i32);
let size = size2(size as i32, size as i32);
let max = min + size.to_vector();
let rectangle = Rectangle { min, max };
let region_info = RegionInfo {
rectangle,
layer,
region_idx,
};
let allocator_idx = match kind {
AllocatorKind::Guillotine => {
self.guillotines.add_with_value((GuillotineAllocator::new(size), region_info))
}
AllocatorKind::HorizontalShelf => {
self.horizontal_shelves.add_with_value((ShelfAllocator::new(size), region_info))
}
AllocatorKind::Tiled(tile_size) => {
let size_in_slots = (self.region_size / tile_size as u16).min(255) as i32;
self.tiles.add_with_value((
SlabAllocatorRegion::new(size2(size_in_slots, size_in_slots)),
region_info
))
}
};
self.regions.set_allocator(region_idx, allocator_idx);
Some(allocator_idx)
}
}
struct RegionLayout {
width: u16,
height: u16,
large_region_size: u16,
}
impl RegionLayout {
fn regions_per_layer(&self) -> u16 {
self.width * self.height
}
fn position_for_region_idx(&self, region_idx: RegionIndex) -> (u16, u16, u16) {
let region_idx = region_idx.0;
let layer = region_idx / self.regions_per_layer();
let index_in_layer = region_idx % self.regions_per_layer();
let large_regions_in_wdith = self.width / self.large_region_size;
let large_region_x = region_idx % large_regions_in_wdith;
let large_region_y = region_idx / large_regions_in_wdith;
let x = large_region_x + index_in_layer % self.large_region_size;
let y = large_region_y + index_in_layer / self.large_region_size;
(x, y, layer)
}
}
struct Region {
allocator: FreeListHandle,
size: u8,
}
#[derive(Copy, Clone, Debug, PartialEq)]
struct RegionIndex(u16);
pub struct Regions {
regions: Vec<Region>,
layout: RegionLayout,
}
impl Regions {
fn allocate_region(&mut self, size: u8, allocator: FreeListHandle) -> Option<RegionIndex> {
let step = size as usize;
'outer: for i in (0..self.regions.len()).step_by(step) {
if self.regions[i].allocator.is_some() {
continue;
}
for j in 1..step {
if self.regions[i+j].allocator.is_some() {
continue 'outer;
}
}
for j in 0..step {
self.regions[i+j].allocator = allocator;
}
self.regions[i].size = size;
return Some(RegionIndex(i as u16));
}
return None;
}
fn set_allocator(&mut self, region: RegionIndex, allocator: FreeListHandle) {
let n = self.regions[region.0 as usize].size as usize;
for i in 0..n {
self.regions[region.0 as usize + i].allocator = allocator;
}
}
fn deallocate_region(&mut self, index: usize) {
for i in 0..(self.regions[index].size as usize) {
self.regions[index + i] = Region {
allocator: FreeListHandle::NONE,
size: 1,
}
}
}
fn add_regions(&mut self, count: usize) {
for i in 0..count {
self.regions.push(Region{
allocator: FreeListHandle::NONE,
size: 1,
});
}
}
fn shrink(&mut self) {
while self.regions.last().map(|region| region.allocator.is_none()) == Some(true) {
self.regions.pop();
}
}
fn shrink_to_power_of_two(&mut self) {
self.shrink();
if !self.regions.len().is_power_of_two() {
self.regions.push(Region{
allocator: FreeListHandle::NONE,
size: 1,
});
}
}
fn shrink_to_multiple_of(&mut self, multiple: usize) {
self.shrink();
if self.regions.len() % multiple != 0 {
self.regions.push(Region{
allocator: FreeListHandle::NONE,
size: 1,
});
}
}
}
|
#[doc = "Register `CCIPR2` reader"]
pub type R = crate::R<CCIPR2_SPEC>;
#[doc = "Register `CCIPR2` writer"]
pub type W = crate::W<CCIPR2_SPEC>;
#[doc = "Field `I2C4SEL` reader - I2C4 clock source selection"]
pub type I2C4SEL_R = crate::FieldReader<I2C4SEL_A>;
#[doc = "I2C4 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum I2C4SEL_A {
#[doc = "0: PCLK clock selected"]
Pclk = 0,
#[doc = "1: SYSCLK clock selected"]
Sysclk = 1,
#[doc = "2: HSI16 clock selected"]
Hsi16 = 2,
}
impl From<I2C4SEL_A> for u8 {
#[inline(always)]
fn from(variant: I2C4SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for I2C4SEL_A {
type Ux = u8;
}
impl I2C4SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<I2C4SEL_A> {
match self.bits {
0 => Some(I2C4SEL_A::Pclk),
1 => Some(I2C4SEL_A::Sysclk),
2 => Some(I2C4SEL_A::Hsi16),
_ => None,
}
}
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn is_pclk(&self) -> bool {
*self == I2C4SEL_A::Pclk
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == I2C4SEL_A::Sysclk
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn is_hsi16(&self) -> bool {
*self == I2C4SEL_A::Hsi16
}
}
#[doc = "Field `I2C4SEL` writer - I2C4 clock source selection"]
pub type I2C4SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, I2C4SEL_A>;
impl<'a, REG, const O: u8> I2C4SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PCLK clock selected"]
#[inline(always)]
pub fn pclk(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::Pclk)
}
#[doc = "SYSCLK clock selected"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::Sysclk)
}
#[doc = "HSI16 clock selected"]
#[inline(always)]
pub fn hsi16(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::Hsi16)
}
}
#[doc = "Field `DFSDMSEL` reader - Digital filter for sigma delta modulator kernel clock source selection"]
pub type DFSDMSEL_R = crate::BitReader<DFSDMSEL_A>;
#[doc = "Digital filter for sigma delta modulator kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DFSDMSEL_A {
#[doc = "0: APB2 clock (PCLK2) selected as DFSDM kernel clock"]
Pclk2 = 0,
#[doc = "1: System clock selected as DFSDM kernel clock"]
Sysclk = 1,
}
impl From<DFSDMSEL_A> for bool {
#[inline(always)]
fn from(variant: DFSDMSEL_A) -> Self {
variant as u8 != 0
}
}
impl DFSDMSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DFSDMSEL_A {
match self.bits {
false => DFSDMSEL_A::Pclk2,
true => DFSDMSEL_A::Sysclk,
}
}
#[doc = "APB2 clock (PCLK2) selected as DFSDM kernel clock"]
#[inline(always)]
pub fn is_pclk2(&self) -> bool {
*self == DFSDMSEL_A::Pclk2
}
#[doc = "System clock selected as DFSDM kernel clock"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == DFSDMSEL_A::Sysclk
}
}
#[doc = "Field `DFSDMSEL` writer - Digital filter for sigma delta modulator kernel clock source selection"]
pub type DFSDMSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DFSDMSEL_A>;
impl<'a, REG, const O: u8> DFSDMSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "APB2 clock (PCLK2) selected as DFSDM kernel clock"]
#[inline(always)]
pub fn pclk2(self) -> &'a mut crate::W<REG> {
self.variant(DFSDMSEL_A::Pclk2)
}
#[doc = "System clock selected as DFSDM kernel clock"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(DFSDMSEL_A::Sysclk)
}
}
#[doc = "Field `ADFSDMSEL` reader - Digital filter for sigma delta modulator audio clock source selection"]
pub type ADFSDMSEL_R = crate::FieldReader<ADFSDMSEL_A>;
#[doc = "Digital filter for sigma delta modulator audio clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum ADFSDMSEL_A {
#[doc = "0: SAI1clock selected as DFSDM audio clock"]
Sai1 = 0,
#[doc = "1: HSI clock selected as DFSDM audio clock"]
Hsi = 1,
#[doc = "2: MSI clock selected as DFSDM audio clock"]
Msi = 2,
}
impl From<ADFSDMSEL_A> for u8 {
#[inline(always)]
fn from(variant: ADFSDMSEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for ADFSDMSEL_A {
type Ux = u8;
}
impl ADFSDMSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<ADFSDMSEL_A> {
match self.bits {
0 => Some(ADFSDMSEL_A::Sai1),
1 => Some(ADFSDMSEL_A::Hsi),
2 => Some(ADFSDMSEL_A::Msi),
_ => None,
}
}
#[doc = "SAI1clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn is_sai1(&self) -> bool {
*self == ADFSDMSEL_A::Sai1
}
#[doc = "HSI clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn is_hsi(&self) -> bool {
*self == ADFSDMSEL_A::Hsi
}
#[doc = "MSI clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn is_msi(&self) -> bool {
*self == ADFSDMSEL_A::Msi
}
}
#[doc = "Field `ADFSDMSEL` writer - Digital filter for sigma delta modulator audio clock source selection"]
pub type ADFSDMSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, ADFSDMSEL_A>;
impl<'a, REG, const O: u8> ADFSDMSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "SAI1clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn sai1(self) -> &'a mut crate::W<REG> {
self.variant(ADFSDMSEL_A::Sai1)
}
#[doc = "HSI clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn hsi(self) -> &'a mut crate::W<REG> {
self.variant(ADFSDMSEL_A::Hsi)
}
#[doc = "MSI clock selected as DFSDM audio clock"]
#[inline(always)]
pub fn msi(self) -> &'a mut crate::W<REG> {
self.variant(ADFSDMSEL_A::Msi)
}
}
#[doc = "Field `SAI1SEL` reader - SAI1 clock source selection"]
pub type SAI1SEL_R = crate::FieldReader<SAI1SEL_A>;
#[doc = "SAI1 clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SAI1SEL_A {
#[doc = "0: PLLSAI1CLK clock is selected as SAIx clock"]
Pllsai1clk = 0,
#[doc = "1: PLLSAI2CLK clock is selected as SAIx clock"]
Pllsai2clk = 1,
#[doc = "2: PLLSAI3CLK clock is selected as SAIx clock"]
Pllsai3clk = 2,
#[doc = "3: External clock SAIx_EXTCLK clock selected as SAIx clock"]
Sai2Extclk = 3,
#[doc = "4: HSI clock selected as SAIx clock"]
Hsi = 4,
}
impl From<SAI1SEL_A> for u8 {
#[inline(always)]
fn from(variant: SAI1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SAI1SEL_A {
type Ux = u8;
}
impl SAI1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SAI1SEL_A> {
match self.bits {
0 => Some(SAI1SEL_A::Pllsai1clk),
1 => Some(SAI1SEL_A::Pllsai2clk),
2 => Some(SAI1SEL_A::Pllsai3clk),
3 => Some(SAI1SEL_A::Sai2Extclk),
4 => Some(SAI1SEL_A::Hsi),
_ => None,
}
}
#[doc = "PLLSAI1CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn is_pllsai1clk(&self) -> bool {
*self == SAI1SEL_A::Pllsai1clk
}
#[doc = "PLLSAI2CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn is_pllsai2clk(&self) -> bool {
*self == SAI1SEL_A::Pllsai2clk
}
#[doc = "PLLSAI3CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn is_pllsai3clk(&self) -> bool {
*self == SAI1SEL_A::Pllsai3clk
}
#[doc = "External clock SAIx_EXTCLK clock selected as SAIx clock"]
#[inline(always)]
pub fn is_sai2_extclk(&self) -> bool {
*self == SAI1SEL_A::Sai2Extclk
}
#[doc = "HSI clock selected as SAIx clock"]
#[inline(always)]
pub fn is_hsi(&self) -> bool {
*self == SAI1SEL_A::Hsi
}
}
#[doc = "Field `SAI1SEL` writer - SAI1 clock source selection"]
pub type SAI1SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, SAI1SEL_A>;
impl<'a, REG, const O: u8> SAI1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PLLSAI1CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn pllsai1clk(self) -> &'a mut crate::W<REG> {
self.variant(SAI1SEL_A::Pllsai1clk)
}
#[doc = "PLLSAI2CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn pllsai2clk(self) -> &'a mut crate::W<REG> {
self.variant(SAI1SEL_A::Pllsai2clk)
}
#[doc = "PLLSAI3CLK clock is selected as SAIx clock"]
#[inline(always)]
pub fn pllsai3clk(self) -> &'a mut crate::W<REG> {
self.variant(SAI1SEL_A::Pllsai3clk)
}
#[doc = "External clock SAIx_EXTCLK clock selected as SAIx clock"]
#[inline(always)]
pub fn sai2_extclk(self) -> &'a mut crate::W<REG> {
self.variant(SAI1SEL_A::Sai2Extclk)
}
#[doc = "HSI clock selected as SAIx clock"]
#[inline(always)]
pub fn hsi(self) -> &'a mut crate::W<REG> {
self.variant(SAI1SEL_A::Hsi)
}
}
#[doc = "Field `SAI2SEL` reader - SAI2 clock source selection"]
pub use SAI1SEL_R as SAI2SEL_R;
#[doc = "Field `SAI2SEL` writer - SAI2 clock source selection"]
pub use SAI1SEL_W as SAI2SEL_W;
#[doc = "Field `DSISEL` reader - clock selection"]
pub type DSISEL_R = crate::BitReader<DSISEL_A>;
#[doc = "clock selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DSISEL_A {
#[doc = "0: DSI-PHY is selected as DSI byte lane clock source (usual case)"]
Dsiphy = 0,
#[doc = "1: PLLDSICLK is selected as DSI byte lane clock source, used in case DSI PLL and DSIPHY are off (low-power mode)"]
Plldsiclk = 1,
}
impl From<DSISEL_A> for bool {
#[inline(always)]
fn from(variant: DSISEL_A) -> Self {
variant as u8 != 0
}
}
impl DSISEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DSISEL_A {
match self.bits {
false => DSISEL_A::Dsiphy,
true => DSISEL_A::Plldsiclk,
}
}
#[doc = "DSI-PHY is selected as DSI byte lane clock source (usual case)"]
#[inline(always)]
pub fn is_dsiphy(&self) -> bool {
*self == DSISEL_A::Dsiphy
}
#[doc = "PLLDSICLK is selected as DSI byte lane clock source, used in case DSI PLL and DSIPHY are off (low-power mode)"]
#[inline(always)]
pub fn is_plldsiclk(&self) -> bool {
*self == DSISEL_A::Plldsiclk
}
}
#[doc = "Field `DSISEL` writer - clock selection"]
pub type DSISEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DSISEL_A>;
impl<'a, REG, const O: u8> DSISEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "DSI-PHY is selected as DSI byte lane clock source (usual case)"]
#[inline(always)]
pub fn dsiphy(self) -> &'a mut crate::W<REG> {
self.variant(DSISEL_A::Dsiphy)
}
#[doc = "PLLDSICLK is selected as DSI byte lane clock source, used in case DSI PLL and DSIPHY are off (low-power mode)"]
#[inline(always)]
pub fn plldsiclk(self) -> &'a mut crate::W<REG> {
self.variant(DSISEL_A::Plldsiclk)
}
}
#[doc = "Field `SDMMCSEL` reader - SDMMC clock selection"]
pub type SDMMCSEL_R = crate::BitReader<SDMMCSEL_A>;
#[doc = "SDMMC clock selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SDMMCSEL_A {
#[doc = "0: 48 MHz clock is selected as SDMMC kernel clock"]
Hsi48 = 0,
#[doc = "1: PLLSAI3CLK is selected as SDMMC kernel clock, used in case higher frequency than 48MHz is needed (for SDR50 mode)"]
Pllsai3clk = 1,
}
impl From<SDMMCSEL_A> for bool {
#[inline(always)]
fn from(variant: SDMMCSEL_A) -> Self {
variant as u8 != 0
}
}
impl SDMMCSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDMMCSEL_A {
match self.bits {
false => SDMMCSEL_A::Hsi48,
true => SDMMCSEL_A::Pllsai3clk,
}
}
#[doc = "48 MHz clock is selected as SDMMC kernel clock"]
#[inline(always)]
pub fn is_hsi48(&self) -> bool {
*self == SDMMCSEL_A::Hsi48
}
#[doc = "PLLSAI3CLK is selected as SDMMC kernel clock, used in case higher frequency than 48MHz is needed (for SDR50 mode)"]
#[inline(always)]
pub fn is_pllsai3clk(&self) -> bool {
*self == SDMMCSEL_A::Pllsai3clk
}
}
#[doc = "Field `SDMMCSEL` writer - SDMMC clock selection"]
pub type SDMMCSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SDMMCSEL_A>;
impl<'a, REG, const O: u8> SDMMCSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "48 MHz clock is selected as SDMMC kernel clock"]
#[inline(always)]
pub fn hsi48(self) -> &'a mut crate::W<REG> {
self.variant(SDMMCSEL_A::Hsi48)
}
#[doc = "PLLSAI3CLK is selected as SDMMC kernel clock, used in case higher frequency than 48MHz is needed (for SDR50 mode)"]
#[inline(always)]
pub fn pllsai3clk(self) -> &'a mut crate::W<REG> {
self.variant(SDMMCSEL_A::Pllsai3clk)
}
}
#[doc = "Field `PLLSAI2DIVR` reader - division factor for LTDC clock"]
pub type PLLSAI2DIVR_R = crate::FieldReader<PLLSAI2DIVR_A>;
#[doc = "division factor for LTDC clock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum PLLSAI2DIVR_A {
#[doc = "0: PLLSAI2DIVR = /2"]
Div2 = 0,
#[doc = "1: PLLSAI2DIVR = /4"]
Div4 = 1,
#[doc = "2: PLLSAI2DIVR = /8"]
Div8 = 2,
#[doc = "3: PLLSAI2DIVR = /16"]
Div16 = 3,
}
impl From<PLLSAI2DIVR_A> for u8 {
#[inline(always)]
fn from(variant: PLLSAI2DIVR_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for PLLSAI2DIVR_A {
type Ux = u8;
}
impl PLLSAI2DIVR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PLLSAI2DIVR_A {
match self.bits {
0 => PLLSAI2DIVR_A::Div2,
1 => PLLSAI2DIVR_A::Div4,
2 => PLLSAI2DIVR_A::Div8,
3 => PLLSAI2DIVR_A::Div16,
_ => unreachable!(),
}
}
#[doc = "PLLSAI2DIVR = /2"]
#[inline(always)]
pub fn is_div2(&self) -> bool {
*self == PLLSAI2DIVR_A::Div2
}
#[doc = "PLLSAI2DIVR = /4"]
#[inline(always)]
pub fn is_div4(&self) -> bool {
*self == PLLSAI2DIVR_A::Div4
}
#[doc = "PLLSAI2DIVR = /8"]
#[inline(always)]
pub fn is_div8(&self) -> bool {
*self == PLLSAI2DIVR_A::Div8
}
#[doc = "PLLSAI2DIVR = /16"]
#[inline(always)]
pub fn is_div16(&self) -> bool {
*self == PLLSAI2DIVR_A::Div16
}
}
#[doc = "Field `PLLSAI2DIVR` writer - division factor for LTDC clock"]
pub type PLLSAI2DIVR_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, PLLSAI2DIVR_A>;
impl<'a, REG, const O: u8> PLLSAI2DIVR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "PLLSAI2DIVR = /2"]
#[inline(always)]
pub fn div2(self) -> &'a mut crate::W<REG> {
self.variant(PLLSAI2DIVR_A::Div2)
}
#[doc = "PLLSAI2DIVR = /4"]
#[inline(always)]
pub fn div4(self) -> &'a mut crate::W<REG> {
self.variant(PLLSAI2DIVR_A::Div4)
}
#[doc = "PLLSAI2DIVR = /8"]
#[inline(always)]
pub fn div8(self) -> &'a mut crate::W<REG> {
self.variant(PLLSAI2DIVR_A::Div8)
}
#[doc = "PLLSAI2DIVR = /16"]
#[inline(always)]
pub fn div16(self) -> &'a mut crate::W<REG> {
self.variant(PLLSAI2DIVR_A::Div16)
}
}
#[doc = "Field `OSPISEL` reader - Octospi clock source selection"]
pub type OSPISEL_R = crate::FieldReader<OSPISEL_A>;
#[doc = "Octospi clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum OSPISEL_A {
#[doc = "0: System clock selected as OctoSPI kernel clock"]
Sysclk = 0,
#[doc = "1: MSI clock selected as OctoSPI kernel clock"]
Msi = 1,
#[doc = "2: PLL48M1CLK clock selected as OctoSPI kernel clock"]
Pll48m1clk = 2,
}
impl From<OSPISEL_A> for u8 {
#[inline(always)]
fn from(variant: OSPISEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for OSPISEL_A {
type Ux = u8;
}
impl OSPISEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<OSPISEL_A> {
match self.bits {
0 => Some(OSPISEL_A::Sysclk),
1 => Some(OSPISEL_A::Msi),
2 => Some(OSPISEL_A::Pll48m1clk),
_ => None,
}
}
#[doc = "System clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn is_sysclk(&self) -> bool {
*self == OSPISEL_A::Sysclk
}
#[doc = "MSI clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn is_msi(&self) -> bool {
*self == OSPISEL_A::Msi
}
#[doc = "PLL48M1CLK clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn is_pll48m1clk(&self) -> bool {
*self == OSPISEL_A::Pll48m1clk
}
}
#[doc = "Field `OSPISEL` writer - Octospi clock source selection"]
pub type OSPISEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, OSPISEL_A>;
impl<'a, REG, const O: u8> OSPISEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "System clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn sysclk(self) -> &'a mut crate::W<REG> {
self.variant(OSPISEL_A::Sysclk)
}
#[doc = "MSI clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn msi(self) -> &'a mut crate::W<REG> {
self.variant(OSPISEL_A::Msi)
}
#[doc = "PLL48M1CLK clock selected as OctoSPI kernel clock"]
#[inline(always)]
pub fn pll48m1clk(self) -> &'a mut crate::W<REG> {
self.variant(OSPISEL_A::Pll48m1clk)
}
}
impl R {
#[doc = "Bits 0:1 - I2C4 clock source selection"]
#[inline(always)]
pub fn i2c4sel(&self) -> I2C4SEL_R {
I2C4SEL_R::new((self.bits & 3) as u8)
}
#[doc = "Bit 2 - Digital filter for sigma delta modulator kernel clock source selection"]
#[inline(always)]
pub fn dfsdmsel(&self) -> DFSDMSEL_R {
DFSDMSEL_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bits 3:4 - Digital filter for sigma delta modulator audio clock source selection"]
#[inline(always)]
pub fn adfsdmsel(&self) -> ADFSDMSEL_R {
ADFSDMSEL_R::new(((self.bits >> 3) & 3) as u8)
}
#[doc = "Bits 5:7 - SAI1 clock source selection"]
#[inline(always)]
pub fn sai1sel(&self) -> SAI1SEL_R {
SAI1SEL_R::new(((self.bits >> 5) & 7) as u8)
}
#[doc = "Bits 8:10 - SAI2 clock source selection"]
#[inline(always)]
pub fn sai2sel(&self) -> SAI2SEL_R {
SAI2SEL_R::new(((self.bits >> 8) & 7) as u8)
}
#[doc = "Bit 12 - clock selection"]
#[inline(always)]
pub fn dsisel(&self) -> DSISEL_R {
DSISEL_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - SDMMC clock selection"]
#[inline(always)]
pub fn sdmmcsel(&self) -> SDMMCSEL_R {
SDMMCSEL_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bits 16:17 - division factor for LTDC clock"]
#[inline(always)]
pub fn pllsai2divr(&self) -> PLLSAI2DIVR_R {
PLLSAI2DIVR_R::new(((self.bits >> 16) & 3) as u8)
}
#[doc = "Bits 20:21 - Octospi clock source selection"]
#[inline(always)]
pub fn ospisel(&self) -> OSPISEL_R {
OSPISEL_R::new(((self.bits >> 20) & 3) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - I2C4 clock source selection"]
#[inline(always)]
#[must_use]
pub fn i2c4sel(&mut self) -> I2C4SEL_W<CCIPR2_SPEC, 0> {
I2C4SEL_W::new(self)
}
#[doc = "Bit 2 - Digital filter for sigma delta modulator kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn dfsdmsel(&mut self) -> DFSDMSEL_W<CCIPR2_SPEC, 2> {
DFSDMSEL_W::new(self)
}
#[doc = "Bits 3:4 - Digital filter for sigma delta modulator audio clock source selection"]
#[inline(always)]
#[must_use]
pub fn adfsdmsel(&mut self) -> ADFSDMSEL_W<CCIPR2_SPEC, 3> {
ADFSDMSEL_W::new(self)
}
#[doc = "Bits 5:7 - SAI1 clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai1sel(&mut self) -> SAI1SEL_W<CCIPR2_SPEC, 5> {
SAI1SEL_W::new(self)
}
#[doc = "Bits 8:10 - SAI2 clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai2sel(&mut self) -> SAI2SEL_W<CCIPR2_SPEC, 8> {
SAI2SEL_W::new(self)
}
#[doc = "Bit 12 - clock selection"]
#[inline(always)]
#[must_use]
pub fn dsisel(&mut self) -> DSISEL_W<CCIPR2_SPEC, 12> {
DSISEL_W::new(self)
}
#[doc = "Bit 14 - SDMMC clock selection"]
#[inline(always)]
#[must_use]
pub fn sdmmcsel(&mut self) -> SDMMCSEL_W<CCIPR2_SPEC, 14> {
SDMMCSEL_W::new(self)
}
#[doc = "Bits 16:17 - division factor for LTDC clock"]
#[inline(always)]
#[must_use]
pub fn pllsai2divr(&mut self) -> PLLSAI2DIVR_W<CCIPR2_SPEC, 16> {
PLLSAI2DIVR_W::new(self)
}
#[doc = "Bits 20:21 - Octospi clock source selection"]
#[inline(always)]
#[must_use]
pub fn ospisel(&mut self) -> OSPISEL_W<CCIPR2_SPEC, 20> {
OSPISEL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Peripherals independent clock configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccipr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccipr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CCIPR2_SPEC;
impl crate::RegisterSpec for CCIPR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ccipr2::R`](R) reader structure"]
impl crate::Readable for CCIPR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ccipr2::W`](W) writer structure"]
impl crate::Writable for CCIPR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CCIPR2 to value 0"]
impl crate::Resettable for CCIPR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
impl Solution {
pub fn subsets(nums: Vec<i32>) -> Vec<Vec<i32>> {
fn sets(xs: &[i32]) -> Vec<Vec<i32>> {
if xs.len() == 0 {
return vec![vec![]];
}
let subs = sets(&xs[1..]);
let mut news = subs.clone();
for i in 0..news.len() {
news[i].push(xs[0]);
}
subs.into_iter().chain(news.into_iter()).collect()
}
sets(&nums)
}
}
|
pub fn is_armstrong_number(num: u32) -> bool {
let digits = number_to_vec(num);
num == digits
.iter()
.fold(0, |acc, x| acc + x.pow(digits.len() as u32))
}
fn number_to_vec(n: u32) -> Vec<u32> {
let mut digits = vec![];
let mut n = n;
while n > 9 {
digits.push(n % 10);
n /= 10;
}
digits.push(n);
digits.reverse();
digits
}
|
//! Representation of LV2 ports.
use rayon::iter::{IntoParallelRefIterator, IterBridge, ParallelBridge};
use crate::bundle_model::{HasRelatedSet, NameRelation, ShortNameRelation, DocRelation, TypeRelation, LabelRelation, GenericRelation, IdentifiedBy, OptionallyIdentifiedBy};
use crate::bundle_model::impl_util::{KnownAndUnknownSet, DocumentedImpl, NamedImpl};
use crate::bundle_model::constants::{PortType, PortDesignation, PortChannel, PortProperty};
use crate::bundle_model::unknowns::{UnknownPortType, UnknownPortDesignation, UnknownPortProperty};
use crate::bundle_model::symbol::Symbol;
use crate::rdf_util::Literal;
use enumset::{EnumSet, EnumSetIter};
use std::collections::BTreeSet;
use ordered_float::OrderedFloat;
use num_bigint::BigUint;
/// Represents a scale point, i.e. a special marked value for a control port.
#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub struct ScalePoint {
/// Labels to be displayed in association with the scale point.
labels: BTreeSet<Literal>,
/// Control port value for the scale point.
value: OrderedFloat<f32>
}
impl<'a> HasRelatedSet<'a, LabelRelation, Literal> for ScalePoint {
type BorrowedElt = &'a Literal;
type SetIter = <BTreeSet<Literal> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.labels.par_iter()
}
}
/// Representation of an LV2 port.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct PortInfo {
/// Set of LV2 port types to which the port belongs.
port_types: KnownAndUnknownSet<PortType, UnknownPortType>,
// TODO: Make a separate type for the port index so the IdentifiedBy impl is clearer?
/// Port index.
index: u32,
/// Port symbol.
symbol: Option<Symbol>,
/// Name and short name information.
named_impl: NamedImpl,
/// Documentation information.
documented_impl: DocumentedImpl,
/// Default value for the port.
default_value: Option<Literal>,
/// Soft maximum value for the port. Hosts *are* allowed to set the value higher.
max_value: Option<Literal>,
/// Soft minimum value for the port. Hosts *are* allowed to set the value lower.
min_value: Option<Literal>,
/// Scale points for the port, i.e. special marked values.
scale_pts: BTreeSet<ScalePoint>,
/// LV2 port properties that apply to the port.
port_props: KnownAndUnknownSet<PortProperty, UnknownPortProperty>,
/// Standard LV2 designations that apply to the port.
designations: EnumSet<PortDesignation>,
/// Standard LV2 channel designations that apply to the port.
channel_designations: EnumSet<PortChannel>,
/// Unknown LV2 designations (including channel designations) that apply to the port.
unknown_designations: BTreeSet<UnknownPortDesignation>,
/// Indicates how 'important' it is to display the port, for scenarios where not all ports can
/// be displayed. A higher value means higher priority.
display_priority: Option<BigUint>,
/// Indicates into how many evenly spaced steps the port's range should be divided, when using a
/// step-based controller such as arrow keys.
range_steps: Option<BigUint>,
/// Minimum allowed buffer size for the port, in bytes.
min_buffer_size: Option<BigUint>,
/// Can be used to specify that the port's buffer must be at least as large as the largest of
/// some other set of port buffer sizes. LV2 symbols are used to identify the ports in the set.
buffer_as_large_as: BTreeSet<Symbol>
}
impl IdentifiedBy<u32> for PortInfo {
fn id(&self) -> &u32 {
&self.index
}
}
impl OptionallyIdentifiedBy<Symbol> for PortInfo {
fn id(&self) -> Option<&Symbol> {
self.symbol.as_ref()
}
}
impl<'a> HasRelatedSet<'a, TypeRelation, PortType> for PortInfo {
type BorrowedElt = PortType;
type SetIter = IterBridge<EnumSetIter<PortType>>;
fn set_iter(&'a self) -> Self::SetIter {
self.port_types.knowns_iter()
}
}
impl<'a> HasRelatedSet<'a, TypeRelation, UnknownPortType> for PortInfo {
type BorrowedElt = &'a UnknownPortType;
type SetIter = <BTreeSet<UnknownPortType> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.port_types.unknowns_iter()
}
}
impl<'a> HasRelatedSet<'a, NameRelation, Literal> for PortInfo {
type BorrowedElt = &'a Literal;
type SetIter = <BTreeSet<Literal> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.named_impl.names.par_iter()
}
}
impl<'a> HasRelatedSet<'a, ShortNameRelation, Literal> for PortInfo {
type BorrowedElt = &'a Literal;
type SetIter = <BTreeSet<Literal> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.named_impl.short_names.par_iter()
}
}
impl<'a> HasRelatedSet<'a, DocRelation, Literal> for PortInfo {
type BorrowedElt = &'a Literal;
type SetIter = <BTreeSet<Literal> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.documented_impl.documentation.par_iter()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, PortDesignation> for PortInfo {
type BorrowedElt = PortDesignation;
type SetIter = IterBridge<EnumSetIter<PortDesignation>>;
fn set_iter(&'a self) -> Self::SetIter {
self.designations.iter().par_bridge()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, PortChannel> for PortInfo {
type BorrowedElt = PortChannel;
type SetIter = IterBridge<EnumSetIter<PortChannel>>;
fn set_iter(&'a self) -> Self::SetIter {
self.channel_designations.iter().par_bridge()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, UnknownPortDesignation> for PortInfo {
type BorrowedElt = &'a UnknownPortDesignation;
type SetIter = <BTreeSet<UnknownPortDesignation> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.unknown_designations.par_iter()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, PortProperty> for PortInfo {
type BorrowedElt = PortProperty;
type SetIter = IterBridge<EnumSetIter<PortProperty>>;
fn set_iter(&'a self) -> Self::SetIter {
self.port_props.knowns_iter()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, UnknownPortProperty> for PortInfo {
type BorrowedElt = &'a UnknownPortProperty;
type SetIter = <BTreeSet<UnknownPortProperty> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.port_props.unknowns_iter()
}
}
impl<'a> HasRelatedSet<'a, GenericRelation, ScalePoint> for PortInfo {
type BorrowedElt = &'a ScalePoint;
type SetIter = <BTreeSet<ScalePoint> as IntoParallelRefIterator<'a>>::Iter;
fn set_iter(&'a self) -> Self::SetIter {
self.scale_pts.par_iter()
}
} |
extern crate termion;
use termion::{clear, color, cursor, style};
use termion::event::Key;
use termion::input::TermRead;
use termion::raw::IntoRawMode;
use std::iter::FromIterator;
use std::collections::LinkedList;
use std::io::{Write, stdout, stdin};
use std::time::Duration;
use std::thread;
struct Message {
sender: String,
data: String
}
struct Progression {
messages: LinkedList<Message>,
input_buffer: Vec<char>
}
impl Progression {
fn append_msg(&mut self, msg: Message) {
self.messages.push_back(msg)
}
fn input_char(&mut self, c: char) {
match c {
'\n' => {
let data = String::from_iter(self.input_buffer.clone());
if !data.is_empty() {
self.append_msg(Message { sender: "me".to_string(), data: data });
}
self.input_buffer.clear();
},
c => self.input_buffer.push(c)
}
}
fn backspace(&mut self) {
self.input_buffer.pop();
}
}
fn main() {
let mut progression = Progression { messages: LinkedList::new(), input_buffer: Vec::new() };
let stdin = stdin();
let mut stdout = stdout().into_raw_mode().unwrap();
draw_progression(&mut stdout, &progression);
for c in stdin.keys() {
match c.unwrap() {
Key::Esc => break,
Key::Char(c) => progression.input_char(c),
Key::Backspace => progression.backspace(),
_ => continue
};
draw_progression(&mut stdout, &progression);
}
writeln!(stdout, "Fin!\r");
}
fn draw_progression(w: &mut Write, p: &Progression) {
clear(w);
draw_messages(w, p.messages.iter());
draw_input_buffer(w, p.input_buffer.as_slice());
}
fn clear(w: &mut Write) {
write!(w, "{}\r", termion::clear::All);
w.flush().unwrap();
}
fn draw_messages<'a, I>(w: &mut Write, messages: I)
where I: Iterator<Item = &'a Message>
{
write!(w, "{}", cursor::Goto(1, 1));
for msg in messages {
writeln!(w, "{bold}{}:{reset} {}\r",
msg.sender,
msg.data,
bold = style::Bold,
reset = style::Reset,
);
}
}
fn draw_input_buffer(w: &mut Write, chars: &[char]) {
writeln!(w, "{goto}{fg_black}{bg_white}Input buffer. Press [ESC] to exit...{reset}\r",
fg_black = color::Fg(color::Black),
bg_white = color::Bg(color::White),
goto = cursor::Goto(1, 30),
reset = style::Reset,
);
for c in chars {
write!(w, "{}", c);
}
w.flush().unwrap();
}
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum SectionType {
Named = 0,
Type = 1,
Import = 2,
Function = 3,
Table = 4,
Memory = 5,
Global = 6,
Export = 7,
Start = 8,
Element = 9,
Code = 10,
Data = 11
}
impl SectionType {
pub fn from_int(v: u8) -> Option<SectionType> {
Some(match v {
0 => SectionType::Named,
1 => SectionType::Type,
2 => SectionType::Import,
3 => SectionType::Function,
4 => SectionType::Table,
5 => SectionType::Memory,
6 => SectionType::Global,
7 => SectionType::Export,
8 => SectionType::Start,
9 => SectionType::Element,
10 => SectionType::Code,
11 => SectionType::Data,
_ => return None,
})
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum ValueType {
I32 = 0x7f,
I64 = 0x7e,
F32 = 0x7d,
F64 = 0x7c,
}
impl ValueType {
pub fn from_int(v: u8) -> Option<ValueType> {
Some(match v {
0x7f => ValueType::I32,
0x7e => ValueType::I64,
0x7d => ValueType::F32,
0x7c => ValueType::F64,
_ => return None
})
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum ExternalKind {
Function = 0,
Table = 1,
Memory = 2,
Global = 3,
}
impl ExternalKind {
pub fn from_int(v: u8) -> Option<ExternalKind> {
Some(match v {
0 => ExternalKind::Function,
1 => ExternalKind::Table,
2 => ExternalKind::Memory,
3 => ExternalKind::Global,
_ => return None,
})
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum InlineSignatureType {
I32 = 0x7f,
I64 = 0x7e,
F32 = 0x7d,
F64 = 0x7c,
Empty = 0x40,
}
impl InlineSignatureType {
pub fn from_int(v: u8) -> Option<InlineSignatureType> {
Some(match v {
0x7f => InlineSignatureType::I32,
0x7e => InlineSignatureType::I64,
0x7d => InlineSignatureType::F32,
0x7c => InlineSignatureType::F64,
0x40 => InlineSignatureType::Empty,
_ => return None
})
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum LanguageType {
I32 = 0x7f,
I64 = 0x7e,
F32 = 0x7d,
F64 = 0x7c,
AnyFunc = 0x70,
Func = 0x60,
/// Empty block type.
Void = 0x40,
}
impl LanguageType {
pub fn from_int(v: u8) -> Option<LanguageType> {
Some(match v {
0x7f => LanguageType::I32,
0x7e => LanguageType::I64,
0x7d => LanguageType::F32,
0x7c => LanguageType::F64,
0x70 => LanguageType::AnyFunc,
0x60 => LanguageType::Func,
0x40 => LanguageType::Void,
_ => return None
})
}
}
|
/// Solves the Day 03 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let report: Vec<&str> = input.split_whitespace().collect();
let length = report.len();
let digits = report[0].len();
let mut counters: Vec<usize> = vec![0; digits];
for number in report {
for (val, bit) in number.chars().zip(0..digits) {
if val == '1' {
counters[bit] += 1;
}
}
}
let mut g: u64 = 0;
let mut e: u64 = 0;
for ones in counters {
g <<= 1;
e <<= 1;
if ones > length / 2 {
g += 1;
} else {
e += 1;
}
}
println!("{}", g * e);
}
/// Solves the Day 03 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let report: Vec<&str> = input.split_whitespace().collect();
let rating = |criterion: fn(usize, usize) -> bool| {
let mut candidates: Vec<&str> = report.clone();
let mut digit = 0;
while candidates.len() > 1 {
let is_0 = |num: &str| num.chars().nth(digit).unwrap() == '0';
let is_1 = |num: &str| num.chars().nth(digit).unwrap() == '1';
let mut zeros: usize = 0;
for number in &candidates {
zeros += is_0(number) as usize;
}
let ones = candidates.len() - zeros;
if criterion(zeros, ones) {
candidates = candidates.drain(..).filter(|num| is_0(num)).collect();
} else {
candidates = candidates.drain(..).filter(|num| is_1(num)).collect();
}
digit += 1
}
return candidates[0];
};
let oxy_str = rating(|zeros, ones| ones < zeros);
let co2_str = rating(|zeros, ones| ones >= zeros);
let oxy_int = usize::from_str_radix(oxy_str, 2).unwrap();
let co2_int = usize::from_str_radix(co2_str, 2).unwrap();
println!("{}", oxy_int * co2_int);
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
AdvancedThreatProtection_Get(#[from] advanced_threat_protection::get::Error),
#[error(transparent)]
AdvancedThreatProtection_Create(#[from] advanced_threat_protection::create::Error),
#[error(transparent)]
AutoProvisioningSettings_List(#[from] auto_provisioning_settings::list::Error),
#[error(transparent)]
AutoProvisioningSettings_Get(#[from] auto_provisioning_settings::get::Error),
#[error(transparent)]
AutoProvisioningSettings_Create(#[from] auto_provisioning_settings::create::Error),
#[error(transparent)]
Compliances_List(#[from] compliances::list::Error),
#[error(transparent)]
Compliances_Get(#[from] compliances::get::Error),
#[error(transparent)]
DeviceSecurityGroups_List(#[from] device_security_groups::list::Error),
#[error(transparent)]
DeviceSecurityGroups_Get(#[from] device_security_groups::get::Error),
#[error(transparent)]
DeviceSecurityGroups_CreateOrUpdate(#[from] device_security_groups::create_or_update::Error),
#[error(transparent)]
DeviceSecurityGroups_Delete(#[from] device_security_groups::delete::Error),
#[error(transparent)]
InformationProtectionPolicies_Get(#[from] information_protection_policies::get::Error),
#[error(transparent)]
InformationProtectionPolicies_CreateOrUpdate(#[from] information_protection_policies::create_or_update::Error),
#[error(transparent)]
InformationProtectionPolicies_List(#[from] information_protection_policies::list::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalytics_GetAll(#[from] io_t_security_solutions_analytics::get_all::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalytics_GetDefault(#[from] io_t_security_solutions_analytics::get_default::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalyticsAggregatedAlerts_List(#[from] io_t_security_solutions_analytics_aggregated_alerts::list::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalyticsAggregatedAlert_Get(#[from] io_t_security_solutions_analytics_aggregated_alert::get::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalyticsAggregatedAlert_Dismiss(#[from] io_t_security_solutions_analytics_aggregated_alert::dismiss::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalyticsRecommendation_Get(#[from] io_t_security_solutions_analytics_recommendation::get::Error),
#[error(transparent)]
IoTSecuritySolutionsAnalyticsRecommendations_List(#[from] io_t_security_solutions_analytics_recommendations::list::Error),
#[error(transparent)]
IoTSecuritySolutions_List(#[from] io_t_security_solutions::list::Error),
#[error(transparent)]
IoTSecuritySolutionsResourceGroup_List(#[from] io_t_security_solutions_resource_group::list::Error),
#[error(transparent)]
IotSecuritySolution_Get(#[from] iot_security_solution::get::Error),
#[error(transparent)]
IotSecuritySolution_Create(#[from] iot_security_solution::create::Error),
#[error(transparent)]
IotSecuritySolution_Update(#[from] iot_security_solution::update::Error),
#[error(transparent)]
IotSecuritySolution_Delete(#[from] iot_security_solution::delete::Error),
#[error(transparent)]
Pricings_List(#[from] pricings::list::Error),
#[error(transparent)]
Pricings_ListByResourceGroup(#[from] pricings::list_by_resource_group::Error),
#[error(transparent)]
Pricings_GetSubscriptionPricing(#[from] pricings::get_subscription_pricing::Error),
#[error(transparent)]
Pricings_UpdateSubscriptionPricing(#[from] pricings::update_subscription_pricing::Error),
#[error(transparent)]
Pricings_GetResourceGroupPricing(#[from] pricings::get_resource_group_pricing::Error),
#[error(transparent)]
Pricings_CreateOrUpdateResourceGroupPricing(#[from] pricings::create_or_update_resource_group_pricing::Error),
#[error(transparent)]
SecurityContacts_List(#[from] security_contacts::list::Error),
#[error(transparent)]
SecurityContacts_Get(#[from] security_contacts::get::Error),
#[error(transparent)]
SecurityContacts_Create(#[from] security_contacts::create::Error),
#[error(transparent)]
SecurityContacts_Update(#[from] security_contacts::update::Error),
#[error(transparent)]
SecurityContacts_Delete(#[from] security_contacts::delete::Error),
#[error(transparent)]
Settings_List(#[from] settings::list::Error),
#[error(transparent)]
Settings_Get(#[from] settings::get::Error),
#[error(transparent)]
Settings_Update(#[from] settings::update::Error),
#[error(transparent)]
WorkspaceSettings_List(#[from] workspace_settings::list::Error),
#[error(transparent)]
WorkspaceSettings_Get(#[from] workspace_settings::get::Error),
#[error(transparent)]
WorkspaceSettings_Create(#[from] workspace_settings::create::Error),
#[error(transparent)]
WorkspaceSettings_Update(#[from] workspace_settings::update::Error),
#[error(transparent)]
WorkspaceSettings_Delete(#[from] workspace_settings::delete::Error),
}
pub mod advanced_threat_protection {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_id: &str,
setting_name: &str,
) -> std::result::Result<models::AdvancedThreatProtectionSetting, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/advancedThreatProtectionSettings/{}",
operation_config.base_path(),
resource_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AdvancedThreatProtectionSetting =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
resource_id: &str,
setting_name: &str,
advanced_threat_protection_setting: &models::AdvancedThreatProtectionSetting,
) -> std::result::Result<models::AdvancedThreatProtectionSetting, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/advancedThreatProtectionSettings/{}",
operation_config.base_path(),
resource_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(advanced_threat_protection_setting).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AdvancedThreatProtectionSetting =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod auto_provisioning_settings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::AutoProvisioningSettingList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/autoProvisioningSettings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AutoProvisioningSettingList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
setting_name: &str,
) -> std::result::Result<models::AutoProvisioningSetting, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/autoProvisioningSettings/{}",
operation_config.base_path(),
subscription_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AutoProvisioningSetting =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
setting_name: &str,
setting: &models::AutoProvisioningSetting,
) -> std::result::Result<models::AutoProvisioningSetting, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/autoProvisioningSettings/{}",
operation_config.base_path(),
subscription_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(setting).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AutoProvisioningSetting =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod compliances {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig, scope: &str) -> std::result::Result<models::ComplianceList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/compliances",
operation_config.base_path(),
scope
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ComplianceList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
scope: &str,
compliance_name: &str,
) -> std::result::Result<models::Compliance, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/compliances/{}",
operation_config.base_path(),
scope,
compliance_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Compliance =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod device_security_groups {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
resource_id: &str,
) -> std::result::Result<models::DeviceSecurityGroupList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/deviceSecurityGroups",
operation_config.base_path(),
resource_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceSecurityGroupList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_id: &str,
device_security_group_name: &str,
) -> std::result::Result<models::DeviceSecurityGroup, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/deviceSecurityGroups/{}",
operation_config.base_path(),
resource_id,
device_security_group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceSecurityGroup =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_id: &str,
device_security_group_name: &str,
device_security_group: &models::DeviceSecurityGroup,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/deviceSecurityGroups/{}",
operation_config.base_path(),
resource_id,
device_security_group_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(device_security_group).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceSecurityGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceSecurityGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::DeviceSecurityGroup),
Created201(models::DeviceSecurityGroup),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_id: &str,
device_security_group_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/deviceSecurityGroups/{}",
operation_config.base_path(),
resource_id,
device_security_group_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod information_protection_policies {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
scope: &str,
information_protection_policy_name: &str,
) -> std::result::Result<models::InformationProtectionPolicy, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/informationProtectionPolicies/{}",
operation_config.base_path(),
scope,
information_protection_policy_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::InformationProtectionPolicy =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
scope: &str,
information_protection_policy_name: &str,
information_protection_policy: &models::InformationProtectionPolicy,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/informationProtectionPolicies/{}",
operation_config.base_path(),
scope,
information_protection_policy_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(information_protection_policy).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::InformationProtectionPolicy = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::InformationProtectionPolicy = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::InformationProtectionPolicy),
Created201(models::InformationProtectionPolicy),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
scope: &str,
) -> std::result::Result<models::InformationProtectionPolicyList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Security/informationProtectionPolicies",
operation_config.base_path(),
scope
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::InformationProtectionPolicyList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_analytics {
use super::{models, API_VERSION};
pub async fn get_all(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
) -> std::result::Result<models::IoTSecuritySolutionAnalyticsModelList, get_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(get_all::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_all::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_all::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_all::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionAnalyticsModelList =
serde_json::from_slice(rsp_body).map_err(|source| get_all::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get_all::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_all::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_all {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_default(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
) -> std::result::Result<models::IoTSecuritySolutionAnalyticsModel, get_default::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(get_default::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_default::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_default::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_default::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionAnalyticsModel =
serde_json::from_slice(rsp_body).map_err(|source| get_default::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get_default::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_default::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_default {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_analytics_aggregated_alerts {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
top: Option<i64>,
) -> std::result::Result<models::IoTSecurityAggregatedAlertList, list::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default/aggregatedAlerts" , operation_config . base_path () , subscription_id , resource_group_name , solution_name) ;
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecurityAggregatedAlertList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_analytics_aggregated_alert {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
aggregated_alert_name: &str,
) -> std::result::Result<models::IoTSecurityAggregatedAlert, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default/aggregatedAlerts/{}" , operation_config . base_path () , subscription_id , resource_group_name , solution_name , aggregated_alert_name) ;
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecurityAggregatedAlert =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn dismiss(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
aggregated_alert_name: &str,
) -> std::result::Result<(), dismiss::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default/aggregatedAlerts/{}/dismiss" , operation_config . base_path () , subscription_id , resource_group_name , solution_name , aggregated_alert_name) ;
let mut url = url::Url::parse(url_str).map_err(dismiss::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(dismiss::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(dismiss::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(dismiss::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| dismiss::Error::DeserializeError(source, rsp_body.clone()))?;
Err(dismiss::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod dismiss {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_analytics_recommendation {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
aggregated_recommendation_name: &str,
) -> std::result::Result<models::IoTSecurityAggregatedRecommendation, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default/aggregatedRecommendations/{}" , operation_config . base_path () , subscription_id , resource_group_name , solution_name , aggregated_recommendation_name) ;
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecurityAggregatedRecommendation =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_analytics_recommendations {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
top: Option<i64>,
) -> std::result::Result<models::IoTSecurityAggregatedRecommendationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}/analyticsModels/default/aggregatedRecommendations" , operation_config . base_path () , subscription_id , resource_group_name , solution_name) ;
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecurityAggregatedRecommendationList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
) -> std::result::Result<models::IoTSecuritySolutionsList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/iotSecuritySolutions",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionsList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod io_t_security_solutions_resource_group {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::IoTSecuritySolutionsList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionsList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod iot_security_solution {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
) -> std::result::Result<models::IoTSecuritySolutionModel, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionModel =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
iot_security_solution_data: &models::IoTSecuritySolutionModel,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iot_security_solution_data).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionModel =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionModel =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IoTSecuritySolutionModel),
Created201(models::IoTSecuritySolutionModel),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
update_iot_security_solution_data: &models::UpdateIotSecuritySolutionData,
) -> std::result::Result<models::IoTSecuritySolutionModel, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(update_iot_security_solution_data).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IoTSecuritySolutionModel =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
solution_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/iotSecuritySolutions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
solution_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod pricings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PricingList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/pricings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PricingList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::PricingList, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/pricings",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PricingList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_subscription_pricing(
operation_config: &crate::OperationConfig,
subscription_id: &str,
pricing_name: &str,
) -> std::result::Result<models::Pricing, get_subscription_pricing::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/pricings/{}",
operation_config.base_path(),
subscription_id,
pricing_name
);
let mut url = url::Url::parse(url_str).map_err(get_subscription_pricing::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_subscription_pricing::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_subscription_pricing::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_subscription_pricing::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Pricing = serde_json::from_slice(rsp_body)
.map_err(|source| get_subscription_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| get_subscription_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_subscription_pricing::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_subscription_pricing {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_subscription_pricing(
operation_config: &crate::OperationConfig,
subscription_id: &str,
pricing_name: &str,
pricing: &models::Pricing,
) -> std::result::Result<models::Pricing, update_subscription_pricing::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/pricings/{}",
operation_config.base_path(),
subscription_id,
pricing_name
);
let mut url = url::Url::parse(url_str).map_err(update_subscription_pricing::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_subscription_pricing::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(pricing).map_err(update_subscription_pricing::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_subscription_pricing::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_subscription_pricing::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Pricing = serde_json::from_slice(rsp_body)
.map_err(|source| update_subscription_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| update_subscription_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_subscription_pricing::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_subscription_pricing {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_resource_group_pricing(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
pricing_name: &str,
) -> std::result::Result<models::Pricing, get_resource_group_pricing::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/pricings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
pricing_name
);
let mut url = url::Url::parse(url_str).map_err(get_resource_group_pricing::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_resource_group_pricing::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_resource_group_pricing::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_resource_group_pricing::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Pricing = serde_json::from_slice(rsp_body)
.map_err(|source| get_resource_group_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| get_resource_group_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_resource_group_pricing::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_resource_group_pricing {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update_resource_group_pricing(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
pricing_name: &str,
pricing: &models::Pricing,
) -> std::result::Result<models::Pricing, create_or_update_resource_group_pricing::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Security/pricings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
pricing_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update_resource_group_pricing::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update_resource_group_pricing::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(pricing).map_err(create_or_update_resource_group_pricing::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(create_or_update_resource_group_pricing::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update_resource_group_pricing::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Pricing = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_resource_group_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update_resource_group_pricing::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update_resource_group_pricing::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update_resource_group_pricing {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod security_contacts {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::SecurityContactList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/securityContacts",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecurityContactList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
security_contact_name: &str,
) -> std::result::Result<models::SecurityContact, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/securityContacts/{}",
operation_config.base_path(),
subscription_id,
security_contact_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecurityContact =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
security_contact_name: &str,
security_contact: &models::SecurityContact,
) -> std::result::Result<models::SecurityContact, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/securityContacts/{}",
operation_config.base_path(),
subscription_id,
security_contact_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(security_contact).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecurityContact =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
security_contact_name: &str,
security_contact: &models::SecurityContact,
) -> std::result::Result<models::SecurityContact, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/securityContacts/{}",
operation_config.base_path(),
subscription_id,
security_contact_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(security_contact).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SecurityContact =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
security_contact_name: &str,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/securityContacts/{}",
operation_config.base_path(),
subscription_id,
security_contact_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod settings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::SettingsList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/settings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SettingsList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
setting_name: &str,
) -> std::result::Result<models::Setting, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/settings/{}",
operation_config.base_path(),
subscription_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Setting =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
setting_name: &str,
setting: &models::Setting,
) -> std::result::Result<models::Setting, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/settings/{}",
operation_config.base_path(),
subscription_id,
setting_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(setting).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Setting =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod workspace_settings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::WorkspaceSettingList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/workspaceSettings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceSettingList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
workspace_setting_name: &str,
) -> std::result::Result<models::WorkspaceSetting, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/workspaceSettings/{}",
operation_config.base_path(),
subscription_id,
workspace_setting_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceSetting =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
subscription_id: &str,
workspace_setting_name: &str,
workspace_setting: &models::WorkspaceSetting,
) -> std::result::Result<models::WorkspaceSetting, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/workspaceSettings/{}",
operation_config.base_path(),
subscription_id,
workspace_setting_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(workspace_setting).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceSetting =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
workspace_setting_name: &str,
workspace_setting: &models::WorkspaceSetting,
) -> std::result::Result<models::WorkspaceSetting, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/workspaceSettings/{}",
operation_config.base_path(),
subscription_id,
workspace_setting_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(workspace_setting).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceSetting =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
workspace_setting_name: &str,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Security/workspaceSettings/{}",
operation_config.base_path(),
subscription_id,
workspace_setting_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::CloudError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
#![recursion_limit = "1024"]
#[macro_use]
extern crate serde_derive;
extern crate yew_material_macro;
pub mod index;
pub mod theme;
use index::Index;
use wasm_bindgen::prelude::*;
use yew::prelude::*;
use yew::utils::document;
use yew_material_utils::log;
#[wasm_bindgen]
pub fn start() -> Result<(), JsValue> {
log::initialize();
theme::initialize();
App::<Index>::new().mount(document().get_element_by_id("root").unwrap());
Ok(())
}
|
use saigon_core::content::Content;
use saigon_core::{Command, HelpText, Plugin, PluginResponse, PluginResult};
use serde::Deserialize;
pub struct CatFact;
#[derive(Deserialize)]
struct CatFactJson {
pub fact: String,
pub length: i32,
}
impl Plugin for CatFact {
fn name(&self) -> String {
env!("CARGO_PKG_NAME").into()
}
fn version(&self) -> String {
env!("CARGO_PKG_VERSION").into()
}
fn help(&self) -> Option<HelpText> {
Some(HelpText {
command: "cat fact".into(),
text: "Returns a fun fact about cats".into(),
})
}
fn receive(&mut self, command: &Command) -> PluginResult {
match command.value.as_ref() {
"cat fact" => {
let res: CatFactJson = reqwest::get("https://catfact.ninja/fact")
.unwrap()
.json()
.unwrap();
Ok(PluginResponse::Success(Content::Text(res.fact)))
}
_ => Ok(PluginResponse::Ignore),
}
}
}
|
// Copyright (C) 2019-2021 Parity Technologies (UK) Ltd.
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Consensus extension module tests for Spartan consensus.
use super::{Call, *};
use frame_support::storage::migration::{get_storage_value, put_storage_value};
use frame_support::{
assert_err, assert_noop, assert_ok, traits::OnFinalize, weights::GetDispatchInfo,
};
use frame_system::{EventRecord, Phase};
use mock::{Event, *};
use schnorrkel::Keypair;
use sp_consensus_poc::{digests::Solution, PoCEpochConfiguration, Slot};
use sp_core::Public;
use sp_runtime::traits::Header;
use sp_runtime::transaction_validity::{
InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity,
ValidTransaction,
};
use sp_runtime::DispatchError;
const EMPTY_RANDOMNESS: [u8; 32] = [
74, 25, 49, 128, 53, 97, 244, 49, 222, 202, 176, 2, 231, 66, 95, 10, 133, 49, 213, 228, 86,
161, 164, 127, 217, 153, 138, 37, 48, 192, 248, 0,
];
#[test]
fn empty_randomness_is_correct() {
let s = compute_randomness([0; RANDOMNESS_LENGTH], 0, std::iter::empty(), None);
assert_eq!(s, EMPTY_RANDOMNESS);
}
#[test]
fn first_block_epoch_zero_start() {
let mut ext = new_test_ext();
ext.execute_with(|| {
let genesis_slot = Slot::from(100);
let solution = Solution::get_for_genesis();
let por_randomness = sp_io::hashing::blake2_256(&solution.signature);
let pre_digest = make_pre_digest(genesis_slot, solution);
assert_eq!(Spartan::genesis_slot(), Slot::from(0));
System::initialize(&1, &Default::default(), &pre_digest, Default::default());
Spartan::do_initialize(1);
assert_eq!(Spartan::genesis_slot(), genesis_slot);
assert_eq!(Spartan::current_slot(), genesis_slot);
assert_eq!(Spartan::epoch_index(), 0);
assert_eq!(Spartan::author_por_randomness(), Some(por_randomness));
Spartan::on_finalize(1);
let header = System::finalize();
assert_eq!(SegmentIndex::<Test>::get(), 0);
assert_eq!(UnderConstruction::<Test>::get(0), vec![por_randomness]);
assert_eq!(Spartan::randomness(), [0; 32]);
assert_eq!(NextRandomness::<Test>::get(), [0; 32]);
assert_eq!(header.digest.logs.len(), 4);
assert_eq!(pre_digest.logs.len(), 1);
assert_eq!(header.digest.logs[0], pre_digest.logs[0]);
let consensus_log = sp_consensus_poc::ConsensusLog::NextEpochData(
sp_consensus_poc::digests::NextEpochDescriptor {
randomness: Spartan::randomness(),
},
);
let consensus_digest = DigestItem::Consensus(POC_ENGINE_ID, consensus_log.encode());
// first epoch descriptor has same info as last.
assert_eq!(header.digest.logs[1], consensus_digest.clone())
})
}
#[test]
fn author_por_output() {
let mut ext = new_test_ext();
ext.execute_with(|| {
let genesis_slot = Slot::from(10);
let solution = Solution::get_for_genesis();
let por_randomness = sp_io::hashing::blake2_256(&solution.signature);
let pre_digest = make_pre_digest(genesis_slot, solution);
System::initialize(&1, &Default::default(), &pre_digest, Default::default());
Spartan::do_initialize(1);
assert_eq!(Spartan::author_por_randomness(), Some(por_randomness));
Spartan::on_finalize(1);
System::finalize();
assert_eq!(Spartan::author_por_randomness(), Some(por_randomness));
})
}
#[test]
fn can_predict_next_epoch_change() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
assert_eq!(<Test as Config>::EpochDuration::get(), 3);
// this sets the genesis slot to 6;
go_to_block(&keypair, 1, 6);
assert_eq!(*Spartan::genesis_slot(), 6);
assert_eq!(*Spartan::current_slot(), 6);
assert_eq!(Spartan::epoch_index(), 0);
progress_to_block(&keypair, 5);
assert_eq!(Spartan::epoch_index(), 5 / 3);
assert_eq!(*Spartan::current_slot(), 10);
// next epoch change will be at
assert_eq!(*Spartan::current_epoch_start(), 9); // next change will be 12, 2 slots from now
assert_eq!(
Spartan::next_expected_epoch_change(System::block_number()),
Some(5 + 2)
);
})
}
#[test]
fn can_update_solution_range_on_era_change() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
assert_eq!(<Test as Config>::EraDuration::get(), 4);
assert_eq!(
<Test as Config>::InitialSolutionRange::get(),
INITIAL_SOLUTION_RANGE
);
// There should be no solution range stored during first era
assert_eq!(Spartan::solution_range(), None);
// We produce blocks on every slot
progress_to_block(&keypair, 4);
// Still no solution range update
assert_eq!(Spartan::solution_range(), None);
progress_to_block(&keypair, 5);
// Second era should have solution range updated
assert!(Spartan::solution_range().is_some());
// Because blocks were produced on every slot, apparent pledged space must increase and
// solution range should decrease
let last_solution_range = Spartan::solution_range().unwrap();
assert!(last_solution_range < INITIAL_SOLUTION_RANGE);
// Progress almost to era change
progress_to_block(&keypair, 8);
// Change era such that it takes more slots than expected
go_to_block(
&keypair,
9,
u64::from(Spartan::current_slot()) + (4 * SLOT_PROBABILITY.1 / SLOT_PROBABILITY.0 + 10),
);
// This should cause solution range to increase as apparent pledged space decreased
assert!(Spartan::solution_range().unwrap() > last_solution_range);
})
}
#[test]
fn can_update_salt_on_eon_change() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
assert_eq!(<Test as Config>::EonDuration::get(), 5);
// Initial salt equals to eon
assert_eq!(Spartan::salt(), 0);
// We produce blocks on every slot
progress_to_block(&keypair, 5);
// Still no salt update
assert_eq!(Spartan::salt(), 0);
progress_to_block(&keypair, 6);
// Second eon should have salt updated
assert_eq!(Spartan::salt(), 1);
// We produce blocks on every slot
progress_to_block(&keypair, 10);
// Just before eon update, still the same salt as before
assert_eq!(Spartan::salt(), 1);
progress_to_block(&keypair, 11);
// Third eon should have salt updated again
assert_eq!(Spartan::salt(), 2);
})
}
#[test]
fn can_enact_next_config() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
assert_eq!(<Test as Config>::EpochDuration::get(), 3);
// this sets the genesis slot to 6;
go_to_block(&keypair, 1, 6);
assert_eq!(*Spartan::genesis_slot(), 6);
assert_eq!(*Spartan::current_slot(), 6);
assert_eq!(Spartan::epoch_index(), 0);
go_to_block(&keypair, 2, 7);
let current_config = PoCEpochConfiguration { c: (0, 4) };
let next_config = PoCEpochConfiguration { c: (1, 4) };
let next_next_config = PoCEpochConfiguration { c: (2, 4) };
EpochConfig::<Test>::put(current_config);
NextEpochConfig::<Test>::put(next_config.clone());
assert_eq!(NextEpochConfig::<Test>::get(), Some(next_config.clone()));
Spartan::plan_config_change(
Origin::root(),
NextConfigDescriptor::V1 {
c: next_next_config.c,
},
)
.unwrap();
progress_to_block(&keypair, 4);
Spartan::on_finalize(9);
let header = System::finalize();
assert_eq!(EpochConfig::<Test>::get(), Some(next_config));
assert_eq!(
NextEpochConfig::<Test>::get(),
Some(next_next_config.clone())
);
let consensus_log =
sp_consensus_poc::ConsensusLog::NextConfigData(NextConfigDescriptor::V1 {
c: next_next_config.c,
});
let consensus_digest = DigestItem::Consensus(POC_ENGINE_ID, consensus_log.encode());
assert_eq!(header.digest.logs[4], consensus_digest.clone())
});
}
#[test]
fn only_root_can_enact_config_change() {
new_test_ext().execute_with(|| {
let next_config = NextConfigDescriptor::V1 { c: (1, 4) };
let res = Spartan::plan_config_change(Origin::none(), next_config.clone());
assert_noop!(res, DispatchError::BadOrigin);
let res = Spartan::plan_config_change(Origin::signed(1), next_config.clone());
assert_noop!(res, DispatchError::BadOrigin);
let res = Spartan::plan_config_change(Origin::root(), next_config);
assert!(res.is_ok());
});
}
#[test]
fn can_fetch_current_and_next_epoch_data() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
EpochConfig::<Test>::put(PoCEpochConfiguration { c: (1, 4) });
progress_to_block(&keypair, System::block_number() + 4);
let current_epoch = Spartan::current_epoch();
assert_eq!(current_epoch.epoch_index, 1);
assert_eq!(*current_epoch.start_slot, 4);
let next_epoch = Spartan::next_epoch();
assert_eq!(next_epoch.epoch_index, 2);
assert_eq!(*next_epoch.start_slot, 7);
// the on-chain randomness should always change across epochs
assert_ne!(current_epoch.randomness, next_epoch.randomness);
});
}
#[test]
fn tracks_block_numbers_when_current_and_previous_epoch_started() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
// an epoch is 3 slots therefore at block 8 we should be in epoch #3
// with the previous epochs having the following blocks:
// epoch 1 - [1, 2, 3]
// epoch 2 - [4, 5, 6]
// epoch 3 - [7, 8, 9]
progress_to_block(&keypair, 8);
let (last_epoch, current_epoch) = EpochStart::<Test>::get();
assert_eq!(last_epoch, 4);
assert_eq!(current_epoch, 7);
// once we reach block 10 we switch to epoch #4
progress_to_block(&keypair, 10);
let (last_epoch, current_epoch) = EpochStart::<Test>::get();
assert_eq!(last_epoch, 7);
assert_eq!(current_epoch, 10);
});
}
#[test]
fn report_equivocation_current_session_works() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let farmer_id = FarmerId::from_slice(&keypair.public.to_bytes());
// generate an equivocation proof. it creates two headers at the given
// slot with different block hashes and signed by the given key
let equivocation_proof = generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
assert_eq!(Spartan::is_in_block_list(&farmer_id), false);
// report the equivocation
Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof)).unwrap();
progress_to_block(&keypair, 2);
// check that farmer was added to block list
assert_eq!(Spartan::is_in_block_list(&farmer_id), true);
});
}
#[test]
fn report_equivocation_old_session_works() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let farmer_id = FarmerId::from_slice(&keypair.public.to_bytes());
// generate an equivocation proof at the current slot
let equivocation_proof = generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
// create new block and report the equivocation
// from the previous block
progress_to_block(&keypair, 2);
assert_eq!(Spartan::is_in_block_list(&farmer_id), false);
// report the equivocation
Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof)).unwrap();
progress_to_block(&keypair, 3);
// check that farmer was added to block list
assert_eq!(Spartan::is_in_block_list(&farmer_id), true);
})
}
#[test]
fn report_equivocation_invalid_equivocation_proof() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let assert_invalid_equivocation = |equivocation_proof| {
assert_err!(
Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof),),
Error::<Test>::InvalidEquivocationProof,
)
};
// both headers have the same hash, no equivocation.
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
equivocation_proof.second_header = equivocation_proof.first_header.clone();
assert_invalid_equivocation(equivocation_proof);
// missing preruntime digest from one header
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
equivocation_proof.first_header.digest_mut().logs.remove(0);
assert_invalid_equivocation(equivocation_proof);
// missing seal from one header
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
equivocation_proof.first_header.digest_mut().logs.remove(1);
assert_invalid_equivocation(equivocation_proof);
// invalid slot number in proof compared to runtime digest
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
equivocation_proof.slot = Slot::from(0);
assert_invalid_equivocation(equivocation_proof.clone());
// different slot numbers in headers
let h1 = equivocation_proof.first_header;
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get() + 1);
// use the header from the previous equivocation generated
// at the previous slot
equivocation_proof.first_header = h1.clone();
assert_invalid_equivocation(equivocation_proof.clone());
// invalid seal signature
let mut equivocation_proof =
generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get() + 1);
// replace the seal digest with the digest from the
// previous header at the previous slot
equivocation_proof.first_header.digest_mut().pop();
equivocation_proof
.first_header
.digest_mut()
.push(h1.digest().logs().last().unwrap().clone());
assert_invalid_equivocation(equivocation_proof.clone());
})
}
#[test]
fn report_equivocation_validate_unsigned_prevents_duplicates() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let farmer_id = FarmerId::from_slice(&keypair.public.to_bytes());
let equivocation_proof = generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
let inner = Call::report_equivocation {
equivocation_proof: Box::new(equivocation_proof.clone()),
};
// Only local/in block reports are allowed
assert_eq!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::External,
&inner,
),
InvalidTransaction::Call.into(),
);
// The transaction is valid when passed as local
let tx_tag = (farmer_id, CurrentSlot::<Test>::get());
assert_eq!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::Local,
&inner,
),
TransactionValidity::Ok(ValidTransaction {
priority: TransactionPriority::MAX - 1,
requires: vec![],
provides: vec![("PoCEquivocation", tx_tag).encode()],
longevity: ReportLongevity::get(),
propagate: false,
})
);
// The pre dispatch checks should also pass
assert_ok!(<Spartan as sp_runtime::traits::ValidateUnsigned>::pre_dispatch(&inner));
// Submit the report
Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof)).unwrap();
// The report should now be considered stale and the transaction is invalid.
// The check for staleness should be done on both `validate_unsigned` and on `pre_dispatch`
assert_err!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::Local,
&inner,
),
InvalidTransaction::Stale,
);
assert_err!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::pre_dispatch(&inner),
InvalidTransaction::Stale,
);
});
}
#[test]
fn report_equivocation_has_valid_weight() {
// the weight is always the same.
assert!((1..=1000)
.map(|_| { <Test as Config>::WeightInfo::report_equivocation() })
.all(|w| w == 1));
}
#[test]
fn valid_equivocation_reports_dont_pay_fees() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
// generate an equivocation proof.
let equivocation_proof = generate_equivocation_proof(&keypair, CurrentSlot::<Test>::get());
// check the dispatch info for the call.
let info = Call::<Test>::report_equivocation {
equivocation_proof: Box::new(equivocation_proof.clone()),
}
.get_dispatch_info();
// it should have non-zero weight and the fee has to be paid.
assert!(info.weight > 0);
assert_eq!(info.pays_fee, Pays::Yes);
// report the equivocation.
let post_info =
Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof.clone()))
.unwrap();
// the original weight should be kept, but given that the report
// is valid the fee is waived.
assert!(post_info.actual_weight.is_none());
assert_eq!(post_info.pays_fee, Pays::No);
// report the equivocation again which is invalid now since it is
// duplicate.
let post_info = Spartan::report_equivocation(Origin::none(), Box::new(equivocation_proof))
.err()
.unwrap()
.post_info;
// the fee is not waived and the original weight is kept.
assert!(post_info.actual_weight.is_none());
assert_eq!(post_info.pays_fee, Pays::Yes);
})
}
#[test]
fn store_root_block_works() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let root_block = create_root_block(0);
let post_info = Spartan::store_root_block(Origin::none(), root_block).unwrap();
// Root blocks don't require fee
assert_eq!(post_info.pays_fee, Pays::No);
assert_eq!(
System::events(),
vec![EventRecord {
phase: Phase::Initialization,
event: Event::Spartan(crate::Event::RootBlockStored(root_block)),
topics: vec![],
}]
);
});
}
#[test]
fn store_root_block_validate_unsigned_prevents_duplicates() {
new_test_ext().execute_with(|| {
let keypair = Keypair::generate();
progress_to_block(&keypair, 1);
let segment_index = 0u64;
let root_block = create_root_block(segment_index);
let inner = Call::store_root_block { root_block };
// Only local/in block reports are allowed
assert_eq!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::External,
&inner,
),
InvalidTransaction::Call.into(),
);
// The transaction is valid when passed as local
assert_eq!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::Local,
&inner,
),
TransactionValidity::Ok(ValidTransaction {
priority: TransactionPriority::MAX,
requires: vec![],
provides: vec![("SubspaceRootBlock", segment_index).encode()],
longevity: 0,
propagate: false,
})
);
// The pre dispatch checks should also pass
assert_ok!(<Spartan as sp_runtime::traits::ValidateUnsigned>::pre_dispatch(&inner));
// Submit the report
Spartan::store_root_block(Origin::none(), root_block).unwrap();
// The report should now be considered stale and the transaction is invalid.
// The check for staleness should be done on both `validate_unsigned` and on `pre_dispatch`
assert_err!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::validate_unsigned(
TransactionSource::Local,
&inner,
),
InvalidTransaction::Stale,
);
assert_err!(
<Spartan as sp_runtime::traits::ValidateUnsigned>::pre_dispatch(&inner),
InvalidTransaction::Stale,
);
});
}
#[test]
fn store_root_block_has_valid_weight() {
// the weight is always the same.
assert!((1..=1000)
.map(|_| { <Test as Config>::WeightInfo::store_root_block() })
.all(|w| w == 1));
}
#[test]
fn add_epoch_configurations_migration_works() {
impl crate::migrations::PoCPalletPrefix for Test {
fn pallet_prefix() -> &'static str {
"Spartan"
}
}
new_test_ext().execute_with(|| {
let next_config_descriptor = NextConfigDescriptor::V1 { c: (3, 4) };
put_storage_value(
b"Spartan",
b"NextEpochConfig",
&[],
Some(next_config_descriptor.clone()),
);
assert!(get_storage_value::<Option<NextConfigDescriptor>>(
b"Spartan",
b"NextEpochConfig",
&[],
)
.is_some());
let current_epoch = PoCEpochConfiguration { c: (1, 4) };
crate::migrations::add_epoch_configuration::<Test>(current_epoch.clone());
assert!(get_storage_value::<Option<NextConfigDescriptor>>(
b"Spartan",
b"NextEpochConfig",
&[],
)
.is_none());
assert_eq!(EpochConfig::<Test>::get(), Some(current_epoch));
assert_eq!(
PendingEpochConfigChange::<Test>::get(),
Some(next_config_descriptor)
);
});
}
|
//! Solutions to the challenges in Set 2.
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
use attacks;
use challenges::{ChallengeResults, ChallengeResultsBuilder};
use challenges::helpers;
use utils::block::{BlockCipher, Algorithms, OperationModes, PaddingSchemes};
use utils::data::Data;
use victims::block::{EcbOrCbc, EcbWithSuffix, EcbUserProfile, EcbWithAffixes, CbcCookie};
/// Run the solution to Set 2 Challenge 9 (Implement PKCS#7 padding)
///
/// # Outputs
///
/// `text_in` - The unpadded input as a plain text string.
///
/// `hex_in` - The unpadded input as a hexadecimal string.
///
/// `hex_out` - The padded output as a hexadecimal string.
pub fn challenge09() -> ChallengeResults {
// Get the text input.
let text_in = "YELLOW SUBMARINE";
// Convert to hex.
let data = Data::from_text(text_in);
let hex_in = data.to_hex();
// Add the padding.
let hex_out = helpers::pkcs7_pad(&data, 20).to_hex();
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(9)
.description("Implement PKCS#7 padding")
.output("text_in", text_in)
.output("hex_in", &hex_in)
.output("hex_out", &hex_out)
.finalize()
}
/// Run the solution to Set 2 Challenge 10 (Implement CBC mode)
///
/// # Outputs
///
/// `base64_in` - The encrypted input as a base 64 string.
///
/// `text_key` - The key as a plain text string.
///
/// `text_out` - The decrypted output as a plain text string.
pub fn challenge10() -> ChallengeResults {
// Get the base-64 input.
let mut base64_in = "".to_string();
let file = File::open(&Path::new("input/set2challenge10.txt")).unwrap();
let reader = BufReader::new(file);
for line_it in reader.lines() {
base64_in.push_str(&line_it.unwrap());
}
let data = Data::from_base64(&base64_in).unwrap();
// Get the key.
let text_key = "YELLOW SUBMARINE";
let key = Data::from_text(text_key);
// Decrypt the data using AES-128-CBC with an IV of all 0.
let iv = Data::from_bytes(vec![0; 16]);
let block = BlockCipher::new(Algorithms::Aes,
OperationModes::Cbc(iv),
PaddingSchemes::Pkcs7,
&key)
.unwrap();
let text_out = block.decrypt(&data).unwrap().to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(10)
.description("Implement CBC mode")
.output("base64_in", &base64_in)
.output("text_key", text_key)
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 2 Challenge 11 (An ECB/CBC detection oracle)
///
/// # Outputs
///
/// `success_rate` - The percentage with which we correctly guessed the encryption mode.
pub fn challenge11() -> ChallengeResults {
// Create an ECB/CBC black-box.
let mut ecb_cbc_box = EcbOrCbc::new();
// Run 1000 trials - for each one, try encrypting some data with repeated blocks using the
// encryption box, and predict if it is using ECB or CBC.
let mut score = 0.0;
for _ in 0..1000 {
let guess = attacks::block::is_ecb_mode(&mut ecb_cbc_box);
if ecb_cbc_box.check_answer(guess) {
score += 1.0;
}
}
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(11)
.description("An ECB/CBC detection oracle")
.output("success_rate", &format!("{}%", score / 10.0))
.finalize()
}
/// Run the solution to Set 2 Challenge 12 (Byte-at-a-time ECB decryption (Simple))
///
/// # Outputs
///
/// `success` - Whether the correct suffix was obtained.
///
/// `text_out` - The decrypted suffix as a plain text string.
pub fn challenge12() -> ChallengeResults {
// Create an ECB-with-suffix black-box.
let base64 = "Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkgaGFpciBjYW4gYmxvdwpU\
aGUgZ2lybGllcyBvbiBzdGFuZGJ5IHdhdmluZyBqdXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5v\
LCBJIGp1c3QgZHJvdmUgYnkK";
let suffix = Data::from_base64(base64).unwrap();
let ecb_suffix_box = EcbWithSuffix::new(suffix);
// Decode the suffix without reading it directly!
let suffix_guess = attacks::block::find_ecb_suffix(&ecb_suffix_box);
let success = ecb_suffix_box.check_answer(&suffix_guess);
let text_out = suffix_guess.to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(12)
.description("Byte-at-a-time ECB decryption (Simple)")
.output("success", &format!("{}", success))
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 2 Challenge 13 (ECB cut-and-paste)
///
/// # Outputs
///
/// `success` - Whether the admin token was successfully created.
pub fn challenge13() -> ChallengeResults {
// Create an ECB-user-profile black-box.
let ecb_profile_box = EcbUserProfile::new();
// Craft an illegitimate admin token.
let admin_token = attacks::block::craft_ecb_admin_token(&ecb_profile_box);
// Check that we've been successful.
let success = ecb_profile_box.is_admin(&admin_token);
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(13)
.description("ECB cut-and-paste")
.output("success", &format!("{}", success))
.finalize()
}
/// Run the solution to Set 2 Challenge 14 (Byte-at-a-time ECB decryption (Harder))
///
/// # Outputs
///
/// `success` - Whether the correct suffix was obtained.
///
/// `text_out` - The decrypted suffix as a plain text string.
pub fn challenge14() -> ChallengeResults {
// Create an ECB-with-suffix black-box.
let base64 = "Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkgaGFpciBjYW4gYmxvdwpU\
aGUgZ2lybGllcyBvbiBzdGFuZGJ5IHdhdmluZyBqdXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5v\
LCBJIGp1c3QgZHJvdmUgYnkK";
let suffix = Data::from_base64(base64).unwrap();
let ecb_affixes_box = EcbWithAffixes::new(suffix);
// Decode the suffix without reading it directly!
let suffix_guess = attacks::block::find_ecb_suffix_with_prefix(&ecb_affixes_box);
let success = ecb_affixes_box.check_answer(&suffix_guess);
let text_out = suffix_guess.to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(14)
.description("Byte-at-a-time ECB decryption (Harder)")
.output("success", &format!("{}", success))
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 2 Challenge 15 (PKCS#7 padding validation)
///
/// `detect_valid` - Whether we correctly detect valid padding.
///
/// `detect_invalid` - Whether we correctly detect invalid padding.
pub fn challenge15() -> ChallengeResults {
let text = "ICE ICE BABY";
// Check valid paddings.
let valid1 = helpers::add_padding(text, &[4, 4, 4, 4]);
let valid2 = helpers::add_padding(text, &[1]);
let detect_valid = helpers::valid_pkcs7(&valid1) && helpers::valid_pkcs7(&valid2);
// Check invalid paddings.
let invalid1 = helpers::add_padding(text, &[5, 5, 5, 5]);
let invalid2 = helpers::add_padding(text, &[1, 2, 3, 4]);
let detect_invalid = !helpers::valid_pkcs7(&invalid1) && !helpers::valid_pkcs7(&invalid2);
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(15)
.description("PKCS#7 padding validation")
.output("detect_valid", &format!("{}", detect_valid))
.output("detect_invalid", &format!("{}", detect_invalid))
.finalize()
}
/// Run the solution to Set 2 Challenge 16 (CBC bitflipping attacks)
///
/// # Outputs
///
/// `success` - Whether the admin token was successfully created.
pub fn challenge16() -> ChallengeResults {
// Create a CBB cookie black-box.
let cbc_cookie_box = CbcCookie::new();
// Craft an illegitimate admin token.
let admin_token = attacks::block::craft_cbc_admin_token(&cbc_cookie_box);
// Check that we've been successful.
let success = cbc_cookie_box.is_admin(&admin_token);
// Return the results
ChallengeResultsBuilder::new()
.set(2)
.challenge(16)
.description("CBC bitflipping attacks")
.output("success", &format!("{}", success))
.finalize()
}
#[cfg(test)]
mod tests {
#[test]
fn challenge09() {
let results = super::challenge09();
results.check("text_in", "YELLOW SUBMARINE");
results.check("hex_in", "59454c4c4f57205355424d4152494e45");
results.check("hex_out", "59454c4c4f57205355424d4152494e4504040404");
}
#[test]
fn challenge10() {
let results = super::challenge10();
results.check_prefix("base64_in",
"CRIwqt4+szDbqkNY+I0qbNXPg1XLaCM5etQ5Bt9DRFV/xIN2k8Go7jtArLIyP605b071\
DL8C+FPYSHOXPkMMMFPAKm+Nsu0nCBMQVt9mlluHbVE/yl6VaBCjNuOGvHZ9WYvt51uR\
/lklZZ0ObqD5UaC1rupZwCEK4pIWf6JQ4pTyPjyiPtKXg54FNQvbVIHeotUG2kHEvH");
results.check("text_key", "YELLOW SUBMARINE");
results.check_prefix("text_out",
"I'm back and I'm ringin' the bell \n\
A rockin' on the mike while the fly girls yell \n\
In ecstasy in the back of me");
}
#[test]
fn challenge11() {
let results = super::challenge11();
results.check("success_rate", "100%");
}
#[test]
fn challenge12() {
let results = super::challenge12();
results.check("success", "true");
results.check("text_out",
"Rollin' in my 5.0\n\
With my rag-top down so my hair can blow\n\
The girlies on standby waving just to say hi\n\
Did you stop? No, I just drove by\n");
}
#[test]
fn challenge13() {
let results = super::challenge13();
results.check("success", "true");
}
#[test]
fn challenge14() {
let results = super::challenge14();
results.check("success", "true");
results.check("text_out",
"Rollin' in my 5.0\n\
With my rag-top down so my hair can blow\n\
The girlies on standby waving just to say hi\n\
Did you stop? No, I just drove by\n");
}
#[test]
fn challenge15() {
let results = super::challenge15();
results.check("detect_valid", "true");
results.check("detect_invalid", "true");
}
#[test]
fn challenge16() {
let results = super::challenge16();
results.check("success", "true");
}
} |
#![no_std]
#![no_main]
#![feature(trait_alias)]
#![feature(min_type_alias_impl_trait)]
#![feature(impl_trait_in_bindings)]
#![feature(type_alias_impl_trait)]
#![allow(incomplete_features)]
#[path = "../example_common.rs"]
mod example_common;
use embassy::executor::Executor;
use embassy::time::Clock;
use embassy::util::Forever;
use embassy_stm32::exti::ExtiInput;
use embassy_stm32::gpio::{Input, Pull};
use embassy_stm32::rcc;
use embassy_traits::gpio::{WaitForFallingEdge, WaitForRisingEdge};
use example_common::*;
use cortex_m_rt::entry;
#[embassy::task]
async fn main_task() {
let mut p = embassy_stm32::init(Default::default());
let mut rcc = rcc::Rcc::new(p.RCC);
rcc.enable_debug_wfe(&mut p.DBGMCU, true);
// Enables SYSCFG
let _ = rcc.enable_hsi48(&mut p.SYSCFG, p.CRS);
let button = Input::new(p.PB2, Pull::Up);
let mut button = ExtiInput::new(button, p.EXTI2);
info!("Press the USER button...");
loop {
button.wait_for_falling_edge().await;
info!("Pressed!");
button.wait_for_rising_edge().await;
info!("Released!");
}
}
struct ZeroClock;
impl Clock for ZeroClock {
fn now(&self) -> u64 {
0
}
}
static EXECUTOR: Forever<Executor> = Forever::new();
#[entry]
fn main() -> ! {
info!("Hello World!");
unsafe { embassy::time::set_clock(&ZeroClock) };
let executor = EXECUTOR.put(Executor::new());
executor.run(|spawner| {
unwrap!(spawner.spawn(main_task()));
})
}
|
struct Solution;
impl Solution {
pub fn count_smaller(nums: Vec<i32>) -> Vec<i32> {
let mut result = Vec::new();
let a = Self::discretization(&nums);
// c 是树状数组
let mut c = vec![0; a.len() + 1];
for num in nums.iter().rev() {
let id = Self::get_id(&a, num);
result.push(Self::query(&c, id - 1));
Self::update(&mut c, id);
}
result.reverse();
result
}
// 将 nums 去重并排序
fn discretization(nums: &Vec<i32>) -> Vec<i32> {
// 去重
let set: std::collections::HashSet<&i32> = nums.iter().collect();
let mut a: Vec<i32> = set.iter().map(|x| **x).collect();
// 排序
a.sort();
a
}
fn get_id(a: &Vec<i32>, num: &i32) -> isize {
a.binary_search(num).unwrap() as isize + 1
}
fn query(c: &Vec<i32>, mut pos: isize) -> i32 {
let mut ret = 0;
while pos > 0 {
ret += c[pos as usize];
pos -= Self::lowbit(pos);
}
ret
}
fn update(c: &mut Vec<i32>, mut pos: isize) {
while pos < c.len() as isize {
c[pos as usize] += 1;
pos += Self::lowbit(pos);
}
}
fn lowbit(n: isize) -> isize {
n & (-n)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_count_smaller() {
let res = Solution::count_smaller(vec![5, 2, 6, 1]);
assert_eq!(res, vec![2, 1, 1, 0]);
}
}
|
use package::ParenchymaDeep;
use parenchyma::{Build, Result};
use parenchyma::opencl::OpenCLContext;
use parenchyma::utility::Uninitialized;
use super::Package;
impl Build<OpenCLContext<Uninitialized>> for ParenchymaDeep {
fn build(cx: &mut OpenCLContext<Uninitialized>) -> Result<ParenchymaDeep> {
let program = cx.create_program(&[
include_str!("source/activation.cl"),
include_str!("source/activationBackward.cl"),
include_str!("source/convolution.cl")
])?;
let cl_package = Package {
tanh: program.create_kernel("tanh_float")?,
sigmoid: program.create_kernel("sigmoid_float")?,
relu: program.create_kernel("relu_float")?,
elu: program.create_kernel("elu_float")?,
tanh_backward: program.create_kernel("tanh_backward_float")?,
sigmoid_backward: program.create_kernel("sigmoid_backward_float")?,
relu_backward: program.create_kernel("relu_backward_float")?,
elu_backward: program.create_kernel("elu_backward_float")?,
convolution: program.create_kernel("convolve_ints")?,
program,
};
Ok(ParenchymaDeep { cl: cl_package })
}
} |
use rstest::rstest;
use autorel_chlg::{BreakingInfo, Change, ChangeLog, ChangeType, SemverScope};
fn semver_scope_of(message: &str) -> Option<SemverScope> {
let change = Change::parse_conventional_commit(message).expect("Not a conventional commit");
let changelog = ChangeLog::default() + change;
changelog.semver_scope()
}
#[test]
fn can_parse_empty_commit_message() {
assert!(Change::parse_conventional_commit("").is_none())
}
#[rstest]
#[case("")]
#[case("Hello world")]
#[case("Hello world\n\nwith multiple lines")]
fn returns_none_for_non_conventional_commits(#[case] message: &str) {
assert_eq!(None, Change::parse_conventional_commit(message));
}
#[rstest]
#[case("feat!: Hello world")]
#[case("feat(withscope)!: Hello world")]
#[case("fix!: Hello world")]
#[case("other(withscope)!: Hello world")]
#[case("feat: Hello world\n\nBREAKING CHANGE: This is breaking")]
#[case("other: Hello world\n\nBREAKING CHANGE: This is breaking")]
fn recognize_breaking_changes(#[case] message: &str) {
let scope = semver_scope_of(message);
assert!(matches!(scope, Some(SemverScope::Breaking)));
}
#[rstest]
#[case("feat: Hello world")]
#[case("feat: a\n\n")]
#[case("feat(withscope): Hello world")]
#[case("feat: Hello world\n\nwith multiple lines")]
fn recognize_feature(#[case] message: &str) {
let scope = semver_scope_of(message);
assert!(matches!(scope, Some(SemverScope::Feature)));
}
#[rstest]
#[case("fix: Hello world")]
#[case("fix(withscope): Hello world")]
#[case("fix: Hello world\n\nwith multiple lines")]
fn recognize_fix(#[case] message: &str) {
let scope = semver_scope_of(message);
assert!(matches!(scope, Some(SemverScope::Fix)));
}
#[rstest]
#[case("chore: Hello world")]
#[case("chore: Hello world!")]
#[case("chore: Hello !: world")]
#[case("featuring: a")]
#[case("tests(withscope): Hello world")]
#[case("refactor: Hello world\n\nwith multiple lines")]
fn recognize_internal_changes(#[case] message: &str) {
let scope = semver_scope_of(message);
assert_eq!(None, scope);
}
#[rstest]
#[case("fix: coucou", ChangeType::Fix)]
#[case("fix!: coucou", ChangeType::Fix)]
#[case("fix(withscope): c'est moi", ChangeType::Fix)]
#[case("feat: c'est moi", ChangeType::Feature)]
#[case("feat(withscope)!: c'est moi", ChangeType::Feature)]
#[case("mytype(withscope)!: c'est moi", ChangeType::Custom("mytype"))]
fn retain_type(#[case] message: &str, #[case] expected_type: ChangeType) {
let actual_type = Change::parse_conventional_commit(message)
.expect("Failed to parse commit")
.type_;
assert_eq!(expected_type, actual_type)
}
#[rstest]
#[case("feat(hello): coucou", Some("hello"))]
#[case("fix(world)!: c'est moi", Some("world"))]
#[case("feat: Hello world!", None)]
fn retain_scope(#[case] message: &str, #[case] expected_scope: Option<&str>) {
let actual_scope = Change::parse_conventional_commit(message)
.expect("Failed to parse commit")
.scope;
assert_eq!(expected_scope, actual_scope)
}
#[rstest]
#[case("fix: coucou", "coucou")]
#[case("fix(withscope): c'est moi", "c'est moi")]
#[case("feat: this is it! \n\noops", "this is it!")]
fn retain_description(#[case] message: &str, #[case] expected_description: &str) {
let actual_description = Change::parse_conventional_commit(message)
.expect("Failed to parse commit")
.description;
assert_eq!(expected_description, actual_description)
}
#[rstest]
#[case("feat(hello): a description", None)]
#[case("feat!: coucou\n\nHello world!", Some("Hello world!"))]
#[case(
"fix: desc\n\nA nice \nbody\n\nwith mutliple lines",
Some("A nice \nbody\n\nwith mutliple lines")
)]
#[case(
"fix(world)!: ...\n\nA nice \nbody\n\nwith mutliple lines\n\nbefore: footer",
Some("A nice \nbody\n\nwith mutliple lines")
)]
fn retain_body(#[case] message: &str, #[case] expected_body: Option<&str>) {
let actual_body = Change::parse_conventional_commit(message)
.expect("Failed to parse commit")
.body;
assert_eq!(expected_body, actual_body)
}
#[rstest]
#[case("feat: hello", BreakingInfo::NotBreaking)]
#[case("feat!: hello", BreakingInfo::Breaking)]
#[case(
"feat: hello\n\nBREAKING CHANGE: Because I had to...",
BreakingInfo::BreakingWithDescriptions(vec!["Because I had to..."])
)]
#[case(
"feat: hello\n\nwith a body\n\nBREAKING CHANGE #\nThis\n\nis\nlife...",
BreakingInfo::BreakingWithDescriptions(vec!["This\n\nis\nlife..."])
)]
#[case(
"feat: hello\n\nBREAKING CHANGE: one\nBREAKING CHANGE: two",
BreakingInfo::BreakingWithDescriptions(vec!["one", "two"])
)]
fn retain_breaking_change_description(#[case] message: &str, #[case] expected: BreakingInfo) {
let actual = Change::parse_conventional_commit(message)
.expect("Failed to parse commit")
.breaking;
assert_eq!(expected, actual)
}
|
pub mod baron;
pub mod hero;
pub mod scenery;
pub mod thing;
|
#![warn(missing_docs)]
use sys;
use std::marker::PhantomData;
use std::ptr;
use {ImGuiColorEditFlags, ImVec2, ImVec4, Ui};
/// Mutable reference to an editable color value.
#[derive(Debug)]
pub enum EditableColor<'p> {
/// Color value with three float components (e.g. RGB).
Float3(&'p mut [f32; 3]),
/// Color value with four float components (e.g. RGBA).
Float4(&'p mut [f32; 4]),
}
impl<'p> EditableColor<'p> {
/// Returns an unsafe mutable pointer to the color slice's buffer.
fn as_mut_ptr(&mut self) -> *mut f32 {
match *self {
EditableColor::Float3(ref mut value) => value.as_mut_ptr(),
EditableColor::Float4(ref mut value) => value.as_mut_ptr(),
}
}
}
impl<'p> From<&'p mut [f32; 3]> for EditableColor<'p> {
fn from(value: &'p mut [f32; 3]) -> EditableColor<'p> { EditableColor::Float3(value) }
}
impl<'p> From<&'p mut [f32; 4]> for EditableColor<'p> {
fn from(value: &'p mut [f32; 4]) -> EditableColor<'p> { EditableColor::Float4(value) }
}
/// Color editor mode.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ColorEditMode {
/// Edit as RGB(A).
RGB,
/// Edit as HSV(A).
HSV,
/// Edit as hex (e.g. #AABBCC(DD))
HEX,
}
/// Color picker hue/saturation/value editor mode.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ColorPickerMode {
/// Use a bar for hue, rectangle for saturation/value.
HueBar,
/// Use a wheel for hue, triangle for saturation/value.
HueWheel,
}
/// Color component formatting.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ColorFormat {
/// Display values formatted as 0..255.
U8,
/// Display values formatted as 0.0..1.0.
Float,
}
/// Color editor preview style.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ColorPreview {
/// Don't show the alpha component.
Opaque,
/// Half of the preview area shows the alpha component using a checkerboard pattern.
HalfAlpha,
/// Show the alpha component using a checkerboard pattern.
Alpha,
}
/// Builder for a color editor widget.
#[must_use]
pub struct ColorEdit<'ui, 'p> {
label: &'p str,
value: EditableColor<'p>,
flags: ImGuiColorEditFlags,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> ColorEdit<'ui, 'p> {
/// Constructs a new color editor builder.
pub fn new(_: &Ui<'ui>, label: &'p str, value: EditableColor<'p>) -> Self {
ColorEdit {
label,
value,
flags: ImGuiColorEditFlags::empty(),
_phantom: PhantomData,
}
}
/// Replaces all current settings with the given flags.
#[inline]
pub fn flags(mut self, flags: ImGuiColorEditFlags) -> Self {
self.flags = flags;
self
}
/// Enables/disables the use of the alpha component.
#[inline]
pub fn alpha(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoAlpha, !value);
self
}
/// Enables/disables the picker that appears when clicking on colored square.
#[inline]
pub fn picker(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoPicker, !value);
self
}
/// Enables/disables toggling of the options menu when right-clicking on inputs or the small
/// preview.
#[inline]
pub fn options(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoOptions, !value);
self
}
/// Enables/disables the colored square preview next to the inputs.
#[inline]
pub fn small_preview(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoSmallPreview, !value);
self
}
/// Enables/disables the input sliders/text widgets.
#[inline]
pub fn inputs(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoInputs, !value);
self
}
/// Enables/disables the tooltip that appears when hovering the preview.
#[inline]
pub fn tooltip(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoTooltip, !value);
self
}
/// Enables/disables display of the inline text label (the label is in any case forwarded to
/// the tooltip and picker).
#[inline]
pub fn label(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoLabel, !value);
self
}
/// Enables/disables the vertical alpha bar/gradient in the color picker.
#[inline]
pub fn alpha_bar(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::AlphaBar, value);
self
}
/// Sets the preview style.
#[inline]
pub fn preview(mut self, preview: ColorPreview) -> Self {
self.flags.set(
ImGuiColorEditFlags::AlphaPreviewHalf,
preview == ColorPreview::HalfAlpha,
);
self.flags.set(
ImGuiColorEditFlags::AlphaPreview,
preview == ColorPreview::Alpha,
);
self
}
/// (WIP) Currently only disables 0.0..1.0 limits in RGBA edition.
///
/// Note: you probably want to use ColorFormat::Float as well.
#[inline]
pub fn hdr(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::HDR, value);
self
}
/// Sets the color editor mode.
#[inline]
pub fn mode(mut self, mode: ColorEditMode) -> Self {
self.flags.set(
ImGuiColorEditFlags::RGB,
mode == ColorEditMode::RGB,
);
self.flags.set(
ImGuiColorEditFlags::HSV,
mode == ColorEditMode::HSV,
);
self.flags.set(
ImGuiColorEditFlags::HEX,
mode == ColorEditMode::HEX,
);
self
}
/// Sets the formatting style of color components.
#[inline]
pub fn format(mut self, format: ColorFormat) -> Self {
self.flags.set(
ImGuiColorEditFlags::Uint8,
format == ColorFormat::U8,
);
self.flags.set(
ImGuiColorEditFlags::Float,
format == ColorFormat::Float,
);
self
}
/// Builds the color editor.
pub fn build(self) -> bool {
match self.value {
EditableColor::Float3(value) => unsafe {
sys::igColorEdit3(sys::ImStr::from(self.label), value.as_mut_ptr(), self.flags)
},
EditableColor::Float4(value) => unsafe {
sys::igColorEdit4(sys::ImStr::from(self.label), value.as_mut_ptr(), self.flags)
},
}
}
}
/// Builder for a color picker widget.
#[must_use]
pub struct ColorPicker<'ui, 'p> {
label: &'p str,
value: EditableColor<'p>,
flags: ImGuiColorEditFlags,
ref_color: Option<&'p [f32; 4]>,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> ColorPicker<'ui, 'p> {
/// Constructs a new color picker builder.
pub fn new(_: &Ui<'ui>, label: &'p str, value: EditableColor<'p>) -> Self {
ColorPicker {
label,
value,
flags: ImGuiColorEditFlags::empty(),
ref_color: None,
_phantom: PhantomData,
}
}
/// Replaces all current settings with the given flags.
#[inline]
pub fn flags(mut self, flags: ImGuiColorEditFlags) -> Self {
self.flags = flags;
self
}
/// Enables/disables the use of the alpha component.
#[inline]
pub fn alpha(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoAlpha, !value);
self
}
/// Enables/disables the colored square preview next to the inputs.
#[inline]
pub fn small_preview(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoSmallPreview, !value);
self
}
/// Enables/disables the input sliders/text widgets.
#[inline]
pub fn inputs(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoInputs, !value);
self
}
/// Enables/disables the tooltip that appears when hovering the preview.
#[inline]
pub fn tooltip(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoTooltip, !value);
self
}
/// Enables/disables display of the inline text label (the label is in any case forwarded to
/// the tooltip and picker).
#[inline]
pub fn label(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoLabel, !value);
self
}
/// Enables/disables the bigger color preview on the right side of the picker.
#[inline]
pub fn side_preview(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoSidePreview, !value);
self
}
/// Enables/disables the vertical alpha bar/gradient in the color picker.
#[inline]
pub fn alpha_bar(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::AlphaBar, value);
self
}
/// Sets the preview style.
#[inline]
pub fn preview(mut self, preview: ColorPreview) -> Self {
self.flags.set(
ImGuiColorEditFlags::AlphaPreviewHalf,
preview == ColorPreview::HalfAlpha,
);
self.flags.set(
ImGuiColorEditFlags::AlphaPreview,
preview == ColorPreview::Alpha,
);
self
}
/// Enables/disables the RGB inputs.
#[inline]
pub fn rgb(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::RGB, value);
self
}
/// Enables/disables the HSV inputs.
#[inline]
pub fn hsv(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::HSV, value);
self
}
/// Enables/disables the HEX input.
#[inline]
pub fn hex(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::HEX, value);
self
}
/// Sets the hue/saturation/value editor mode.
#[inline]
pub fn mode(mut self, mode: ColorPickerMode) -> Self {
self.flags.set(
ImGuiColorEditFlags::PickerHueBar,
mode == ColorPickerMode::HueBar,
);
self.flags.set(
ImGuiColorEditFlags::PickerHueWheel,
mode == ColorPickerMode::HueWheel,
);
self
}
/// Sets the formatting style of color components.
#[inline]
pub fn format(mut self, format: ColorFormat) -> Self {
self.flags.set(
ImGuiColorEditFlags::Uint8,
format == ColorFormat::U8,
);
self.flags.set(
ImGuiColorEditFlags::Float,
format == ColorFormat::Float,
);
self
}
/// Sets the shown reference color.
#[inline]
pub fn reference_color(mut self, ref_color: &'p [f32; 4]) -> Self {
self.ref_color = Some(ref_color);
self
}
/// Builds the color picker.
pub fn build(mut self) -> bool {
if let EditableColor::Float3(_) = self.value {
self.flags.insert(ImGuiColorEditFlags::NoAlpha);
}
let ref_color = self.ref_color.map(|c| c.as_ptr()).unwrap_or(ptr::null());
unsafe {
sys::igColorPicker4(
sys::ImStr::from(self.label),
self.value.as_mut_ptr(),
self.flags,
ref_color,
)
}
}
}
/// Builder for a color button widget.
#[must_use]
pub struct ColorButton<'ui, 'p> {
desc_id: &'p str,
color: ImVec4,
flags: ImGuiColorEditFlags,
size: ImVec2,
_phantom: PhantomData<&'ui Ui<'ui>>,
}
impl<'ui, 'p> ColorButton<'ui, 'p> {
/// Constructs a new color button builder.
pub fn new(_: &Ui<'ui>, desc_id: &'p str, color: ImVec4) -> Self {
ColorButton {
desc_id,
color,
flags: ImGuiColorEditFlags::empty(),
size: ImVec2::zero(),
_phantom: PhantomData,
}
}
/// Replaces all current settings with the given flags.
#[inline]
pub fn flags(mut self, flags: ImGuiColorEditFlags) -> Self {
self.flags = flags;
self
}
/// Enables/disables the use of the alpha component.
#[inline]
pub fn alpha(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoAlpha, !value);
self
}
/// Enables/disables the tooltip that appears when hovering the preview.
#[inline]
pub fn tooltip(mut self, value: bool) -> Self {
self.flags.set(ImGuiColorEditFlags::NoTooltip, !value);
self
}
/// Sets the preview style.
#[inline]
pub fn preview(mut self, preview: ColorPreview) -> Self {
self.flags.set(
ImGuiColorEditFlags::AlphaPreviewHalf,
preview == ColorPreview::HalfAlpha,
);
self.flags.set(
ImGuiColorEditFlags::AlphaPreview,
preview == ColorPreview::Alpha,
);
self
}
/// Sets the button size.
///
/// Use 0.0 for width and/or height to use the default size.
#[inline]
pub fn size<S: Into<ImVec2>>(mut self, size: S) -> Self {
self.size = size.into();
self
}
/// Builds the color button.
pub fn build(self) -> bool {
unsafe { sys::igColorButton(sys::ImStr::from(self.desc_id), self.color, self.flags, self.size) }
}
}
|
# Heat shock RS with stress
# @input nostress stress
# @output hse hsp hsf hsf3 mfp hsf3:hse prot hsp:mfp hsp:hsf
# @initial hse prot hsp:hsf stress
# Only reachable graphs are constructed
hsf, hsp, hsf3
hsf hsp mfp, , hsf3
hsf3, hse hsp, hsf
hsf3 hsp mfp, hse, hsf
hsf3 hse, hsp, hsf3:hse
hsf3 hse hsp mfp, , hsf3:hse
hse, hsf3, hse
hse hsf3 hsp, mfp, hse
hsf3:hse, hsp, hsf3:hse hsp
hsf3:hse hsp mfp, , hsf3:hse hsp
hsp hsf, mfp, hsp:hsf
hsp:hsf stress, nostress, hsp hsf
hsp:hsf nostress, stress, hsp:hsf
hsp hsf3, mfp, hsp:hsf
hsp hsf3:hse, mfp, hsp:hsf hse
prot stress, nostress, prot mfp
prot nostress, stress, prot
hsp mfp, , hsp:mfp
mfp, hsp, mfp
hsp:mfp, , hsp prot
---
#hse prot hsp:hsf stress
stress
stress
stress
stress
stress
|
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EUniverse {
Invalid = 0,
Public = 1,
Beta = 2,
Internal = 3,
Dev = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatEntryType {
Invalid = 0,
ChatMsg = 1,
Typing = 2,
InviteGame = 3,
LeftConversation = 6,
Entered = 7,
WasKicked = 8,
WasBanned = 9,
Disconnected = 10,
HistoricalChat = 11,
Reserved1 = 12,
Reserved2 = 13,
LinkBlocked = 14,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPersonaState {
Offline = 0,
Online = 1,
Busy = 2,
Away = 3,
Snooze = 4,
LookingToTrade = 5,
LookingToPlay = 6,
Invisible = 7,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EAccountType {
Invalid = 0,
Individual = 1,
Multiseat = 2,
GameServer = 3,
AnonGameServer = 4,
Pending = 5,
ContentServer = 6,
Clan = 7,
Chat = 8,
ConsoleUser = 9,
AnonUser = 10,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EFriendRelationship {
None = 0,
Blocked = 1,
RequestRecipient = 2,
Friend = 3,
RequestInitiator = 4,
Ignored = 5,
IgnoredFriend = 6,
}
bitflags! {
#[derive(Serialize, Deserialize)]
#[allow(non_upper_case_globals)]
pub struct EAccountFlags: i32 {
const NormalUser = 0;
const PersonaNameSet = 1;
const Unbannable = 2;
const PasswordSet = 4;
const Support = 8;
const Admin = 16;
const Supervisor = 32;
const AppEditor = 64;
const HWIDSet = 128;
const PersonalQASet = 256;
const VacBeta = 512;
const Debug = 1024;
const Disabled = 2048;
const LimitedUser = 4096;
const LimitedUserForce = 8192;
const EmailValidated = 16384;
const MarketingTreatment = 32768;
const OGGInviteOptOut = 65536;
const ForcePasswordChange = 131072;
const ForceEmailVerification = 262144;
const LogonExtraSecurity = 524288;
const LogonExtraSecurityDisabled = 1048576;
const Steam2MigrationComplete = 2097152;
const NeedLogs = 4194304;
const Lockdown = 8388608;
const MasterAppEditor = 16777216;
const BannedFromWebAPI = 33554432;
const ClansOnlyFromFriends = 67108864;
const GlobalModerator = 134217728;
const ParentalSettings = 268435456;
const ThirdPartySupport = 536870912;
const NeedsSSANextSteamLogon = 1073741824;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EClanPermission: i32 {
const Nobody = 0;
const Owner = 1;
const Officer = 2;
const OwnerAndOfficer = 3;
const Member = 4;
const Moderator = 8;
const OwnerOfficerModerator = Self::Owner.bits | Self::Officer.bits | Self::Moderator.bits;
const AllMembers = Self::Owner.bits | Self::Officer.bits | Self::Moderator.bits | Self::Member .bits;
const OGGGameOwner = 16;
const NonMember = 128;
const MemberAllowed = Self::NonMember.bits | Self::Member.bits;
const ModeratorAllowed = Self::NonMember.bits | Self::Member.bits | Self::Moderator.bits;
const OfficerAllowed = Self::NonMember.bits | Self::Member.bits | Self::Moderator.bits | Self::Officer.bits;
const OwnerAllowed = Self::NonMember.bits | Self::Member.bits | Self::Moderator.bits | Self::Officer.bits | Self::Owner.bits;
const Anybody = Self::NonMember.bits | Self::Member.bits | Self::Moderator.bits | Self::Officer.bits | Self::Owner.bits;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EChatPermission: i32 {
const Close = 1;
const Invite = 2;
const Talk = 8;
const Kick = 16;
const Mute = 32;
const SetMetadata = 64;
const ChangePermissions = 128;
const Ban = 256;
const ChangeAccess = 512;
// const EveryoneNotInClanDefault = Self::Talk;
const EveryoneDefault = Self::Talk.bits | Self::Invite.bits;
const MemberDefault = Self::Ban.bits | Self::Kick.bits | Self::Talk.bits | Self::Invite.bits;
const OfficerDefault = Self::Ban.bits | Self::Kick.bits | Self::Talk.bits | Self::Invite.bits;
const OwnerDefault = Self::ChangeAccess.bits | Self::Ban.bits | Self::SetMetadata.bits | Self::Mute.bits | Self::Kick.bits | Self::Talk.bits | Self::Invite.bits | Self::Close.bits;
const Mask = 1019;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EFriendFlags: i32 {
const None = 0;
const Blocked = 1;
const FriendshipRequested = 2;
const Immediate = 4;
const ClanMember = 8;
const OnGameServer = 16;
const RequestingFriendship = 128;
const RequestingInfo = 256;
const Ignored = 512;
const IgnoredFriend = 1024;
const Suggested = 2048;
const ChatMember = 4096;
const FlagAll = 65535;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EPersonaStateFlag: i32 {
const HasRichPresence = 1;
const InJoinableGame = 2;
const Golden = 4;
const ClientTypeWeb = 256;
const ClientTypeMobile = 512;
const ClientTypeTenfoot = 1024;
const ClientTypeVR = 2048;
const LaunchTypeGamepad = 4096;
const LaunchTypeCompatTool = 8192;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EClientPersonaStateFlag: i32 {
const Status = 1;
const PlayerName = 2;
const QueryPort = 4;
const SourceID = 8;
const Presence = 16;
const LastSeen = 64;
const UserClanRank = 128;
const GameExtraInfo = 256;
const GameDataBlob = 512;
const ClanData = 1024;
const Facebook = 2048;
const RichPresence = 4096;
const Broadcast = 8192;
const Watching = 16384;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EAppUsageEvent {
GameLaunch = 1,
GameLaunchTrial = 2,
Media = 3,
PreloadStart = 4,
PreloadFinish = 5,
MarketingMessageView = 6,
InGameAdViewed = 7,
GameLaunchFreeWeekend = 8,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct ELicenseFlags: i32 {
const None = 0;
const Renew = 0x01;
const RenewalFailed = 0x02;
const Pending = 0x04;
const Expired = 0x08;
const CancelledByUser = 0x10;
const CancelledByAdmin = 0x20;
const LowViolenceContent = 0x40;
const ImportedFromSteam2 = 0x80;
const ForceRunRestriction = 0x100;
const RegionRestrictionExpired = 0x200;
const CancelledByFriendlyFraudLock = 0x400;
const NotActivated = 0x800;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELicenseType {
NoLicense = 0,
SinglePurchase = 1,
SinglePurchaseLimitedUse = 2,
RecurringCharge = 3,
RecurringChargeLimitedUse = 4,
RecurringChargeLimitedUseWithOverages = 5,
RecurringOption = 6,
LimitedUseDelayedActivation = 7,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPaymentMethod {
None = 0,
ActivationCode = 1,
CreditCard = 2,
Giropay = 3,
PayPal = 4,
Ideal = 5,
PaySafeCard = 6,
Sofort = 7,
GuestPass = 8,
WebMoney = 9,
MoneyBookers = 10,
AliPay = 11,
Yandex = 12,
Kiosk = 13,
Qiwi = 14,
GameStop = 15,
HardwarePromo = 16,
MoPay = 17,
BoletoBancario = 18,
BoaCompraGold = 19,
BancoDoBrasilOnline = 20,
ItauOnline = 21,
BradescoOnline = 22,
Pagseguro = 23,
VisaBrazil = 24,
AmexBrazil = 25,
Aura = 26,
Hipercard = 27,
MastercardBrazil = 28,
DinersCardBrazil = 29,
AuthorizedDevice = 30,
MOLPoints = 31,
ClickAndBuy = 32,
Beeline = 33,
Konbini = 34,
EClubPoints = 35,
CreditCardJapan = 36,
BankTransferJapan = 37,
PayEasy = 38,
Zong = 39,
CultureVoucher = 40,
BookVoucher = 41,
HappymoneyVoucher = 42,
ConvenientStoreVoucher = 43,
GameVoucher = 44,
Multibanco = 45,
Payshop = 46,
MaestroBoaCompra = 47,
OXXO = 48,
ToditoCash = 49,
Carnet = 50,
SPEI = 51,
ThreePay = 52,
IsBank = 53,
Garanti = 54,
Akbank = 55,
YapiKredi = 56,
Halkbank = 57,
BankAsya = 58,
Finansbank = 59,
DenizBank = 60,
PTT = 61,
CashU = 62,
AutoGrant = 64,
WebMoneyJapan = 65,
OneCard = 66,
PSE = 67,
Exito = 68,
Efecty = 69,
Paloto = 70,
PinValidda = 71,
MangirKart = 72,
BancoCreditoDePeru = 73,
BBVAContinental = 74,
SafetyPay = 75,
PagoEfectivo = 76,
Trustly = 77,
UnionPay = 78,
BitCoin = 79,
Wallet = 128,
Valve = 129,
MasterComp = 130,
Promotional = 131,
OEMTicket = 256,
Split = 512,
Complimentary = 1024,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPurchaseResultDetail {
NoDetail = 0,
AVSFailure = 1,
InsufficientFunds = 2,
ContactSupport = 3,
Timeout = 4,
InvalidPackage = 5,
InvalidPaymentMethod = 6,
InvalidData = 7,
OthersInProgress = 8,
AlreadyPurchased = 9,
WrongPrice = 10,
FraudCheckFailed = 11,
CancelledByUser = 12,
RestrictedCountry = 13,
BadActivationCode = 14,
DuplicateActivationCode = 15,
UseOtherPaymentMethod = 16,
UseOtherFunctionSource = 17,
InvalidShippingAddress = 18,
RegionNotSupported = 19,
AcctIsBlocked = 20,
AcctNotVerified = 21,
InvalidAccount = 22,
StoreBillingCountryMismatch = 23,
DoesNotOwnRequiredApp = 24,
CanceledByNewTransaction = 25,
ForceCanceledPending = 26,
FailCurrencyTransProvider = 27,
FailedCyberCafe = 28,
NeedsPreApproval = 29,
PreApprovalDenied = 30,
WalletCurrencyMismatch = 31,
EmailNotValidated = 32,
ExpiredCard = 33,
TransactionExpired = 34,
WouldExceedMaxWallet = 35,
MustLoginPS3AppForPurchase = 36,
CannotShipToPOBox = 37,
InsufficientInventory = 38,
CannotGiftShippedGoods = 39,
CannotShipInternationally = 40,
BillingAgreementCancelled = 41,
InvalidCoupon = 42,
ExpiredCoupon = 43,
AccountLocked = 44,
OtherAbortableInProgress = 45,
ExceededSteamLimit = 46,
OverlappingPackagesInCart = 47,
NoWallet = 48,
NoCachedPaymentMethod = 49,
CannotRedeemCodeFromClient = 50,
PurchaseAmountNoSupportedByProvider = 51,
OverlappingPackagesInPendingTransaction = 52,
RateLimited = 53,
OwnsExcludedApp = 54,
CreditCardBinMismatchesType = 55,
CartValueTooHigh = 56,
BillingAgreementAlreadyExists = 57,
POSACodeNotActivated = 58,
CannotShipToCountry = 59,
HungTransactionCancelled = 60,
PaypalInternalError = 61,
UnknownGlobalCollectError = 62,
InvalidTaxAddress = 63,
PhysicalProductLimitExceeded = 64,
PurchaseCannotBeReplayed = 65,
DelayedCompletion = 66,
BundleTypeCannotBeGifted = 67,
BlockedByUSGov = 68,
ItemsReservedForCommercialUse = 69,
GiftAlreadyOwned = 70,
GiftInvalidForRecipientRegion = 71,
GiftPricingImbalance = 72,
GiftRecipientNotSpecified = 73,
ItemsNotAllowedForCommercialUse = 74,
BusinessStoreCountryCodeMismatch = 75,
UserAssociatedWithManyCafes = 76,
UserNotAssociatedWithCafe = 77,
AddressInvalid = 78,
CreditCardNumberInvalid = 79,
CannotShipToMilitaryPostOffice = 80,
BillingNameInvalidResemblesCreditCard = 81,
PaymentMethodTemporarilyUnavailable = 82,
PaymentMethodNotSupportedForProduct = 83,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EIntroducerRouting {
P2PVoiceChat = 1,
P2PNetworking = 2,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EServerFlags: i32 {
const None = 0;
const Active = 1;
const Secure = 2;
const Dedicated = 4;
const Linux = 8;
const Passworded = 16;
const Private = 32;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EDenyReason {
InvalidVersion = 1,
Generic = 2,
NotLoggedOn = 3,
NoLicense = 4,
Cheater = 5,
LoggedInElseWhere = 6,
UnknownText = 7,
IncompatibleAnticheat = 8,
MemoryCorruption = 9,
IncompatibleSoftware = 10,
SteamConnectionLost = 11,
SteamConnectionError = 12,
SteamResponseTimedOut = 13,
SteamValidationStalled = 14,
SteamOwnerLeftGuestUser = 15,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EClanRank {
None = 0,
Owner = 1,
Officer = 2,
Member = 3,
Moderator = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EClanRelationship {
None = 0,
Blocked = 1,
Invited = 2,
Member = 3,
Kicked = 4,
KickAcknowledged = 5,
PendingApproval = 6,
RequestDenied = 7,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EAuthSessionResponse {
OK = 0,
UserNotConnectedToSteam = 1,
NoLicenseOrExpired = 2,
VACBanned = 3,
LoggedInElseWhere = 4,
VACCheckTimedOut = 5,
AuthTicketCanceled = 6,
AuthTicketInvalidAlreadyUsed = 7,
AuthTicketInvalid = 8,
PublisherIssuedBan = 9,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomEnterResponse {
Success = 1,
DoesntExist = 2,
NotAllowed = 3,
Full = 4,
Error = 5,
Banned = 6,
Limited = 7,
ClanDisabled = 8,
CommunityBan = 9,
MemberBlockedYou = 10,
YouBlockedMember = 11,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomType {
Friend = 1,
MUC = 2,
Lobby = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatInfoType {
StateChange = 1,
InfoUpdate = 2,
MemberLimitChange = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatAction {
InviteChat = 1,
Kick = 2,
Ban = 3,
UnBan = 4,
StartVoiceSpeak = 5,
EndVoiceSpeak = 6,
LockChat = 7,
UnlockChat = 8,
CloseChat = 9,
SetJoinable = 10,
SetUnjoinable = 11,
SetOwner = 12,
SetInvisibleToFriends = 13,
SetVisibleToFriends = 14,
SetModerated = 15,
SetUnmoderated = 16,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatActionResult {
Success = 1,
Error = 2,
NotPermitted = 3,
NotAllowedOnClanMember = 4,
NotAllowedOnBannedUser = 5,
NotAllowedOnChatOwner = 6,
NotAllowedOnSelf = 7,
ChatDoesntExist = 8,
ChatFull = 9,
VoiceSlotsFull = 10,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EAppInfoSection {
Unknown = 0,
All = 1,
Common = 2,
Extended = 3,
Config = 4,
Stats = 5,
Install = 6,
Depots = 7,
UFS = 10,
OGG = 11,
Policies = 13,
SysReqs = 14,
Community = 15,
Store = 16,
Localization = 17,
Broadcastgamedata = 18,
Computed = 19,
Albummetadata = 20,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EContentDownloadSourceType {
Invalid = 0,
CS = 1,
CDN = 2,
LCS = 3,
ProxyCache = 4,
LANPeer = 5,
SLS = 6,
SteamCache = 7,
OpenCache = 8,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPlatformType {
Unknown = 0,
Win32 = 1,
Win64 = 2,
Linux64 = 3,
OSX = 4,
PS3 = 5,
Linux32 = 6,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EOSType {
Web = -700,
IOSUnknown = -600,
IOS1 = -599,
IOS2 = -598,
IOS3 = -597,
IOS4 = -596,
IOS5 = -595,
IOS6 = -594,
IOS6_1 = -593,
IOS7 = -592,
IOS7_1 = -591,
IOS8 = -590,
IOS8_1 = -589,
IOS8_2 = -588,
IOS8_3 = -587,
IOS8_4 = -586,
IOS9 = -585,
IOS9_1 = -584,
IOS9_2 = -583,
IOS9_3 = -582,
IOS10 = -581,
IOS10_1 = -580,
IOS10_2 = -579,
IOS10_3 = -578,
IOS11 = -577,
IOS11_1 = -576,
IOS11_2 = -575,
IOS11_3 = -574,
IOS11_4 = -573,
IOS12 = -572,
IOS12_1 = -571,
AndroidUnknown = -500,
Android6 = -499,
Android7 = -498,
Android8 = -497,
Android9 = -496,
UMQ = -400,
PS3 = -300,
MacOSUnknown = -102,
MacOS104 = -101,
MacOS105 = -100,
MacOS1058 = -99,
MacOS106 = -95,
MacOS1063 = -94,
MacOS1064_slgu = -93,
MacOS1067 = -92,
MacOS107 = -90,
MacOS108 = -89,
MacOS109 = -88,
MacOS1010 = -87,
MacOS1011 = -86,
MacOS1012 = -85,
Macos1013 = -84,
Macos1014 = -83,
LinuxUnknown = -203,
Linux22 = -202,
Linux24 = -201,
Linux26 = -200,
Linux32 = -199,
Linux35 = -198,
Linux36 = -197,
Linux310 = -196,
Linux316 = -195,
Linux318 = -194,
Linux3x = -193,
Linux4x = -192,
Linux41 = -191,
Linux44 = -190,
Linux49 = -189,
Linux414 = -188,
Linux419 = -187,
Linux5x = -186,
WinUnknown = 0,
Win311 = 1,
Win95 = 2,
Win98 = 3,
WinME = 4,
WinNT = 5,
Win2000 = 6,
WinXP = 7,
Win2003 = 8,
WinVista = 9,
Windows7 = 10,
Win2008 = 11,
Win2012 = 12,
Windows8 = 13,
Windows81 = 14,
Win2012R2 = 15,
Windows10 = 16,
Win2016 = 17,
WinMAX = 18,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EServerType {
Util = -2,
Client = -3,
CServer = -4,
CEconBase = -5,
Invalid = -1,
Shell = 0,
GM = 1,
AM = 3,
BS = 4,
VS = 5,
ATS = 6,
CM = 7,
FBS = 8,
BoxMonitor = 9,
SS = 10,
DRMS = 11,
Console = 13,
PICS = 14,
ContentStats = 16,
DP = 17,
WG = 18,
SM = 19,
SLC = 20,
UFS = 21,
Community = 24,
AppInformation = 26,
Spare = 27,
FTS = 28,
SiteLicense = 29,
PS = 30,
IS = 31,
CCS = 32,
DFS = 33,
LBS = 34,
MDS = 35,
CS = 36,
GC = 37,
NS = 38,
OGS = 39,
WebAPI = 40,
UDS = 41,
MMS = 42,
GMS = 43,
KGS = 44,
UCM = 45,
RM = 46,
FS = 47,
Econ = 48,
Backpack = 49,
UGS = 50,
StoreFeature = 51,
MoneyStats = 52,
CRE = 53,
UMQ = 54,
Workshop = 55,
BRP = 56,
GCH = 57,
MPAS = 58,
Trade = 59,
Secrets = 60,
Logsink = 61,
Market = 62,
Quest = 63,
WDS = 64,
ACS = 65,
PNP = 66,
TaxForm = 67,
ExternalMonitor = 68,
Parental = 69,
PartnerUpload = 70,
Partner = 71,
ES = 72,
DepotWebContent = 73,
ExternalConfig = 74,
GameNotifications = 75,
MarketRepl = 76,
MarketSearch = 77,
Localization = 78,
Steam2Emulator = 79,
PublicTest = 80,
SolrMgr = 81,
BroadcastRelay = 82,
BroadcastDirectory = 83,
VideoManager = 84,
TradeOffer = 85,
BroadcastChat = 86,
Phone = 87,
AccountScore = 88,
Support = 89,
LogRequest = 90,
LogWorker = 91,
EmailDelivery = 92,
InventoryManagement = 93,
Auth = 94,
StoreCatalog = 95,
HLTVRelay = 96,
IDLS = 97,
Perf = 98,
ItemInventory = 99,
Watchdog = 100,
AccountHistory = 101,
Chat = 102,
Shader = 103,
AccountHardware = 104,
WebRTC = 105,
Giveaway = 106,
ChatRoom = 107,
VoiceChat = 108,
QMS = 109,
Trust = 110,
TimeMachine = 111,
VACDBMaster = 112,
ContentServerConfig = 113,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EBillingType {
NoCost = 0,
BillOnceOnly = 1,
BillMonthly = 2,
ProofOfPrepurchaseOnly = 3,
GuestPass = 4,
HardwarePromo = 5,
Gift = 6,
AutoGrant = 7,
OEMTicket = 8,
RecurringOption = 9,
BillOnceOrCDKey = 10,
Repurchaseable = 11,
FreeOnDemand = 12,
Rental = 13,
CommercialLicense = 14,
FreeCommercialLicense = 15,
NumBillingTypes = 16,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u32)]
pub enum EActivationCodeClass {
WonCDKey = 0,
ValveCDKey = 1,
Doom3CDKey = 2,
DBLookup = 3,
Steam2010Key = 4,
Test = 2147483647,
Invalid = 4294967295,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EChatMemberStateChange: i32 {
const Entered = 0x01;
const Left = 0x02;
const Disconnected = 0x04;
const Kicked = 0x08;
const Banned = 0x10;
const VoiceSpeaking = 0x1000;
const VoiceDoneSpeaking = 0x2000;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(u8)]
pub enum ERegionCode {
USEast = 0x00,
USWest = 0x01,
SouthAmerica = 0x02,
Europe = 0x03,
Asia = 0x04,
Australia = 0x05,
MiddleEast = 0x06,
Africa = 0x07,
World = 0xFF,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ECurrencyCode {
Invalid = 0,
USD = 1,
GBP = 2,
EUR = 3,
CHF = 4,
RUB = 5,
PLN = 6,
BRL = 7,
JPY = 8,
NOK = 9,
IDR = 10,
MYR = 11,
PHP = 12,
SGD = 13,
THB = 14,
VND = 15,
KRW = 16,
TRY = 17,
UAH = 18,
MXN = 19,
CAD = 20,
AUD = 21,
NZD = 22,
CNY = 23,
INR = 24,
CLP = 25,
PEN = 26,
COP = 27,
ZAR = 28,
HKD = 29,
TWD = 30,
SAR = 31,
AED = 32,
ARS = 34,
ILS = 35,
BYN = 36,
KZT = 37,
KWD = 38,
QAR = 39,
CRC = 40,
UYU = 41,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EDepotFileFlag: i32 {
const UserConfig = 1;
const VersionedUserConfig = 2;
const Encrypted = 4;
const ReadOnly = 8;
const Hidden = 16;
const Executable = 32;
const Directory = 64;
const CustomExecutable = 128;
const InstallScript = 256;
const Symlink = 512;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EWorkshopEnumerationType {
RankedByVote = 0,
Recent = 1,
Trending = 2,
FavoriteOfFriends = 3,
VotedByFriends = 4,
ContentByFriends = 5,
RecentFromFollowedUsers = 6,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPublishedFileVisibility {
Public = 0,
FriendsOnly = 1,
Private = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EWorkshopFileType {
Community = 0,
Microtransaction = 1,
Collection = 2,
Art = 3,
Video = 4,
Screenshot = 5,
Game = 6,
Software = 7,
Concept = 8,
WebGuide = 9,
IntegratedGuide = 10,
Merch = 11,
ControllerBinding = 12,
SteamworksAccessInvite = 13,
SteamVideo = 14,
GameManagedItem = 15,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EWorkshopFileAction {
Played = 0,
Completed = 1,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EEconTradeResponse {
Accepted = 0,
Declined = 1,
TradeBannedInitiator = 2,
TradeBannedTarget = 3,
TargetAlreadyTrading = 4,
Disabled = 5,
NotLoggedIn = 6,
Cancel = 7,
TooSoon = 8,
TooSoonPenalty = 9,
ConnectionFailed = 10,
AlreadyTrading = 11,
AlreadyHasTradeRequest = 12,
NoResponse = 13,
CyberCafeInitiator = 14,
CyberCafeTarget = 15,
// SchoolLabInitiator = 16,
SchoolLabTarget = 16,
InitiatorBlockedTarget = 18,
InitiatorNeedsVerifiedEmail = 20,
InitiatorNeedsSteamGuard = 21,
TargetAccountCannotTrade = 22,
InitiatorSteamGuardDuration = 23,
InitiatorPasswordResetProbation = 24,
InitiatorNewDeviceCooldown = 25,
InitiatorSentInvalidCookie = 26,
NeedsEmailConfirmation = 27,
InitiatorRecentEmailChange = 28,
NeedsMobileConfirmation = 29,
TradingHoldForClearedTradeOffersInitiator = 30,
WouldExceedMaxAssetCount = 31,
DisabledInRegion = 32,
DisabledInPartnerRegion = 33,
OKToDeliver = 50,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EMarketingMessageFlags: i32 {
const None = 0;
const HighPriority = 1;
const PlatformWindows = 2;
const PlatformMac = 4;
const PlatformLinux = 8;
const PlatformRestrictions = Self::PlatformWindows.bits | Self::PlatformMac.bits | Self::PlatformLinux.bits;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ENewsUpdateType {
AppNews = 0,
SteamAds = 1,
SteamNews = 2,
CDDBUpdate = 3,
ClientUpdate = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ESystemIMType {
RawText = 0,
InvalidCard = 1,
RecurringPurchaseFailed = 2,
CardWillExpire = 3,
SubscriptionExpired = 4,
GuestPassReceived = 5,
GuestPassGranted = 6,
GiftRevoked = 7,
SupportMessage = 8,
SupportMessageClearAlert = 9,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EChatFlags: i32 {
const Locked = 1;
const InvisibleToFriends = 2;
const Moderated = 4;
const Unjoinable = 8;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct ERemoteStoragePlatform: i32 {
const None = 0;
const Windows = 1;
const OSX = 2;
const PS3 = 4;
const Linux = 8;
const Switch = 16;
const Android = 32;
const IPhoneOS = 64;
const All = -1;
}
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EDRMBlobDownloadType: i32 {
const Error = 0;
const File = 1;
const Parts = 2;
const Compressed = 4;
const AllMask = 7;
const IsJob = 8;
const HighPriority = 16;
const AddTimestamp = 32;
const LowPriority = 64;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EDRMBlobDownloadErrorDetail {
None = 0,
DownloadFailed = 1,
TargetLocked = 2,
OpenZip = 3,
ReadZipDirectory = 4,
UnexpectedZipEntry = 5,
UnzipFullFile = 6,
UnknownBlobType = 7,
UnzipStrips = 8,
UnzipMergeGuid = 9,
UnzipSignature = 10,
ApplyStrips = 11,
ApplyMergeGuid = 12,
ApplySignature = 13,
AppIdMismatch = 14,
AppIdUnexpected = 15,
AppliedSignatureCorrupt = 16,
ApplyValveSignatureHeader = 17,
UnzipValveSignatureHeader = 18,
PathManipulationError = 19,
TargetLocked_Base = 65536,
TargetLocked_Max = 131071,
NextBase = 131072,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EClientStat {
P2PConnectionsUDP = 0,
P2PConnectionsRelay = 1,
P2PGameConnections = 2,
P2PVoiceConnections = 3,
BytesDownloaded = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EClientStatAggregateMethod {
LatestOnly = 0,
Sum = 1,
Event = 2,
Scalar = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELeaderboardDataRequest {
Global = 0,
GlobalAroundUser = 1,
Friends = 2,
Users = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELeaderboardSortMethod {
None = 0,
Ascending = 1,
Descending = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELeaderboardDisplayType {
None = 0,
Numeric = 1,
TimeSeconds = 2,
TimeMilliSeconds = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELeaderboardUploadScoreMethod {
None = 0,
KeepBest = 1,
ForceUpdate = 2,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EUCMFilePrivacyState: i32 {
const Invalid = -1;
const Private = 2;
const FriendsOnly = 4;
const Public = 8;
const All = Self::Public.bits | Self::FriendsOnly.bits | Self::Private.bits;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPublishedFileQueryType {
RankedByVote = 0,
RankedByPublicationDate = 1,
AcceptedForGameRankedByAcceptanceDate = 2,
RankedByTrend = 3,
FavoritedByFriendsRankedByPublicationDate = 4,
CreatedByFriendsRankedByPublicationDate = 5,
RankedByNumTimesReported = 6,
CreatedByFollowedUsersRankedByPublicationDate = 7,
NotYetRated = 8,
RankedByTotalUniqueSubscriptions = 9,
RankedByTotalVotesAsc = 10,
RankedByVotesUp = 11,
RankedByTextSearch = 12,
RankedByPlaytimeTrend = 13,
RankedByTotalPlaytime = 14,
RankedByAveragePlaytimeTrend = 15,
RankedByLifetimeAveragePlaytime = 16,
RankedByPlaytimeSessionsTrend = 17,
RankedByLifetimePlaytimeSessions = 18,
RankedByInappropriateContentRating = 19,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPublishedFileInappropriateProvider {
Invalid = 0,
Google = 1,
Amazon = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EPublishedFileInappropriateResult {
NotScanned = 0,
VeryUnlikely = 1,
Unlikely = 30,
Possible = 50,
Likely = 75,
VeryLikely = 100,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EDisplayStatus {
Invalid = 0,
Launching = 1,
Uninstalling = 2,
Installing = 3,
Running = 4,
Validating = 5,
Updating = 6,
Downloading = 7,
Synchronizing = 8,
ReadyToInstall = 9,
ReadyToPreload = 10,
ReadyToLaunch = 11,
RegionRestricted = 12,
PresaleOnly = 13,
InvalidPlatform = 14,
ParentalBlocked = 15,
PreloadOnly = 16,
BorrowerLocked = 17,
UpdatePaused = 18,
UpdateQueued = 19,
UpdateRequired = 20,
UpdateDisabled = 21,
DownloadPaused = 22,
DownloadQueued = 23,
DownloadRequired = 24,
DownloadDisabled = 25,
LicensePending = 26,
LicenseExpired = 27,
AvailForFree = 28,
AvailToBorrow = 29,
AvailGuestPass = 30,
Purchase = 31,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EAppType: i32 {
const Invalid = 0;
const Game = 1;
const Application = 2;
const Tool = 4;
const Demo = 8;
const Deprected = 16;
const DLC = 32;
const Guide = 64;
const Driver = 128;
const Config = 256;
const Hardware = 512;
const Franchise = 1024;
const Video = 2048;
const Plugin = 4096;
const Music = 8192;
const Series = 16384;
const Comic = 32768;
const Beta = 65536;
const Shortcut = 1073741824;
const DepotOnly = -2147483648;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomGroupType {
Default = 0,
Unmoderated = 1,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatroomNotificationLevel {
Invalid = 0,
None = 1,
MentionMe = 2,
MentionAll = 3,
AllMessages = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomMemberStateChange {
Invalid = 0,
Joined = 1,
Parted = 2,
Kicked = 3,
Invited = 4,
RankChanged = 7,
InviteDismissed = 8,
Muted = 9,
Banned = 10,
RolesChanged = 12,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomServerMsg {
Invalid = 0,
RenameChatRoom = 1,
Joined = 2,
Parted = 3,
Kicked = 4,
Invited = 5,
InviteDismissed = 8,
ChatRoomTaglineChanged = 9,
ChatRoomAvatarChanged = 10,
AppCustom = 11,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomGroupRank {
Default = 0,
Viewer = 10,
Guest = 15,
Member = 20,
Moderator = 30,
Officer = 40,
Owner = 50,
}
bitflags! {
#[derive(Serialize, Deserialize)]
pub struct EChatRoomGroupPermissions: i32 {
const Default = 0;
const Valid = 1;
const CanInvite = 2;
const CanKick = 4;
const CanBan = 8;
const CanAdminChannel = 16;
}
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomGroupAction {
Default = 0,
CreateRenameDeleteChannel = 1,
Kick = 2,
Ban = 3,
Invite = 4,
ChangeTaglineAvatarName = 5,
Chat = 6,
ViewHistory = 7,
ChangeGroupRoles = 8,
ChangeUserRoles = 9,
MentionAll = 10,
SetWatchingBroadcast = 11,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EChatRoomJoinState {
Default = 0,
None = 1,
Joined = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum EVoiceCallState {
None = 0,
ScheduledInitiate = 1,
RequestedMicAccess = 2,
LocalMicOnly = 3,
CreatePeerConnection = 4,
InitatedWebRTCSession = 5,
WebRTCConnectedWaitingOnIceConnected = 6,
RequestedPermission = 7,
NotifyingVoiceChatOfWebRTCSession = 8,
Connected = 9,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ETradeOfferState {
Invalid = 1,
Active = 2,
Accepted = 3,
Countered = 4,
Expired = 5,
Canceled = 6,
Declined = 7,
InvalidItems = 8,
CreatedNeedsConfirmation = 9,
CanceledBySecondFactor = 10,
InEscrow = 11,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ETradeOfferConfirmationMethod {
Invalid = 0,
Email = 1,
MobileApp = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELobbyType {
Private = 0,
FriendsOnly = 1,
Public = 2,
Invisible = 3,
PrivateUnique = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELobbyFilterType {
String = 0,
Numerical = 1,
SlotsAvailable = 2,
NearValue = 3,
Distance = 4,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELobbyComparison {
EqualToOrLessThan = -2,
LessThan = -1,
Equal = 0,
GreaterThan = 1,
EqualToOrGreaterThan = 2,
NotEqual = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ELobbyDistanceFilter {
Close = 0,
Default = 1,
Far = 2,
Worldwide = 3,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ESteamIPv6ConnectivityProtocol {
Invalid = 0,
Http = 1,
Udp = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ESteamIPv6ConnectivityState {
Unknown = 0,
Good = 1,
Bad = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[repr(i32)]
pub enum ESteamRealm {
Unknown = 0,
SteamGlobal = 1,
SteamChina = 2,
}
#[derive(FromPrimitive, ToPrimitive, Clone, Debug, PartialEq, Eq, Serialize_repr, Deserialize_repr)]
#[repr(i32)]
pub enum EResult {
Invalid = 0,
OK = 1,
Fail = 2,
NoConnection = 3,
InvalidPassword = 5,
LoggedInElsewhere = 6,
InvalidProtocolVer = 7,
InvalidParam = 8,
FileNotFound = 9,
Busy = 10,
InvalidState = 11,
InvalidName = 12,
InvalidEmail = 13,
DuplicateName = 14,
AccessDenied = 15,
Timeout = 16,
Banned = 17,
AccountNotFound = 18,
InvalidSteamID = 19,
ServiceUnavailable = 20,
NotLoggedOn = 21,
Pending = 22,
EncryptionFailure = 23,
InsufficientPrivilege = 24,
LimitExceeded = 25,
Revoked = 26,
Expired = 27,
AlreadyRedeemed = 28,
DuplicateRequest = 29,
AlreadyOwned = 30,
IPNotFound = 31,
PersistFailed = 32,
LockingFailed = 33,
LogonSessionReplaced = 34,
ConnectFailed = 35,
HandshakeFailed = 36,
IOFailure = 37,
RemoteDisconnect = 38,
ShoppingCartNotFound = 39,
Blocked = 40,
Ignored = 41,
NoMatch = 42,
AccountDisabled = 43,
ServiceReadOnly = 44,
AccountNotFeatured = 45,
AdministratorOK = 46,
ContentVersion = 47,
TryAnotherCM = 48,
PasswordRequiredToKickSession = 49,
AlreadyLoggedInElsewhere = 50,
Suspended = 51,
Cancelled = 52,
DataCorruption = 53,
DiskFull = 54,
RemoteCallFailed = 55,
PasswordUnset = 56,
ExternalAccountUnlinked = 57,
PSNTicketInvalid = 58,
ExternalAccountAlreadyLinked = 59,
RemoteFileConflict = 60,
IllegalPassword = 61,
SameAsPreviousValue = 62,
AccountLogonDenied = 63,
CannotUseOldPassword = 64,
InvalidLoginAuthCode = 65,
AccountLogonDeniedNoMail = 66,
HardwareNotCapableOfIPT = 67,
IPTInitError = 68,
ParentalControlRestricted = 69,
FacebookQueryError = 70,
ExpiredLoginAuthCode = 71,
IPLoginRestrictionFailed = 72,
AccountLockedDown = 73,
AccountLogonDeniedVerifiedEmailRequired = 74,
NoMatchingURL = 75,
BadResponse = 76,
RequirePasswordReEntry = 77,
ValueOutOfRange = 78,
UnexpectedError = 79,
Disabled = 80,
InvalidCEGSubmission = 81,
RestrictedDevice = 82,
RegionLocked = 83,
RateLimitExceeded = 84,
AccountLoginDeniedNeedTwoFactor = 85,
ItemDeleted = 86,
AccountLoginDeniedThrottle = 87,
TwoFactorCodeMismatch = 88,
TwoFactorActivationCodeMismatch = 89,
AccountAssociatedToMultiplePartners = 90,
NotModified = 91,
NoMobileDevice = 92,
TimeNotSynced = 93,
SMSCodeFailed = 94,
AccountLimitExceeded = 95,
AccountActivityLimitExceeded = 96,
PhoneActivityLimitExceeded = 97,
RefundToWallet = 98,
EmailSendFailure = 99,
NotSettled = 100,
NeedCaptcha = 101,
GSLTDenied = 102,
GSOwnerDenied = 103,
InvalidItemType = 104,
IPBanned = 105,
GSLTExpired = 106,
InsufficientFunds = 107,
TooManyPending = 108,
NoSiteLicensesFound = 109,
WGNetworkSendExceeded = 110,
AccountNotFriends = 111,
LimitedUserAccount = 112,
CantRemoveItem = 113,
}
#[derive(FromPrimitive, ToPrimitive, Hash, Clone, Debug, PartialEq, Eq, Serialize_repr, Deserialize_repr)]
#[repr(i32)]
pub enum EMsg {
Invalid = 0,
Multi = 1,
RemoteSysID = 128,
FileXferRequest = 1200,
FileXferResponse = 1201,
FileXferData = 1202,
FileXferEnd = 1203,
FileXferDataAck = 1204,
ChannelEncryptRequest = 1303,
ChannelEncryptResponse = 1304,
ChannelEncryptResult = 1305,
ClientReportOverlayDetourFailure = 5517,
ClientMMSGetLobbyData = 6611,
ClientMMSLobbyData = 6612,
ClientChatAction = 597,
CSUserContentRequest = 652,
ClientLogOn_Deprecated = 701,
ClientAnonLogOn_Deprecated = 702,
ClientHeartBeat = 703,
ClientVACResponse = 704,
ClientLogOff = 706,
ClientNoUDPConnectivity = 707,
ClientInformOfCreateAccount = 708,
ClientAckVACBan = 709,
ClientConnectionStats = 710,
ClientInitPurchase = 711,
ClientPingResponse = 712,
ClientRemoveFriend = 714,
ClientGamesPlayedNoDataBlob = 715,
ClientChangeStatus = 716,
ClientVacStatusResponse = 717,
ClientFriendMsg = 718,
ClientGetFinalPrice = 722,
ClientSystemIM = 726,
ClientSystemIMAck = 727,
ClientGetLicenses = 728,
ClientCancelLicense = 729,
ClientGetLegacyGameKey = 730,
ClientContentServerLogOn_Deprecated = 731,
ClientAckVACBan2 = 732,
ClientCompletePurchase = 733,
ClientCancelPurchase = 734,
ClientAckMessageByGID = 735,
ClientGetPurchaseReceipts = 736,
ClientAckPurchaseReceipt = 737,
ClientSendGuestPass = 739,
ClientAckGuestPass = 740,
ClientRedeemGuestPass = 741,
ClientGamesPlayed = 742,
ClientRegisterKey = 743,
ClientInviteUserToClan = 744,
ClientAcknowledgeClanInvite = 745,
ClientPurchaseWithMachineID = 746,
ClientAppUsageEvent = 747,
ClientGetGiftTargetList = 748,
ClientGetGiftTargetListResponse = 749,
ClientLogOnResponse = 751,
ClientVACChallenge = 753,
ClientSetHeartbeatRate = 755,
ClientNotLoggedOnDeprecated = 756,
ClientLoggedOff = 757,
GSApprove = 758,
GSDeny = 759,
GSKick = 760,
ClientCreateAcctResponse = 761,
ClientPurchaseResponse = 763,
ClientPing = 764,
ClientNOP = 765,
ClientPersonaState = 766,
ClientFriendsList = 767,
ClientAccountInfo = 768,
ClientVacStatusQuery = 770,
ClientNewsUpdate = 771,
ClientGameConnectDeny = 773,
GSStatusReply = 774,
ClientGetFinalPriceResponse = 775,
ClientGameConnectTokens = 779,
ClientLicenseList = 780,
ClientCancelLicenseResponse = 781,
ClientVACBanStatus = 782,
ClientCMList = 783,
ClientEncryptPct = 784,
ClientGetLegacyGameKeyResponse = 785,
CSUserContentApprove = 787,
CSUserContentDeny = 788,
ClientInitPurchaseResponse = 789,
ClientAddFriend = 791,
ClientAddFriendResponse = 792,
ClientInviteFriend = 793,
ClientInviteFriendResponse = 794,
ClientSendGuestPassResponse = 795,
ClientAckGuestPassResponse = 796,
ClientRedeemGuestPassResponse = 797,
ClientUpdateGuestPassesList = 798,
ClientChatMsg = 799,
ClientChatInvite = 800,
ClientJoinChat = 801,
ClientChatMemberInfo = 802,
ClientLogOnWithCredentials_Deprecated = 803,
ClientPasswordChangeResponse = 805,
ClientChatEnter = 807,
ClientFriendRemovedFromSource = 808,
ClientCreateChat = 809,
ClientCreateChatResponse = 810,
ClientUpdateChatMetadata = 811,
ClientP2PIntroducerMessage = 813,
ClientChatActionResult = 814,
ClientRequestFriendData = 815,
ClientGetUserStats = 818,
ClientGetUserStatsResponse = 819,
ClientStoreUserStats = 820,
ClientStoreUserStatsResponse = 821,
ClientClanState = 822,
ClientServiceModule = 830,
ClientServiceCall = 831,
ClientServiceCallResponse = 832,
ClientNatTraversalStatEvent = 839,
ClientAppInfoRequest = 840,
ClientAppInfoResponse = 841,
ClientSteamUsageEvent = 842,
ClientCheckPassword = 845,
ClientResetPassword = 846,
ClientCheckPasswordResponse = 848,
ClientResetPasswordResponse = 849,
ClientSessionToken = 850,
ClientDRMProblemReport = 851,
ClientSetIgnoreFriend = 855,
ClientSetIgnoreFriendResponse = 856,
ClientGetAppOwnershipTicket = 857,
ClientGetAppOwnershipTicketResponse = 858,
ClientGetLobbyListResponse = 860,
ClientGetLobbyMetadata = 861,
ClientGetLobbyMetadataResponse = 862,
ClientVTTCert = 863,
ClientAppInfoUpdate = 866,
ClientAppInfoChanges = 867,
ClientServerList = 880,
ClientGetFriendsLobbies = 888,
ClientGetFriendsLobbiesResponse = 889,
ClientGetLobbyList = 890,
ClientEmailChangeResponse = 891,
ClientSecretQAChangeResponse = 892,
ClientDRMBlobRequest = 896,
ClientDRMBlobResponse = 897,
ClientLookupKey = 898,
ClientLookupKeyResponse = 899,
GSDisconnectNotice = 901,
GSStatus = 903,
GSUserPlaying = 905,
GSStatus2 = 906,
GSStatusUpdate_Unused = 907,
GSServerType = 908,
GSPlayerList = 909,
GSGetUserAchievementStatus = 910,
GSGetUserAchievementStatusResponse = 911,
GSGetPlayStats = 918,
GSGetPlayStatsResponse = 919,
GSGetUserGroupStatus = 920,
GSGetUserGroupStatusResponse = 923,
GSGetReputation = 936,
GSGetReputationResponse = 937,
ClientChatRoomInfo = 4026,
ClientUFSUploadFileRequest = 5202,
ClientUFSUploadFileResponse = 5203,
ClientUFSUploadFileChunk = 5204,
ClientUFSUploadFileFinished = 5205,
ClientUFSGetFileListForApp = 5206,
ClientUFSGetFileListForAppResponse = 5207,
ClientUFSDownloadRequest = 5210,
ClientUFSDownloadResponse = 5211,
ClientUFSDownloadChunk = 5212,
ClientUFSLoginRequest = 5213,
ClientUFSLoginResponse = 5214,
ClientUFSTransferHeartbeat = 5216,
ClientUFSDeleteFileRequest = 5219,
ClientUFSDeleteFileResponse = 5220,
ClientUFSGetUGCDetails = 5226,
ClientUFSGetUGCDetailsResponse = 5227,
ClientUFSGetSingleFileInfo = 5230,
ClientUFSGetSingleFileInfoResponse = 5231,
ClientUFSShareFile = 5232,
ClientUFSShareFileResponse = 5233,
ClientRequestForgottenPasswordEmail = 5401,
ClientRequestForgottenPasswordEmailResponse = 5402,
ClientCreateAccountResponse = 5403,
ClientResetForgottenPassword = 5404,
ClientResetForgottenPasswordResponse = 5405,
ClientCreateAccount2 = 5406,
ClientInformOfResetForgottenPassword = 5407,
ClientInformOfResetForgottenPasswordResponse = 5408,
ClientAnonUserLogOn_Deprecated = 5409,
ClientGamesPlayedWithDataBlob = 5410,
ClientUpdateUserGameInfo = 5411,
ClientFileToDownload = 5412,
ClientFileToDownloadResponse = 5413,
ClientLBSSetScore = 5414,
ClientLBSSetScoreResponse = 5415,
ClientLBSFindOrCreateLB = 5416,
ClientLBSFindOrCreateLBResponse = 5417,
ClientLBSGetLBEntries = 5418,
ClientLBSGetLBEntriesResponse = 5419,
ClientMarketingMessageUpdate = 5420,
ClientChatDeclined = 5426,
ClientFriendMsgIncoming = 5427,
ClientAuthList_Deprecated = 5428,
ClientTicketAuthComplete = 5429,
ClientIsLimitedAccount = 5430,
ClientAuthList = 5432,
ClientStat = 5433,
ClientP2PConnectionInfo = 5434,
ClientP2PConnectionFailInfo = 5435,
ClientGetNumberOfCurrentPlayers = 5436,
ClientGetNumberOfCurrentPlayersResponse = 5437,
ClientGetDepotDecryptionKey = 5438,
ClientGetDepotDecryptionKeyResponse = 5439,
GSPerformHardwareSurvey = 5440,
ClientEnableTestLicense = 5443,
ClientEnableTestLicenseResponse = 5444,
ClientDisableTestLicense = 5445,
ClientDisableTestLicenseResponse = 5446,
ClientRequestValidationMail = 5448,
ClientRequestValidationMailResponse = 5449,
ClientToGC = 5452,
ClientFromGC = 5453,
ClientRequestChangeMail = 5454,
ClientRequestChangeMailResponse = 5455,
ClientEmailAddrInfo = 5456,
ClientPasswordChange3 = 5457,
ClientEmailChange3 = 5458,
ClientPersonalQAChange3 = 5459,
ClientResetForgottenPassword3 = 5460,
ClientRequestForgottenPasswordEmail3 = 5461,
ClientCreateAccount3 = 5462,
ClientNewLoginKey = 5463,
ClientNewLoginKeyAccepted = 5464,
ClientLogOnWithHash_Deprecated = 5465,
ClientStoreUserStats2 = 5466,
ClientStatsUpdated = 5467,
ClientActivateOEMLicense = 5468,
ClientRequestedClientStats = 5480,
ClientStat2Int32 = 5481,
ClientStat2 = 5482,
ClientVerifyPassword = 5483,
ClientVerifyPasswordResponse = 5484,
ClientDRMDownloadRequest = 5485,
ClientDRMDownloadResponse = 5486,
ClientDRMFinalResult = 5487,
ClientGetFriendsWhoPlayGame = 5488,
ClientGetFriendsWhoPlayGameResponse = 5489,
ClientOGSBeginSession = 5490,
ClientOGSBeginSessionResponse = 5491,
ClientOGSEndSession = 5492,
ClientOGSEndSessionResponse = 5493,
ClientOGSWriteRow = 5494,
ClientDRMTest = 5495,
ClientDRMTestResult = 5496,
ClientServerUnavailable = 5500,
ClientServersAvailable = 5501,
ClientRegisterAuthTicketWithCM = 5502,
ClientGCMsgFailed = 5503,
ClientMicroTxnAuthRequest = 5504,
ClientMicroTxnAuthorize = 5505,
ClientMicroTxnAuthorizeResponse = 5506,
ClientAppMinutesPlayedData = 5507,
ClientGetMicroTxnInfo = 5508,
ClientGetMicroTxnInfoResponse = 5509,
ClientMarketingMessageUpdate2 = 5510,
ClientDeregisterWithServer = 5511,
ClientSubscribeToPersonaFeed = 5512,
ClientLogon = 5514,
ClientGetClientDetails = 5515,
ClientGetClientDetailsResponse = 5516,
ClientGetClientAppList = 5518,
ClientGetClientAppListResponse = 5519,
ClientInstallClientApp = 5520,
ClientInstallClientAppResponse = 5521,
ClientUninstallClientApp = 5522,
ClientUninstallClientAppResponse = 5523,
ClientSetClientAppUpdateState = 5524,
ClientSetClientAppUpdateStateResponse = 5525,
ClientRequestEncryptedAppTicket = 5526,
ClientRequestEncryptedAppTicketResponse = 5527,
ClientWalletInfoUpdate = 5528,
ClientLBSSetUGC = 5529,
ClientLBSSetUGCResponse = 5530,
ClientAMGetClanOfficers = 5531,
ClientAMGetClanOfficersResponse = 5532,
ClientCheckFileSignature = 5533,
ClientCheckFileSignatureResponse = 5534,
ClientFriendProfileInfo = 5535,
ClientFriendProfileInfoResponse = 5536,
ClientUpdateMachineAuth = 5537,
ClientUpdateMachineAuthResponse = 5538,
ClientReadMachineAuth = 5539,
ClientReadMachineAuthResponse = 5540,
ClientRequestMachineAuth = 5541,
ClientRequestMachineAuthResponse = 5542,
ClientScreenshotsChanged = 5543,
ClientEmailChange4 = 5544,
ClientEmailChangeResponse4 = 5545,
ClientDFSAuthenticateRequest = 5605,
ClientDFSAuthenticateResponse = 5606,
ClientDFSEndSession = 5607,
ClientDFSDownloadStatus = 5617,
ClientMDSLoginRequest = 5801,
ClientMDSLoginResponse = 5802,
ClientMDSUploadManifestRequest = 5803,
ClientMDSUploadManifestResponse = 5804,
ClientMDSTransmitManifestDataChunk = 5805,
ClientMDSHeartbeat = 5806,
ClientMDSUploadDepotChunks = 5807,
ClientMDSUploadDepotChunksResponse = 5808,
ClientMDSInitDepotBuildRequest = 5809,
ClientMDSInitDepotBuildResponse = 5810,
ClientMDSGetDepotManifest = 5818,
ClientMDSGetDepotManifestResponse = 5819,
ClientMDSGetDepotManifestChunk = 5820,
ClientMDSDownloadDepotChunksRequest = 5823,
ClientMDSDownloadDepotChunksAsync = 5824,
ClientMDSDownloadDepotChunksAck = 5825,
ClientMMSCreateLobby = 6601,
ClientMMSCreateLobbyResponse = 6602,
ClientMMSJoinLobby = 6603,
ClientMMSJoinLobbyResponse = 6604,
ClientMMSLeaveLobby = 6605,
ClientMMSLeaveLobbyResponse = 6606,
ClientMMSGetLobbyList = 6607,
ClientMMSGetLobbyListResponse = 6608,
ClientMMSSetLobbyData = 6609,
ClientMMSSetLobbyDataResponse = 6610,
ClientMMSSendLobbyChatMsg = 6613,
ClientMMSLobbyChatMsg = 6614,
ClientMMSSetLobbyOwner = 6615,
ClientMMSSetLobbyOwnerResponse = 6616,
ClientMMSSetLobbyGameServer = 6617,
ClientMMSLobbyGameServerSet = 6618,
ClientMMSUserJoinedLobby = 6619,
ClientMMSUserLeftLobby = 6620,
ClientMMSInviteToLobby = 6621,
ClientUDSP2PSessionStarted = 7001,
ClientUDSP2PSessionEnded = 7002,
ClientUDSInviteToGame = 7005,
ClientUCMAddScreenshot = 7301,
ClientUCMAddScreenshotResponse = 7302,
ClientUCMGetScreenshotList = 7305,
ClientUCMGetScreenshotListResponse = 7306,
ClientUCMDeleteScreenshot = 7309,
ClientUCMDeleteScreenshotResponse = 7310,
ClientRichPresenceUpload = 7501,
ClientRichPresenceRequest = 7502,
ClientRichPresenceInfo = 7503,
}
|
mod game;
mod init;
mod ecs;
mod var;
fn it_works() {}
|
use rwl::{App};
use clap::{App as Clap, Arg};
fn main() {
let matches = Clap::new("Worklog")
.version("1.0")
.author("Ota Klapka")
.subcommand(Clap::new("log")
.arg(Arg::new("message")
.required(true)
.index(1)
.about("Message to log")
)
.arg(Arg::new("time")
.short('t')
.takes_value(true)
.about("Custom message date and time (format: '10.01.2021 14:05')")
)
.about("Inserts new message")
)
.subcommand(Clap::new("ls")
.arg(Arg::new("date")
.short('d')
.takes_value(true)
.about("Date to list (format: '10.01.2021')")
)
.arg(Arg::new("last")
.short('l')
.about("Last day containing messages")
)
.about("Lists messages")
)
.subcommand(Clap::new("set")
.arg(Arg::new("id")
.required(true)
.index(1)
.about("ID of record")
)
.arg(Arg::new("message")
.index(2)
.about("New message")
)
.arg(Arg::new("time")
.short('t')
.takes_value(true)
.about("New date and time (format: '10.01.2021 14:05')")
)
.about("Updates the record by given record ID")
)
.subcommand(Clap::new("delete")
.arg(Arg::new("id")
.required(true)
.index(1)
.about("ID of record")
)
.about("Deletes the record by given record ID")
).get_matches();
let app = App::new("database.sqlite");
app.run(&matches);
}
|
pub fn int_to_roman(num: i32) -> String {
let d1 = num % 10;
let d2 = (num / 10) % 10;
let d3 = (num / 100) % 10;
let d4 = (num / 1000) % 10;
let unit = |n: i32, c1: char, c2: char, c3: char| {
match n {
1 => vec![c1],
2 => vec![c1, c1],
3 => vec![c1, c1, c1],
4 => vec![c1, c2],
5 => vec![c2],
6 => vec![c2, c1],
7 => vec![c2, c1, c1],
8 => vec![c2, c1, c1, c1],
9 => vec![c1, c3],
_ => vec![],
}
};
[unit(d1, 'I', 'V', 'X'), unit(d2, 'X', 'L', 'C'), unit(d3, 'C', 'D', 'M'), unit(d4, 'M', '_', '_')]
.iter().rev().flat_map(|s| s.iter()).collect()
}
#[test]
fn test_int_to_roman() {
assert_eq!(int_to_roman(4), "IV");
assert_eq!(int_to_roman(58), "LVIII");
assert_eq!(int_to_roman(1994), "MCMXCIV")
} |
use std::fs::File;
use std::path::Path;
use std::io::Read;
use crate::cpu::{ConditionFlags, Register};
use crate::cpu::utils::*;
use crate::cpu::instructions::*;
pub struct Intel8080 {
pub regs: Register,
pub flags: ConditionFlags,
pub pc: usize,
pub sp: usize,
pub int_enable: u8,
pub memory: Vec<u8>
}
impl Intel8080 {
pub fn new() -> Self {
Intel8080 {
regs: Register::new(),
flags: ConditionFlags::new(),
pc: 0_usize,
sp: 0_usize,
int_enable: 0,
memory: vec![0_u8; 0x10000] // 65 KB of Memory
}
}
pub fn load_program(&mut self, file_name: &str) {
let mut f = match File::open(Path::new(file_name)) {
Ok(file) => file,
Err(e) => panic!("Could not open file - {}", e)
};
f.read(&mut self.memory).unwrap();
}
pub fn run(&mut self) {
while self.memory[self.pc] != 0x76 { // while opcode != HLT (0x76)
match self.memory[self.pc] {
0x00 => { self.pc += 1; } // NOP
0x01 => { lxi(self, 'B'); self.pc += 3; }
0x02 => { stax(self, 'B'); self.pc += 1; }
0x03 => { inx(self, 'B'); self.pc += 1; }
0x04 => { inr(self, 'B'); self.pc += 1; }
0x05 => { dcr(self, 'B'); self.pc += 1; }
0x06 => { mvi(self, 'B'); self.pc += 2; }
0x07 => {
// INSTRUCTION: RLC
// DESCRIPTION:
// The contents of the accumulator are rotated one bit position to
// the left, with the high-order bit being transferred to the
// low-order bit position of the accumulator.
// compute carry and use it to compute the new value to
// be assigned to the accumulator (A) register.
let carry = ((self.regs.a & 0x80) >> 7) as u8;
self.regs.a = ((self.regs.a << 1) | carry) as u8;
// The Carry bit is set equal to the high-order bit of the accumulator.
self.flags.carry = carry;
self.pc += 1;
}
0x08 => { self.pc += 1; }
0x09 => { dad(self, 'B'); self.pc += 1; }
0x0A => { ldax(self, 'B'); self.pc += 1; }
0x0B => { dcx(self, 'B'); self.pc += 1; }
0x0C => { inr(self, 'C'); self.pc += 1; }
0x0D => { dcr(self, 'C'); self.pc += 1; }
0x0E => { mvi(self, 'C'); self.pc += 2; }
0x0F => {
// INSTRUCTION: RRC
// DESCRIPTION:
// The contents of the accumulator are rotated one bit position to
// the right, with the low-order bit being transferred to the
// high-order bit position of the accumulator.
// compute carry and use it to compute the new value to
// be assigned to the accumulator (A) register.
let carry = (self.regs.a & 0x01) as u8;
self.regs.a = ((self.regs.a >> 1) | (carry << 7)) as u8;
// The Carry bit is set equal to the high-order bit of the accumulator.
self.flags.carry = carry;
self.pc += 1;
}
0x10 => { self.pc += 1; }
0x11 => { lxi(self, 'D'); self.pc += 3; }
0x12 => { stax(self, 'D'); self.pc += 1; }
0x13 => { inx(self, 'D'); self.pc += 1; }
0x14 => { inr(self, 'D'); self.pc += 1; }
0x15 => { dcr(self, 'D'); self.pc += 1; }
0x16 => { mvi(self, 'D'); self.pc += 2; }
0x17 => {
// INSTRUCTION: RAL
// DESCRIPTION:
// The contents of the accumulator are rotated one bit position to the left.
// The high-order bit of the accumulator replaces the Carry bit, while the
// Carry bit replaces the high-order bit of the accumulator.
let temp = self.flags.carry;
let carry = ((self.regs.a & 0x80) >> 7) as u8;
self.regs.a = ((self.regs.a << 1) | (temp << 7)) as u8;
// The Carry bit is set equal to the high-order bit of the accumulator.
self.flags.carry = carry;
self.pc += 1;
}
0x18 => { self.pc += 1; }
0x19 => { dad(self, 'D'); self.pc += 1; }
0x1A => { ldax(self, 'D'); self.pc += 1; }
0x1B => { dcx(self, 'D'); self.pc += 1; }
0x1C => { inr(self, 'E'); self.pc += 1; }
0x1D => { dcr(self, 'E'); self.pc += 1; }
0x1E => { mvi(self, 'E'); self.pc += 2; }
0x1F => {
// INSTRUCTION: RAR
// DESCRIPTION:
// The contents of the accumulator are rotated one bit position
// to the right. The low-order bit of the accumulator replaces the
// carry bit, while the carry bit replaces the high-order bit of
// the accumulator.
let temp = self.flags.carry;
let carry = (self.regs.a & 0x01) as u8;
self.regs.a = ((self.regs.a >> 1) | (temp << 7)) as u8;
// The Carry bit is set equal to the high-order bit of the accumulator.
self.flags.carry = carry;
self.pc += 1;
}
0x20 => { self.pc += 1; }
0x21 => { lxi(self, 'H'); self.pc += 3; }
0x22 => {
// INSTRUCTION: SHLD
// DESCRIPTION:
// The contents of the L register are stored at the memory address
// formed by concatenati ng HI AD 0 with LOW ADO. The contents of
// the H register are stored at the next higher memory address.
let mut addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.memory[addr] = self.regs.l; addr += 1;
self.memory[addr] = self.regs.h;
self.pc += 3;
}
0x23 => { inx(self, 'H'); self.pc += 1; }
0x24 => { inr(self, 'H'); self.pc += 1; }
0x25 => { dcr(self, 'H'); self.pc += 1; }
0x26 => { mvi(self, 'H'); self.pc += 2; }
0x27 => {
// INSTRUCTION: DAA
// DESCRIPTION:
// The DAA intruction adjusts the eight-bit value in the accumulator
// to form two four-bit binary coded decimal digits.
if (self.regs.a & 0x0f) > 9 || self.flags.aux_carry == 1 {
self.regs.a += 6;
self.flags.aux_carry = 1;
}
let mut ho_nibble = (self.regs.a & 0xf0) >> 4;
if ho_nibble > 9 || self.flags.carry == 1 {
ho_nibble += 6;
self.regs.a = (self.regs.a & 0x0f) | (ho_nibble << 4);
self.flags.carry = 1;
}
self.flags.zero = ((self.regs.a as u16 & 0xffff) == 0) as u8;
self.flags.sign = ((self.regs.a as u16 & 0x8000) != 0) as u8;
self.flags.parity = parity(self.regs.a as u16);
self.pc += 1;
}
0x28 => { self.pc += 1; }
0x29 => { dad(self, 'H'); self.pc += 1; }
0x2A => {
// INSTRUCTION: LHLD
// DESCRIPTION:
// The byte at the memory address formed by concatenating HI ADD
// with LOW ADD replaces the contents of the L register. The byte
// at the next higher memory address replaces the contents of the
// H register.
let mut addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.regs.l = self.memory[addr]; addr += 1;
self.regs.h = self.memory[addr];
self.pc += 3;
}
0x2B => { dcx(self, 'H'); self.pc += 1; }
0x2C => { inr(self, 'L'); self.pc += 1; }
0x2D => { dcr(self, 'L'); self.pc += 1; }
0x2E => { mvi(self, 'L'); self.pc += 2; }
0x2F => {
// INSTRUCTION: CMA
// DESCRIPTION:
// Each bit of the contents of the accumulator is complemented
// (producing the one's complement).
self.regs.a = !self.regs.a;
self.pc += 1;
}
0x30 => { self.pc += 1; }
0x31 => { lxi(self, 'S'); self.pc += 3; }
0x32 => {
// INSTRUCTION: STA
// DESCRIPTION:
// The contents of the accumulator replace the byte at the memory
// address formed by concatenating HI ADD with LOW ADD.
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.memory[addr] = self.regs.a;
self.pc += 3;
}
0x33 => { self.sp += 1; self.pc += 1; }
0x34 => { inr(self, 'M'); self.pc += 1; }
0x35 => { dcr(self, 'M'); self.pc += 1; }
0x36 => { mvi(self, 'M'); self.pc += 2; }
0x37 => { self.flags.carry = 1; self.pc += 1; }
0x38 => { self.pc += 1; }
0x39 => { dad(self, 'S'); self.pc += 1;}
0x3A => {
// INSTRUCTION: LDA
// DESCRIPTION:
// LDA load~ the accumulator with a copy of the byte at the location
// specified In bytes two and three of the LDA instruction.
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
println!("{:04x}: LDA {:04x}", self.pc, addr);
self.regs.a = self.memory[addr];
self.pc += 3;
}
0x3B => { self.sp -= 1; self.pc += 1; }
0x3C => { inr(self, 'A'); self.pc += 1; }
0x3D => { dcr(self, 'A'); self.pc += 1; }
0x3E => { mvi(self, 'A'); self.pc += 2; }
0x3F => { self.flags.carry = !self.flags.carry; self.pc += 1; }
0x40 => { self.pc += 1; }
0x41 => { self.regs.b = self.regs.c; self.pc += 1; }
0x42 => { self.regs.b = self.regs.d; self.pc += 1; }
0x43 => { self.regs.b = self.regs.e; self.pc += 1; }
0x44 => { self.regs.b = self.regs.h; self.pc += 1; }
0x45 => { self.regs.b = self.regs.l; self.pc += 1; }
0x46 => {
// INSTRUCTION: MOV B, M
// DESCRIPTION: move from memory into B
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.b = self.memory[addr];
self.pc += 1;
}
0x47 => { self.regs.b = self.regs.a; self.pc += 1; }
0x48 => { self.regs.c = self.regs.b; self.pc += 1; }
0x49 => { self.pc += 1; }
0x4A => { self.regs.c = self.regs.d; self.pc += 1; }
0x4B => { self.regs.c = self.regs.e; self.pc += 1; }
0x4C => { self.regs.c = self.regs.h; self.pc += 1; }
0x4D => { self.regs.c = self.regs.l; self.pc += 1; }
0x4E => {
// INSTRUCTION: MOV C, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.c = self.memory[addr];
self.pc += 1;
}
0x4F => { self.regs.c = self.regs.a; self.pc += 1; }
0x50 => { self.regs.d = self.regs.b; self.pc += 1; }
0x51 => { self.regs.d = self.regs.c; self.pc += 1; }
0x52 => { self.pc += 1; }
0x53 => { self.regs.d = self.regs.e; self.pc += 1; }
0x54 => { self.regs.d = self.regs.h; self.pc += 1; }
0x55 => { self.regs.d = self.regs.l; self.pc += 1; }
0x56 => {
// INSTRUCTION: MOV D, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.d = self.memory[addr];
self.pc += 1;
}
0x57 => { self.regs.d = self.regs.a; self.pc += 1; }
0x58 => { self.regs.e = self.regs.b; self.pc += 1; }
0x59 => { self.regs.e = self.regs.c; self.pc += 1; }
0x5A => { self.regs.e = self.regs.d; self.pc += 1; }
0x5B => { self.pc += 1; }
0x5C => { self.regs.e = self.regs.h; self.pc += 1; }
0x5D => { self.regs.e = self.regs.l; self.pc += 1; }
0x5E => {
// INSTRUCTION: MOV E, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.e = self.memory[addr];
self.pc += 1;
}
0x5F => { self.regs.e = self.regs.a; self.pc += 1; }
0x60 => { self.regs.h = self.regs.b; self.pc += 1; }
0x61 => { self.regs.h = self.regs.c; self.pc += 1; }
0x62 => { self.regs.h = self.regs.d; self.pc += 1; }
0x63 => { self.regs.h = self.regs.e; self.pc += 1; }
0x64 => { self.pc += 1; }
0x65 => { self.regs.h = self.regs.l; self.pc += 1; }
0x66 => {
// INSTRUCTION: MOV H, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.h = self.memory[addr];
self.pc += 1;
}
0x67 => { self.regs.h = self.regs.a; self.pc += 1; }
0x68 => { self.regs.l = self.regs.b; self.pc += 1; }
0x69 => { self.regs.l = self.regs.c; self.pc += 1; }
0x6A => { self.regs.l = self.regs.d; self.pc += 1; }
0x6B => { self.regs.l = self.regs.e; self.pc += 1; }
0x6C => { self.regs.l = self.regs.h; self.pc += 1; }
0x6D => { self.pc += 1; }
0x6E => {
// INSTRUCTION: MOV L, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.l = self.memory[addr];
self.pc += 1;
}
0x6F => { self.regs.l = self.regs.a; self.pc += 1; }
0x70 => { mov_m(self, 'B'); self.pc += 1; }
0x71 => { mov_m(self, 'C'); self.pc += 1; }
0x72 => { mov_m(self, 'D'); self.pc += 1; }
0x73 => { mov_m(self, 'E'); self.pc += 1; }
0x74 => { mov_m(self, 'H'); self.pc += 1; }
0x75 => { mov_m(self, 'L'); self.pc += 1; }
0x76 => {}
0x77 => { mov_m(self, 'A'); self.pc += 1; }
0x78 => { self.regs.a = self.regs.b; self.pc += 1; }
0x79 => { self.regs.a = self.regs.c; self.pc += 1; }
0x7A => { self.regs.a = self.regs.d; self.pc += 1; }
0x7B => { self.regs.a = self.regs.e; self.pc += 1; }
0x7C => { self.regs.a = self.regs.h; self.pc += 1; }
0x7D => { self.regs.a = self.regs.l; self.pc += 1; }
0x7E => {
// INSTRUCTION: MOV A, M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.regs.a = self.memory[addr];
self.pc += 1;
}
0x7F => { self.pc += 1; }
0x80 => { add_to_accu(self, self.regs.b); self.pc += 1; }
0x81 => { add_to_accu(self, self.regs.c); self.pc += 1; }
0x82 => { add_to_accu(self, self.regs.d); self.pc += 1; }
0x83 => { add_to_accu(self, self.regs.e); self.pc += 1; }
0x84 => { add_to_accu(self, self.regs.h); self.pc += 1; }
0x85 => { add_to_accu(self, self.regs.l); self.pc += 1; }
0x86 => {
// INSTRUCTION: ADD M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
add_to_accu(self, self.memory[addr]);
self.pc += 1;
}
0x87 => { add_to_accu(self, self.regs.a); self.pc += 1; }
0x88 => { adc(self, self.regs.b); self.pc += 1; }
0x89 => { adc(self, self.regs.c); self.pc += 1; }
0x8A => { adc(self, self.regs.d); self.pc += 1; }
0x8B => { adc(self, self.regs.e); self.pc += 1; }
0x8C => { adc(self, self.regs.h); self.pc += 1; }
0x8D => { adc(self, self.regs.l); self.pc += 1; }
0x8E => {
// INSTRUCTION: ADC M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
adc(self, self.memory[addr]);
self.pc += 1;
}
0x8F => { adc(self, self.regs.a); self.pc += 1; }
0x90 => { sub_accu(self, self.regs.b); self.pc += 1; }
0x91 => { sub_accu(self, self.regs.c); self.pc += 1; }
0x92 => { sub_accu(self, self.regs.d); self.pc += 1; }
0x93 => { sub_accu(self, self.regs.e); self.pc += 1; }
0x94 => { sub_accu(self, self.regs.h); self.pc += 1; }
0x95 => { sub_accu(self, self.regs.l); self.pc += 1; }
0x96 => {
// INSTRUCTION: SUB M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
sub_accu(self, self.memory[addr]);
self.pc += 1;
}
0x97 => { sub_accu(self, self.regs.a); self.pc += 1; }
0x98 => { sbb(self, self.regs.b); self.pc += 1; }
0x99 => { sbb(self, self.regs.c); self.pc += 1; }
0x9A => { sbb(self, self.regs.d); self.pc += 1; }
0x9B => { sbb(self, self.regs.e); self.pc += 1; }
0x9C => { sbb(self, self.regs.h); self.pc += 1; }
0x9D => { sbb(self, self.regs.l); self.pc += 1; }
0x9E => {
// INSTRUCTION: SBB M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
sbb(self, self.memory[addr]);
self.pc += 1;
}
0x9F => { sbb(self, self.regs.a); self.pc += 1; }
0xA0 => { ana(self, self.regs.b); self.pc += 1; }
0xA1 => { ana(self, self.regs.c); self.pc += 1; }
0xA2 => { ana(self, self.regs.d); self.pc += 1; }
0xA3 => { ana(self, self.regs.e); self.pc += 1; }
0xA4 => { ana(self, self.regs.h); self.pc += 1; }
0xA5 => { ana(self, self.regs.l); self.pc += 1; }
0xA6 => {
// INSTRUCTION: ANA M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
ana(self, self.memory[addr]);
self.pc += 1;
}
0xA7 => { println!("{:04x}: ANA A", self.pc); ana(self, self.regs.a); self.pc += 1; }
0xA8 => { xra(self, self.regs.b); self.pc += 1; }
0xA9 => { xra(self, self.regs.c); self.pc += 1; }
0xAA => { xra(self, self.regs.d); self.pc += 1; }
0xAB => { xra(self, self.regs.e); self.pc += 1; }
0xAC => { xra(self, self.regs.h); self.pc += 1; }
0xAD => { xra(self, self.regs.l); self.pc += 1; }
0xAE => {
// INSTRUCTION: XRA M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
xra(self, self.memory[addr]);
self.pc += 1;
}
0xAF => { xra(self, self.regs.a); self.pc += 1; }
0xB0 => { ora(self, self.regs.b); self.pc += 1; }
0xB1 => { ora(self, self.regs.c); self.pc += 1; }
0xB2 => { ora(self, self.regs.d); self.pc += 1; }
0xB3 => { ora(self, self.regs.e); self.pc += 1; }
0xB4 => { ora(self, self.regs.h); self.pc += 1; }
0xB5 => { ora(self, self.regs.l); self.pc += 1; }
0xB6 => {
// INSTRUCTION: ORA C
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
ora(self, self.memory[addr]);
self.pc += 1;
}
0xB7 => { ora(self, self.regs.a); self.pc += 1; }
0xB8 => { cmp(self, self.regs.b); self.pc += 1; }
0xB9 => { cmp(self, self.regs.c); self.pc += 1; }
0xBA => { cmp(self, self.regs.d); self.pc += 1; }
0xBB => { cmp(self, self.regs.e); self.pc += 1; }
0xBC => { cmp(self, self.regs.h); self.pc += 1; }
0xBD => { cmp(self, self.regs.l); self.pc += 1; }
0xBE => {
// INSTRUCTION: CMP M
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
cmp(self, self.memory[addr]);
self.pc += 1;
}
0xBF => { cmp(self, self.regs.a); self.pc += 1; }
0xC0 => {
// INSTRUCTION: RNZ
if self.flags.zero == 0 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xC1 => { pop(self, 'B'); self.pc += 1; }
0xC2 => {
// INSTRUCTION: JNZ
if self.flags.zero == 0 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
println!("{:04x}: JNZ {:04x}", self.pc, addr);
self.pc = addr;
} else {
self.pc += 3;
}
}
0xC3 => {
// INSTRUCTION: JMP
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
}
0xC4 => {
// INSTRUCTION: CNZ
if self.flags.zero == 0 {
let next_instr_addr = self.pc + 3; // Address of the next instruction
let msb = ((next_instr_addr & 0xff00) >> 8) as u8;
let lsb = (next_instr_addr & 0x00ff) as u8;
self.memory[self.sp - 1] = msb;
self.memory[self.sp - 2] = lsb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else { self.pc += 3; }
}
0xC5 => { push(self, 'B'); self.pc += 1; }
0xC6 => {
// INSTRUCTION: ADI
let result = (self.regs.a as u16) + (self.memory[self.pc + 1] as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xC7 => { rst(self, 0); }
0xC8 => {
// INSTRUCTION: RZ
if self.flags.zero == 1 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xC9 => {
// INSTRUCTION: RET
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
}
0xCA => {
// INSTRUCTION: JZ
if self.flags.zero == 1 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xCB => { self.pc += 1; }
0xCC => {
// INSTRUCTION: CZ
if self.flags.zero == 1 {
let next_instr_addr = self.pc + 3;
let msb = ((next_instr_addr & 0xff00) >> 8) as u8;
let lsb = (next_instr_addr & 0x00ff) as u8;
self.memory[self.sp - 1] = msb;
self.memory[self.sp - 2] = lsb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else { self.pc += 3; }
}
0xCD => {
// INSTRUCTION: CALL
let next_instr_addr = self.pc + 3;
let msb = ((next_instr_addr & 0xff00) >> 8) as u8;
let lsb = (next_instr_addr & 0x00ff) as u8;
self.memory[self.sp - 1] = msb;
self.memory[self.sp - 2] = lsb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
}
0xCE => {
// INSTRUCTION: ACI
let result = (self.regs.a as u16) + (self.memory[self.pc + 1] as u16 +
self.flags.carry as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xCF => { rst(self, 1); }
0xD0 => {
// INSTRUCTION: RNC
if self.flags.carry == 0 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xD1 => { pop(self, 'D'); self.pc += 1; }
0xD2 => {
// INSTRUCTION: JNC
if self.flags.carry == 0 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xD3 => {
// INSTRUCTION: OUT exp
// DESCRIPTION:
// The contents of the accumulator are sent to output
// device number exp
self.pc += 1;
}
0xD4 => {
// INSTRUCTION: CNC
if self.flags.carry == 0 {
let next_instr_addr = self.pc + 3;
let msb = ((next_instr_addr & 0xff00) >> 8) as u8;
let lsb = (next_instr_addr & 0x00ff) as u8;
self.memory[self.sp - 1] = msb;
self.memory[self.sp - 2] = lsb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else {
self.pc += 3;
}
}
0xD5 => { push(self, 'D'); self.pc += 1; }
0xD6 => {
// INSTRUCTION: SUI
let result = (self.regs.a as u16) - (self.memory[self.pc + 1] as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xD7 => { rst(self, 2); }
0xD8 => {
// INSTRUCTION: RC
if self.flags.carry == 1 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else { self.pc += 1; }
}
0xD9 => { self.pc += 1; }
0xDA => {
// INSTRUCTION: JC
if self.flags.carry == 1 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xDB => {
// INSTRUCTION: IN exp
// DESCRIPTION:
// An eight-bit data byte is read from input device
// number exp and replaces the contents of the accumulator
self.pc += 1;
}
0xDC => {
// INSTRUCTION: CC
if self.flags.carry == 1 {
let next_instr_addr = self.pc + 3;
let msb = ((next_instr_addr & 0xff00) >> 8) as u8;
let lsb = (next_instr_addr & 0x00ff) as u8;
self.memory[self.sp - 1] = msb;
self.memory[self.sp - 2] = lsb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else {
self.pc += 3;
}
}
0xDD => { self.pc += 1; }
0xDE => {
// INSTRUCTION: SBI
let result = (self.regs.a as u16) - (self.memory[self.pc + 1] as u16 +
self.flags.carry as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xDF => { rst(self, 3); }
0xE0 => {
// INSTRUCTION: RPO
if self.flags.parity == 0 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xE1 => { pop(self, 'H'); self.pc += 1; }
0xE2 => {
// INSTRUCTION: JPO
if self.flags.parity == 0 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xE3 => {
// INSTRUCTION: XTHL
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
self.sp += 2;
self.memory[self.sp - 1] = self.regs.l;
self.memory[self.sp - 2] = self.regs.h;
self.sp -= 2;
self.regs.l = lsb;
self.regs.h = msb;
self.pc += 1;
}
0xE4 => {
// INSTRUCTION: CPO
if self.flags.parity == 0 {
self.pc += 3; // Address of the next instruction
let msb = ((self.pc & 0xff00) >> 8) as u8;
let lsb = (self.pc & 0x00ff) as u8;
self.memory[self.sp - 1] = lsb;
self.memory[self.sp - 2] = msb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else {
self.pc += 3;
}
}
0xE5 => { push(self, 'H'); self.pc += 1; }
0xE6 => {
// INSTRUCTION: ANI
let result = (self.regs.a as u16) & (self.memory[self.pc + 1] as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xE7 => { rst(self, 4); }
0xE8 => {
// INSTRUCTION: RPE
if self.flags.parity == 1 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xE9 => {
// INSTRUCTION: PCHL
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.pc = addr;
}
0xEA => {
// INSTRUCTION: JPE
if self.flags.parity == 1 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xEB => {
// INSTRUCTION: XCHG
let (d, e) = (self.regs.d, self.regs.d);
self.regs.d = self.regs.h;
self.regs.e = self.regs.l;
self.regs.h = d;
self.regs.l = e;
self.pc += 1;
}
0xEC => {
// INSTRUCTION: CPE
if self.flags.parity == 1 {
self.pc += 3; // Address of the next instruction
let msb = ((self.pc & 0xff00) >> 8) as u8;
let lsb = (self.pc & 0x00ff) as u8;
self.memory[self.sp - 1] = lsb;
self.memory[self.sp - 2] = msb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp -= 2;
} else {
self.pc += 3;
}
}
0xED => { self.pc += 1; }
0xEE => {
// INSTRUCTION: XRI
let result = (self.regs.a as u16) ^ (self.memory[self.pc + 1] as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xEF => { rst(self, 5); }
0xF0 => {
// INSTRUCTION: RP
if self.flags.sign == 0 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xF1 => { pop(self, 'P'); self.pc += 1; }
0xF2 => {
// INSTRUCTION: JP
if self.flags.sign == 1 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xF3 => {
// INSTRUCTION: DI
// disable interrupts
self.int_enable = 0;
self.pc += 1;
}
0xF4 => {
// INSTRUCTION: CP
if self.flags.sign == 0 {
self.pc += 3; // Address of the next instruction
let msb = ((self.pc & 0xff00) >> 8) as u8;
let lsb = (self.pc & 0x00ff) as u8;
self.memory[self.sp - 1] = lsb;
self.memory[self.sp - 2] = msb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 3;
}
}
0xF5 => { push(self, 'P'); self.pc += 1; }
0xF6 => {
// INSTRUCTION: ORI
let result = (self.regs.a as u16) | (self.memory[self.pc + 1] as u16);
self.flags.carry = (result > 0xff) as u8;
self.flags.zero = (((result as u8) & 0xff) == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result);
self.regs.a = result as u8;
self.pc += 2;
}
0xF7 => { rst(self, 6); }
0xF8 => {
// INSTRUCTION: RM
if self.flags.sign == 1 {
let lsb = self.memory[self.sp];
let msb = self.memory[self.sp + 1];
let addr = (((msb as u16) << 8) | (lsb as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 1;
}
}
0xF9 => {
// INSTRUCTION: SPHL
let addr = (((self.regs.h as u16) << 8) | (self.regs.l as u16)) as usize;
self.sp = addr;
self.pc += 1;
}
0xFA => {
// INSTRUCTION: JM
if self.flags.sign == 1 {
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
} else {
self.pc += 3;
}
}
0xFB => {
// INSTRUCTION: EI
// enable interrupts
self.int_enable = 1;
self.pc += 1;
}
0xFC => {
// INSTRUCTION: CM
if self.flags.sign == 1 {
self.pc += 3; // Address of the next instruction
let msb = ((self.pc & 0xff00) >> 8) as u8;
let lsb = (self.pc & 0x00ff) as u8;
self.memory[self.sp - 1] = lsb;
self.memory[self.sp - 2] = msb;
let addr = (((self.memory[self.pc + 2] as u16) << 8) |
(self.memory[self.pc + 1] as u16)) as usize;
self.pc = addr;
self.sp += 2;
} else {
self.pc += 3;
}
}
0xFD => { self.pc += 1; }
0xFE => {
// INSTRUCTION: CPI
let result = (self.regs.a as i16) - (self.memory[self.pc + 1] as i16);
self.flags.carry = (self.regs.a < self.memory[self.pc + 1]) as u8;
self.flags.zero = (result == 0) as u8;
self.flags.sign = (((result as u8) & 0x80) != 0) as u8;
self.flags.parity = parity(result as u16);
self.pc += 2;
}
0xFF => { rst(self, 7); }
}
}
}
} |
#![allow(unused_imports)]
use codec::{Encode, Decode};
use frame_system::Config;
use frame_support::weights::DispatchInfo;
use sp_runtime::{
traits::{SignedExtension, DispatchInfoOf, Dispatchable},
transaction_validity::TransactionValidityError,
};
use sp_std::marker::PhantomData;
use sp_core::H160;
pub type AppTag = H160;
/// Application tag (tenant) transaction metadata.
///
/// No any validity checks.
#[derive(Encode, Decode, Clone, Eq, PartialEq)]
pub struct TagApp<T: Config>(AppTag, PhantomData<T>);
impl<T: Config> TagApp<T> {
// utility constructor. Used only in client/factory code.
pub fn from(tag: AppTag) -> Self {
Self(tag, Default::default())
}
}
impl<T: Config> sp_std::fmt::Debug for TagApp<T> {
#[cfg(feature = "std")]
fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
write!(f, "TagApp({:?})", self.0)
}
#[cfg(not(feature = "std"))]
fn fmt(&self, _: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
Ok(())
}
}
impl<T: Config + Send + Sync> SignedExtension for TagApp<T> where
T::Call: Dispatchable<Info=DispatchInfo> + Send + Sync
{
type AccountId = T::AccountId;
type Call = T::Call;
type AdditionalSigned = AppTag;
type Pre = ();
const IDENTIFIER: &'static str = "TagApp";
fn additional_signed(&self) -> sp_std::result::Result<Self::AdditionalSigned, TransactionValidityError>
{
Ok(self.0.clone())
}
fn pre_dispatch(
self,
_who: &Self::AccountId,
_call: &Self::Call,
_info: &DispatchInfoOf<Self::Call>,
_len: usize,
) -> Result<(), TransactionValidityError> {
Ok(())
}
}
|
#![deny(clippy::pedantic)]
#![no_std]
#![allow(incomplete_features)]
#![feature(generic_associated_types)]
extern crate alloc;
#[macro_use]
extern crate contracts;
use core::num::NonZeroU32;
use necsim_core::{
lineage::MigratingLineage,
reporter::{boolean::Boolean, Reporter},
};
use necsim_core_bond::{NonNegativeF64, PositiveF64};
pub mod context;
pub mod iterator;
use context::ReporterContext;
#[allow(clippy::inline_always, clippy::inline_fn_without_body)]
#[contract_trait]
pub trait Partitioning: Sized {
type LocalPartition<R: Reporter>: LocalPartition<R>;
type Auxiliary;
fn is_monolithic(&self) -> bool;
#[debug_ensures(
self.is_monolithic() -> ret,
"monolithic partition is always root"
)]
fn is_root(&self) -> bool;
#[debug_ensures(
self.is_monolithic() == (ret.get() == 1),
"there is only one monolithic partition"
)]
fn get_number_of_partitions(&self) -> NonZeroU32;
#[debug_ensures(
ret < self.get_number_of_partitions().get(),
"rank is in range [0, number_of_partitions)"
)]
fn get_rank(&self) -> u32;
fn into_local_partition<R: Reporter, P: ReporterContext<Reporter = R>>(
self,
reporter_context: P,
auxiliary: Self::Auxiliary,
) -> anyhow::Result<Self::LocalPartition<R>>;
}
#[derive(Copy, Clone)]
pub enum MigrationMode {
Force,
Default,
Hold,
}
#[allow(clippy::inline_always, clippy::inline_fn_without_body)]
#[contract_trait]
pub trait LocalPartition<R: Reporter>: Sized {
type Reporter: Reporter;
type IsLive: Boolean;
type ImmigrantIterator<'a>: Iterator<Item = MigratingLineage>;
fn get_reporter(&mut self) -> &mut Self::Reporter;
fn is_root(&self) -> bool;
#[debug_ensures(
ret < self.get_number_of_partitions().get(),
"partition rank is in range [0, self.get_number_of_partitions())"
)]
fn get_partition_rank(&self) -> u32;
fn get_number_of_partitions(&self) -> NonZeroU32;
fn migrate_individuals<E: Iterator<Item = (u32, MigratingLineage)>>(
&mut self,
emigrants: &mut E,
emigration_mode: MigrationMode,
immigration_mode: MigrationMode,
) -> Self::ImmigrantIterator<'_>;
fn reduce_vote_continue(&self, local_continue: bool) -> bool;
fn reduce_vote_min_time(&self, local_time: PositiveF64) -> Result<PositiveF64, PositiveF64>;
fn wait_for_termination(&mut self) -> bool;
fn reduce_global_time_steps(
&self,
local_time: NonNegativeF64,
local_steps: u64,
) -> (NonNegativeF64, u64);
fn report_progress_sync(&mut self, remaining: u64);
fn finalise_reporting(self);
}
|
#[doc = "Register `DCOUNT` reader"]
pub type R = crate::R<DCOUNT_SPEC>;
#[doc = "Field `DATACOUNT` reader - Data count value When read, the number of remaining data bytes to be transferred is returned. Write has no effect."]
pub type DATACOUNT_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:24 - Data count value When read, the number of remaining data bytes to be transferred is returned. Write has no effect."]
#[inline(always)]
pub fn datacount(&self) -> DATACOUNT_R {
DATACOUNT_R::new(self.bits & 0x01ff_ffff)
}
}
#[doc = "The SDMMC_DCNTR register loads the value from the data length register (see SDMMC_DLENR) when the DPSM moves from the Idle state to the Wait_R or Wait_S state. As data is transferred, the counter decrements the value until it reaches 0. The DPSM then moves to the Idle state and when there has been no error, the data status end flag (DATAEND) is set.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dcount::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DCOUNT_SPEC;
impl crate::RegisterSpec for DCOUNT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dcount::R`](R) reader structure"]
impl crate::Readable for DCOUNT_SPEC {}
#[doc = "`reset()` method sets DCOUNT to value 0"]
impl crate::Resettable for DCOUNT_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! copyrigt (c) 2020 by shaipe
//! 从rs实体文件中提取字段创建数据库表结构
//!
use lane::fs::{append_content, read_content};
use std::env;
use std::path::{Path, PathBuf};
mod mysql;
mod postgres;
/// 入口函数
fn main() {
// 获取输入参数
let args: Vec<String> = env::args().collect();
let prefix = if args.len() > 1 { &args[1] } else { "" };
let dir_str = if args.len() > 2 { &args[2] } else { "./" };
let sql_type = if args.len() > 3 { &args[3] } else { "mysql"};
// 获取目录路径
let dir_path = Path::new(dir_str);
let file_path = dir_path.join("./tables.sql");
let file_path_str = format!("{}", file_path.display());
println!("开始对 {:?} 目录进行处理, 将把结果写入 {:?} 文件中.", dir_path.display(), file_path.display());
walk(PathBuf::from(dir_path), &file_path_str, sql_type, prefix);
println!("代码生成已经完成");
}
/// 数据库接口
pub trait Sql {
/// 获取数据类型
fn get_data_type(&self, type_str: &str) -> String;
/// 获取自增长
fn get_identifier(&self) -> String;
/// 获取数据库语句
fn get_sql(&self, content: String, prefix: &str) -> String;
}
/// 遍历整个目录对指定的rs文件进行处理
fn walk(dir_path: PathBuf, file_path: &str, sql_type: &str, prefix: &str) {
if dir_path.exists() {
for entry in dir_path.read_dir().expect("指定的路径 {} 不存在.") {
let p = entry.unwrap().path();
// println!("{:?}", p);
if p.is_file() {
let ext = p.extension();
// 判断只处理后缀为rs的文件
if !ext.is_none() && ext.unwrap() == "rs" {
// 获取路径的字符
let p_str = format!("{}", p.display());
// 判断只处理src目录下的文件
if p_str.contains(r"/src/") {
let content = read_content(&p_str);
let sql = if sql_type == "postgres" {
postgres::Postgres{}.get_sql(content, prefix)
}
else{
mysql::MySql{}.get_sql(content, prefix)
};
// let sql = get_create_sql(content, prefix);
append_content(file_path, sql.as_bytes());
}
}
} else {
walk(p, file_path, sql_type, prefix);
}
}
} else {
println!("指定的路径 {} 不存在.", dir_path.display());
}
}
|
macro_rules! rs_not_supported {
() => { panic!() };
}
macro_rules! locked {
() => { panic!() };
} |
use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
use futures::executor::block_on;
use memmap2::Mmap;
use rand::prelude::*;
use std::fs::OpenOptions;
use std::io::Write;
use std::num::{NonZeroU64, NonZeroUsize};
use std::time::Instant;
use std::{env, fs};
use subspace_archiving::archiver::Archiver;
use subspace_core_primitives::crypto::kzg;
use subspace_core_primitives::crypto::kzg::Kzg;
use subspace_core_primitives::{
HistorySize, PieceOffset, PublicKey, Record, RecordedHistorySegment, SectorId,
};
use subspace_erasure_coding::ErasureCoding;
use subspace_farmer_components::file_ext::FileExt;
use subspace_farmer_components::plotting::{plot_sector, PieceGetterRetryPolicy, PlottedSector};
use subspace_farmer_components::reading::read_piece;
use subspace_farmer_components::sector::{
sector_size, SectorContentsMap, SectorMetadata, SectorMetadataChecksummed,
};
use subspace_farmer_components::FarmerProtocolInfo;
use subspace_proof_of_space::chia::ChiaTable;
use subspace_proof_of_space::Table;
type PosTable = ChiaTable;
const MAX_PIECES_IN_SECTOR: u16 = 1000;
pub fn criterion_benchmark(c: &mut Criterion) {
println!("Initializing...");
let base_path = env::var("BASE_PATH")
.map(|base_path| base_path.parse().unwrap())
.unwrap_or_else(|_error| env::temp_dir());
let pieces_in_sector = env::var("PIECES_IN_SECTOR")
.map(|base_path| base_path.parse().unwrap())
.unwrap_or_else(|_error| MAX_PIECES_IN_SECTOR);
let persist_sector = env::var("PERSIST_SECTOR")
.map(|persist_sector| persist_sector == "1")
.unwrap_or_else(|_error| false);
let sectors_count = env::var("SECTORS_COUNT")
.map(|sectors_count| sectors_count.parse().unwrap())
.unwrap_or(10);
let public_key = PublicKey::default();
let sector_index = 0;
let mut input = RecordedHistorySegment::new_boxed();
StdRng::seed_from_u64(42).fill(AsMut::<[u8]>::as_mut(input.as_mut()));
let kzg = Kzg::new(kzg::embedded_kzg_settings());
let mut archiver = Archiver::new(kzg.clone()).unwrap();
let erasure_coding = ErasureCoding::new(
NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize).unwrap(),
)
.unwrap();
let mut table_generator = PosTable::generator();
let archived_history_segment = archiver
.add_block(
AsRef::<[u8]>::as_ref(input.as_ref()).to_vec(),
Default::default(),
true,
)
.into_iter()
.next()
.unwrap()
.pieces;
let farmer_protocol_info = FarmerProtocolInfo {
history_size: HistorySize::from(NonZeroU64::new(1).unwrap()),
max_pieces_in_sector: pieces_in_sector,
recent_segments: HistorySize::from(NonZeroU64::new(5).unwrap()),
recent_history_fraction: (
HistorySize::from(NonZeroU64::new(1).unwrap()),
HistorySize::from(NonZeroU64::new(10).unwrap()),
),
min_sector_lifetime: HistorySize::from(NonZeroU64::new(4).unwrap()),
};
let sector_size = sector_size(pieces_in_sector);
let persisted_sector = base_path.join(format!("subspace_bench_sector_{pieces_in_sector}.plot"));
let (plotted_sector, plotted_sector_bytes) = if persist_sector && persisted_sector.is_file() {
println!(
"Reading persisted sector from {}...",
persisted_sector.display()
);
let plotted_sector_bytes = fs::read(&persisted_sector).unwrap();
let sector_contents_map = SectorContentsMap::from_bytes(
&plotted_sector_bytes[..SectorContentsMap::encoded_size(pieces_in_sector)],
pieces_in_sector,
)
.unwrap();
let sector_metadata = SectorMetadataChecksummed::from(SectorMetadata {
sector_index,
pieces_in_sector,
s_bucket_sizes: sector_contents_map.s_bucket_sizes(),
history_size: farmer_protocol_info.history_size,
});
(
PlottedSector {
sector_id: SectorId::new(public_key.hash(), sector_index),
sector_index,
sector_metadata,
piece_indexes: vec![],
},
plotted_sector_bytes,
)
} else {
println!("Plotting one sector...");
let mut plotted_sector_bytes = vec![0; sector_size];
let mut plotted_sector_metadata_bytes = vec![0; SectorMetadataChecksummed::encoded_size()];
let plotted_sector = block_on(plot_sector::<_, PosTable>(
&public_key,
sector_index,
&archived_history_segment,
PieceGetterRetryPolicy::default(),
&farmer_protocol_info,
&kzg,
&erasure_coding,
pieces_in_sector,
&mut plotted_sector_bytes,
&mut plotted_sector_metadata_bytes,
&mut table_generator,
))
.unwrap();
(plotted_sector, plotted_sector_bytes)
};
assert_eq!(plotted_sector_bytes.len(), sector_size);
if persist_sector && !persisted_sector.is_file() {
println!(
"Writing persisted sector into {}...",
persisted_sector.display()
);
fs::write(persisted_sector, &plotted_sector_bytes).unwrap()
}
let piece_offset = PieceOffset::ZERO;
let mut group = c.benchmark_group("reading");
group.throughput(Throughput::Elements(1));
group.bench_function("piece/memory", |b| {
b.iter(|| {
read_piece::<PosTable>(
black_box(piece_offset),
black_box(&plotted_sector.sector_id),
black_box(&plotted_sector.sector_metadata),
black_box(&plotted_sector_bytes),
black_box(&erasure_coding),
black_box(&mut table_generator),
)
.unwrap();
})
});
{
println!("Writing {sectors_count} sectors to disk...");
let plot_file_path = base_path.join("subspace_bench_plot.plot");
let mut plot_file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(&plot_file_path)
.unwrap();
plot_file
.preallocate(sector_size as u64 * sectors_count)
.unwrap();
plot_file.advise_random_access().unwrap();
for _i in 0..sectors_count {
plot_file
.write_all(plotted_sector_bytes.as_slice())
.unwrap();
}
let plot_mmap = unsafe { Mmap::map(&plot_file).unwrap() };
#[cfg(unix)]
{
plot_mmap.advise(memmap2::Advice::Random).unwrap();
}
group.throughput(Throughput::Elements(sectors_count));
group.bench_function("piece/disk", move |b| {
b.iter_custom(|iters| {
let start = Instant::now();
for _i in 0..iters {
for sector in plot_mmap.chunks_exact(sector_size) {
read_piece::<PosTable>(
black_box(piece_offset),
black_box(&plotted_sector.sector_id),
black_box(&plotted_sector.sector_metadata),
black_box(sector),
black_box(&erasure_coding),
black_box(&mut table_generator),
)
.unwrap();
}
}
start.elapsed()
});
});
drop(plot_file);
fs::remove_file(plot_file_path).unwrap();
}
group.finish();
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
use Opcode::*;
use Mode::*;
use std::fmt::{Debug, Formatter, Error, Display};
use std::io::Write;
use std::collections::VecDeque;
use std::hint::unreachable_unchecked;
//const DBG: bool = true;
const DBG: bool = false;
#[derive(Debug)]
pub struct Computer {
pub mem: Vec<i64>,
ptr: usize,
input: VecDeque<i64>,
output: VecDeque<i64>,
rel_base: i64,
pub is_done: bool,
}
impl Computer {
pub fn compute(&mut self) {
loop {
if DBG { print!("{}: {} ", self.ptr, self.mem[self.ptr]); }
let orig_ptr = self.ptr;
let opcode = Opcode::from(&self);
if DBG { print!(" {} ", opcode); }
if let Err(code) = Opcode::calculate(&opcode, self) {
match code {
Halt => {
if DBG { println!() }
self.is_done = true;
return;
}
JumpNZero(_, _) | JumpZero(_, _) => {}
Input(_) => return,
// _ => unreachable!("{:?} should not error in calculation", opcode)
_ => unsafe { unreachable_unchecked() },
}
}
if orig_ptr == self.ptr {
self.ptr += opcode.nparams() + opcode.nwrites() + 1;
}
if DBG { println!(); }
};
}
fn read(&self, mode: &Mode) -> i64 {
*self.mem.get(mode.index()).unwrap_or(&0)
}
fn write(&mut self, mode: &Mode, val: i64) {
let index = mode.index();
if index >= self.mem.len() {
self.mem.resize(index + 1, 0);
}
self.mem[index] = val
}
pub fn send(&mut self, val: i64) {
self.input.push_back(val);
}
pub fn recv(&mut self) -> Option<i64> {
self.output.pop_front()
}
pub fn send_all(&mut self, vals: impl Iterator<Item=i64>) {
vals.for_each(|n| self.send(n));
}
pub fn recv_all(&mut self) -> impl Iterator<Item=i64> + '_ {
self.output.drain(..)
}
pub fn init<I: IntoIterator<Item=i64>>(mem: &Vec<i64>, vals: I) -> Self {
Computer {
mem: mem.clone(),
ptr: 0,
input: vals.into_iter().collect(),
output: Default::default(),
rel_base: 0,
is_done: false,
}
}
pub fn parse_mem(mem: &str) -> Vec<i64> {
mem.lines()
.take(1)
.flat_map(|line| line.split(","))
.map(|n| n.trim().parse().unwrap())
.collect()
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
enum Opcode {
Add(Mode, Mode, Mode),
Mult(Mode, Mode, Mode),
Input(Mode),
Output(Mode),
JumpNZero(Mode, Mode),
JumpZero(Mode, Mode),
Less(Mode, Mode, Mode),
Equal(Mode, Mode, Mode),
SetRelBase(Mode),
Halt,
}
impl Opcode {
fn calculate(&self, com: &mut Computer) -> Result<(), Opcode> {
if DBG { std::io::stdout().flush().unwrap(); }
match self {
Add(a, b, w) => {
let res = com.read(a) + com.read(b);
if DBG { print!("{}+{}={} @{}", com.read(a), com.read(b), res, w.index()); }
com.write(w, res);
Ok(())
}
Mult(a, b, w) => {
let res = com.read(a) * com.read(b);
if DBG { print!("{}*{}={} @{}", com.read(a), com.read(b), res, w.index()); }
com.write(w, res);
Ok(())
}
Input(w) => {
let res = match com.input.pop_front() {
Some(inp) => inp,
None => return Err(Input(Mode::dummy()))
};
if DBG { print!("in={} @{}", res, w.index()); }
com.write(w, res);
Ok(())
}
Output(a) => {
let res = com.read(a);
if DBG { print!("out={}", res); }
com.output.push_back(res);
if com.output.len() % 3 == 0 && !com.output.is_empty() {
if com.output[com.output.len() - 3] == -1 {
println!("{:?}", com.output);
}
}
Ok(())
}
JumpNZero(a, j) => {
let a = com.read(a);
if a != 0 {
if DBG { print!("jnz({}!=0)->{}", a, com.read(j)); }
com.ptr = com.read(j) as usize;
Ok(())
} else {
if DBG { print!("!jnz({}==0)", a); }
Err(JumpNZero(Mode::dummy(), Mode::dummy()))
}
}
JumpZero(a, j) => {
let a = com.read(a);
if a == 0 {
if DBG { print!("jnz({}==0)->{}", a, com.read(j)); }
com.ptr = com.read(j) as usize;
Ok(())
} else {
if DBG { print!("!jnz({}!=0)", a); }
Err(JumpZero(Mode::dummy(), Mode::dummy()))
}
}
Less(a, b, w) => {
let res = if com.read(a) < com.read(b) { 1 } else { 0 };
if DBG { print!("{}<{}={} @{}", com.read(a), com.read(b), res, w.index()); }
com.write(w, res);
Ok(())
}
Equal(a, b, w) => {
let res = if com.read(a) == com.read(b) { 1 } else { 0 };
if DBG { print!("{}=={}={} @{}", com.read(a), com.read(b), res, w.index()); }
com.write(w, res);
Ok(())
}
SetRelBase(a) => {
let a = com.read(a);
if DBG { print!("rb={}+{}={}", com.rel_base, a, com.rel_base + a); }
com.rel_base += a;
Ok(())
}
Halt => Err(Halt),
}
}
fn nparams(&self) -> usize {
match self {
Add(_, _, _) => 2,
Mult(_, _, _) => 2,
Input(_) => 0,
Output(_) => 1,
JumpNZero(_, _) => 2,
JumpZero(_, _) => 2,
Less(_, _, _) => 2,
Equal(_, _, _) => 2,
SetRelBase(_) => 1,
Halt => 0,
}
}
fn nwrites(&self) -> usize {
match self {
Add(_, _, _) => 1,
Mult(_, _, _) => 1,
Input(_) => 1,
Output(_) => 0,
JumpNZero(_, _) => 0,
JumpZero(_, _) => 0,
Less(_, _, _) => 1,
Equal(_, _, _) => 1,
SetRelBase(_) => 0,
Halt => 0,
}
}
fn from(com: &Computer) -> Self {
let ptr = com.ptr;
let mem = &com.mem;
let instr = com.mem[ptr];
let code = instr % 100;
match code {
1 => {
if DBG { print!("[{}, {}, {}]", mem[ptr + 1], mem[ptr + 2], mem[ptr + 3], ); }
Add(Mode::from(com, 1), Mode::from(com, 2), Mode::from(com, 3))
}
2 => {
if DBG { print!("[{}, {}, {}]", mem[ptr + 1], mem[ptr + 2], mem[ptr + 3], ); }
Mult(Mode::from(com, 1), Mode::from(com, 2), Mode::from(com, 3))
}
3 => {
if DBG { print!("[{}]", mem[ptr + 1]); }
Input(Mode::from(com, 1))
}
4 => {
if DBG { print!("[{}]", mem[ptr + 1]); }
Output(Mode::from(com, 1))
}
5 => {
if DBG { print!("[{}, {}]", mem[ptr + 1], mem[ptr + 2]); }
JumpNZero(Mode::from(com, 1), Mode::from(com, 2))
}
6 => {
if DBG { print!("[{}, {}]", mem[ptr + 1], mem[ptr + 2]); }
JumpZero(Mode::from(com, 1), Mode::from(com, 2))
}
7 => {
if DBG { print!("[{}, {}, {}]", mem[ptr + 1], mem[ptr + 2], mem[ptr + 3], ); }
Less(Mode::from(com, 1), Mode::from(com, 2), Mode::from(com, 3))
}
8 => {
if DBG { print!("[{}, {}, {}]", mem[ptr + 1], mem[ptr + 2], mem[ptr + 3], ); }
Equal(Mode::from(com, 1), Mode::from(com, 2), Mode::from(com, 3))
}
9 => {
if DBG { print!("[{}]", mem[ptr + 1]); }
SetRelBase(Mode::from(com, 1))
}
99 => Halt,
_ => unsafe { unreachable_unchecked() },
// _ => unreachable!("Opcode::from instr={}", instr),
}
}
}
impl Display for Opcode {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{}", match self {
Add(_, _, _) => "Add",
Mult(_, _, _) => "Mult",
Input(_) => "Input",
Output(_) => "Output",
JumpNZero(_, _) => "JumpNZero",
JumpZero(_, _) => "JumpZero",
Less(_, _, _) => "Less",
Equal(_, _, _) => "Equal",
SetRelBase(_) => "SetRelBase",
Halt => "Halt",
})
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum Mode {
Pos(usize),
Imm(usize),
Rel(usize),
}
impl Mode {
fn from(computer: &Computer, offset: usize) -> Self {
let Computer { mem, ptr, input: _, output: _, rel_base, is_done: _ } = computer;
let ptr = *ptr;
let rel_base = *rel_base;
let instr = mem[ptr];
let key = (instr / 10_i64.pow((offset + 1) as u32)) % 10;
match key {
0 => Pos(mem[ptr + offset] as usize),
1 => Imm(ptr + offset),
2 => Rel((mem[ptr + offset] + rel_base) as usize),
// fail => unreachable!("Mode::from, key={}", fail),
_ => unsafe { unreachable_unchecked() },
}
}
fn index(&self) -> usize {
match self {
Pos(i) => *i,
Imm(i) => *i,
Rel(i) => *i,
}
}
fn dummy() -> Self { // thicc
Pos(0) // should be unreachable to actually get this value
}
} |
#[doc = "Reader of register CONN_REQ_WORD10"]
pub type R = crate::R<u32, super::CONN_REQ_WORD10>;
#[doc = "Writer for register CONN_REQ_WORD10"]
pub type W = crate::W<u32, super::CONN_REQ_WORD10>;
#[doc = "Register CONN_REQ_WORD10 `reset()`'s with value 0"]
impl crate::ResetValue for super::CONN_REQ_WORD10 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DATA_CHANNELS_UPPER`"]
pub type DATA_CHANNELS_UPPER_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_CHANNELS_UPPER`"]
pub struct DATA_CHANNELS_UPPER_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_CHANNELS_UPPER_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f);
self.w
}
}
impl R {
#[doc = "Bits 0:4 - This register field indicates which of the data channels are in use. This stores the information for the upper 5 (36:32) data channel indices. '1' indicates the corresponding data channel is used and '0' indicates the channel is unused."]
#[inline(always)]
pub fn data_channels_upper(&self) -> DATA_CHANNELS_UPPER_R {
DATA_CHANNELS_UPPER_R::new((self.bits & 0x1f) as u8)
}
}
impl W {
#[doc = "Bits 0:4 - This register field indicates which of the data channels are in use. This stores the information for the upper 5 (36:32) data channel indices. '1' indicates the corresponding data channel is used and '0' indicates the channel is unused."]
#[inline(always)]
pub fn data_channels_upper(&mut self) -> DATA_CHANNELS_UPPER_W {
DATA_CHANNELS_UPPER_W { w: self }
}
}
|
use geo::Point;
pub enum Dir {
North,
South,
West,
East
}
impl Dir {
pub fn right(&self) -> Dir {
use Dir::*;
match *self {
North => East,
East => South,
South => West,
West => North
}
}
pub fn left(&self) -> Dir {
use Dir::*;
match *self {
South => East,
West => South,
North => West,
East => North
}
}
pub fn offset(&self) -> (isize, isize) {
match self {
Dir::North => (0, -1),
Dir::East => (1, 0),
Dir::West => (-1, 0),
Dir::South => (0, 1)
}
}
pub fn next_pos(&self, pos: Point<usize>) -> Point<usize> {
let off = self.offset();
offset_pos(pos, off)
}
}
pub fn offset_pos(pt: Point<usize>, offset: (isize, isize)) -> Point<usize> {
let x = (pt.0.x as isize) + offset.0;
let y = (pt.0.y as isize) + offset.1;
Point::new(x as usize, y as usize)
} |
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// https://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The implementations of the `Uniform` distribution for other built-in types.
use core::char;
use {Rng};
use distributions::{Distribution, Uniform};
impl Distribution<char> for Uniform {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> char {
// a char is 21 bits
const CHAR_MASK: u32 = 0x001f_ffff;
loop {
// Rejection sampling. About 0.2% of numbers with at most
// 21-bits are invalid codepoints (surrogates), so this
// will succeed first go almost every time.
match char::from_u32(rng.next_u32() & CHAR_MASK) {
Some(c) => return c,
None => {}
}
}
}
}
impl Distribution<bool> for Uniform {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> bool {
rng.gen::<u8>() & 1 == 1
}
}
macro_rules! tuple_impl {
// use variables to indicate the arity of the tuple
($($tyvar:ident),* ) => {
// the trailing commas are for the 1 tuple
impl< $( $tyvar ),* >
Distribution<( $( $tyvar ),* , )>
for Uniform
where $( Uniform: Distribution<$tyvar> ),*
{
#[inline]
fn sample<R: Rng + ?Sized>(&self, _rng: &mut R) -> ( $( $tyvar ),* , ) {
(
// use the $tyvar's to get the appropriate number of
// repeats (they're not actually needed)
$(
_rng.gen::<$tyvar>()
),*
,
)
}
}
}
}
impl Distribution<()> for Uniform {
#[inline]
fn sample<R: Rng + ?Sized>(&self, _: &mut R) -> () { () }
}
tuple_impl!{A}
tuple_impl!{A, B}
tuple_impl!{A, B, C}
tuple_impl!{A, B, C, D}
tuple_impl!{A, B, C, D, E}
tuple_impl!{A, B, C, D, E, F}
tuple_impl!{A, B, C, D, E, F, G}
tuple_impl!{A, B, C, D, E, F, G, H}
tuple_impl!{A, B, C, D, E, F, G, H, I}
tuple_impl!{A, B, C, D, E, F, G, H, I, J}
tuple_impl!{A, B, C, D, E, F, G, H, I, J, K}
tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L}
macro_rules! array_impl {
// recursive, given at least one type parameter:
{$n:expr, $t:ident, $($ts:ident,)*} => {
array_impl!{($n - 1), $($ts,)*}
impl<T> Distribution<[T; $n]> for Uniform where Uniform: Distribution<T> {
#[inline]
fn sample<R: Rng + ?Sized>(&self, _rng: &mut R) -> [T; $n] {
[_rng.gen::<$t>(), $(_rng.gen::<$ts>()),*]
}
}
};
// empty case:
{$n:expr,} => {
impl<T> Distribution<[T; $n]> for Uniform {
fn sample<R: Rng + ?Sized>(&self, _rng: &mut R) -> [T; $n] { [] }
}
};
}
array_impl!{32, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T, T,}
impl<T> Distribution<Option<T>> for Uniform where Uniform: Distribution<T> {
#[inline]
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Option<T> {
// UFCS is needed here: https://github.com/rust-lang/rust/issues/24066
if rng.gen::<bool>() {
Some(rng.gen())
} else {
None
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Address {
pub address1: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub address2: Option<String>,
pub city: String,
pub country: String,
#[serde(rename = "postalCode")]
pub postal_code: String,
pub state: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Contact {
#[serde(rename = "addressMailing", default, skip_serializing_if = "Option::is_none")]
pub address_mailing: Option<Address>,
pub email: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fax: Option<String>,
#[serde(rename = "jobTitle", default, skip_serializing_if = "Option::is_none")]
pub job_title: Option<String>,
#[serde(rename = "nameFirst")]
pub name_first: String,
#[serde(rename = "nameLast")]
pub name_last: String,
#[serde(rename = "nameMiddle", default, skip_serializing_if = "Option::is_none")]
pub name_middle: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub organization: Option<String>,
pub phone: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Domain {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<domain::Properties>,
}
pub mod domain {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "contactAdmin")]
pub contact_admin: Contact,
#[serde(rename = "contactBilling")]
pub contact_billing: Contact,
#[serde(rename = "contactRegistrant")]
pub contact_registrant: Contact,
#[serde(rename = "contactTech")]
pub contact_tech: Contact,
#[serde(rename = "registrationStatus", default, skip_serializing_if = "Option::is_none")]
pub registration_status: Option<properties::RegistrationStatus>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<properties::ProvisioningState>,
#[serde(rename = "nameServers", default, skip_serializing_if = "Vec::is_empty")]
pub name_servers: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub privacy: Option<bool>,
#[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")]
pub created_time: Option<String>,
#[serde(rename = "expirationTime", default, skip_serializing_if = "Option::is_none")]
pub expiration_time: Option<String>,
#[serde(rename = "lastRenewedTime", default, skip_serializing_if = "Option::is_none")]
pub last_renewed_time: Option<String>,
#[serde(rename = "autoRenew", default, skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<bool>,
#[serde(rename = "readyForDnsRecordManagement", default, skip_serializing_if = "Option::is_none")]
pub ready_for_dns_record_management: Option<bool>,
#[serde(rename = "managedHostNames", default, skip_serializing_if = "Vec::is_empty")]
pub managed_host_names: Vec<HostName>,
pub consent: DomainPurchaseConsent,
#[serde(rename = "domainNotRenewableReasons", default, skip_serializing_if = "Vec::is_empty")]
pub domain_not_renewable_reasons: Vec<String>,
#[serde(rename = "dnsType", default, skip_serializing_if = "Option::is_none")]
pub dns_type: Option<properties::DnsType>,
#[serde(rename = "dnsZoneId", default, skip_serializing_if = "Option::is_none")]
pub dns_zone_id: Option<String>,
#[serde(rename = "targetDnsType", default, skip_serializing_if = "Option::is_none")]
pub target_dns_type: Option<properties::TargetDnsType>,
#[serde(rename = "authCode", default, skip_serializing_if = "Option::is_none")]
pub auth_code: Option<String>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RegistrationStatus {
Active,
Awaiting,
Cancelled,
Confiscated,
Disabled,
Excluded,
Expired,
Failed,
Held,
Locked,
Parked,
Pending,
Reserved,
Reverted,
Suspended,
Transferred,
Unknown,
Unlocked,
Unparked,
Updated,
JsonConverterFailed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Succeeded,
Failed,
Canceled,
InProgress,
Deleting,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DnsType {
AzureDns,
DefaultDomainRegistrarDns,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TargetDnsType {
AzureDns,
DefaultDomainRegistrarDns,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainAvailablilityCheckResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub available: Option<bool>,
#[serde(rename = "domainType", default, skip_serializing_if = "Option::is_none")]
pub domain_type: Option<domain_availablility_check_result::DomainType>,
}
pub mod domain_availablility_check_result {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DomainType {
Regular,
SoftDeleted,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainCollection {
pub value: Vec<Domain>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainControlCenterSsoRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(rename = "postParameterKey", default, skip_serializing_if = "Option::is_none")]
pub post_parameter_key: Option<String>,
#[serde(rename = "postParameterValue", default, skip_serializing_if = "Option::is_none")]
pub post_parameter_value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainOwnershipIdentifier {
#[serde(flatten)]
pub proxy_only_resource: ProxyOnlyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<domain_ownership_identifier::Properties>,
}
pub mod domain_ownership_identifier {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "ownershipId", default, skip_serializing_if = "Option::is_none")]
pub ownership_id: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainOwnershipIdentifierCollection {
pub value: Vec<DomainOwnershipIdentifier>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainPatchResource {
#[serde(flatten)]
pub proxy_only_resource: ProxyOnlyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<domain_patch_resource::Properties>,
}
pub mod domain_patch_resource {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "contactAdmin")]
pub contact_admin: Contact,
#[serde(rename = "contactBilling")]
pub contact_billing: Contact,
#[serde(rename = "contactRegistrant")]
pub contact_registrant: Contact,
#[serde(rename = "contactTech")]
pub contact_tech: Contact,
#[serde(rename = "registrationStatus", default, skip_serializing_if = "Option::is_none")]
pub registration_status: Option<properties::RegistrationStatus>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<properties::ProvisioningState>,
#[serde(rename = "nameServers", default, skip_serializing_if = "Vec::is_empty")]
pub name_servers: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub privacy: Option<bool>,
#[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")]
pub created_time: Option<String>,
#[serde(rename = "expirationTime", default, skip_serializing_if = "Option::is_none")]
pub expiration_time: Option<String>,
#[serde(rename = "lastRenewedTime", default, skip_serializing_if = "Option::is_none")]
pub last_renewed_time: Option<String>,
#[serde(rename = "autoRenew", default, skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<bool>,
#[serde(rename = "readyForDnsRecordManagement", default, skip_serializing_if = "Option::is_none")]
pub ready_for_dns_record_management: Option<bool>,
#[serde(rename = "managedHostNames", default, skip_serializing_if = "Vec::is_empty")]
pub managed_host_names: Vec<HostName>,
pub consent: DomainPurchaseConsent,
#[serde(rename = "domainNotRenewableReasons", default, skip_serializing_if = "Vec::is_empty")]
pub domain_not_renewable_reasons: Vec<String>,
#[serde(rename = "dnsType", default, skip_serializing_if = "Option::is_none")]
pub dns_type: Option<properties::DnsType>,
#[serde(rename = "dnsZoneId", default, skip_serializing_if = "Option::is_none")]
pub dns_zone_id: Option<String>,
#[serde(rename = "targetDnsType", default, skip_serializing_if = "Option::is_none")]
pub target_dns_type: Option<properties::TargetDnsType>,
#[serde(rename = "authCode", default, skip_serializing_if = "Option::is_none")]
pub auth_code: Option<String>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RegistrationStatus {
Active,
Awaiting,
Cancelled,
Confiscated,
Disabled,
Excluded,
Expired,
Failed,
Held,
Locked,
Parked,
Pending,
Reserved,
Reverted,
Suspended,
Transferred,
Unknown,
Unlocked,
Unparked,
Updated,
JsonConverterFailed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Succeeded,
Failed,
Canceled,
InProgress,
Deleting,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DnsType {
AzureDns,
DefaultDomainRegistrarDns,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TargetDnsType {
AzureDns,
DefaultDomainRegistrarDns,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainPurchaseConsent {
#[serde(rename = "agreementKeys", default, skip_serializing_if = "Vec::is_empty")]
pub agreement_keys: Vec<String>,
#[serde(rename = "agreedBy", default, skip_serializing_if = "Option::is_none")]
pub agreed_by: Option<String>,
#[serde(rename = "agreedAt", default, skip_serializing_if = "Option::is_none")]
pub agreed_at: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DomainRecommendationSearchParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub keywords: Option<String>,
#[serde(rename = "maxDomainRecommendations", default, skip_serializing_if = "Option::is_none")]
pub max_domain_recommendations: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HostName {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "siteNames", default, skip_serializing_if = "Vec::is_empty")]
pub site_names: Vec<String>,
#[serde(rename = "azureResourceName", default, skip_serializing_if = "Option::is_none")]
pub azure_resource_name: Option<String>,
#[serde(rename = "azureResourceType", default, skip_serializing_if = "Option::is_none")]
pub azure_resource_type: Option<host_name::AzureResourceType>,
#[serde(rename = "customHostNameDnsRecordType", default, skip_serializing_if = "Option::is_none")]
pub custom_host_name_dns_record_type: Option<host_name::CustomHostNameDnsRecordType>,
#[serde(rename = "hostNameType", default, skip_serializing_if = "Option::is_none")]
pub host_name_type: Option<host_name::HostNameType>,
}
pub mod host_name {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureResourceType {
Website,
TrafficManager,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CustomHostNameDnsRecordType {
CName,
A,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HostNameType {
Verified,
Managed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameIdentifierCollection {
pub value: Vec<NameIdentifier>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TldLegalAgreement {
#[serde(rename = "agreementKey")]
pub agreement_key: String,
pub title: String,
pub content: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TldLegalAgreementCollection {
pub value: Vec<TldLegalAgreement>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TopLevelDomain {
#[serde(flatten)]
pub proxy_only_resource: ProxyOnlyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<top_level_domain::Properties>,
}
pub mod top_level_domain {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub privacy: Option<bool>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TopLevelDomainAgreementOption {
#[serde(rename = "includePrivacy", default, skip_serializing_if = "Option::is_none")]
pub include_privacy: Option<bool>,
#[serde(rename = "forTransfer", default, skip_serializing_if = "Option::is_none")]
pub for_transfer: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TopLevelDomainCollection {
pub value: Vec<TopLevelDomain>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NameIdentifier {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
pub location: String,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyOnlyResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CsmOperationCollection {
pub value: Vec<CsmOperationDescription>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CsmOperationDescription {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<CsmOperationDisplay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CsmOperationDescriptionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CsmOperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CsmOperationDescriptionProperties {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<ServiceSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceSpecification {
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<String>,
#[serde(rename = "supportsInstanceLevelAggregation", default, skip_serializing_if = "Option::is_none")]
pub supports_instance_level_aggregation: Option<bool>,
#[serde(rename = "enableRegionalMdmAccount", default, skip_serializing_if = "Option::is_none")]
pub enable_regional_mdm_account: Option<bool>,
#[serde(rename = "sourceMdmAccount", default, skip_serializing_if = "Option::is_none")]
pub source_mdm_account: Option<String>,
#[serde(rename = "sourceMdmNamespace", default, skip_serializing_if = "Option::is_none")]
pub source_mdm_namespace: Option<String>,
#[serde(rename = "metricFilterPattern", default, skip_serializing_if = "Option::is_none")]
pub metric_filter_pattern: Option<String>,
#[serde(rename = "fillGapWithZero", default, skip_serializing_if = "Option::is_none")]
pub fill_gap_with_zero: Option<bool>,
#[serde(rename = "isInternal", default, skip_serializing_if = "Option::is_none")]
pub is_internal: Option<bool>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<Dimension>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub availabilities: Vec<MetricAvailability>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Dimension {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "internalName", default, skip_serializing_if = "Option::is_none")]
pub internal_name: Option<String>,
#[serde(rename = "toBeExportedForShoebox", default, skip_serializing_if = "Option::is_none")]
pub to_be_exported_for_shoebox: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricAvailability {
#[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")]
pub time_grain: Option<String>,
#[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")]
pub blob_duration: Option<String>,
}
|
use ::nalgebra::{
base::allocator::Allocator, base::dimension::DimName, DefaultAllocator, Dim, DimMin, U1,
};
use ::num_traits::float::Float;
const STEPS: usize = 1_000;
/// The `Min` trait specifies than an object has a minimum value
pub trait Min<T> {
/// Returns the minimum value in the domain of a given distribution
/// if it exists, otherwise `None`.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Min;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(0.0, n.min());
/// ```
fn min(&self) -> T;
}
/// The `Max` trait specifies that an object has a maximum value
pub trait Max<T> {
/// Returns the maximum value in the domain of a given distribution
/// if it exists, otherwise `None`.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Max;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(1.0, n.max());
/// ```
fn max(&self) -> T;
}
pub trait DiscreteDistribution<T: Float>: ::rand::distributions::Distribution<u64> {
/// Returns the mean, if it exists.
fn mean(&self) -> Option<T> {
None
}
/// Returns the variance, if it exists.
fn variance(&self) -> Option<T> {
None
}
/// Returns the standard deviation, if it exists.
fn std_dev(&self) -> Option<T> {
self.variance().map(|var| var.sqrt())
}
/// Returns the entropy, if it exists.
fn entropy(&self) -> Option<T> {
None
}
/// Returns the skewness, if it exists.
fn skewness(&self) -> Option<T> {
None
}
}
pub trait Distribution<T: Float>: ::rand::distributions::Distribution<T> {
/// Returns the mean, if it exists.
/// The default implementation returns an estimation
/// based on random samples. This is a crude estimate
/// for when no further information is known about the
/// distribution. More accurate statements about the
/// mean can and should be given by overriding the
/// default implementation.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(0.5, n.mean().unwrap());
/// ```
fn mean(&self) -> Option<T> {
// TODO: Does not need cryptographic rng
let mut rng = ::rand::rngs::OsRng;
let mut mean = T::zero();
let mut steps = T::zero();
for _ in 0..STEPS {
steps = steps + T::one();
mean = mean + Self::sample(self, &mut rng);
}
Some(mean / steps)
}
/// Returns the variance, if it exists.
/// The default implementation returns an estimation
/// based on random samples. This is a crude estimate
/// for when no further information is known about the
/// distribution. More accurate statements about the
/// variance can and should be given by overriding the
/// default implementation.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(1.0 / 12.0, n.variance().unwrap());
/// ```
fn variance(&self) -> Option<T> {
// TODO: Does not need cryptographic rng
let mut rng = ::rand::rngs::OsRng;
let mut mean = T::zero();
let mut variance = T::zero();
let mut steps = T::zero();
for _ in 0..STEPS {
steps = steps + T::one();
let sample = Self::sample(self, &mut rng);
variance = variance + (steps - T::one()) * (sample - mean) * (sample - mean) / steps;
mean = mean + (sample - mean) / steps;
}
steps = steps - T::one();
Some(variance / steps)
}
/// Returns the standard deviation, if it exists.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!((1f64 / 12f64).sqrt(), n.std_dev().unwrap());
/// ```
fn std_dev(&self) -> Option<T> {
self.variance().map(|var| var.sqrt())
}
/// Returns the entropy, if it exists.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(0.0, n.entropy().unwrap());
/// ```
fn entropy(&self) -> Option<T> {
None
}
/// Returns the skewness, if it exists.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Distribution;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(0.0, n.skewness().unwrap());
/// ```
fn skewness(&self) -> Option<T> {
None
}
}
/// The `Mean` trait implements the calculation of a mean.
// TODO: Clarify the traits of multidimensional distributions
pub trait MeanN<T> {
fn mean(&self) -> Option<T>;
}
// TODO: Clarify the traits of multidimensional distributions
pub trait VarianceN<T> {
fn variance(&self) -> Option<T>;
}
/// The `Median` trait returns the median of the distribution.
pub trait Median<T> {
/// Returns the median.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Median;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(0.5, n.median());
/// ```
fn median(&self) -> T;
}
/// The `Mode` trait specifies that an object has a closed form solution
/// for its mode(s)
pub trait Mode<T> {
/// Returns the mode, if one exists.
///
/// # Examples
///
/// ```
/// use statrs::statistics::Mode;
/// use statrs::distribution::Uniform;
///
/// let n = Uniform::new(0.0, 1.0).unwrap();
/// assert_eq!(Some(0.5), n.mode());
/// ```
fn mode(&self) -> T;
}
|
use super::*;
pub struct Sponge {
pub rate: Rate,
capacity: Capacity,
keccak_f: KeccakF,
}
impl Sponge {
pub fn new(rate: Rate, capacity: Capacity, width: StateBitsWidth) -> Sponge {
Sponge {
rate: rate,
capacity: capacity,
keccak_f: KeccakF::new(width),
}
}
pub fn absorb(&self, mut state: &mut State, message: &BytesArr) {
assert!(
message.len() % self.rate == 0,
"Message is not divisible entirely by bytes rate"
);
let chunks_total = message.len() / self.rate;
let words: Vec<u64> = Sponge::bits_to_u64_words_le(message);
for chunk_i in 0..chunks_total {
let chunk_offset: usize = chunk_i * (self.rate / 8);
let mut x = 0;
let mut y = 0;
for i in 0..(self.rate / 8) {
let word = words[chunk_offset + i];
state[x][y] ^= word;
if x < 5 - 1 {
x += 1;
} else {
y += 1;
x = 0;
}
}
self.keccak_f.permutations(&mut state);
}
}
pub fn squeeze(&self, state: &mut State) -> BytesVec {
let mut output: Vec<u8> = vec![];
let output_len: usize = self.capacity / 2;
let elems_total: usize = output_len / 8;
let mut counter: usize = 0;
'outer: for i in 0..5 {
for j in 0..5 {
output.append(&mut state[j][i].to_le_bytes().to_vec());
if counter == elems_total {
break 'outer;
}
counter += 1;
}
}
output.resize(output_len, 0);
output
}
fn bits_to_u64_words_le(message: &BytesArr) -> Vec<u64> {
let words_total = message.len() / 8;
let mut words: Vec<u64> = vec![0; words_total];
for i in 0..words_total {
let mut word_bits: [u8; 8] = Default::default();
word_bits.copy_from_slice(&message[i * 8..i * 8 + 8]);
words[i] = u64::from_le_bytes(word_bits);
}
words
}
}
|
#![allow(clippy::op_ref, clippy::type_complexity)]
#![cfg(not(ci))]
use ark_ec::{CurveCycle, PairingEngine, PairingFriendlyCycle};
use ark_ed_on_mnt4_298::EdwardsParameters;
use ark_ff::{One, PrimeField};
use ark_marlin::constraints::snark::{MarlinSNARK, MarlinSNARKGadget};
use ark_marlin::fiat_shamir::constraints::FiatShamirAlgebraicSpongeRngVar;
use ark_marlin::fiat_shamir::poseidon::constraints::PoseidonSpongeVar;
use ark_marlin::fiat_shamir::poseidon::PoseidonSponge;
use ark_marlin::fiat_shamir::FiatShamirAlgebraicSpongeRng;
use ark_marlin::MarlinConfig;
use ark_mnt4_298::constraints::PairingVar as MNT4PairingVar;
use ark_mnt4_298::{Fq, Fr, MNT4_298};
use ark_mnt6_298::constraints::PairingVar as MNT6PairingVar;
use ark_mnt6_298::MNT6_298;
use ark_pcd::ec_cycle_pcd::{ECCyclePCD, ECCyclePCDConfig};
use ark_pcd::variable_length_crh::bowe_hopwood::constraints::VariableLengthBoweHopwoodCompressedCRHGadget;
use ark_pcd::variable_length_crh::bowe_hopwood::VariableLengthBoweHopwoodCompressedCRH;
use ark_pcd::{PCDPredicate, PCD};
use ark_poly::polynomial::univariate::DensePolynomial;
use ark_poly_commit::marlin_pc::{MarlinKZG10, MarlinKZG10Gadget};
use ark_r1cs_std::bits::boolean::Boolean;
use ark_r1cs_std::eq::EqGadget;
use ark_r1cs_std::fields::fp::FpVar;
use ark_relations::r1cs::ConstraintSystemRef;
use ark_relations::r1cs::SynthesisError;
use core::marker::PhantomData;
use rand_chacha::ChaChaRng;
#[derive(Copy, Clone, Debug)]
pub struct Mnt46298Cycle;
impl CurveCycle for Mnt46298Cycle {
type E1 = <MNT4_298 as PairingEngine>::G1Affine;
type E2 = <MNT6_298 as PairingEngine>::G1Affine;
}
impl PairingFriendlyCycle for Mnt46298Cycle {
type Engine1 = MNT4_298;
type Engine2 = MNT6_298;
}
#[derive(Copy, Clone, Debug)]
pub struct Mnt64298Cycle;
impl CurveCycle for Mnt64298Cycle {
type E1 = <MNT6_298 as PairingEngine>::G1Affine;
type E2 = <MNT4_298 as PairingEngine>::G1Affine;
}
impl PairingFriendlyCycle for Mnt64298Cycle {
type Engine1 = MNT6_298;
type Engine2 = MNT4_298;
}
type FS4 = FiatShamirAlgebraicSpongeRng<Fr, Fq, PoseidonSponge<Fq>>;
type FS6 = FiatShamirAlgebraicSpongeRng<Fq, Fr, PoseidonSponge<Fr>>;
type PCGadget4 = MarlinKZG10Gadget<Mnt64298Cycle, DensePolynomial<Fr>, MNT4PairingVar>;
type PCGadget6 = MarlinKZG10Gadget<Mnt46298Cycle, DensePolynomial<Fq>, MNT6PairingVar>;
type FSG4 = FiatShamirAlgebraicSpongeRngVar<Fr, Fq, PoseidonSponge<Fq>, PoseidonSpongeVar<Fq>>;
type FSG6 = FiatShamirAlgebraicSpongeRngVar<Fq, Fr, PoseidonSponge<Fr>, PoseidonSpongeVar<Fr>>;
#[derive(Clone)]
pub struct TestMarlinConfig;
impl MarlinConfig for TestMarlinConfig {
const FOR_RECURSION: bool = true;
}
pub struct PCDMarlin;
impl ECCyclePCDConfig<Fr, Fq> for PCDMarlin {
type CRH = VariableLengthBoweHopwoodCompressedCRH<ChaChaRng, EdwardsParameters>;
type CRHGadget = VariableLengthBoweHopwoodCompressedCRHGadget<ChaChaRng, EdwardsParameters>;
type MainSNARK =
MarlinSNARK<Fr, Fq, MarlinKZG10<MNT4_298, DensePolynomial<Fr>>, FS4, TestMarlinConfig>;
type HelpSNARK =
MarlinSNARK<Fq, Fr, MarlinKZG10<MNT6_298, DensePolynomial<Fq>>, FS6, TestMarlinConfig>;
type MainSNARKGadget = MarlinSNARKGadget<
Fr,
Fq,
MarlinKZG10<MNT4_298, DensePolynomial<Fr>>,
FS4,
TestMarlinConfig,
PCGadget4,
FSG4,
>;
type HelpSNARKGadget = MarlinSNARKGadget<
Fq,
Fr,
MarlinKZG10<MNT6_298, DensePolynomial<Fq>>,
FS6,
TestMarlinConfig,
PCGadget6,
FSG6,
>;
}
pub struct TestPredicate<F: PrimeField> {
pub field_phantom: PhantomData<F>,
}
impl<F: PrimeField> TestPredicate<F> {
fn new() -> Self {
Self {
field_phantom: PhantomData,
}
}
}
impl<F: PrimeField> Clone for TestPredicate<F> {
fn clone(&self) -> Self {
Self {
field_phantom: PhantomData,
}
}
}
impl<F: PrimeField> PCDPredicate<F> for TestPredicate<F> {
type Message = F;
type MessageVar = FpVar<F>;
type LocalWitness = F;
type LocalWitnessVar = FpVar<F>;
const PRIOR_MSG_LEN: usize = 1;
fn generate_constraints(
&self,
_cs: ConstraintSystemRef<F>,
msg: &Self::MessageVar,
witness: &Self::LocalWitnessVar,
prior_msgs: &[Self::MessageVar],
_base_case: &Boolean<F>,
) -> Result<(), SynthesisError> {
let msg_supposed = &prior_msgs[0] + witness;
msg_supposed.enforce_equal(&msg)?;
Ok(())
}
}
type TestPCD = ECCyclePCD<Fr, Fq, PCDMarlin>;
#[test]
fn test_marlin_pcd() {
let val_1 = Fr::one();
let circ = TestPredicate::<Fr>::new();
let mut rng = ark_std::test_rng();
let (pk, vk) = TestPCD::circuit_specific_setup(&circ, &mut rng).unwrap();
let proof_1 = TestPCD::prove(&pk, &circ, &val_1, &val_1, &[], &[], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_1, &proof_1).unwrap());
let val_2 = val_1 + &val_1;
let val_3 = val_1 + &val_2;
let proof_2 =
TestPCD::prove(&pk, &circ, &val_2, &val_1, &[val_1], &[proof_1], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_2, &proof_2).unwrap());
let proof_3 =
TestPCD::prove(&pk, &circ, &val_3, &val_1, &[val_2], &[proof_2], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_3, &proof_3).unwrap());
assert!(!TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_1, &proof_3).unwrap());
}
#[test]
fn test_marlin_universal_pcd() {
use ark_marlin::constraints::snark::MarlinBound;
use ark_pcd::UniversalSetupPCD;
use ark_snark::UniversalSetupSNARK;
let val_1 = Fr::one();
let val_2 = val_1 + &val_1;
let val_3 = val_1 + &val_2;
let circ = TestPredicate::<Fr>::new();
let mut rng = ark_std::test_rng();
let bound: <MarlinSNARK<
Fr,
Fq,
MarlinKZG10<MNT4_298, DensePolynomial<Fr>>,
FS4,
TestMarlinConfig,
> as UniversalSetupSNARK<Fr>>::ComputationBound = MarlinBound { max_degree: 10 };
let pp = TestPCD::universal_setup(&bound, &mut rng).unwrap();
let (pk, vk) = TestPCD::index(&pp, &circ, &mut rng).unwrap();
let proof_1 = TestPCD::prove(&pk, &circ, &val_1, &val_1, &[], &[], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_1, &proof_1).unwrap());
let proof_2 =
TestPCD::prove(&pk, &circ, &val_2, &val_1, &[val_1], &[proof_1], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_2, &proof_2).unwrap());
let proof_3 =
TestPCD::prove(&pk, &circ, &val_3, &val_1, &[val_2], &[proof_2], &mut rng).unwrap();
assert!(TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_3, &proof_3).unwrap());
assert!(!TestPCD::verify::<TestPredicate<Fr>>(&vk, &val_1, &proof_3).unwrap());
}
|
/*!
```rudra-poc
[target]
crate = "messagepack-rs"
version = "0.8.0"
[report]
issue_url = "https://github.com/otake84/messagepack-rs/issues/2"
issue_date = 2021-01-26
rustsec_url = "https://github.com/RustSec/advisory-db/pull/835"
rustsec_id = "RUSTSEC-2021-0092"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "UninitExposure"
bug_count = 4
rudra_report_locations = [
"src/deserializable.rs:59:5: 64:6",
"src/deserializable.rs:66:5: 71:6",
"src/deserializable.rs:130:5: 135:6",
"src/deserializable.rs:82:9: 93:10",
]
```
!*/
#![forbid(unsafe_code)]
fn main() {
panic!("This issue was reported without PoC");
}
|
pub fn find() -> Option<u32> {
(1..999)
.flat_map(|a| (1..(999 - a)).map(move |b| (a, b, 1000 - a - b)))
.filter(|(a, b, c)| a * a + b * b == c * c)
.map(|(a, b, c)| a * b * c)
.nth(0)
}
|
use sync_resolve::hosts::{host_file, load_hosts};
fn main() {
let path = host_file();
println!("Loading host table from {}", path.display());
println!("");
let table = match load_hosts(&path) {
Ok(t) => t,
Err(e) => {
println!("Failed to load host table: {}", e);
return;
}
};
for host in &table.hosts {
println!(" {:<20} points to {}", host.name, host.address);
for alias in &host.aliases {
println!(" {:<20} points to {}", alias, host.address);
}
}
}
|
use anyhow::Result;
use ndarray::Array2;
use ndarray_stats::*;
use std::path::{Path, PathBuf};
// use ndarray::parallel::prelude::*;
use crate::graph::Graph;
use crate::io;
use crate::rank;
use crate::Rank;
pub fn parse_args(
input: &Path,
output: Option<&PathBuf>,
method: Option<&Rank>,
log2: &bool,
psede_count: &f64,
rank_cutoff: Option<&usize>,
pcc_cutoff: Option<&f64>,
) -> Result<()> {
info!("--- start read {} ---", input.to_str().unwrap());
info!("log2 transform: {}, psede_count: {}", log2, psede_count);
// read csv and make ndarray::Array2
let mut index: Vec<String> = vec![];
let mut arr = io::read_exp_csv(input, &mut index)?;
if *log2 {
arr.par_mapv_inplace(|x| (x + psede_count).log2());
}
debug!("exp_matrix: \n{:?}", arr);
// calc correlation
let corr = arr.pearson_correlation()?;
debug!("{:?}", corr.shape());
debug!("corr_matrix: \n{:?}", corr);
debug!(
"corr[0, 4]: {} {} : {:?}",
&index[0],
&index[4],
corr[[0, 4]]
);
// calc rank matrix
info!("calculate rank matrix...");
let array_size = index.len();
let rank_arr: Array2<usize> = rank::construct_rank_matrix_multithreading(&corr, array_size)?;
// construct hrr based network
info!("construct rank based network...");
let method = method.unwrap_or(&Rank::HRR);
match method {
Rank::HRR => {
info!("Method: HRR");
let mut g: Graph<usize> = Graph::new(&index);
g.construct_hrr_network(corr, rank_arr, rank_cutoff, pcc_cutoff);
let default_path = PathBuf::from("hrr_based_network.csv");
let out_path = output.unwrap_or(&default_path);
io::graph_to_csv(out_path.clone(), g)?;
}
Rank::MR => {
info!("Method: MR");
let mut g: Graph<f64> = Graph::new(&index);
let rank_cutoff = match rank_cutoff {
Some(rank_cutoff) => {
let rank_cutoff_f64 = *rank_cutoff as f64;
Some(rank_cutoff_f64)
}
None => None,
};
g.construct_mr_network(corr, rank_arr, rank_cutoff.as_ref(), pcc_cutoff);
let default_path = PathBuf::from("mr_based_network.csv");
let out_path = output.unwrap_or(&default_path);
io::graph_to_csv(out_path.clone(), g)?;
}
}
info!("Finish!");
Ok(())
}
|
// a crust is full of resources
pub mod start;
pub mod capalloc;
pub mod vspace;
// TODO: find a better place
pub const ROOT_SLOT: usize = ::mantle::kernel::CAP_INIT_CNODE;
pub const ROOT_PAGEDIR: usize = ::mantle::kernel::CAP_INIT_VSPACE;
pub const ROOT_BITS: usize = 64; // TODO: maybe this should be 32?
|
use ir::UnaryInst;
pub struct CastInst<'ctx>(UnaryInst<'ctx>);
impl_subtype!(CastInst => UnaryInst);
/// Define a generic cast instruction.
macro_rules! define_cast_instruction {
($name:ident => $ctor:ident) => {
pub struct $name<'ctx>($crate::ir::CastInst<'ctx>);
impl<'ctx> $name<'ctx>
{
/// Creates a new instruction.
pub fn new(value: &$crate::ir::Value,
ty: &$crate::ir::Type) -> Self {
use $crate::SafeWrapper;
use $crate::ir::{User, Instruction, UnaryInst, CastInst};
unsafe {
let inner = $crate::sys::$ctor(value.inner(), ty.inner());
wrap_value!(inner => User => Instruction => UnaryInst => CastInst => $name)
}
}
}
impl_subtype!($name => CastInst);
}
}
define_cast_instruction!(AddrSpaceCastInst => LLVMRustCreateAddrSpaceCastInst);
define_cast_instruction!(BitCastInst => LLVMRustCreateBitCastInst);
define_cast_instruction!(FPExtInst => LLVMRustCreateFPExtInst);
define_cast_instruction!(FPToSIInst => LLVMRustCreateFPToSIInst);
define_cast_instruction!(FPToUIInst => LLVMRustCreateFPToUIInst);
define_cast_instruction!(FPTruncInst => LLVMRustCreateFPTruncInst);
define_cast_instruction!(IntToPtrInst => LLVMRustCreateIntToPtrInst);
define_cast_instruction!(PtrToIntInst => LLVMRustCreatePtrToIntInst);
define_cast_instruction!(SExtInst => LLVMRustCreateSExtInst);
define_cast_instruction!(SIToFPInst => LLVMRustCreateSIToFPInst);
define_cast_instruction!(TruncInst => LLVMRustCreateTruncInst);
define_cast_instruction!(UIToFPInst => LLVMRustCreateUIToFPInst);
define_cast_instruction!(ZExtInst => LLVMRustCreateZExtInst);
|
#[doc = "Register `FNR` reader"]
pub type R = crate::R<FNR_SPEC>;
#[doc = "Field `FN` reader - FN"]
pub type FN_R = crate::FieldReader<u16>;
#[doc = "Field `LSOF` reader - LSOF"]
pub type LSOF_R = crate::FieldReader;
#[doc = "Field `LCK` reader - LCK"]
pub type LCK_R = crate::BitReader;
#[doc = "Field `RXDM` reader - RXDM"]
pub type RXDM_R = crate::BitReader;
#[doc = "Field `RXDP` reader - RXDP"]
pub type RXDP_R = crate::BitReader;
impl R {
#[doc = "Bits 0:10 - FN"]
#[inline(always)]
pub fn fn_(&self) -> FN_R {
FN_R::new((self.bits & 0x07ff) as u16)
}
#[doc = "Bits 11:12 - LSOF"]
#[inline(always)]
pub fn lsof(&self) -> LSOF_R {
LSOF_R::new(((self.bits >> 11) & 3) as u8)
}
#[doc = "Bit 13 - LCK"]
#[inline(always)]
pub fn lck(&self) -> LCK_R {
LCK_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - RXDM"]
#[inline(always)]
pub fn rxdm(&self) -> RXDM_R {
RXDM_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - RXDP"]
#[inline(always)]
pub fn rxdp(&self) -> RXDP_R {
RXDP_R::new(((self.bits >> 15) & 1) != 0)
}
}
#[doc = "USB frame number register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fnr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FNR_SPEC;
impl crate::RegisterSpec for FNR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fnr::R`](R) reader structure"]
impl crate::Readable for FNR_SPEC {}
#[doc = "`reset()` method sets FNR to value 0"]
impl crate::Resettable for FNR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::{
fs::{File, remove_dir_all, remove_file},
path::{PathBuf},
io::Read,
};
use bincode::{serialize_into, deserialize_from};
use chrono::prelude::*;
use tera::{Tera, Context};
use toml;
use walkdir::WalkDir;
use project::{Project};
use error::{StateError, StateResult};
use website::{Website, Color};
use build::{IndexProject, Page};
use template::Template;
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
#[serde(rename_all = "camelCase")]
pub struct SiteState {
pub source: PathBuf,
pub destination: PathBuf,
pub website: Website,
pub selected_project: Option<Project>,
pub last_built: Option<DateTime<Local>>,
pub template: String,
}
#[derive(Serialize, Deserialize, Debug, Default, Clone)]
#[serde(rename_all = "camelCase")]
pub struct OldSiteState {
pub source: PathBuf,
pub destination: PathBuf,
pub website: Website,
pub selected_project: Option<Project>,
pub last_built: Option<DateTime<Local>>,
}
impl Into<SiteState> for OldSiteState {
fn into(self) -> SiteState {
SiteState {
template: default_template(),
source: self.source,
destination: self.destination,
website: self.website,
selected_project: self.selected_project,
last_built: self.last_built,
}
}
}
fn default_template() -> String {
"Default".to_string()
}
impl SiteState {
pub fn new(title: &String, path: &PathBuf) -> SiteState {
SiteState {
source: path.clone(),
website: Website::new(title),
..SiteState::default()
}
}
pub fn get(path: &PathBuf) -> Result<SiteState, StateError> {
let p = path.join(".site_builder");
let mut f = super::get_cache_file(&p)?;
if f.metadata()?.len() == 0 {
let s =
SiteState {
source: path.clone(),
destination: PathBuf::default(),
website: Website::default(),
selected_project: None,
last_built: None,
template: "Default".to_string(),
};
s.ensure_dir_defaults();
return Ok(s)
}
let mut buf: Vec<u8> = Vec::with_capacity(f.metadata()?.len() as usize);
f.read_to_end(&mut buf)?;
if let Ok(ret) = deserialize_from(buf.as_slice()) {
Ok(ret)
} else {
let old_state: OldSiteState = deserialize_from(buf.as_slice())?;
Ok(old_state.into())
}
}
/// use the source property of a state instance to
/// get the current file structure/content
pub fn update_from_source(&mut self) -> StateResult {
for entry in WalkDir::new(&self.source).min_depth(1).max_depth(1) {
let entry = entry?;
let name = entry.file_name();
if name == "portfolio" {
self.website.update_projects_from_source(entry.path());
} else if name == "about.md" {
self.website.about = super::content(entry.path());
} else if name == "me.jpg" {
self.website.image = entry.path().to_path_buf();
}
}
Ok(String::from("Successfully refreshed site from folder"))
}
pub fn add_project(&mut self, name: String) -> StateResult {
self.website.add_project(&self.source, name);
match self.write_input() {
Ok(()) => Ok(String::from("Successfully added project")),
Err(e) => Err(StateError::new(&format!("Unable to create project, {:?}", e))),
}
}
pub fn update_project(&mut self, project: Project) {
self.website.update_project(project)
}
/// save a copy of the state to a cache file
pub fn cache(&self) {
println!("cache_state");
if let Some(f) = self.cache_file() {
match serialize_into(&f, self) {
Ok(_) => (),
Err(e) => println!("{:?}", e),
}
} else {
println!("Error getting cache file")
}
}
pub fn update_title(&mut self, title: String) {
self.website.update_title(title);
}
pub fn set_template(&mut self, template: String) {
self.template = template;
}
/// Attempt to get the cache file. This will also
/// ensure that the ~/.website_builder folder is created
fn cache_file(&self) -> Option<File> {
if let Ok(f) = super::get_cache_file(&self.source.join(".site_builder")) {
Some(f)
} else {
None
}
}
pub fn write_input(&self) -> ::std::io::Result<()> {
for project in self.website.portfolio.iter() {
let path = &project.path;
super::ensure_folder(path)?;
super::write_file(&mut project.description.clone(), path.join("content.md"))?;
if let Ok(mut m) = toml::to_string(&project.meta) {
super::write_file(&mut m, path.join("meta.toml"))?;
}
super::ensure_folder(&path.join("img"))?;
}
super::write_file(&mut self.website.about.clone(), self.source.join("about.md"))?;
Ok(())
}
/// Ensure that all of the top level files and folders are
/// included in the source dir
pub fn ensure_dir_defaults(&self) {
println!("Ensuring Fonts Folder");
if let Err(e) = super::ensure_folder(&self.source.join("fonts")) {
println!("Error ensuring folder: {:?}", e);
}
println!("Ensuring Portfolio Folder");
if let Err(e) = super::ensure_folder(&self.source.join("portfolio")) {
println!("Error ensuring folder: {:?}", e);
}
println!("Ensuring about.md");
if let Err(e) = super::write_file("", self.source.join("about.md")) {
println!("Error ensuring about.md: {:?}", e);
}
}
/// Delete and recreate the output directory
/// we don't want to have any old files laying around
/// so we want to wipe everything first
pub fn ensure_out_dir_defaults(&self) -> StateResult {
super::ensure_folder(&self.destination)?;
for entry in WalkDir::new(&self.destination).max_depth(1).min_depth(1) {
let entry = entry?;
if entry.file_type().is_dir() {
remove_dir_all(&entry.path())?;
} else {
remove_file(&entry.path())?;
}
}
super::ensure_folder(&self.destination.join("fonts"))?;
super::ensure_folder(&self.destination.join("portfolio"))?;
super::ensure_folder(&self.destination.join("contact"))?;
super::ensure_folder(&self.destination.join("about"))?;
Ok(String::from("Successfully removed and created output directories"))
}
pub fn add_font(&mut self, path: &PathBuf, bold: bool) -> StateResult {
self.remove_font(bold)?;
let msg = if bold {
let path = super::copy_file(&path, &self.source.join("fonts"))?;
self.website.fonts.bold = Some(path);
"Successfully added bold font"
} else {
let path = super::copy_file(&path, &self.source.join("fonts"))?;
println!("normal font: {:?}", &path);
self.website.fonts.normal = Some(path);
"Successfully added normal font"
};
Ok(msg.into())
}
pub fn remove_font(&mut self, bold: bool) -> StateResult {
if bold {
if let Some(ref p) = self.website.fonts.bold {
if p.exists() {
super::remove(p).map_err(|_| format!("Error removing old font {:?}", &self.website.fonts.bold))?;
}
}
self.website.fonts.bold = None;
} else {
if let Some(ref p) = self.website.fonts.normal {
if p.exists() {
super::remove(p).map_err(|_| format!("Error removing old font {:?}", &self.website.fonts.bold))?;
}
}
self.website.fonts.normal = None;
};
Ok(String::new())
}
pub fn build(&self, template: &Template) -> StateResult {
let mut t = Tera::default();
t.add_raw_templates(vec![
("base.html", template.base.as_str()),
("about.html", template.about.as_str()),
("contact.html", template.contact.as_str()),
("index.html", template.index.as_str()),
("page.html", template.page.as_str()),
])?;
self.ensure_out_dir_defaults()?;
self.move_fonts()?;
self.build_index(&t)?;
self.build_contact(&t)?;
self.build_about(&t)?;
self.build_portfolio(&t)?;
Ok(String::from("Successfully built site!"))
}
fn move_fonts(&self,) -> StateResult {
if let Some(ref normal) = self.website.fonts.normal {
super::copy_file(normal, &self.destination.join("fonts"))?;
}
if let Some(ref bold) = self.website.fonts.bold {
super::copy_file(bold, &self.destination.join("fonts"))?;
}
Ok(String::from("Moved fonts"))
}
fn build_index(&self, t: &Tera) -> StateResult {
let mut ctx = self.get_context("index");
let pages: Vec<IndexProject> = self.website.portfolio.iter().map(IndexProject::from).collect();
ctx.add("pages", &pages);
let html = t.render("index.html", &ctx)?;
super::write_file(&html, self.destination.join("index.html"))?;
Ok(String::from("Successfully built index"))
}
fn build_contact(&self, t: &Tera) -> StateResult {
let ctx = self.get_context("contact");
let html = t.render("contact.html", &ctx)?;
super::write_file(&html, self.destination.join("contact").join("index.html"))?;
Ok(String::from("Successfully built contact"))
}
fn build_about(&self, t: &Tera) -> StateResult {
let mut ctx = self.get_context("about");
let html = super::generate_html(&self.website.about);
ctx.add("content", &html);
let path = super::copy_file(&self.website.image, &self.destination)?;
let img = super::file_name(&path);
ctx.add("image", &img);
let body = t.render("about.html", &ctx)?;
super::write_file(&body, self.destination.join("about").join("index.html"))?;
Ok(String::from("Successfully Built about"))
}
fn build_portfolio(&self, t: &Tera) -> StateResult {
for proj in self.website.portfolio.iter() {
let page = Page::from(proj);
let project_dest = self.ensure_project_folder(&page.project_folder)?;
for img in proj.images.iter() {
super::copy_file(&img.path, &project_dest.join("img"))?;
}
self.build_portfolio_page(t, &page, &project_dest)?;
}
Ok(String::from("Successfully Built portfolio"))
}
fn ensure_project_folder(&self, folder_name: &String) -> Result<PathBuf, StateError> {
let project_path = self.destination.join("portfolio").join(folder_name);
let img_path = project_path.join("img");
super::ensure_folder(&img_path)?;
Ok(project_path)
}
fn build_portfolio_page(&self, t: &Tera, page: &Page, dest: &PathBuf) -> StateResult {
let mut ctx = self.get_context("portfolio");
ctx.add("page", &page);
let html = t.render("page.html", &ctx)?;
super::write_file(&html, dest.join("index.html"))?;
Ok(String::from("Successfully built project"))
}
fn get_context(&self, route: &str) -> Context {
let mut ctx = Context::new();
ctx.add("route", route);
ctx.add("bold_font", &self.website.fonts.bold_file());
ctx.add("normal_font", &self.website.fonts.normal_file());
ctx.add("color", &self.website.accent_color);
ctx.add("title", &self.website.title);
ctx
}
pub fn selected_project(&mut self) -> Result<&mut Project, StateError> {
if let Some(ref mut p) = self.selected_project {
Ok(p)
} else {
Err(StateError::new("No project selected"))
}
}
pub fn add_project_image(&mut self, path: &PathBuf) -> Result<(), StateError> {
let proj = self.selected_project()?;
proj.add_image(path)
}
pub fn change_color(&mut self, color: &Color) -> StateResult {
self.website.accent_color = color.clone();
Ok(String::from("Successfully updated color"))
}
} |
pub(crate) fn default<T: Default>() -> T {
Default::default()
}
|
use std::collections::{HashMap, HashSet};
use crate::graphemes_struct::Graphemes;
use len_trait::len::Len;
use std::ops::Index;
use push_trait::base::Push;
use itertools::Itertools;
pub mod graphemes_struct;
pub mod metrics;
type Coordinate = (usize, usize);
/// Returns the backtraced path as a vector of coordinates (row, col) from the levenshtein distance cost matrix
/// starting at `(0, 0)`
///
/// # Arguments
/// * `graphemes1` - Graphemes to compare with `graphemes2`
/// * `graphemes2` - Graphemes to compare with `graphemes1`
/// * `sub_cost` - Cost of substituting a character with another
///
/// # Example
/// ```
/// use nlp::alignment_path;
/// use nlp::graphemes_struct::Graphemes;
///
/// alignment_path(&Graphemes::from("dog"), &Graphemes::from("woof"), 1);
/// // returns [(0, 0), (1, 1), (2, 2), (3, 3), (3, 4)]
/// ```
pub fn alignment_path<'a, T, U>(graphemes1 : &T, graphemes2: &T, sub_cost : usize) -> Vec<Coordinate>
where T : Len + Index<usize, Output = U>, U : PartialEq + 'a {
let mat = alignment_matrix(graphemes1, graphemes2, sub_cost);
let mut path = backtrace_alignment_matrix((graphemes1.len(), graphemes2.len()), mat);
path.reverse();
path
}
/// Returns an alignment of two strings as an array of two graphemes
/// # Arguments
/// * `graphemes1` - Graphemes to compare with `graphemes2`
/// * `graphemes2` - Graphemes to compare with `graphemes1`
/// * `sub_cost` - Cost of substituting a character with another
/// * `ins_del_char` - &str for indicating insertion/deletion
///
/// # Example
/// ```
/// use nlp::alignment_strings;
/// use nlp::graphemes_struct::Graphemes;
/// let intention = Graphemes::from("intention");
/// let execution = Graphemes::from("execution");
/// let strings = alignment_strings(&intention, &execution, 1, " ");
/// // strings contains
/// // 0. inten tion
/// // 1. ex ecution
/// ```
pub fn alignment_strings<'a, T, U>(graphemes1 : &T, graphemes2 : &T, sub_cost : usize, ins_del_char : U) -> [T; 2]
where T : 'a + Default + Len + Push<U> + Index<usize, Output = U>, U : PartialEq + Clone + 'a{
let path = alignment_path(graphemes1, graphemes2, sub_cost);
if path.is_empty() {
return [T::default(), T::default()];
}
let mut align_graphemes1 = T::default();
let mut align_graphemes2 = T::default();
let mut path_iter = path.iter();
let mut prev_coord = *path_iter.next().unwrap(); // handled by the if case
for &(row, col) in path_iter {
if row != 0 && row - 1 == prev_coord.0 && col != 0 && col - 1 == prev_coord.1 {
align_graphemes1.push(graphemes1[row-1].clone());
align_graphemes2.push(graphemes2[col-1].clone());
} else if row == prev_coord.0 && col != 0 && col - 1 == prev_coord.1 {
align_graphemes1.push(ins_del_char.clone());
align_graphemes2.push(graphemes2[col-1].clone());
}
else if row != 0 && row - 1 == prev_coord.0 && col == prev_coord.1 {
align_graphemes1.push(graphemes1[row-1].clone());
align_graphemes2.push(ins_del_char.clone());
} else {
panic!();
}
prev_coord = (row, col);
}
[align_graphemes1, align_graphemes2]
}
/// Segments a sentence with space using the max match algorithm
/// # Arguments
/// * `sentence` - Sentence composed of words unseperated to be segmented
/// * `dictionary` - HashSet containing words for matching possible words in the sentence for segmentation
///
/// # Example
/// ```
/// use nlp::max_match;
/// use nlp::graphemes_struct::Graphemes;
/// use std::collections::HashSet;
/// let mut dictionary : HashSet<Graphemes> = HashSet::new();
/// dictionary.insert(Graphemes::from("他"));
/// dictionary.insert(Graphemes::from("特别"));
/// dictionary.insert(Graphemes::from("喜欢"));
/// dictionary.insert(Graphemes::from("北京烤鸭"));
/// let sentence = max_match(&Graphemes::from("他特别喜欢北京烤鸭"), &dictionary);
/// assert_eq!(&sentence, &Graphemes::from("他 特别 喜欢 北京烤鸭"));
/// ```
pub fn max_match<'a>(sentence : &Graphemes<'a>, dictionary : &HashSet<Graphemes>) -> Graphemes<'a> {
if sentence.is_empty() {
return Graphemes::from("");
}
for i in (1..sentence.len()+1).rev() {
let mut first_word = sentence.slice(0,i);
let remainder = sentence.slice(i, sentence.len());
if dictionary.contains(&first_word) {
if !remainder.is_empty() {
first_word.push(" ");
}
first_word.append(max_match(&remainder, dictionary));
return first_word;
}
}
let mut first_word = sentence.slice(0,1);
let remainder = sentence.slice(1, sentence.len());
if !remainder.is_empty() {
first_word.push(" ");
}
first_word.append(max_match(&remainder, dictionary));
return first_word;
}
fn backtrace_alignment_matrix<'a>(start_coord : Coordinate, backtrace : HashMap<Coordinate, Coordinate>) -> Vec<Coordinate>{
let mut path = vec![];
let mut backtracing_coord = start_coord;
while let Some(&next_coord) = backtrace.get(&backtracing_coord) {
path.push(backtracing_coord);
backtracing_coord = next_coord;
}
path.push(backtracing_coord);
path
}
fn alignment_matrix<'a, T, U>(graphemes1 : &T, graphemes2 : &T, sub_cost : usize) -> HashMap<Coordinate, Coordinate>
where T : Len + Index<usize, Output = U>, U : PartialEq + 'a {
let num_rows = graphemes1.len() + 1;
let num_cols = graphemes2.len() + 1;
let mut backtrace : HashMap<Coordinate, Coordinate> = HashMap::new();
let mut recurrence_matrix : Vec<Vec<usize>> = vec![vec![0; num_cols]; num_rows];
// graphemes1 → row
// graphemes2 → column
for row in 1..num_rows {
recurrence_matrix[row][0] = row;
backtrace.insert((row, 0), (row-1, 0));
}
for col in 1..num_cols {
recurrence_matrix[0][col] = col;
backtrace.insert((0, col), (0, col-1));
}
for (row, col) in (1..num_rows).cartesian_product(1..num_cols) {
let mut min_distance = recurrence_matrix[row][col-1] + 1;
let mut min_coordinate = (row, col-1);
let current_del_cost = recurrence_matrix[row-1][col] + 1;
if current_del_cost < min_distance {
min_distance = current_del_cost;
min_coordinate = (row-1, col);
}
let current_sub_cost = recurrence_matrix[row-1][col-1] + if graphemes1[row-1] == graphemes2[col-1] {0} else {sub_cost};
if current_sub_cost < min_distance {
min_distance = current_sub_cost;
min_coordinate = (row-1, col-1);
}
recurrence_matrix[row][col] = min_distance;
backtrace.insert((row, col), min_coordinate);
}
backtrace
}
#[cfg(test)]
mod test_cases {
use super::*;
fn calculate_edit_distance_from_alignment(graphemes1 : &Graphemes, graphemes2 : &Graphemes, sub_cost : usize, ins_del_char : &str) -> usize {
let alignments = alignment_strings(graphemes1, graphemes2, sub_cost, ins_del_char);
assert_eq!(alignments[0].len(), alignments[1].len());
let mut edit_distance = 0;
for i in 0..alignments[0].len() {
if alignments[0][i] == " " || alignments[1][i] == " " {
edit_distance += 1;
} else if alignments[0][i] != alignments[1][i] {
edit_distance += sub_cost;
}
}
edit_distance
}
#[test]
fn alignment_path_basic_test() {
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from(""), &Graphemes::from(""), 2, " "), 0);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from(""), &Graphemes::from("a"), 2, " "), 1);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("a"), &Graphemes::from(""), 2, " "), 1);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from(""), &Graphemes::from("aa"), 2, " "), 2);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("aa"), &Graphemes::from(""), 2, " "), 2);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("a"), &Graphemes::from("b"), 2, " "), 2);
}
#[test]
fn alignment_path_example_test() {
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("book"), &Graphemes::from("back"), 1, " "), 2);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("back"), &Graphemes::from("book"), 1, " "), 2);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("kitten"), &Graphemes::from("sitting"), 1, " "), 3);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("sitting"), &Graphemes::from("kitten"), 1, " "), 3);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("longstring"), &Graphemes::from("short"), 1, " "), 9);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("short"), &Graphemes::from("longstring"), 1, " "), 9);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("superman"), &Graphemes::from("batman"), 1, " "), 5);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("batman"), &Graphemes::from("superman"), 1, " "), 5);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from(""), &Graphemes::from("aaaaaaaaaaaaaaaaa"), 1, " "), 17);
assert_eq!(calculate_edit_distance_from_alignment(
&Graphemes::from("aaaaaaaaaaaaaaaaa"), &Graphemes::from(""), 1, " "), 17);
}
fn chinese_dictionary() -> HashSet<Graphemes<'static>> {
let mut dictionary : HashSet<Graphemes> = HashSet::new();
dictionary.insert(Graphemes::from("他"));
dictionary.insert(Graphemes::from("特别"));
dictionary.insert(Graphemes::from("喜欢"));
dictionary.insert(Graphemes::from("北京烤鸭"));
dictionary
}
fn english_dictionary() -> HashSet<Graphemes<'static>> {
let mut dictionary : HashSet<Graphemes> = HashSet::new();
dictionary.insert(Graphemes::from("we"));
dictionary.insert(Graphemes::from("canon"));
dictionary.insert(Graphemes::from("see"));
dictionary.insert(Graphemes::from("ash"));
dictionary.insert(Graphemes::from("ort"));
dictionary.insert(Graphemes::from("distance"));
dictionary.insert(Graphemes::from("ahead"));
dictionary
}
#[test]
fn max_match_test() {
let chinese_dictionary = chinese_dictionary();
let empty_sentence : Graphemes = max_match(&Graphemes::from(""), &chinese_dictionary);
assert!(empty_sentence.is_empty());
let sentence = max_match(&Graphemes::from("他特别喜欢北京烤鸭"), &chinese_dictionary);
assert_eq!(&sentence, &Graphemes::from("他 特别 喜欢 北京烤鸭"));
let another_sentence = max_match(&Graphemes::from("english"), &chinese_dictionary);
assert_eq!(&another_sentence, &Graphemes::from("e n g l i s h"));
let english_dictionary = english_dictionary();
let example_sentence = max_match(&Graphemes::from("wecanonlyseeashortdistanceahead"), &english_dictionary);
assert_eq!(&example_sentence, &Graphemes::from("we canon l y see ash ort distance ahead"));
}
} |
use crate::api::downloads::FileInfo;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LocalFile {
pub game: String,
pub file_name: String,
pub mod_id: u32,
pub file_id: u64,
pub update_status: UpdateStatus,
}
impl LocalFile {
pub fn new(fi: FileInfo, update_status: UpdateStatus) -> Self {
LocalFile {
game: fi.game,
file_name: fi.file_name,
mod_id: fi.mod_id,
file_id: fi.file_id,
update_status,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum UpdateStatus {
UpToDate(u64), // time of your newest file,
HasNewFile(u64), // time of your newest file
OutOfDate(u64), // time of your newest file
IgnoredUntil(u64), // time of latest file in update list
}
impl UpdateStatus {
pub fn time(&self) -> u64 {
match self {
Self::UpToDate(t) | Self::HasNewFile(t) | Self::OutOfDate(t) | Self::IgnoredUntil(t) => *t,
}
}
}
|
use serde::Serialize;
use common::error::Error;
use common::result::Result;
use crate::application::dtos::RoleDto;
use crate::domain::role::RoleRepository;
use crate::domain::user::{UserId, UserRepository};
#[derive(Serialize)]
pub struct GetAllResponse {
pub roles: Vec<RoleDto>,
}
pub struct GetAll<'a> {
role_repo: &'a dyn RoleRepository,
user_repo: &'a dyn UserRepository,
}
impl<'a> GetAll<'a> {
pub fn new(role_repo: &'a dyn RoleRepository, user_repo: &'a dyn UserRepository) -> Self {
GetAll {
role_repo,
user_repo,
}
}
pub async fn exec(&self, auth_id: String) -> Result<GetAllResponse> {
let admin = self.user_repo.find_by_id(&UserId::new(auth_id)?).await?;
if !admin.role().is("admin") {
return Err(Error::unauthorized());
}
let roles = self.role_repo.find_all().await?;
Ok(GetAllResponse {
roles: roles.iter().map(|role| RoleDto::from(role)).collect(),
})
}
}
|
use super::{osgood, Exception, Isolate, Local, Valuable, V8};
pub use V8::Module;
enum Status {
Uninstantiated,
Instantiating,
Instantiated,
Evaluating,
Evaluated,
Errored,
}
impl Module {
pub fn compile(
src: Local<V8::String>,
name: Local<V8::String>,
) -> Result<Local<V8::Module>, String> {
let origin = unsafe { osgood::create_module_origin(Isolate::raw(), name.into()) };
let result = unsafe { osgood::compile_module(Isolate::raw(), origin, src.into()) };
if result.is_exception {
let mut exception: Exception = result.exception.into();
Err(exception.syntax_error_stack())
} else {
Ok(result.ret_val.into())
}
}
pub fn empty_and_throw(message: &str) -> V8::MaybeLocal<V8::Module> {
Isolate::throw_error(message);
unsafe { osgood::empty_module() }
}
}
impl Local<Module> {
pub fn instantiate(
&mut self,
ctx: Local<V8::Context>,
callback: V8::Module_ResolveCallback,
) -> Result<(), String> {
let result = unsafe { osgood::instantiate_module(ctx.into(), (*self).into(), callback) };
if result {
Ok(())
} else {
Err("Failed to instantiate module".to_string())
}
}
pub fn evaluate(&mut self, ctx: Local<V8::Context>) -> Result<Local<V8::Value>, String> {
let result = unsafe { osgood::evaluate_module(Isolate::raw(), ctx.into(), (*self).into()) };
if result.is_exception {
Err(Local::from(result.ret_val)
.to_object()
.get(ctx, "stack")
.as_rust_string())
} else {
Ok(Local::from(result.ret_val))
}
}
pub fn get_hash(&mut self) -> i32 {
unsafe { self.inner_mut().GetIdentityHash() }
}
pub fn get_exports(mut self, context: Local<V8::Context>) -> Result<Local<V8::Object>, String> {
unsafe {
let module = self.inner_mut();
let mut exports: Local<V8::Value> = module.GetModuleNamespace().into();
let exports = exports.inner_mut();
if !exports.IsObject() {
return Err(String::from("Module namespace was not an object"));
}
Ok(exports.ToObject(context.into()).to_local_checked().unwrap())
}
}
}
impl From<Local<Module>> for V8::MaybeLocal<V8::Module> {
fn from(wrapped: Local<Module>) -> V8::MaybeLocal<V8::Module> {
unsafe { osgood::from_local_module(wrapped.into()) }
}
}
|
use crate::material::*;
use crate::shared::*;
/// Information of a ray hit
pub struct HitRecord {
pub point: Point3,
pub normal: Vec3,
pub t: f32,
pub front_face: bool,
pub material: Arc<dyn Material>,
}
impl HitRecord {
pub fn new(ray: Ray, t: f32, outward_normal: Vec3, material: Arc<dyn Material>) -> Self {
let front_face = ray.direction.dot(outward_normal) < 0.0;
let normal = if front_face {
outward_normal
} else {
-outward_normal
};
HitRecord {
point: ray.at(t),
normal,
t,
front_face,
material,
}
}
}
/// Bounds for RayHittable
#[derive(Copy, Clone)]
pub struct HittableBounds {
aabb: AABB,
node_index: usize,
pub hittable_index: usize,
}
impl Bounded for HittableBounds {
fn aabb(&self) -> AABB {
self.aabb
}
}
impl BHShape for HittableBounds {
fn set_bh_node_index(&mut self, index: usize) {
self.node_index = index;
}
fn bh_node_index(&self) -> usize {
self.node_index
}
}
/// An object in the scene which can be hit with a ray
pub trait RayHittable: Send + Sync {
// Intersect ray with object
fn intersect(&self, query: RayQuery) -> Option<HitRecord>;
// Return bounds
fn compute_bounds(&self, index: usize) -> HittableBounds;
}
pub struct Sphere {
pub center: Point3,
pub radius: f32,
pub material: Arc<dyn Material>,
radius_rcp: f32,
radius_sq: f32,
}
impl Sphere {
pub fn new(center: Point3, radius: f32, material: &Arc<dyn Material>) -> Self {
Sphere {
center,
radius,
material: material.clone(),
radius_rcp: 1.0 / radius,
radius_sq: radius * radius,
}
}
}
impl RayHittable for Sphere {
fn intersect(&self, query: RayQuery) -> Option<HitRecord> {
let r = query.ray;
let oc = r.origin - self.center;
let a = r.direction.length_squared();
let half_b = oc.dot(r.direction);
let c = oc.length_squared() - self.radius_sq;
let discriminant = half_b * half_b - a * c;
if discriminant < 0.0 {
return None;
}
let sqrtd = discriminant.sqrt();
let mut root = (-half_b - sqrtd) / a;
if root < query.t_min || query.t_max < root {
root = (-half_b + sqrtd) / a;
if root < query.t_min || query.t_max < root {
return None;
}
}
let t = root;
let point = r.at(t);
let outward_normal = (point - self.center) * self.radius_rcp;
let record = HitRecord::new(r, t, outward_normal, self.material.clone());
Some(record)
}
fn compute_bounds(&self, hittable_index: usize) -> HittableBounds {
let half_size = Vec3::new(self.radius, self.radius, self.radius);
let min = self.center - half_size;
let max = self.center + half_size;
let aabb = AABB::with_bounds(min, max);
HittableBounds {
aabb,
node_index: 0,
hittable_index,
}
}
}
|
#[macro_use]
extern crate clap;
extern crate hex;
extern crate ton_block;
extern crate ton_node_old;
extern crate ton_types;
extern crate ton_vm as tvm;
use std::fs::File;
use std::io::Write;
use std::sync::Arc;
use clap::AppSettings;
use serde_json::Value;
use ton_block::Serializable;
use ton_client::abi::{Abi, CallSet, DeploySet, encode_message, FunctionHeader, ParamsOfEncodeMessage, Signer};
use ton_client::ClientContext;
use ton_client::crypto::KeyPair;
use ton_types::{
AccountId, BagOfCells
};
use ton_node_old::node_engine::StubReceiver as MsgCreator;
#[tokio::main]
async fn main() {
let args = clap_app!(create_msg =>
(version: &*format!("{}\nCOMMIT_ID: {}\nBUILD_DATE: {}\nCOMMIT_DATE: {}\nGIT_BRANCH: {}",
env!("CARGO_PKG_VERSION"),
env!("BUILD_GIT_COMMIT"),
env!("BUILD_TIME") ,
env!("BUILD_GIT_DATE"),
env!("BUILD_GIT_BRANCH")
))
(author: "TONLabs")
(about: "Message creator in BoC format")
(@subcommand transfer =>
(about: "Transfer funds")
(@arg SRC: +required +takes_value "Source account address")
(@arg DST: +required +takes_value "Destination account address")
(@arg VALUE: +required +takes_value "Value for transfer (nanotokens)")
(@arg OUTPUT: +required +takes_value "Output file name")
)
(@subcommand deploy =>
(@arg TVC: +required +takes_value "Compiled smart contract (tvc file)")
(@arg PARAMS: +required +takes_value "Constructor arguments. Can be passed via a filename.")
(@arg ABI: --abi +required +takes_value "Json file with contract ABI.")
(@arg WC: --wc +takes_value "Workchain id of the smart contract (default 0).")
(@arg SIGN: --sign +required +takes_value "Keypair used to sign 'constructor message'.")
(@arg OUTPUT: +required +takes_value "Output file name")
)
)
.setting(AppSettings::ArgRequiredElseHelp)
.get_matches();
let (subcommand, args) = args.subcommand();
let args = args.expect("Specify SUBCOMMAND: transfer or deploy");
let out = args.value_of("OUTPUT")
.expect("Specify OUTPUT file name");
match subcommand {
"transfer" => {
let src = args.value_of("SRC").unwrap();
let dst = args.value_of("DST").unwrap();
let value: u128 = args.value_of("VALUE").unwrap().parse().expect("Error parse value");
assert_eq!(src.len(), 64, "source address is invalid. Address can be only 32 byte hex string");
assert_eq!(dst.len(), 64, "destination address is invalid. Address can be only 32 byte hex string");
let source_vec = hex::decode(src)
.expect("source address is invalid hex string");
let dest_vec = hex::decode(dst)
.expect("source address is invalid hex string");
let message = MsgCreator::create_external_transfer_funds_message(
0,
AccountId::from(source_vec),
AccountId::from(dest_vec),
value,
0);
let b = message.write_to_new_cell().expect("Error write message to tree of cells");
let bag = BagOfCells::with_root(&b.into());
let mut file = File::create(out.clone()).expect("Error create out file");
bag.write_to(&mut file, false).expect("Error write message to file");
file.flush().expect("Error flush out file");
println!("BoC successfully saved: {}", out);
},
"deploy" => {
let tvc_file_name = args.value_of("TVC").unwrap();
let tvc = std::fs::read(tvc_file_name)
.expect("Error reading TVC file");
let tvc = base64::encode(tvc);
let abi_file_name = args.value_of("ABI").unwrap();
let abi = std::fs::read_to_string(abi_file_name)
.expect("Error reading ABI file");
let abi = serde_json::from_str(&abi)
.expect("ABI file parsing failed");
let wc = args.value_of("WC")
.map(|value| value.parse().expect("Workchain id must be valid integer value"))
.unwrap_or(0);
let params = serde_json::from_str(args.value_of("PARAMS")
.expect("Expected params"))
.expect("PARAMS must be valid JSON");
let keypair_file_name = args.value_of("SIGN").unwrap();
let keypair = std::fs::read_to_string(keypair_file_name)
.expect("Error reading keypair file");
let keypair: Value = serde_json::from_str(&keypair)
.expect("Keypair file must be valid JSON file");
let encoded = encode_message(
Arc::new(ClientContext::new(Default::default()).unwrap()),
ParamsOfEncodeMessage {
abi: Abi::Contract(abi),
deploy_set: Some(DeploySet {
tvc,
workchain_id: Some(wc),
..Default::default()
}),
call_set: Some(CallSet {
function_name: "constructor".to_string(),
header: Some(FunctionHeader {
expire: Some(u32::max_value()),
..Default::default()
}),
input: Some(params),
..Default::default()
}),
signer: Signer::Keys {
keys: KeyPair::new(
keypair["public"].as_str().unwrap().to_string(),
keypair["secret"].as_str().unwrap().to_string()
)
},
..Default::default()
},
).await
.expect("Error encoding message");
println!("Address for deployment: {}", encoded.address);
let decoded = base64::decode(&encoded.message)
.expect("Error deciding BASE64 message");
std::fs::write(out,decoded)
.expect("Error writing output file");
println!("BoC successfully saved: {}", out);
},
_ => {
println!("Invalid type option");
}
}
}
|
use std::{ env, fs, error };
use error::Error;
pub struct Config {
query: String,
filename: String,
case_sensitive: bool,
}
impl Config {
pub fn new(mut args: env::Args) -> Result<Config, &'static str> {
args.next();
let query = match args.next() {
Some(arg) => arg,
None => return Err("Didn't get a query string"),
};
let filename = match args.next() {
Some(arg) => arg,
None => return Err("Didn't get a file name"),
};
let case_sensitive = env::var("CASE_INSENSITIVE").is_err();
Ok(Config { query, filename, case_sensitive })
}
}
pub fn run(conf: Config) -> Result<(), Box<dyn Error>> {
let contents = fs::read_to_string(conf.filename)?;
let result = if conf.case_sensitive {
search(&conf.query, &contents)
} else {
search_insensitive(&conf.query, &contents)
};
for item in result {
println!("{}", item)
}
Ok(())
}
pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
contents.lines()
.filter(|line| line.contains(query))
.collect()
}
pub fn search_insensitive<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
contents.lines()
.filter(|line| line.to_lowercase().contains(&query.to_lowercase()))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn search_content() {
let query = "Rust's";
let contents = "\
Rust's speed, safety, single binary output,
and cross-plaform support make it an ideal language for creating command line tools,
so for our project, ...";
assert_eq!(vec!["Rust's speed, safety, single binary output,"], search(&query, &contents));
}
#[test]
fn search_content_insensitive() {
let query = "PrOj";
let contents = "\
Rust's speed, safety, single binary output,
and cross-plaform support make it an ideal language for creating command line tools,
so for our project, ...";
assert_eq!(vec!["so for our project, ..."], search_insensitive(&query, &contents));
}
}
|
use entry::Entry;
use treap::{implicit_tree, tree};
/// A struct representing an internal node of a treap.
pub struct Node<T, U> {
pub entry: Entry<T, U>,
pub priority: u32,
pub len: usize,
pub left: tree::Tree<T, U>,
pub right: tree::Tree<T, U>,
}
/// A struct representing an internal node of an implicit treap.
pub struct ImplicitNode<T> {
pub value: T,
pub priority: u32,
pub len: usize,
pub left: implicit_tree::Tree<T>,
pub right: implicit_tree::Tree<T>,
}
impl<T, U> Node<T, U>
where
T: Ord,
{
pub fn new(key: T, value: U, priority: u32) -> Self {
Node {
entry: Entry { key, value },
priority,
len: 1,
left: None,
right: None,
}
}
pub fn update(&mut self) {
let Node { ref mut len, ref left, ref right, .. } = self;
*len = 1;
if let Some(ref left_node) = left {
*len += left_node.len;
}
if let Some(ref right_node) = right {
*len += right_node.len;
}
}
}
impl<T> ImplicitNode<T> {
pub fn new(value: T, priority: u32) -> Self {
ImplicitNode {
value,
priority,
len: 1,
left: None,
right: None,
}
}
pub fn len(&self) -> usize {
self.len
}
pub fn update(&mut self) {
let ImplicitNode { ref mut len, ref left, ref right, .. } = self;
*len = 1;
if let Some(ref left_node) = left {
*len += left_node.len;
}
if let Some(ref right_node) = right {
*len += right_node.len;
}
}
pub fn get_implicit_key(&self) -> usize {
match self.left {
Some(ref left_node) => left_node.len() + 1,
None => 1,
}
}
}
|
use crate::translations::translations::{bytes_translation, packets_translation};
use crate::Language;
/// Enum representing the possible kind of chart displayed.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ChartType {
Packets,
Bytes,
}
impl ChartType {
pub(crate) const ALL: [ChartType; 2] = [ChartType::Bytes, ChartType::Packets];
pub fn get_radio_label(&self, language: Language) -> &str {
match self {
ChartType::Packets => packets_translation(language),
ChartType::Bytes => bytes_translation(language),
}
}
}
|
use rand::prelude::*;
use rand::rngs::SmallRng;
use rtlib::vec::Vec3;
fn pdf(_p: Vec3) -> f64 {
1.0 / (4.0 * std::f64::consts::PI)
}
const N: u64 = 1000000;
const SEED: u64 = 0;
fn main() -> Result<(), std::io::Error> {
let mut rng = SmallRng::seed_from_u64(SEED);
let mut sum = 0.0;
for _ in 0..N {
let d = Vec3::random_unit_vector(&mut rng);
let cosine_squared = d.z * d.z;
sum += cosine_squared / pdf(d);
}
println!("I = {:.12} ({} iterations)", sum / (N as f64), N);
Ok(())
}
|
mod client;
mod config;
mod data;
mod file;
mod mongo;
mod services;
mod utils;
use std::io::Write;
use actix_web::{
middleware::{Logger, NormalizePath, TrailingSlash},
web, App, HttpServer,
};
use chrono::Local as ChronoLocal;
use clap::{App as ClapApp, Arg};
use env_logger::Env;
use log::info;
use crate::config::Config;
use crate::data::Data;
use crate::file::local::Local;
use crate::file::s3::S3;
use crate::mongo::wrapper::MongoWrapper;
use crate::mongo::MongoDB;
const API_VERSION: &str = "v1";
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
env_logger::Builder::from_env(Env::default().default_filter_or("info"))
.format(|buf, record| {
writeln!(
buf,
"{} [{}] - {} - {}",
ChronoLocal::now().format("%Y-%m-%dT%H:%M:%S"),
record.level(),
record.module_path().unwrap_or("main"),
record.args()
)
})
.init();
let name = "PDFIller";
let matches = ClapApp::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.name(name)
.author("Dario Cancelliere <dario.cancelliere@facile.it>")
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("path")
.short("p")
.long("path")
.required(false)
.takes_value(true)
.default_value("config")
.help("Base config file path"),
)
.get_matches();
info!(
"{} v{} by Dario Cancelliere",
name,
env!("CARGO_PKG_VERSION")
);
info!("{}", env!("CARGO_PKG_DESCRIPTION"));
info!("");
let config = Config::new(&format!(
"{}/config.toml",
matches.value_of("path").unwrap()
));
if let Some(sentry) = config.sentry {
let _guard = sentry::init(sentry.dsn);
}
let data = Data::new(
if config.service.filesystem == "local" {
Box::new(Local::new(config.service.clone()))
} else {
Box::new(S3::new(config.service.clone()))
},
MongoWrapper::new(MongoDB::new(&config.mongo).await),
);
info!(
"Starting PDFIller API server at http://{}:{}...",
config.server.bind_address, config.server.bind_port
);
HttpServer::new(move || {
App::new()
.data(data.clone())
.wrap(NormalizePath::new(TrailingSlash::Trim))
.wrap(Logger::default())
.service(web::scope(&format!("/api/{}", API_VERSION)).configure(services::config))
})
.bind(format!(
"{}:{}",
config.server.bind_address, config.server.bind_port
))?
.run()
.await
}
|
//! Tools to control view alignment
/// Specifies the alignment along both horizontal and vertical directions.
pub struct Align {
pub h: HAlign,
pub v: VAlign,
}
impl Align {
/// Creates a new Align object from the given horizontal and vertical alignments.
pub fn new(h: HAlign, v: VAlign) -> Self {
Align { h: h, v: v }
}
/// Creates a top-left alignment.
pub fn top_left() -> Self {
Align::new(HAlign::Left, VAlign::Top)
}
/// Creates a top-right alignment.
pub fn top_right() -> Self {
Align::new(HAlign::Right, VAlign::Top)
}
/// Creates a bottom-left alignment.
pub fn bot_left() -> Self {
Align::new(HAlign::Left, VAlign::Bottom)
}
/// Creates a bottom-right alignment.
pub fn bot_right() -> Self {
Align::new(HAlign::Right, VAlign::Top)
}
/// Creates an alignment centered both horizontally and vertically.
pub fn center() -> Self {
Align::new(HAlign::Center, VAlign::Center)
}
}
/// Horizontal alignment
pub enum HAlign {
Left,
Center,
Right,
}
/// Vertical alignment
pub enum VAlign {
Top,
Center,
Bottom,
}
impl HAlign {
/// To draw a view with size `content` in a printer with size `container`, this returns the
/// offset to start printing the view at.
pub fn get_offset(&self, content: usize, container: usize) -> usize {
match *self {
HAlign::Left => 0,
HAlign::Center => (container - content) / 2,
HAlign::Right => (container - content),
}
}
}
impl VAlign {
/// To draw a view with size `content` in a printer with size `container`, this returns the
/// offset to start printing the view at.
pub fn get_offset(&self, content: usize, container: usize) -> usize {
match *self {
VAlign::Top => 0,
VAlign::Center => (container - content) / 2,
VAlign::Bottom => (container - content),
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.