text stringlengths 8 4.13M |
|---|
// Bloom filter Python library written in Rust
extern crate farmhash;
use farmhash::FarmHasher;
use std::hash::{Hash, Hasher};
use pyo3::prelude::*;
use pyo3::wrap_pymodule;
#[pyclass]
struct BloomFilter {
bv: Vec<bool>,
hashes: u64,
}
#[inline]
fn num_of_bits_in_vec(capacity: usize, error_rate: f64) -> usize {
(-1.0 * (((capacity as f64) * error_rate.ln()) /
(1.0 / std::f64::consts::LN_2.powf(2.0)).ln())).ceil() as usize
}
#[inline]
fn num_of_hash_funcs(m: usize, capacity: usize) -> u64 {
(std::f64::consts::LN_2 * ((m as f64) / (capacity as f64))).round().abs() as u64
}
#[pymethods]
impl BloomFilter {
#[new]
pub fn new(capacity: usize, error_rate: f64) -> PyResult<Self> {
assert!((error_rate > 0.0 && error_rate < 1.0) && capacity > 0);
let bv = vec![false; capacity];
// https://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives
let m = num_of_bits_in_vec(capacity, error_rate);
// https://en.wikipedia.org/wiki/Bloom_filter#Optimal_number_of_hash_functions
let k = num_of_hash_funcs(m, capacity);
Ok(BloomFilter {
bv,
hashes: k,
})
}
fn nth_hash(&self, x: &str, m: u64) -> usize {
let mut hasher = FarmHasher::default();
hasher.write(&m.to_be_bytes());
x.hash(&mut hasher);
((hasher.finish()) % (self.bv.capacity() as u64)) as usize
}
pub fn insert(&mut self, value: &str) -> PyResult<bool> {
for i in 0..self.hashes {
let index = self.nth_hash(&value, i);
self.bv[index] = true;
}
Ok(true)
}
pub fn has(&self, value: &str) -> PyResult<bool> {
for i in 0..self.hashes {
let index = self.nth_hash(&value, i);
if !self.bv[index] {
return Ok(false);
}
}
Ok(true)
}
}
#[pymodule]
fn ubloom(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_class::<BloomFilter>()?;
Ok(())
}
#[pymodule]
fn ubloom_filter(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pymodule!(ubloom))?;
Ok(())
}
|
use crate::{
components::VotingPanel, icon, smmdb::Course2Response, smmdb::Difficulty, styles::*, AppState,
Message,
};
use iced::{
button, container, Align, Background, Button, Color, Column, Container, Element, Image, Length,
Row, Space, Text,
};
use iced_native::widget::image::Handle;
#[derive(Debug)]
pub struct SmmdbCoursePanel {
voting_panel: VotingPanel,
panel_state: button::State,
upvote_state: button::State,
downvote_state: button::State,
course: Course2Response,
thumbnail: Option<Vec<u8>>,
}
impl SmmdbCoursePanel {
pub fn new(course: Course2Response) -> SmmdbCoursePanel {
SmmdbCoursePanel {
voting_panel: VotingPanel::new(),
panel_state: button::State::new(),
upvote_state: button::State::new(),
downvote_state: button::State::new(),
course,
thumbnail: None,
}
}
pub fn set_own_vote(&mut self, value: i32) {
self.course.set_own_vote(value);
}
pub fn view(&mut self, state: &AppState, is_logged_in: bool) -> Element<Message> {
let course = self.course.get_course();
let course_header = course.get_header();
let thumbnail: Element<Message> = if let Some(thumbnail) = &self.thumbnail {
Image::new(Handle::from_memory(thumbnail.clone()))
.width(Length::Units(240))
.height(Length::Units(135))
.into()
} else {
Space::new(Length::Units(240), Length::Units(135)).into()
};
let difficulty: Element<Message> = match self.course.get_difficulty() {
Some(difficulty) => {
let row = Row::new()
.align_items(Align::End)
.push(Text::new("Difficulty:").size(15))
.push(Space::with_width(Length::Units(4)));
match difficulty {
Difficulty::Unset => row,
Difficulty::Easy => row
.push(Image::new(icon::EASY.clone()))
.push(Text::new("Easy").size(15)),
Difficulty::Normal => row
.push(Image::new(icon::NORMAL.clone()))
.push(Text::new("Normal").size(15)),
Difficulty::Expert => row
.push(Image::new(icon::EXPERT.clone()))
.push(Text::new("Expert").size(15)),
Difficulty::SuperExpert => row
.push(Image::new(icon::SUPER_EXPERT.clone()))
.push(Text::new("Super Expert").size(15)),
}
.into()
}
None => Space::with_height(Length::Shrink).into(),
};
let voting_content = self.voting_panel.view(
self.course.get_id().clone(),
self.course.get_votes(),
self.course.get_own_vote(),
is_logged_in,
);
let inner_content = Row::new()
.push(voting_content)
.push(Space::with_width(Length::Units(10)))
.push(Container::new(thumbnail).style(ThumbnailStyle))
.push(Space::with_width(Length::Units(10)))
.push(
Column::new()
.push(Text::new(format!("{}", course_header.get_description())).size(15))
.push(Space::with_height(Length::Units(LIST_SPACING)))
.push(difficulty),
)
.align_items(Align::Center);
let content = Column::new()
.push(Text::new(format!("{}", course_header.get_title())).size(24))
.push(Space::with_height(Length::Units(10)))
.push(inner_content);
match state {
AppState::DownloadSelect(index) => Button::new(&mut self.panel_state, content)
.style(SmmdbCoursePanelButtonStyle(state.clone()))
.padding(12)
.width(Length::Fill)
.on_press(Message::DownloadCourse(
*index,
self.course.get_id().clone(),
))
.into(),
_ => Container::new(content)
.style(SmmdbCoursePanelStyle)
.padding(12)
.width(Length::Fill)
.into(),
}
}
pub fn get_id(&self) -> &String {
self.course.get_id()
}
pub fn set_thumbnail(&mut self, thumbnail: Vec<u8>) {
self.thumbnail = Some(thumbnail);
}
}
struct SmmdbCoursePanelButtonStyle(AppState);
impl button::StyleSheet for SmmdbCoursePanelButtonStyle {
fn active(&self) -> button::Style {
button::Style {
text_color: Color::BLACK,
background: match self.0 {
AppState::DownloadSelect(_) => Some(PANEL_SELECT_ACTIVE),
_ => Some(PANEL_ACTIVE),
},
border_radius: 8,
..button::Style::default()
}
}
fn hovered(&self) -> button::Style {
button::Style {
text_color: Color::BLACK,
background: match self.0 {
AppState::DownloadSelect(_) => Some(PANEL_SELECT_HOVER),
_ => Some(PANEL_ACTIVE),
},
border_radius: 8,
..button::Style::default()
}
}
}
struct SmmdbCoursePanelStyle;
impl container::StyleSheet for SmmdbCoursePanelStyle {
fn style(&self) -> container::Style {
container::Style {
background: Some(PANEL_ACTIVE),
border_radius: 8,
..container::Style::default()
}
}
}
struct ThumbnailStyle;
impl container::StyleSheet for ThumbnailStyle {
fn style(&self) -> container::Style {
container::Style {
background: Some(Background::Color(Color::from_rgba(0., 0., 0., 0.5))),
..container::Style::default()
}
}
}
|
use {
super::{
concurrency::{Concurrency, DefaultConcurrency},
path::{Path, PathExtractor},
recognizer::Recognizer,
scope::{ScopeId, Scopes},
App, AppInner, ResourceData, RouteData, ScopeData, Uri,
},
crate::{
endpoint::Endpoint,
extractor::Extractor,
generic::Combine,
handler::ModifyHandler,
util::{Chain, Never},
},
http::Method,
indexmap::map::{Entry, IndexMap},
std::{error, fmt, marker::PhantomData, rc::Rc, sync::Arc},
};
/// A type alias of `Result<T, E>` whose error type is restricted to `AppError`.
pub type Result<T> = std::result::Result<T, Error>;
/// An error type which will be thrown from `AppBuilder`.
#[derive(Debug)]
pub struct Error {
cause: failure::Compat<failure::Error>,
}
impl From<Never> for Error {
fn from(never: Never) -> Self {
match never {}
}
}
impl Error {
pub fn custom<E>(cause: E) -> Self
where
E: Into<failure::Error>,
{
Self {
cause: cause.into().compat(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.cause.fmt(f)
}
}
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(&self.cause)
}
}
impl<C> App<C>
where
C: Concurrency,
{
/// Construct an `App` using the provided function.
pub fn build<F>(f: F) -> Result<Self>
where
F: FnOnce(Scope<'_, (), C>) -> Result<()>,
{
let mut app = AppInner {
recognizer: Recognizer::default(),
scopes: Scopes::new(ScopeData {
prefix: Uri::root(),
fallback: None,
}),
};
f(Scope {
app: &mut app,
scope_id: ScopeId::root(),
modifier: (),
_marker: PhantomData,
})?;
Ok(Self {
inner: Arc::new(app),
})
}
}
/// A type representing the "scope" in Web application.
#[derive(Debug)]
pub struct Scope<'a, M, C: Concurrency = DefaultConcurrency> {
app: &'a mut AppInner<C>,
modifier: M,
scope_id: ScopeId,
_marker: PhantomData<Rc<()>>,
}
/// The experimental API for the next version.
impl<'a, M, C> Scope<'a, M, C>
where
C: Concurrency,
{
/// Creates a resource that has the provided path.
pub fn at<P>(&mut self, path: P) -> Result<Resource<'_, P, &M, C>>
where
P: Path,
{
let uri: Uri = path.as_str().parse().map_err(Error::custom)?;
let uri = self.app.scopes[self.scope_id]
.data
.prefix
.join(&uri)
.map_err(Error::custom)?;
let scope = &self.app.scopes[self.scope_id];
let resource = self
.app
.recognizer
.insert(
uri.as_str(),
Arc::new(ResourceData {
scope: scope.id(),
ancestors: scope
.ancestors()
.iter()
.cloned()
.chain(Some(scope.id()))
.collect(),
uri: uri.clone(),
routes: vec![],
default_route: None,
verbs: IndexMap::default(),
}),
)
.map_err(Error::custom)?;
Ok(Resource {
resource: Arc::get_mut(resource).expect("the instance has already been shared"),
modifier: &self.modifier,
path,
})
}
/// Registers the scope-level fallback handler onto the current scope.
///
/// The fallback handler is called when there are no resources that exactly
/// matches to the incoming request.
pub fn fallback<T>(&mut self, endpoint: T) -> Result<()>
where
T: Endpoint<()>,
M: ModifyHandler<EndpointHandler<(), T>>,
M::Handler: Into<C::Handler>,
{
let handler = EndpointHandler::new(endpoint, ());
let handler = self.modifier.modify(handler);
self.app.scopes[self.scope_id].data.fallback = Some(handler.into());
Ok(())
}
/// Creates a sub-scope onto the current scope.
pub fn mount<P>(&mut self, prefix: P) -> Result<Scope<'_, &M, C>>
where
P: AsRef<str>,
{
let prefix: Uri = prefix.as_ref().parse().map_err(Error::custom)?;
let scope_id = self
.app
.scopes
.add_node(self.scope_id, {
let parent = &self.app.scopes[self.scope_id].data;
ScopeData {
prefix: parent.prefix.join(&prefix).map_err(Error::custom)?,
fallback: None,
}
})
.map_err(Error::custom)?;
Ok(Scope {
app: &mut *self.app,
scope_id,
modifier: &self.modifier,
_marker: PhantomData,
})
}
/// Adds the provided `ModifyHandler` to the stack and executes a configuration.
///
/// Unlike `nest`, this method does not create a scope.
pub fn with<M2>(&mut self, modifier: M2) -> Scope<'_, Chain<M2, &M>, C> {
Scope {
app: &mut *self.app,
scope_id: self.scope_id,
modifier: Chain::new(modifier, &self.modifier),
_marker: PhantomData,
}
}
/// Applies itself to the provided function.
pub fn done<F, T>(self, f: F) -> Result<T>
where
F: FnOnce(Self) -> Result<T>,
{
f(self)
}
}
/// A resource associated with a specific HTTP path.
#[derive(Debug)]
pub struct Resource<'s, P, M, C>
where
P: Path,
C: Concurrency,
{
resource: &'s mut ResourceData<C>,
path: P,
modifier: M,
}
impl<'s, P, M, C> Resource<'s, P, M, C>
where
P: Path,
C: Concurrency,
{
/// Creates a `Route` that matches to the specified HTTP methods.
pub fn route(
&mut self,
methods: impl IntoIterator<Item = impl Into<Method>>,
) -> Route<'_, PathExtractor<P>, &M, C> {
self.route2(Some(methods.into_iter().map(Into::into).collect()))
}
fn route2(&mut self, methods: Option<Vec<Method>>) -> Route<'_, PathExtractor<P>, &M, C> {
Route {
resource: &mut *self.resource,
methods,
modifier: &self.modifier,
extractor: PathExtractor::<P>::new(),
}
}
pub fn get(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::GET))
}
pub fn post(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::POST))
}
pub fn put(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::PUT))
}
pub fn head(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::HEAD))
}
pub fn delete(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::DELETE))
}
pub fn patch(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route(Some(Method::PATCH))
}
/// Start building of a `Route` that matches any HTTP method.
pub fn any(&mut self) -> Route<'_, PathExtractor<P>, &M, C> {
self.route2(None)
}
pub fn to<T>(&mut self, endpoint: T) -> Result<()>
where
T: Endpoint<P::Output>,
M: ModifyHandler<EndpointHandler<PathExtractor<P>, T>>,
M::Handler: Into<C::Handler>,
{
self.any().to(endpoint)
}
/// Appends a `ModifyHandler` to the stack applied to the all handlers on this resource.
pub fn with<M2>(self, modifier: M2) -> Resource<'s, P, Chain<M2, M>, C> {
Resource {
resource: self.resource,
path: self.path,
modifier: Chain::new(modifier, self.modifier),
}
}
/// Applies itself to the specified function.
pub fn done<F, T>(self, f: F) -> Result<T>
where
F: FnOnce(Self) -> Result<T>,
{
f(self)
}
}
#[allow(missing_debug_implementations)]
pub struct Route<'a, E, M, C>
where
E: Extractor,
C: Concurrency,
{
resource: &'a mut ResourceData<C>,
methods: Option<Vec<Method>>,
extractor: E,
modifier: M,
}
impl<'a, E, M, C> Route<'a, E, M, C>
where
E: Extractor,
C: Concurrency,
{
pub fn with<M2>(self, modifier: M2) -> Route<'a, E, Chain<M2, M>, C> {
Route {
resource: self.resource,
methods: self.methods,
modifier: Chain::new(modifier, self.modifier),
extractor: self.extractor,
}
}
pub fn extract<E2>(self, extractor: E2) -> Route<'a, Chain<E, E2>, M, C>
where
E2: Extractor,
E::Output: Combine<E2::Output>,
{
Route {
resource: self.resource,
methods: self.methods,
modifier: self.modifier,
extractor: Chain::new(self.extractor, extractor),
}
}
pub fn to<T>(self, endpoint: T) -> Result<()>
where
T: Endpoint<E::Output>,
M: ModifyHandler<EndpointHandler<E, T>>,
M::Handler: Into<C::Handler>,
{
let handler = self
.modifier
.modify(EndpointHandler::new(endpoint, self.extractor));
let route = RouteData {
handler: handler.into(),
};
if let Some(methods) = self.methods {
let index = self.resource.routes.len();
self.resource.routes.push(route);
for method in methods {
match self.resource.verbs.entry(method) {
Entry::Occupied(..) => {
return Err(Error::custom(failure::format_err!("duplicated method")));
}
Entry::Vacant(entry) => {
entry.insert(index);
}
}
}
} else {
if self.resource.default_route.is_some() {
return Err(Error::custom(failure::format_err!(
"the default route handler has already been set"
)));
}
self.resource.default_route = Some(route);
}
Ok(())
}
}
/// A `Handler` that uses on an endpoint tied to a specific HTTP path.
#[allow(missing_debug_implementations)]
pub struct EndpointHandler<E, T> {
endpoint: Arc<T>,
extractor: E,
}
impl<E, T> EndpointHandler<E, T>
where
E: Extractor,
T: Endpoint<E::Output>,
{
pub(crate) fn new(endpoint: T, extractor: E) -> Self {
Self {
endpoint: Arc::new(endpoint),
extractor,
}
}
}
mod handler {
use {
super::EndpointHandler,
crate::{
endpoint::Endpoint,
error::Error,
extractor::Extractor,
future::{Poll, TryFuture},
handler::Handler,
input::Input,
},
std::sync::Arc,
};
impl<E, T> Handler for EndpointHandler<E, T>
where
E: Extractor,
T: Endpoint<E::Output>,
{
type Output = T::Output;
type Error = Error;
type Handle = EndpointHandle<E, T>;
fn handle(&self) -> Self::Handle {
EndpointHandle {
state: State::Init(self.endpoint.clone(), self.extractor.extract()),
}
}
}
#[doc(hidden)]
#[allow(missing_debug_implementations)]
pub struct EndpointHandle<E, T>
where
E: Extractor,
T: Endpoint<E::Output>,
{
state: State<E, T>,
}
#[allow(missing_debug_implementations)]
enum State<E, T>
where
E: Extractor,
T: Endpoint<E::Output>,
{
Init(Arc<T>, E::Extract),
InFlight(T::Future),
}
impl<E, T> TryFuture for EndpointHandle<E, T>
where
E: Extractor,
T: Endpoint<E::Output>,
{
type Ok = T::Output;
type Error = Error;
#[inline]
fn poll_ready(&mut self, input: &mut Input<'_>) -> Poll<Self::Ok, Self::Error> {
loop {
self.state = match self.state {
State::Init(ref endpoint, ref mut extract) => {
let args =
futures01::try_ready!(extract.poll_ready(input).map_err(Into::into));
State::InFlight(endpoint.apply(args))
}
State::InFlight(ref mut in_flight) => {
return in_flight.poll_ready(input).map_err(Into::into);
}
};
}
}
}
}
|
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote_spanned};
use syn::Ident;
use crate::codegen::unique::{CodeGenUnique, CodeGenUniqueNames};
use crate::validation::{
component::{Child, ChildType, Component},
AllComponents, AllUniques,
};
pub fn gen_mod_components(ecs: &Ident, all: &AllComponents, uniques: &AllUniques) -> TokenStream {
let span = ecs.span();
let component_imports: Vec<TokenStream> = all.values().map(|c| c.gen_imports()).collect();
let comp_store_atoms: Vec<TokenStream> = all
.values()
.map(|c| c.gen_store_atom(all, uniques))
.collect();
let comp_ecs_impls: Vec<TokenStream> = all.values().map(|c| c.gen_ecs_impl(ecs, all)).collect();
quote_spanned! {span =>
mod components {
use core::convert::{TryFrom, TryInto};
use rl_ecs::key::KeyExt;
use rl_ecs::stores::Store;
use rl_ecs::stores::{StoreExBasic, StoreExBasicMut,
StoreExCreate,StoreExGetParent,StoreExSetParent,
StoreExGetChild, StoreExPurge};
use rl_ecs::slotmap::Key;
use rl_ecs::arrayvec::ArrayVec;
#(#component_imports)*
#(#comp_store_atoms)*
#(#comp_ecs_impls)*
}
}
}
pub trait CodeGenComponentNames {
fn to_store_name(&self) -> Ident;
fn to_child_struct_name(&self) -> Ident;
fn to_parent_enum_key(&self) -> Ident;
fn to_parent_enum(&self) -> Ident;
fn to_key_struct_name(&self) -> Ident;
fn to_store_struct_name(&self) -> Ident;
}
impl CodeGenComponentNames for Component {
fn to_child_struct_name(&self) -> Ident {
let span = self.r#type.span();
format_ident!("__{}Children", self.name, span = span)
}
fn to_parent_enum_key(&self) -> Ident {
let span = self.r#type.span();
format_ident!("__{}", self.name, span = span)
}
fn to_parent_enum(&self) -> Ident {
let span = self.r#type.span();
format_ident!("__{}Parent", self.name, span = span)
}
fn to_store_struct_name(&self) -> Ident {
let span = self.r#type.span();
let mut name: Vec<char> = self.name.to_lowercase().chars().collect();
name[0] = name[0].to_uppercase().next().unwrap_or(name[0]);
let name: String = name.into_iter().collect();
format_ident!("__{}Store", name, span = span)
}
fn to_store_name(&self) -> Ident {
let span = self.r#type.span();
format_ident!("__{}_store", self.name.to_lowercase(), span = span)
}
fn to_key_struct_name(&self) -> Ident {
let span = self.r#type.span();
format_ident!("{}Key", self.name, span = span)
}
}
pub trait CodeGenComponent {
fn gen_imports(&self) -> TokenStream;
fn gen_store(&self) -> TokenStream;
fn gen_new(&self) -> TokenStream;
fn gen_key(&self) -> TokenStream;
}
impl CodeGenComponent for Component {
fn gen_imports(&self) -> TokenStream {
let span = self.r#type.span();
let path = &self.r#type;
quote_spanned! {span =>
#[allow(unused_import)]
use super::#path;
}
}
fn gen_store(&self) -> TokenStream {
let span = self.r#type.span();
let store_name = self.to_store_name();
let store_struct_name = self.to_store_struct_name();
quote_spanned! {span =>
#store_name: #store_struct_name,
}
}
fn gen_new(&self) -> TokenStream {
let span = self.r#type.span();
let store_name = self.to_store_name();
let store_struct_name = self.to_store_struct_name();
quote_spanned! {span =>
#store_name: #store_struct_name::new(),
}
}
fn gen_key(&self) -> TokenStream {
let span = self.r#type.span();
let key = &self.to_key_struct_name();
quote_spanned! {span =>
new_key_type! { pub struct #key; }
impl KeyExt for super::keys::#key {
#[inline]
fn is_some(&self) -> bool { !self.is_none() }
#[inline]
fn is_none(&self) -> bool { self.is_null() }
}
}
}
}
trait CodeGenComponentPriv {
fn gen_store_atom(&self, all: &AllComponents, uniques: &AllUniques) -> TokenStream;
fn gen_ecs_impl(&self, ecs: &Ident, all: &AllComponents) -> TokenStream;
fn gen_ecs_purge_impl(&self, ecs: &Ident, all: &AllComponents) -> TokenStream;
fn gen_parents_impl(&self, parent: &Ident) -> TokenStream;
fn gen_parents_enum_impl(&self, parent: &Ident, parents: &[TokenStream]) -> TokenStream;
fn gen_get_parents_impl(&self, parent_key: &Ident, span: Span) -> TokenStream;
}
pub trait CodeGenChild {
fn to_child_name(&self, all: &AllComponents) -> Ident;
fn gen_store_entry(&self, all: &AllComponents) -> TokenStream;
fn gen_new(&self, all: &AllComponents) -> TokenStream;
fn gen_get_child_impl(
&self,
key: &Ident,
store_name: &Ident,
all: &AllComponents,
) -> TokenStream;
}
impl CodeGenComponentPriv for Component {
fn gen_parents_impl(&self, parent: &Ident) -> TokenStream {
let span = parent.span();
let enum_key: Ident = self.to_parent_enum_key();
let parent_key = self.to_key_struct_name();
quote_spanned! {span =>
impl From<super::keys::#parent_key> for #parent {
fn from(k: super::keys::#parent_key) -> Self {
Self::#enum_key(k)
}
}
impl TryFrom<#parent> for super::keys::#parent_key {
type Error = ();
fn try_from(p: #parent) -> Result<Self, Self::Error> {
match p {
#parent::#enum_key(k) => Ok(k),
_ => Err(())
}
}
}
}
}
fn gen_parents_enum_impl(&self, parent: &Ident, parents: &[TokenStream]) -> TokenStream {
let span = self.r#type.span();
if parents.is_empty() {
quote_spanned! {span => }
} else {
quote_spanned! {span =>
#[doc(hidden)]
#[derive(Copy,Clone,Eq,PartialEq,Ord,PartialOrd,Hash,Debug)]
pub(super) enum #parent {
None,
#(#parents)*
}
#[doc(hidden)]
impl #parent {
#[doc(hidden)]
#[inline]
fn is_none(&self) -> bool {
if *self == #parent::None {true} else {false}
}
}
#[doc(hidden)]
impl Default for #parent {
#[inline]
fn default() -> Self {
Self::None
}
}
}
}
}
fn gen_store_atom(&self, all: &AllComponents, uniques: &AllUniques) -> TokenStream {
let span = self.r#type.span();
let typ = &self.r#type;
let key = &self.to_key_struct_name();
let child_atom_name = self.to_child_struct_name();
let store_struct_name = self.to_store_struct_name();
let children: Vec<_> = self
.children
.iter()
.map(|c| c.gen_store_entry(all))
.collect();
let children_new: Vec<_> = self.children.iter().map(|c| c.gen_new(all)).collect();
let children_impl: Vec<_> = self
.children
.iter()
.map(|c| c.gen_get_child_impl(key, &store_struct_name, all))
.collect();
let parent = self.to_parent_enum();
let mut parents: Vec<TokenStream> = all
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| {
let enum_key: Ident = c.to_parent_enum_key();
let key = c.to_key_struct_name();
quote_spanned! {span =>
#enum_key(super::keys::#key),
}
})
.collect();
parents.extend(
uniques
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| {
let enum_key: Ident = c.to_parent_enum_key();
let key = c.to_key_struct_name();
quote_spanned! {span =>
#enum_key(super::keys::#key),
}
})
.collect::<Vec<TokenStream>>(),
);
let parent_enum_impl = self.gen_parents_enum_impl(&parent, &parents);
let mut parents_impl: Vec<TokenStream> = all
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| c.gen_parents_impl(&parent))
.collect();
parents_impl.extend(
uniques
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| c.gen_parents_impl(&parent))
.collect::<Vec<TokenStream>>(),
);
let mut get_parents_impl: Vec<TokenStream> = all
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| {
let span = c
.children
.iter()
.find_map(|c| (c.id == self.id).then(|| c.span))
.unwrap();
self.gen_get_parents_impl(&c.to_key_struct_name(), span)
})
.collect();
get_parents_impl.extend(
uniques
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| {
let span = c
.children
.iter()
.find_map(|c| (c.id == self.id).then(|| c.span))
.unwrap();
self.gen_get_parents_impl(&c.to_key_struct_name(), span)
})
.collect::<Vec<TokenStream>>(),
);
let parent_enum_type = if parents.is_empty() {
quote_spanned! {span => }
} else {
quote_spanned! {span =>
__parent: #parent,
}
};
let parent_enum_init = if parents.is_empty() {
quote_spanned! {span => }
} else {
quote_spanned! {span =>
__parent: #parent::None,
}
};
quote_spanned! {span =>
#parent_enum_impl
#[doc(hidden)]
pub(super) struct #child_atom_name {
#parent_enum_type
#(#children)*
}
impl #child_atom_name {
#[inline]
pub fn new() -> Self {
Self {
#parent_enum_init
#(#children_new)*
}
}
}
#[doc(hidden)]
pub(super) struct #store_struct_name(Store<#typ,#child_atom_name,super::keys::#key>);
impl #store_struct_name {
pub fn new() -> Self {
Self (
Store::new(),
)
}
}
impl StoreExBasic<#typ, super::keys::#key> for #store_struct_name {
fn get(&self, k: super::keys::#key) -> Option<&#typ> {
self.0.bin.get(k)
}
fn is_empty(&self) -> bool {
self.0.bin.is_empty()
}
}
impl StoreExBasicMut<#typ, super::keys::#key> for #store_struct_name {
fn get_mut(&mut self, k: super::keys::#key) -> Option<&mut #typ> {
self.0.bin.get_mut(k)
}
}
impl StoreExCreate<#typ, super::keys::#key> for #store_struct_name {
fn create(&mut self, t: #typ) -> super::keys::#key {
let key = self.0.bin.insert(t);
self.0.id.insert(key, #child_atom_name::new());
key
}
fn remove(&mut self, key: super::keys::#key) -> Option<()> {
self.0.id.remove(key);
self.0.bin.remove(key).map(|_| ())
}
}
#(#parents_impl)*
#(#children_impl)*
#(#get_parents_impl)*
}
}
fn gen_ecs_purge_impl(&self, ecs: &Ident, all: &AllComponents) -> TokenStream {
let span = self.r#type.span();
let key = &self.to_key_struct_name();
let store_name = self.to_store_name();
let clear_parents: Vec<TokenStream> = all
.values()
.filter(|c| c.children.iter().any(|c| c.id == self.id))
.map(|c| {
let parent_key = c.to_key_struct_name();
quote_spanned! {span =>
let parent_key: Option<super::keys::#parent_key> = self.get_parent(key);
if let Some(parent_key) = parent_key {
self.clear_parent(key, parent_key);
}
}
})
.collect();
let clear_children: Vec<TokenStream> = self
.children
.iter()
.map(|c| {
let child_key = all.get(&c.id).unwrap().to_key_struct_name();
quote_spanned! {span =>
let mut counter = 0;
loop {
let child: Option<super::keys::#child_key> =
self.get_children(key)
.map(|k| k.map(|k| *k).nth(counter))
.flatten();
if let Some(child_key) = child {
self.purge(child_key);
counter += 1;
}
else {break;}
}
}
})
.collect();
quote_spanned! {span =>
impl StoreExPurge<super::keys::#key> for super::#ecs {
fn purge(&mut self, key: super::keys::#key) {
#(#clear_parents)*
#(#clear_children)*
self.#store_name.remove(key);
}
}
}
}
fn gen_ecs_impl(&self, ecs: &Ident, all: &AllComponents) -> TokenStream {
let span = self.r#type.span();
let typ = &self.r#type;
let key = &self.to_key_struct_name();
let store_name = self.to_store_name();
let get_child_vec: Vec<_> = self
.children
.iter()
.map(|c| {
let child_key = all.get(&c.id).unwrap().to_key_struct_name();
quote_spanned! {span =>
impl StoreExGetChild<super::keys::#key, super::keys::#child_key> for super::#ecs {
fn get_children(&self, parent: super::keys::#key)
-> Option<std::slice::Iter<super::keys::#child_key>> {
self.#store_name.get_children(parent)
}
fn set_child(&mut self, parent: super::keys::#key, child: super::keys::#child_key) -> bool {
self.#store_name.set_child(parent, child)
}
fn clear_child(&mut self, parent: super::keys::#key, child: super::keys::#child_key) -> bool {
self.#store_name.clear_child(parent, child)
}
}
}
})
.collect();
let parents_impl: Vec<TokenStream> = self
.children
.iter()
.map(|c| {
let child_key = all.get(&c.id).unwrap().to_key_struct_name();
let c_store_name = all.get(&c.id).unwrap().to_store_name();
quote_spanned! {span =>
impl StoreExGetParent<super::keys::#child_key, super::keys::#key> for super::#ecs {
#[inline]
fn get_parent(&self, child: super::keys::#child_key) -> Option<super::keys::#key> {
self.#c_store_name.get_parent(child)
}
}
#[doc(hidden)]
impl StoreExSetParent<super::keys::#child_key, super::keys::#key> for super::#ecs {
#[doc(hidden)]
#[inline]
fn clear_parent(&mut self, child: super::keys::#child_key, parent: super::keys::#key) -> bool {
self.#c_store_name.clear_parent(child, parent)
}
#[doc(hidden)]
#[inline]
fn set_parent(&mut self, child: super::keys::#child_key, parent: super::keys::#key) -> bool {
self.#c_store_name.set_parent(child, parent)
}
}
}
})
.collect();
let purge_impl = self.gen_ecs_purge_impl(ecs, all);
quote_spanned! {span =>
impl StoreExBasic<#typ, super::keys::#key> for super::#ecs {
#[inline]
fn get(&self, k: super::keys::#key) -> Option<&#typ> {
self.#store_name.get(k)
}
#[inline]
fn is_empty(&self) -> bool { self.#store_name.is_empty() }
}
impl StoreExBasicMut<#typ, super::keys::#key> for super::#ecs {
#[inline]
fn get_mut(&mut self, k: super::keys::#key) -> Option<&mut #typ> {
self.#store_name.get_mut(k)
}
}
impl StoreExCreate<#typ, super::keys::#key> for super::#ecs {
#[inline]
fn create(&mut self, t: #typ) -> super::keys::#key {
self.#store_name.create(t)
}
#[inline]
fn remove(&mut self, key: super::keys::#key) -> Option<()> {
self.#store_name.remove(key)
}
}
#(#parents_impl)*
#(#get_child_vec)*
#purge_impl
}
}
fn gen_get_parents_impl(&self, parent_key: &Ident, span: Span) -> TokenStream {
let store_name = self.to_store_struct_name();
let key = self.to_key_struct_name();
quote_spanned! {span =>
impl StoreExGetParent<super::keys::#key, super::keys::#parent_key> for #store_name {
#[inline]
fn get_parent(&self, child: super::keys::#key) -> Option<super::keys::#parent_key> {
self.0.id.get(child).map(|id| id.__parent.try_into().ok() ).flatten()
}
}
#[doc(hidden)]
impl StoreExSetParent<super::keys::#key, super::keys::#parent_key> for #store_name {
#[inline]
#[doc(hidden)]
fn clear_parent(&mut self, child: super::keys::#key, parent: super::keys::#parent_key) -> bool {
self.0.id.get_mut(child).map(|id| {
if id.__parent == parent.into() {
id.__parent = Default::default();
true
}
else {false}
}).unwrap_or(false)
}
#[inline]
#[doc(hidden)]
fn set_parent(&mut self, child: super::keys::#key, parent: super::keys::#parent_key) -> bool {
self.0.id.get_mut(child).map(|id| {
if id.__parent.is_none() {
id.__parent = parent.into();
true
}
else {false}
}).unwrap_or(false)
}
}
}
}
}
impl CodeGenChild for Child {
fn to_child_name(&self, all: &AllComponents) -> Ident {
let span = self.span;
let name = &all.get(&self.id).unwrap().name;
format_ident!("__{}", name.to_lowercase(), span = span)
}
fn gen_new(&self, all: &AllComponents) -> TokenStream {
let span = self.span;
let name = self.to_child_name(all);
match self.child_type {
ChildType::Single => {
let key = all.get(&self.id).unwrap().to_key_struct_name();
quote_spanned! {span =>
#name: [super::keys::#key::null()],
}
}
ChildType::Array(_) => {
quote_spanned! {span =>
#name: ArrayVec::new(),
}
}
ChildType::Vec => {
quote_spanned! {span =>
#name: Vec::new(),
}
}
}
}
fn gen_store_entry(&self, all: &AllComponents) -> TokenStream {
let span = self.span;
let name = self.to_child_name(all);
let key = all.get(&self.id).unwrap().to_key_struct_name();
let typ = quote_spanned! {span => super::keys::#key};
match self.child_type {
ChildType::Single => {
quote_spanned! {span => #name: [super::keys::#key;1], }
}
ChildType::Array(sz) => {
quote_spanned! {span => #name: ArrayVec::<#typ,#sz>, }
}
ChildType::Vec => {
quote_spanned! {span => #name: Vec<#typ>, }
}
}
}
fn gen_get_child_impl(
&self,
key: &Ident,
store_name: &Ident,
all: &AllComponents,
) -> TokenStream {
let span = self.span;
let child_key = all.get(&self.id).unwrap().to_key_struct_name();
let cname = self.to_child_name(all);
let get_child_body = match self.child_type {
ChildType::Single => quote_spanned! {span =>
if cidt.#cname[0].is_null() {None} else { Some(cidt.#cname.iter())}
},
ChildType::Array(_) | ChildType::Vec => quote_spanned! {span =>
if cidt.#cname.is_empty() {None} else { Some(cidt.#cname.iter())}
},
};
let set_child_body = match self.child_type {
ChildType::Single => quote_spanned! { span =>
if id.#cname[0].is_null() { id.#cname[0] = child; true }
else { false }
},
ChildType::Array(_) => quote_spanned! {span =>
if id.#cname.contains(&child) || id.#cname.is_full() { false }
else { id.#cname.push(child); true }
},
ChildType::Vec => quote_spanned! {span =>
if id.#cname.contains(&child) { false }
else { id.#cname.push(child); true }
},
};
let clear_child_body = match self.child_type {
ChildType::Single => quote_spanned! {span =>
if id.#cname[0].is_null() { false }
else { id.#cname[0] = super::keys::#child_key::null(); true }
},
ChildType::Array(_) | ChildType::Vec => quote_spanned! {span =>
if id.#cname.contains(&_child) {
let idx = id.#cname.iter().enumerate().find(|(_i,_k)| **_k == _child).map(|(_i,_k)|_i);
if let Some(idx) = idx { id.#cname.swap_remove(idx); true }
else {false}
} else { false }
},
};
quote_spanned! {span =>
impl StoreExGetChild<super::keys::#key, super::keys::#child_key> for #store_name {
#[inline]
fn get_children(&self, parent: super::keys::#key) -> Option<std::slice::Iter<super::keys::#child_key>> {
self.0.id.get(parent).map(|cidt|
#get_child_body
).flatten()
}
#[inline]
#[doc(hidden)]
fn set_child(&mut self, parent: super::keys::#key, child: super::keys::#child_key) -> bool {
self.0.id.get_mut(parent).map(|id| {
#set_child_body
}).unwrap_or(false)
}
#[inline]
#[doc(hidden)]
fn clear_child(&mut self, parent: super::keys::#key, _child: super::keys::#child_key) -> bool {
self.0.id.get_mut(parent).map(|id| {
#clear_child_body
}).unwrap_or(false)
}
}
}
}
}
|
extern crate macrotis;
#[macro_use] extern crate clap;
use macrotis::r53;
use macrotis::state;
use macrotis::resource;
use macrotis::compare;
use macrotis::{MacrotisConfig};
use macrotis::resource::{Resource, ResHash};
use macrotis::tinydns;
use std::collections::HashMap;
//use macrotis::MacrotisRecord;
//use std::env;
use std::fs::{File, metadata};
use std::path::Path;
use std::io::{BufReader};
use clap::App;
// Main - Use Clap to build CLI, check options, etc.
fn main() {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).version(clap::crate_version!()).get_matches();
// Safe to simply unwrap this value since it's marked as 'required'
let input = matches.value_of("input").unwrap();
// If no config file was specified, default to 'macrotis.conf'
let conffile = matches.value_of("config").unwrap_or("macrotis.conf");
// Attempt to load the config file, exit on failure
let config = match load_config(conffile) {
Some(x) => x,
None => {
println!("Error loading config file {}. Bailing out.", conffile);
std::process::exit(1);
}
};
// Check subcommand and bail if none provided
let sub = match matches.subcommand_name() {
Some("lint") => 0,
Some("noop") => 1,
Some("execute") => 2,
_ => {
println!("Missing subcommand. Use 'macrotis --help' for usage");
std::process::exit(1);
}
};
// Load up local records based on the 'input' argument provided.
// Bail out on error
let local_recs = match load_local(&input, &config) {
Some(x) => x,
None => {
println!("Error processing input file(s)");
std::process::exit(1);
}
};
println!("Processed {} local records.", local_recs.0.len());
// Exit now if 'lint' subcommand provided
if sub == 0 {
return;
}
// Load and parse statefile to populate 'state' - Note that state could
// be empty if this is the first run!
let st = match state::load_state(&config) {
Some(x) => x,
None => {
println!("Error processing statefile, bailing out.");
std::process::exit(1);
}
};
println!("Statefile: {}", st);
let mut state_recs = st.records;
// Load and parse remote provider zones to populate 'remote' - Note that
// these could also be empty! Bail out on errors.
let remote_recs = match load_remote(&config) {
Some(x) => x,
None => {
println!("Error downloading remote records, bailing out.");
std::process::exit(1);
}
};
println!("Got {} resources from remote", remote_recs.0.len());
// Compare statefile records with remote records to ensure state accurately
// reflects the 'source of truth'
compare::state_remote(&mut state_recs, &remote_recs);
// Compare local records with updated statefile records to see what changes
// need to be sent to remote.
let (mut new_recs, mut upd_recs, del_recs) = compare::local_state(&local_recs, &state_recs);
// Compare the 'new' change set to the remote records, since it contains
// records the statefile is unaware of but which might already exist
// remotely.
compare::new_remote(&mut new_recs, &mut upd_recs, &remote_recs);
// Print out changes to be pushed
output_changes(&new_recs, &upd_recs, &del_recs, &state_recs);
// Exit now if 'noop' subcommand provided
if sub != 2 {
return;
}
// Turn those ResHashes into something a little more palatable -
// simple &str,Vec<Resource> hashes where the &str part matches
// an AWS action (CREATE, UPSERT, DELETE).
let mut to_push: HashMap<&str, Vec<Resource>> = HashMap::new();
to_push.insert("CREATE", resource::hash_to_vec(new_recs));
to_push.insert("UPSERT", resource::hash_to_vec(upd_recs));
to_push.insert("DELETE", resource::hash_to_vec(del_recs));
// Finally, send the changes up to the remote provider
match push_remote(&config, &to_push) {
true => {
println!("Successfully pushed changes.");
},
false => {
println!("Error pushing changes, bailing out.");
std::process::exit(1);
}
};
// Make the current local into the new state and write the new statefile
state::save_state(&config, local_recs);
}
// Load in a config file and deserialize it into a MacrotisConfig struct
fn load_config(fname: &str) -> Option<MacrotisConfig> {
// Attempt to open and read file
let f = match File::open(fname) {
Ok(file) => file,
Err(e) => {
println!("Error opening file {}: {}", fname, e);
return None;
}
};
let reader = BufReader::new(f);
// Deserialize
let retval: MacrotisConfig = match serde_json::from_reader(reader) {
Ok(x) => x,
Err(e) => {
println!("Error parsing config JSON: {}", e);
return None;
}
};
Some(retval)
}
// Load and parse input file(s)
// config is needed for TinyDNSRecord::find_zone_id
fn load_local(fname: &str, config: &MacrotisConfig) -> Option<ResHash> {
// Check if input is a dir or a file using std::fs::metadata
// call .is_dir() or .is_file() for an appropriate bool
let meta = match metadata(&fname) {
Ok(x) => x,
Err(e) => {
println!("Error reading {}: {}", fname, e);
std::process::exit(1);
}
};
// Call tinydns::from_file either once (is_file) or in a loop
// (is_dir).
if meta.is_file() {
println!("Processing {}", &fname);
let tdns_records = match tinydns::from_file(&fname) {
Some(x) => x,
None => {
println!("Error processing input file {}", fname);
return None;
}
};
println!("Converting TinyDNS records...");
let converted = match resource::vec_from_tiny(&tdns_records, &config.zones) {
Some(x) => x,
None => {
println!("Error converting TDRs to Resources");
return None;
}
};
let retval = match resource::build_reshash(converted) {
Some(x) => x,
None => {
println!("Error building ResHash");
return None;
}
};
return Some(retval);
} else {
// Get a list of *.tiny files in the directory and call the tinydns
// functions as necessary.
// This is kinda gross???
let mut error_flag = false;
let mut tdns_vec = Vec::new();
let path = Path::new(&fname);
if let Ok(dir_iter) = std::fs::read_dir(&path) {
for entry in dir_iter {
if let Ok(f) = entry {
let fpath = f.path();
if fpath.is_dir() {
continue;
}
let pathstring = match fpath.to_str() {
Some(x) => x,
None => {
println!("Error getting path string for {:?}", fpath);
error_flag = true;
continue;
}
};
if let Some(ext) = fpath.extension() {
if ext == "tiny" {
println!("Processing {}...", &pathstring);
let mut recs = match tinydns::from_file(&pathstring) {
Some(x) => x,
None => {
println!("Error processing {}", pathstring);
error_flag = true;
continue;
}
};
tdns_vec.append(&mut recs);
} else {
continue;
}
} else {
continue;
}
} else {
println!("Error getting entry from iterator");
error_flag = true;
continue;
}
} // End of loop, convert the big vec
println!("Converting TinyDNS records...");
let converted = match resource::vec_from_tiny(&tdns_vec, &config.zones) {
Some(x) => x,
None => {
println!("Error converting TDRs to Resources");
return None;
}
};
let retval = match resource::build_reshash(converted) {
Some(x) => x,
None => {
println!("Error building ResHash");
return None;
}
};
match error_flag {
true => { return None; },
false => { return Some(retval); }
};
} else {
println!("Error getting iterator for {}", path.display());
return None;
}
}
}
// Load and parse remote records
fn load_remote(config: &MacrotisConfig) -> Option<ResHash> {
let prov = &config.provider;
let mut resources = Vec::new();
for z in &config.zones {
match r53::bulk_fetch(prov, &z.id) {
Some(mut x) => { resources.append(&mut x); },
None => { println!("No records for zone {}", z.name); }
};
}
let retval = match resource::build_reshash(resources) {
Some(x) => x,
None => {
println!("Error building ResHash");
return None;
}
};
Some(retval)
}
// Push records up to remote
// 'resources' should be a HashMap where the key is an action to take
// (create, upsert, delete), and the values are Vecs of Resources
fn push_remote(config: &MacrotisConfig, resources: &HashMap<&str,Vec<Resource>>) -> bool {
let mut retval = true;
let prov = &config.provider;
let mut by_zone: HashMap<&str, Vec<rusoto_route53::Change>> = HashMap::new();
// So for each of the possible actions, we want to turn the Resource
// struct into a rusoto_r53::Change struct, while simultaneously
// separating the Resources by their zone_id. Because Route53
// allows us to send multiple types of changes together so long as
// they are all within a single HostedZone, we should be able to do
// something that goes...a little bit a-like a-dis:
for (action, res) in resources {
for rec in res {
let z = &rec.zone_id[..];
let chg = r53::resource_to_change(&action, &rec);
by_zone.entry(z.clone()).or_insert(vec![]).push(chg);
}
}
// Now iterate through that by_zone hashmap and call bulk_put for
// each one.
for (zoneid, chgvec) in by_zone {
match r53::bulk_put(&prov, chgvec, &zoneid) {
Ok(x) => { println!("Change IDs: {}", x); },
Err(e) => { println!("Error! {}", e); retval = false; }
};
}
retval
}
// Iterate through the ResHashes of changes and print out what needs to
// be done to bring Remote in line with Local. Returns 'false' if there
// are no changes to push.
fn output_changes(ne: &ResHash, up: &ResHash, de: &ResHash, st: &ResHash) -> bool {
for (_k, v) in &ne.0 {
println!("[ADD] {} {}\t [ ] -> {:?}", &v.rtype, &v.name, &v.records);
}
for (k, v) in &up.0 {
let oldres = match st.0.get(k) {
Some(x) => x,
None => {
println!("Failed to get value for key {} in state", k);
continue;
}
};
println!("[UPD] {} {}\t {:?} -> {:?}", &v.rtype, &v.name, &oldres.records, &v.records);
}
for (_k, v) in &de.0 {
println!("[DEL] {} {}\t {:?} -> [ ]", &v.rtype, &v.name, &v.records);
}
if ne.0.len() < 1 && up.0.len() < 1 && de.0.len() < 1 {
println!("No changes detected.");
false
} else {
true
}
}
|
use bitbuffer::{BitRead, BitWrite, BitWriteSized, BitWriteStream, LittleEndian};
use serde::{Deserialize, Serialize};
use crate::{ReadResult, Stream};
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VoiceInitMessage {
codec: String,
quality: u8,
sampling_rate: u16,
}
impl BitRead<'_, LittleEndian> for VoiceInitMessage {
fn read(stream: &mut Stream) -> ReadResult<Self> {
let codec = stream.read()?;
let quality = stream.read()?;
let sampling_rate = if quality == 255 {
// v2 packets have variable rate
stream.read()?
} else if codec == "vaudio_celt" {
// legacy sample rate for celt
22050
} else {
// legacy sample rate for non-celt
11025
};
Ok(VoiceInitMessage {
codec,
quality,
sampling_rate,
})
}
}
impl BitWrite<LittleEndian> for VoiceInitMessage {
fn write(&self, stream: &mut BitWriteStream<LittleEndian>) -> ReadResult<()> {
self.codec.write(stream)?;
self.quality.write(stream)?;
if self.quality == 255 {
self.sampling_rate.write(stream)?;
}
Ok(())
}
}
#[test]
fn test_voice_init_roundtrip() {
crate::test_roundtrip_write(VoiceInitMessage {
codec: "foo".into(),
quality: 0,
sampling_rate: 11025,
});
crate::test_roundtrip_write(VoiceInitMessage {
codec: "foo".into(),
quality: 255,
sampling_rate: 12,
});
}
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[derive(BitRead, BitWrite, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[endianness = "LittleEndian"]
#[serde(bound(deserialize = "'a: 'static"))]
pub struct VoiceDataMessage<'a> {
client: u8,
proximity: u8,
length: u16,
#[size = "length"]
data: Stream<'a>,
}
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(bound(deserialize = "'a: 'static"))]
pub struct ParseSoundsMessage<'a> {
pub reliable: bool,
pub num: u8,
pub length: u16,
pub data: Stream<'a>,
}
impl<'a> BitRead<'a, LittleEndian> for ParseSoundsMessage<'a> {
fn read(stream: &mut Stream<'a>) -> ReadResult<Self> {
let reliable = stream.read()?;
let num = if reliable { 1u8 } else { stream.read()? };
let length = if reliable {
stream.read_sized::<u16>(8)?
} else {
stream.read()?
};
let data = stream.read_sized(length as usize)?;
Ok(ParseSoundsMessage {
reliable,
num,
length,
data,
})
}
}
impl<'a> BitWrite<LittleEndian> for ParseSoundsMessage<'a> {
fn write(&self, stream: &mut BitWriteStream<LittleEndian>) -> ReadResult<()> {
self.reliable.write(stream)?;
if !self.reliable {
self.num.write(stream)?;
}
if self.reliable {
self.length.write_sized(stream, 8)?;
} else {
self.length.write(stream)?;
}
self.data.write(stream)?;
Ok(())
}
}
#[test]
fn test_parse_sounds_roundtrip() {
use bitbuffer::BitReadBuffer;
let inner = BitReadBuffer::new(&[1, 2, 3, 4, 5, 6], LittleEndian);
crate::test_roundtrip_write(ParseSoundsMessage {
reliable: false,
num: 0,
length: inner.bit_len() as u16,
data: inner.clone().into(),
});
crate::test_roundtrip_write(ParseSoundsMessage {
reliable: true,
num: 1,
length: inner.bit_len() as u16,
data: inner.into(),
});
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use borrow_check::borrow_set::BorrowData;
use borrow_check::error_reporting::UseSpans;
use borrow_check::nll::region_infer::Cause;
use borrow_check::{Context, MirBorrowckCtxt, WriteKind};
use rustc::ty::{self, Region, TyCtxt};
use rustc::mir::{FakeReadCause, Local, Location, Mir, Operand};
use rustc::mir::{Place, StatementKind, TerminatorKind};
use rustc_errors::DiagnosticBuilder;
use syntax_pos::Span;
mod find_use;
pub(in borrow_check) enum BorrowExplanation<'tcx> {
UsedLater(LaterUseKind, Span),
UsedLaterInLoop(LaterUseKind, Span),
UsedLaterWhenDropped {
drop_loc: Location,
dropped_local: Local,
should_note_order: bool,
},
MustBeValidFor(Region<'tcx>),
Unexplained,
}
#[derive(Clone, Copy)]
pub(in borrow_check) enum LaterUseKind {
ClosureCapture,
Call,
FakeLetRead,
Other,
}
impl<'tcx> BorrowExplanation<'tcx> {
pub(in borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx>(
&self,
tcx: TyCtxt<'cx, 'gcx, 'tcx>,
mir: &Mir<'tcx>,
err: &mut DiagnosticBuilder<'_>,
borrow_desc: &str,
) {
match *self {
BorrowExplanation::UsedLater(later_use_kind, var_or_use_span) => {
let message = match later_use_kind {
LaterUseKind::ClosureCapture => "borrow later captured here by closure",
LaterUseKind::Call => "borrow later used by call",
LaterUseKind::FakeLetRead => "borrow later stored here",
LaterUseKind::Other => "borrow later used here",
};
err.span_label(var_or_use_span, format!("{}{}", borrow_desc, message));
},
BorrowExplanation::UsedLaterInLoop(later_use_kind, var_or_use_span) => {
let message = match later_use_kind {
LaterUseKind::ClosureCapture => {
"borrow captured here by closure, in later iteration of loop"
},
LaterUseKind::Call => "borrow used by call, in later iteration of loop",
LaterUseKind::FakeLetRead => "borrow later stored here",
LaterUseKind::Other => "borrow used here, in later iteration of loop",
};
err.span_label(var_or_use_span, format!("{}{}", borrow_desc, message));
},
BorrowExplanation::UsedLaterWhenDropped { drop_loc, dropped_local,
should_note_order } =>
{
let local_decl = &mir.local_decls[dropped_local];
let (dtor_desc, type_desc) = match local_decl.ty.sty {
// If type is an ADT that implements Drop, then
// simplify output by reporting just the ADT name.
ty::Adt(adt, _substs) if adt.has_dtor(tcx) && !adt.is_box() =>
("`Drop` code", format!("type `{}`", tcx.item_path_str(adt.did))),
// Otherwise, just report the whole type (and use
// the intentionally fuzzy phrase "destructor")
ty::Closure(..) =>
("destructor", format!("closure")),
ty::Generator(..) =>
("destructor", format!("generator")),
_ => ("destructor", format!("type `{}`", local_decl.ty)),
};
match local_decl.name {
Some(local_name) => {
let message =
format!("{B}borrow might be used here, when `{LOC}` is dropped \
and runs the {DTOR} for {TYPE}",
B=borrow_desc, LOC=local_name, TYPE=type_desc, DTOR=dtor_desc);
err.span_label(mir.source_info(drop_loc).span, message);
if should_note_order {
err.note(
"values in a scope are dropped \
in the opposite order they are defined",
);
}
}
None => {
err.span_label(local_decl.source_info.span,
format!("a temporary with access to the {B}borrow \
is created here ...",
B=borrow_desc));
let message =
format!("... and the {B}borrow might be used here, \
when that temporary is dropped \
and runs the {DTOR} for {TYPE}",
B=borrow_desc, TYPE=type_desc, DTOR=dtor_desc);
err.span_label(mir.source_info(drop_loc).span, message);
if let Some(info) = &local_decl.is_block_tail {
// FIXME: use span_suggestion instead, highlighting the
// whole block tail expression.
let msg = if info.tail_result_is_ignored {
"The temporary is part of an expression at the end of a block. \
Consider adding semicolon after the expression so its temporaries \
are dropped sooner, before the local variables declared by the \
block are dropped."
} else {
"The temporary is part of an expression at the end of a block. \
Consider forcing this temporary to be dropped sooner, before \
the block's local variables are dropped. \
For example, you could save the expression's value in a new \
local variable `x` and then make `x` be the expression \
at the end of the block."
};
err.note(msg);
}
}
}
}
BorrowExplanation::MustBeValidFor(region) => {
tcx.note_and_explain_free_region(
err,
&format!("{}{}", borrow_desc, "borrowed value must be valid for "),
region,
"...",
);
},
_ => {},
}
}
}
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
/// Returns structured explanation for *why* the borrow contains the
/// point from `context`. This is key for the "3-point errors"
/// [described in the NLL RFC][d].
///
/// # Parameters
///
/// - `borrow`: the borrow in question
/// - `context`: where the borrow occurs
/// - `kind_place`: if Some, this describes the statement that triggered the error.
/// - first half is the kind of write, if any, being performed
/// - second half is the place being accessed
///
/// [d]: https://rust-lang.github.io/rfcs/2094-nll.html#leveraging-intuition-framing-errors-in-terms-of-points
pub(in borrow_check) fn explain_why_borrow_contains_point(
&self,
context: Context,
borrow: &BorrowData<'tcx>,
kind_place: Option<(WriteKind, &Place<'tcx>)>,
) -> BorrowExplanation<'tcx> {
debug!(
"explain_why_borrow_contains_point(context={:?}, borrow={:?}, kind_place={:?})",
context, borrow, kind_place
);
let regioncx = &self.nonlexical_regioncx;
let mir = self.mir;
let tcx = self.infcx.tcx;
let borrow_region_vid = regioncx.to_region_vid(borrow.region);
debug!(
"explain_why_borrow_contains_point: borrow_region_vid={:?}",
borrow_region_vid
);
let region_sub = regioncx.find_sub_region_live_at(borrow_region_vid, context.loc);
debug!(
"explain_why_borrow_contains_point: region_sub={:?}",
region_sub
);
match find_use::find(mir, regioncx, tcx, region_sub, context.loc) {
Some(Cause::LiveVar(local, location)) => {
let span = mir.source_info(location).span;
let spans = self.move_spans(&Place::Local(local), location)
.or_else(|| self.borrow_spans(span, location));
if self.is_borrow_location_in_loop(context.loc) {
let later_use = self.later_use_kind(spans, location);
BorrowExplanation::UsedLaterInLoop(later_use.0, later_use.1)
} else {
// Check if the location represents a `FakeRead`, and adapt the error
// message to the `FakeReadCause` it is from: in particular,
// the ones inserted in optimized `let var = <expr>` patterns.
let later_use = self.later_use_kind(spans, location);
BorrowExplanation::UsedLater(later_use.0, later_use.1)
}
}
Some(Cause::DropVar(local, location)) => {
let mut should_note_order = false;
if mir.local_decls[local].name.is_some() {
if let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place {
if let Place::Local(borrowed_local) = place {
let dropped_local_scope = mir.local_decls[local].visibility_scope;
let borrowed_local_scope =
mir.local_decls[*borrowed_local].visibility_scope;
if mir.is_sub_scope(borrowed_local_scope, dropped_local_scope)
&& local != *borrowed_local
{
should_note_order = true;
}
}
}
}
BorrowExplanation::UsedLaterWhenDropped {
drop_loc: location,
dropped_local: local,
should_note_order,
}
}
None => if let Some(region) = regioncx.to_error_region(region_sub) {
BorrowExplanation::MustBeValidFor(region)
} else {
BorrowExplanation::Unexplained
},
}
}
/// Check if a borrow location is within a loop.
fn is_borrow_location_in_loop(
&self,
borrow_location: Location,
) -> bool {
let mut visited_locations = Vec::new();
let mut pending_locations = vec![ borrow_location ];
debug!("is_in_loop: borrow_location={:?}", borrow_location);
while let Some(location) = pending_locations.pop() {
debug!("is_in_loop: location={:?} pending_locations={:?} visited_locations={:?}",
location, pending_locations, visited_locations);
if location == borrow_location && visited_locations.contains(&borrow_location) {
// We've managed to return to where we started (and this isn't the start of the
// search).
debug!("is_in_loop: found!");
return true;
}
// Skip locations we've been.
if visited_locations.contains(&location) { continue; }
let block = &self.mir.basic_blocks()[location.block];
if location.statement_index == block.statements.len() {
// Add start location of the next blocks to pending locations.
match block.terminator().kind {
TerminatorKind::Goto { target } => {
pending_locations.push(target.start_location());
},
TerminatorKind::SwitchInt { ref targets, .. } => {
for target in targets {
pending_locations.push(target.start_location());
}
},
TerminatorKind::Drop { target, unwind, .. } |
TerminatorKind::DropAndReplace { target, unwind, .. } |
TerminatorKind::Assert { target, cleanup: unwind, .. } |
TerminatorKind::Yield { resume: target, drop: unwind, .. } |
TerminatorKind::FalseUnwind { real_target: target, unwind, .. } => {
pending_locations.push(target.start_location());
if let Some(unwind) = unwind {
pending_locations.push(unwind.start_location());
}
},
TerminatorKind::Call { ref destination, cleanup, .. } => {
if let Some((_, destination)) = destination {
pending_locations.push(destination.start_location());
}
if let Some(cleanup) = cleanup {
pending_locations.push(cleanup.start_location());
}
},
TerminatorKind::FalseEdges { real_target, ref imaginary_targets, .. } => {
pending_locations.push(real_target.start_location());
for target in imaginary_targets {
pending_locations.push(target.start_location());
}
},
_ => {},
}
} else {
// Add the next statement to pending locations.
pending_locations.push(location.successor_within_block());
}
// Keep track of where we have visited.
visited_locations.push(location);
}
false
}
fn later_use_kind(&self, use_spans: UseSpans, location: Location) -> (LaterUseKind, Span) {
use self::LaterUseKind::*;
let block = &self.mir.basic_blocks()[location.block];
match use_spans {
UseSpans::ClosureUse { var_span, .. } => (LaterUseKind::ClosureCapture, var_span),
UseSpans::OtherUse(span) => {
(if let Some(stmt) = block.statements.get(location.statement_index) {
match stmt.kind {
StatementKind::FakeRead(FakeReadCause::ForLet, _) => FakeLetRead,
_ => Other,
}
} else {
assert_eq!(location.statement_index, block.statements.len());
match block.terminator().kind {
TerminatorKind::Call { ref func, from_hir_call: true, .. } => {
// Just point to the function, to reduce the chance
// of overlapping spans.
let function_span = match func {
Operand::Constant(c) => c.span,
Operand::Copy(Place::Local(l)) | Operand::Move(Place::Local(l)) => {
let local_decl = &self.mir.local_decls[*l];
if local_decl.name.is_none() {
local_decl.source_info.span
} else {
span
}
},
_ => span,
};
return (Call, function_span);
},
_ => Other,
}
}, span)
}
}
}
}
|
use pyo3::prelude::*;
#[pyfunction]
fn get_21() -> usize {
21
}
#[pymodule]
fn pyo3_mixed_include_exclude(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(get_21))?;
Ok(())
}
|
use amethyst::renderer::{TextureMetadata,ScreenDimensions,Projection,Camera,PngFormat,Texture,MaterialTextureSet,Sprite,SpriteSheetHandle,TextureCoordinates,SpriteSheet};
use amethyst::assets::{AssetStorage,Loader};
use amethyst::prelude::*;
use amethyst::core::cgmath::{Vector3, Matrix4};
use amethyst::core::transform::{GlobalTransform};
fn load_texture_from_image(world: &mut World,image_path: &str,texture_id: u64) {
let loader = world.read_resource::<Loader>();
let texture_storage = world.read_resource::<AssetStorage<Texture>>();
let texture_handle = loader.load(
image_path,
PngFormat,
TextureMetadata::srgb(),
(),
&texture_storage);
let mut material_texture_set = world.write_resource::<MaterialTextureSet>();
material_texture_set.insert(texture_id, texture_handle);
}
pub fn decompile_as_sprites(world: &mut World,image_path: &str,image_size: (f32,f32), sprite_size: (f32,f32),texture_id: u64) -> SpriteSheetHandle {
let sprites_in_x = (image_size.0 / sprite_size.0).trunc();
let sprites_in_y = (image_size.1 / sprite_size.1).trunc();
let sprite_offset_x_in_image = 1.0 / sprites_in_x;
let sprite_offset_y_in_image = 1.0 / sprites_in_y;
let mut sprites = vec![];
for y in 0..sprites_in_y as u32{
for x in 0..sprites_in_x as u32{
let left = x as f32 * sprite_offset_x_in_image;
let right = (x + 1) as f32 * sprite_offset_x_in_image;
let top = (y + 1) as f32 * sprite_offset_y_in_image;
let bottom = y as f32 * sprite_offset_y_in_image;
let tex_coords = TextureCoordinates{
left,
right,
bottom,
top,
};
let sprite = Sprite{
width: sprite_size.0,
height: sprite_size.1,
//offsets: [sprite_size.0 / 2.0, sprite_size.1 / 2.0],
offsets: [0.0, 0.0],
tex_coords,
};
sprites.push(sprite);
}
}
let sprite_sheet = SpriteSheet{
texture_id: texture_id,
sprites: sprites,
};
load_texture_from_image(world, image_path, texture_id);
let sprite_sheet_handle = {
let loader = world.read_resource::<Loader>();
let sprite_sheet_storage = world.read_resource::<AssetStorage<SpriteSheet>>();
loader.load_from_data(sprite_sheet, (), &sprite_sheet_storage)
};
sprite_sheet_handle
}
pub fn initialise_camera(world: &mut World) {
let (width,height) = {
let dimn = world.read_resource::<ScreenDimensions>();
(dimn.width(), dimn.height())
};
world.create_entity()
.with(Camera::from(Projection::orthographic(
0.0,
width,
height,
0.0,
)))
.with(GlobalTransform(
Matrix4::from_translation(Vector3::new(0.0, 0.0, 1.0)).into()
))
.build();
}
pub struct Backpack {
pub snake_sheet: Option<SpriteSheetHandle>,
pub food_sheet: Option<SpriteSheetHandle>,
}
impl Backpack {
pub fn new(sheet: SpriteSheetHandle,food_sheet: SpriteSheetHandle) -> Self {
Backpack {
snake_sheet: Some(sheet),
food_sheet: Some(food_sheet),
}
}
}
impl Default for Backpack {
fn default() -> Self {
Backpack {
snake_sheet: None,
food_sheet: None,
}
}
} |
//!
//! [`Device`](Device) and [`Surface`](Surface)
//! implementations using egl contexts and surfaces for efficient rendering.
//!
//! Usually this implementation's [`EglSurface`](::backend::drm::egl::EglSurface)s implementation
//! of [`GLGraphicsBackend`](::backend::graphics::gl::GLGraphicsBackend) will be used
//! to let your compositor render.
//! Take a look at `anvil`s source code for an example of this.
//!
use drm::control::{crtc, ResourceHandles, ResourceInfo};
use nix::libc::dev_t;
use std::os::unix::io::{AsRawFd, RawFd};
use std::rc::Rc;
#[cfg(feature = "native_lib")]
use wayland_server::Display;
use super::{Device, DeviceHandler, Surface};
use crate::backend::egl::context::GlAttributes;
use crate::backend::egl::error::Result as EGLResult;
use crate::backend::egl::native::{Backend, NativeDisplay, NativeSurface};
use crate::backend::egl::EGLContext;
#[cfg(feature = "native_lib")]
use crate::backend::egl::{EGLDisplay, EGLGraphicsBackend};
pub mod error;
use self::error::*;
mod surface;
pub use self::surface::*;
#[cfg(feature = "backend_session")]
pub mod session;
/// Representation of an egl device to create egl rendering surfaces
pub struct EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
dev: Rc<EGLContext<B, D>>,
logger: ::slog::Logger,
}
impl<B, D> AsRawFd for EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
fn as_raw_fd(&self) -> RawFd {
self.dev.borrow().as_raw_fd()
}
}
impl<B, D> EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
/// Try to create a new [`EglDevice`] from an open device.
///
/// Returns an error if the file is no valid device or context
/// creation was not successful.
pub fn new<L>(dev: D, logger: L) -> Result<Self>
where
L: Into<Option<::slog::Logger>>,
{
EglDevice::new_with_gl_attr(
dev,
GlAttributes {
version: None,
profile: None,
debug: cfg!(debug_assertions),
vsync: true,
},
logger,
)
}
/// Create a new [`EglDevice`] from an open device and given [`GlAttributes`]
///
/// Returns an error if the file is no valid device or context
/// creation was not successful.
pub fn new_with_gl_attr<L>(mut dev: D, attributes: GlAttributes, logger: L) -> Result<Self>
where
L: Into<Option<::slog::Logger>>,
{
let log = crate::slog_or_stdlog(logger).new(o!("smithay_module" => "backend_egl"));
dev.clear_handler();
debug!(log, "Creating egl context from device");
Ok(EglDevice {
// Open the gbm device from the drm device and create a context based on that
dev: Rc::new(
EGLContext::new(dev, attributes, Default::default(), log.clone()).map_err(Error::from)?,
),
logger: log,
})
}
}
struct InternalDeviceHandler<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
handler: Box<dyn DeviceHandler<Device = EglDevice<B, D>> + 'static>,
}
impl<B, D> DeviceHandler for InternalDeviceHandler<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
type Device = D;
fn vblank(&mut self, crtc: crtc::Handle) {
self.handler.vblank(crtc)
}
fn error(&mut self, error: <<D as Device>::Surface as Surface>::Error) {
self.handler
.error(ResultExt::<()>::chain_err(Err(error), || ErrorKind::UnderlyingBackendError).unwrap_err())
}
}
impl<B, D> Device for EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
type Surface = EglSurface<B, D>;
fn device_id(&self) -> dev_t {
self.dev.borrow().device_id()
}
fn set_handler(&mut self, handler: impl DeviceHandler<Device = Self> + 'static) {
self.dev.borrow_mut().set_handler(InternalDeviceHandler {
handler: Box::new(handler),
});
}
fn clear_handler(&mut self) {
self.dev.borrow_mut().clear_handler()
}
fn create_surface(&mut self, crtc: crtc::Handle) -> Result<EglSurface<B, D>> {
info!(self.logger, "Initializing EglSurface");
let surface = self.dev.create_surface(crtc)?;
Ok(EglSurface {
dev: self.dev.clone(),
surface,
})
}
fn process_events(&mut self) {
self.dev.borrow_mut().process_events()
}
fn resource_info<T: ResourceInfo>(&self, handle: T::Handle) -> Result<T> {
self.dev
.borrow()
.resource_info(handle)
.chain_err(|| ErrorKind::UnderlyingBackendError)
}
fn resource_handles(&self) -> Result<ResourceHandles> {
self.dev
.borrow()
.resource_handles()
.chain_err(|| ErrorKind::UnderlyingBackendError)
}
}
#[cfg(feature = "native_lib")]
impl<B, D> EGLGraphicsBackend for EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
fn bind_wl_display(&self, display: &Display) -> EGLResult<EGLDisplay> {
self.dev.bind_wl_display(display)
}
}
impl<B, D> Drop for EglDevice<B, D>
where
B: Backend<Surface = <D as Device>::Surface> + 'static,
D: Device + NativeDisplay<B, Arguments = crtc::Handle> + 'static,
<D as Device>::Surface: NativeSurface,
{
fn drop(&mut self) {
self.clear_handler();
}
}
|
use crate::lib::error::{DfxError, DfxResult};
use crate::{error_invalid_argument, error_invalid_data};
use indicatif::{ProgressBar, ProgressDrawTarget};
use libflate::gzip::Decoder;
use semver::Version;
use serde::{Deserialize, Deserializer};
use std::collections::BTreeMap;
use std::os::unix::fs::PermissionsExt;
use std::{env, fs};
use tar::Archive;
fn parse_semver<'de, D>(version: &str) -> Result<Version, D::Error>
where
D: Deserializer<'de>,
{
semver::Version::parse(&version)
.map_err(|e| serde::de::Error::custom(format!("invalid SemVer: {}", e)))
}
fn deserialize_tags<'de, D>(deserializer: D) -> Result<BTreeMap<String, Version>, D::Error>
where
D: Deserializer<'de>,
{
let tags: BTreeMap<String, String> = Deserialize::deserialize(deserializer)?;
let mut result = BTreeMap::<String, Version>::new();
for (tag, version) in tags.into_iter() {
result.insert(tag, parse_semver::<D>(&version)?);
}
Ok(result)
}
fn deserialize_versions<'de, D>(deserializer: D) -> Result<Vec<Version>, D::Error>
where
D: Deserializer<'de>,
{
let versions: Vec<String> = Deserialize::deserialize(deserializer)?;
let mut result = Vec::with_capacity(versions.len());
for version in versions.iter() {
result.push(parse_semver::<D>(version)?);
}
Ok(result)
}
#[derive(Debug, PartialEq, Eq, Deserialize)]
pub struct Manifest {
#[serde(deserialize_with = "deserialize_tags")]
tags: BTreeMap<String, Version>,
#[serde(deserialize_with = "deserialize_versions")]
versions: Vec<Version>,
}
impl Manifest {
#[allow(dead_code)]
pub fn get_tags(&self) -> Vec<&String> {
self.tags.keys().collect()
}
pub fn get_versions(&self) -> Vec<Version> {
self.versions.clone()
}
pub fn get_tag_version(&self, tag: &str) -> Option<&Version> {
self.tags.get(tag)
}
}
pub fn is_upgrade_necessary(latest_version: Option<&Version>, current: &Version) -> bool {
match latest_version {
Some(latest) => latest > current && current.pre.is_empty(),
None => true,
}
}
pub fn get_latest_version(
release_root: &str,
timeout: Option<std::time::Duration>,
) -> DfxResult<Version> {
let url = reqwest::Url::parse(release_root)
.map_err(|e| error_invalid_argument!("invalid release root: {}", e))?;
let manifest_url = url
.join("manifest.json")
.map_err(|e| error_invalid_argument!("invalid manifest URL: {}", e))?;
println!("Fetching manifest {}", manifest_url);
let b = ProgressBar::new_spinner();
b.set_draw_target(ProgressDrawTarget::stderr());
b.set_message("Checking for latest dfx version...");
b.enable_steady_tick(80);
let client = match timeout {
Some(timeout) => reqwest::blocking::Client::builder().timeout(timeout),
None => reqwest::blocking::Client::builder(),
};
let client = client.build()?;
let response = client.get(manifest_url).send().map_err(DfxError::new)?;
let status_code = response.status();
b.finish_and_clear();
if !status_code.is_success() {
return Err(error_invalid_data!(
"unable to fetch manifest: {}",
status_code.canonical_reason().unwrap_or("unknown error"),
));
}
let manifest: Manifest = response
.json()
.map_err(|e| error_invalid_data!("invalid manifest: {}", e))?;
manifest
.tags
.get("latest")
.ok_or_else(|| error_invalid_data!("expected field 'latest' in 'tags'"))
.map(|v| v.clone())
}
pub fn get_latest_release(release_root: &str, version: &Version, arch: &str) -> DfxResult<()> {
let url = reqwest::Url::parse(&format!(
"{0}/downloads/dfx/{1}/{2}/dfx-{1}.tar.gz",
release_root, version, arch
))
.map_err(|e| error_invalid_argument!("invalid release root: {}", e))?;
let b = ProgressBar::new_spinner();
b.set_draw_target(ProgressDrawTarget::stderr());
b.set_message(format!("Downloading {}", url).as_str());
b.enable_steady_tick(80);
let mut response = reqwest::blocking::get(url).map_err(DfxError::new)?;
let mut decoder = Decoder::new(&mut response)
.map_err(|e| error_invalid_data!("unable to gunzip file: {}", e))?;
let mut archive = Archive::new(&mut decoder);
let current_exe_path = env::current_exe().map_err(DfxError::new)?;
let current_exe_dir = current_exe_path.parent().unwrap(); // This should not fail
b.set_message("Unpacking");
archive.unpack(¤t_exe_dir)?;
b.set_message("Setting permissions");
let mut permissions = fs::metadata(¤t_exe_path)?.permissions();
permissions.set_mode(0o775); // FIXME Preserve existing permissions
fs::set_permissions(¤t_exe_path, permissions)?;
b.finish_with_message("Done");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
const MANIFEST: &str = r#"{
"tags": {
"latest": "0.4.1"
},
"versions": [
"0.3.1",
"0.4.0",
"0.4.1"
]
}"#;
#[test]
fn test_parse_manifest() {
let manifest: Manifest = serde_json::from_str(&MANIFEST).unwrap();
let mut tags = BTreeMap::new();
tags.insert(
"latest".to_string(),
semver::Version::parse("0.4.1").unwrap(),
);
let versions: Vec<Version> = vec!["0.3.1", "0.4.0", "0.4.1"]
.into_iter()
.map(|v| semver::Version::parse(v).unwrap())
.collect();
assert_eq!(manifest.versions, versions);
}
#[test]
fn test_get_latest_version() {
let _ = env_logger::try_init();
let _m = mockito::mock("GET", "/manifest.json")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(MANIFEST)
.create();
let latest_version = get_latest_version(&mockito::server_url(), None);
assert_eq!(latest_version.unwrap(), Version::parse("0.4.1").unwrap());
let _m = mockito::mock("GET", "/manifest.json")
.with_status(200)
.with_header("content-type", "application/json")
.with_body("Not a valid JSON object")
.create();
let latest_version = get_latest_version(&mockito::server_url(), None);
assert!(latest_version.is_err());
}
}
|
use gtk::prelude::*;
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::HashMap;
use crate::state::State;
use glib::clone;
pub fn setup_buttons_events(
buttons: &HashMap<String, gtk::SpinButton>,
state: &Rc<RefCell<State>>,
drawing_area: &Rc<RefCell<gtk::DrawingArea>>,
) {
// zoom button
{
let button_state = Rc::clone(&state);
let drawing = Rc::clone(&drawing_area);
buttons.get("zoom").unwrap().connect_value_changed(move |spin_button| {
let mut cur_state = button_state.borrow_mut();
let area = drawing.borrow();
cur_state.zoom = spin_button.get_value();
area.queue_draw();
});
}
// parts buttons
{
let button_state = Rc::clone(&state);
let drawing = Rc::clone(&drawing_area);
buttons.get("parts_ox").unwrap().connect_value_changed(move |spin_button| {
let mut cur_state = button_state.borrow_mut();
let area = drawing.borrow();
cur_state.parts_ox = spin_button.get_value_as_int();
area.queue_draw();
});
}
{
let button_state = Rc::clone(&state);
let drawing = Rc::clone(&drawing_area);
buttons.get("parts_oy").unwrap().connect_value_changed(move |spin_button| {
let mut cur_state = button_state.borrow_mut();
let area = drawing.borrow();
cur_state.parts_oy = spin_button.get_value_as_int();
area.queue_draw();
});
}
}
|
use crate::client::Client;
use ureq::{Error, Request};
use serde::{Deserialize};
#[derive(Deserialize)]
pub struct CountryNetwork {
pub comment: String,
pub features: Vec<String>,
pub mcc: String,
pub mncs: Vec<String>,
#[serde(rename = "networkName")]
pub network_name: String,
pub price: f64,
}
#[derive(Deserialize)]
pub struct CountryPricing {
#[serde(rename = "countryCode")]
pub country_code: String,
#[serde(rename = "countryName")]
pub country_name: String,
#[serde(rename = "countryPrefix")]
pub country_prefix: String,
pub networks: Vec<CountryNetwork>,
}
#[derive(Deserialize)]
pub struct PricingResponse {
#[serde(rename = "countCountries")]
pub count_countries: u32,
#[serde(rename = "countNetworks")]
pub count_networks: u32,
pub countries: Vec<CountryPricing>,
}
pub struct PricingParams {
pub country: Option<String>,
}
pub struct Pricing {
client: Client
}
impl Pricing {
pub fn new(client: Client) -> Self {
Pricing {
client,
}
}
pub fn get(&self, params: PricingParams, format: &str) -> Request {
let mut req = self.client.request("GET", "pricing").clone();
if params.country.is_some() {
req = req.query("country", &*params.country.unwrap_or_default().to_string());
}
req.query("format", format)
}
pub fn csv(&self, params: PricingParams) -> Result<String, Error> {
Ok(self.get(params, "csv").call()?.into_string()?)
}
pub fn json(&self, params: PricingParams) -> Result<PricingResponse, Error> {
Ok(self.get(params, "json").call()?.into_json::<PricingResponse>()?)
}
} |
pub
fn it_vec() {
let v: Vec<i32> = (0..5).collect();
println!("{:?}", v);
assert_eq!(v, [0, 1, 2, 3, 4]);
}
|
use duktape::error;
use std::io;
use std::str;
error_chain!{
foreign_links {
Io(io::Error);
Utf8(str::Utf8Error);
}
links {
Duktape(error::Error, error::ErrorKind);
}
errors {
Resolve(path:String) {
description("ResolveError")
display("could not resolve: '{}'",path)
}
}
}
|
use itertools::iproduct;
use lazy_static::lazy_static;
use scan_fmt::scan_fmt;
use std::{
cmp::Ordering,
collections::{HashMap, HashSet},
};
lazy_static! {
static ref COORDINATES: Vec<(i32, i32)> = include_str!("input.txt")
.lines()
.map(|line| {
let (x, y) = scan_fmt!(line, "{d}, {d}", i32, i32);
(x.unwrap(), y.unwrap())
})
.collect();
}
fn manhattan((x1, y1): (i32, i32), (x2, y2): (i32, i32)) -> i32 {
(x1 - x2).abs() + (y1 - y2).abs()
}
trait IteratorExt: Iterator {
fn single(self) -> Option<Self::Item>;
fn single_min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where
B: Ord,
F: FnMut(&Self::Item) -> B;
}
impl<I: Iterator> IteratorExt for I {
fn single(mut self) -> Option<Self::Item> {
self.next().and_then(|elem| {
if self.next().is_none() {
Some(elem)
} else {
None
}
})
}
fn single_min_by_key<B, F>(mut self, mut f: F) -> Option<Self::Item>
where
B: Ord,
F: FnMut(&Self::Item) -> B,
{
let mut candidate = self.next();
let mut candidate_key = candidate.as_ref().map(|item| f(item));
for item in self {
let key = f(&item);
if let Some(ref mut candidate_key) = candidate_key {
match key.cmp(&candidate_key) {
Ordering::Less => {
*candidate_key = key;
candidate = Some(item);
}
Ordering::Equal => {
candidate = None;
}
Ordering::Greater => {}
}
}
}
candidate
}
}
fn part1() {
let mut hull = HashSet::new();
let mut areas = HashMap::new();
for (i, j) in iproduct!(0..500, 0..500) {
if let Some(closest) = COORDINATES
.iter()
.single_min_by_key(|&&point| manhattan(point, (i, j)))
{
if i == 0 || i == 499 || j == 0 || j == 499 {
hull.insert(closest);
}
*areas.entry(closest).or_insert(0) += 1;
}
}
let (_, area) = areas
.iter()
.filter(|(&point, _)| !hull.contains(point))
.max_by_key(|(_, &area)| area)
.unwrap();
println!("{}", area);
}
fn part2() {
let safe_region_size = iproduct!(0..500, 0..500)
.filter(|&a| COORDINATES.iter().map(|&b| manhattan(a, b)).sum::<i32>() < 10000)
.count();
println!("{}", safe_region_size);
}
fn main() {
part1();
part2();
}
|
use crate::data::{Id, Item, Rating};
use crate::helpers::{ElementDataRef, QuerySelector};
use html5ever::{expanded_name, local_name, namespace_url, ns};
use kuchiki::NodeRef;
fn get_item_id(elem: &ElementDataRef) -> Id {
static ID_PREFIX: &str = "item_";
let attrs = elem.attributes.borrow();
let id = attrs.get(local_name!("id")).unwrap();
let (prefix, id_str) = id.split_at(ID_PREFIX.len());
assert!(prefix == ID_PREFIX);
id_str.parse().unwrap()
}
fn get_item_title(elem: &ElementDataRef) -> String {
let title_node = elem.query_selector("h3>*:last-child").unwrap();
assert!(
title_node.name.expanded() == expanded_name!(html "small")
|| title_node.name.expanded() == expanded_name!(html "a")
);
title_node.text_contents()
}
fn get_item_rating(elem: &ElementDataRef) -> Option<Rating> {
static STARS_PREFIX: &str = "stars";
let elem = elem.query_selector(".starlight")?;
let attrs = elem.attributes.borrow();
let classes = attrs.get(local_name!("class")).unwrap();
let result = classes
.split_whitespace()
.find_map(|class| {
if !class.starts_with(STARS_PREFIX) {
return None;
}
let rating = class[STARS_PREFIX.len()..].parse().unwrap();
assert!((1..=10).contains(&rating));
Some(rating)
})
.unwrap();
Some(result)
}
fn get_item_tags(elem: &ElementDataRef) -> Vec<String> {
static TAGS_PREFIX: &str = "标签: ";
if let Some(tags_elem) = elem.query_selector(".collectInfo>.tip") {
let all_text = tags_elem.text_contents();
let tag_text = all_text.trim();
assert!(tag_text.starts_with(TAGS_PREFIX));
tag_text[TAGS_PREFIX.len()..]
.split(' ')
.filter_map(|s| {
if !s.is_empty() {
Some(s.to_string())
} else {
None
}
})
.collect()
} else {
vec![]
}
}
fn generate_item_from_node(elem: &ElementDataRef) -> Item {
Item {
id: get_item_id(elem),
title: get_item_title(elem),
rating: get_item_rating(elem),
tags: get_item_tags(elem),
}
}
pub fn get_all_items(html: NodeRef) -> Vec<Item> {
html.select("#browserItemList>li")
.unwrap()
.map(|elem| generate_item_from_node(&elem))
.collect()
}
|
use crate::construction::constraints::{RouteConstraintViolation, TourSizeModule};
use crate::helpers::construction::constraints::create_constraint_pipeline_with_module;
use crate::helpers::models::domain::create_empty_solution_context;
use crate::helpers::models::problem::{test_fleet, test_multi_job_with_locations, test_single_with_id};
use crate::helpers::models::solution::{create_route_context_with_activities, test_activity_with_location};
use crate::models::common::Location;
use crate::models::problem::Job;
use std::sync::Arc;
fn fail() -> Option<RouteConstraintViolation> {
Some(RouteConstraintViolation { code: 1 })
}
parameterized_test! {can_limit_by_job_activities, (activities, job_size, limit, expected), {
can_limit_by_job_activities_impl(activities, job_size, limit, expected);
}}
can_limit_by_job_activities! {
case01: (3, 1, Some(3), fail()),
case02: (3, 1, None, None),
case03: (2, 1, Some(3), None),
case04: (2, 2, Some(3), fail()),
case05: (2, 2, None, None),
case06: (1, 2, Some(3), None),
}
fn can_limit_by_job_activities_impl(
activities: usize,
job_size: usize,
limit: Option<usize>,
expected: Option<RouteConstraintViolation>,
) {
let job = if job_size == 1 {
Job::Single(test_single_with_id("job1"))
} else {
Job::Multi(test_multi_job_with_locations((0..job_size).map(|idx| vec![Some(idx as Location)]).collect()))
};
let route_ctx = create_route_context_with_activities(
&test_fleet(),
"v1",
(0..activities).map(|idx| test_activity_with_location(idx as Location)).collect(),
);
let result = create_constraint_pipeline_with_module(Box::new(TourSizeModule::new(Arc::new(move |_| limit), 1)))
.evaluate_hard_route(&create_empty_solution_context(), &route_ctx, &job);
assert_eq!(result, expected);
}
|
pub mod controller;
pub mod repository;
|
#![allow(dead_code)]
mod telegram;
use std::error::Error;
use std::env;
use telegram::run_bot;
fn main() -> Result<(), Box<dyn Error>> {
let token = env::var("TELEGRAM_BOT_TOKEN").expect("TELEGRAM_BOT_TOKEN not found");
run_bot(token)
}
|
use std::{fmt, io::Write, num::NonZeroUsize, ops::Range};
use rand::{distributions::WeightedIndex, prelude::Distribution, seq::SliceRandom, Rng};
use serde::Deserialize;
use crate::payload::{Error, Serialize};
use self::{
common::tags, event::EventGenerator, metric::MetricGenerator,
service_check::ServiceCheckGenerator,
};
use super::{common::AsciiString, Generator};
mod common;
mod event;
mod metric;
mod service_check;
fn default_metric_names_minimum() -> NonZeroUsize {
NonZeroUsize::new(1).unwrap()
}
fn default_metric_names_maximum() -> NonZeroUsize {
NonZeroUsize::new(64).unwrap()
}
fn default_tag_keys_minimum() -> usize {
0
}
fn default_tag_keys_maximum() -> usize {
64
}
/// Weights for `DogStatsD` kinds: metrics, events, service checks
///
/// Defines the relative probability of each kind of `DogStatsD` datagram.
#[derive(Debug, Deserialize, Clone, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct KindWeights {
metric: u8,
event: u8,
service_check: u8,
}
impl Default for KindWeights {
fn default() -> Self {
KindWeights {
metric: 80, // 80%
event: 10, // 10%
service_check: 10, // 10%
}
}
}
/// Weights for `DogStatsD` metrics: gauges, counters, etc
#[derive(Debug, Deserialize, Clone, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct MetricWeights {
count: u8,
gauge: u8,
timer: u8,
distribution: u8,
set: u8,
histogram: u8,
}
impl Default for MetricWeights {
fn default() -> Self {
MetricWeights {
count: 34, // 34%
gauge: 34, // 34%
timer: 5, // 5%
distribution: 1, // 1%
set: 1, // 1%
histogram: 25, // 25%
}
}
}
#[derive(Debug, Deserialize, Clone, Copy, PartialEq)]
pub struct Config {
/// Defines the minimum number of metric names allowed in a payload.
#[serde(default = "default_metric_names_minimum")]
pub metric_names_minimum: NonZeroUsize,
/// Defines the maximum number of metric names allowed in a
/// payload. Must be greater or equal to minimum.
#[serde(default = "default_metric_names_maximum")]
pub metric_names_maximum: NonZeroUsize,
/// Defines the minimum number of metric names allowed in a payload.
#[serde(default = "default_tag_keys_minimum")]
pub tag_keys_minimum: usize,
/// Defines the maximum number of metric names allowed in a
/// payload. Must be greater or equal to minimum.
#[serde(default = "default_tag_keys_maximum")]
pub tag_keys_maximum: usize,
/// Defines the relative probability of each kind of DogStatsD kinds of
/// payload.
#[serde(default)]
pub kind_weights: KindWeights,
/// Defines the relative probability of each kind of DogStatsD metic.
#[serde(default)]
pub metric_weights: MetricWeights,
}
fn choose_or_not<R, T>(mut rng: &mut R, pool: &[T]) -> Option<T>
where
T: Clone,
R: rand::Rng + ?Sized,
{
if rng.gen() {
pool.choose(&mut rng).cloned()
} else {
None
}
}
#[derive(Debug, Clone)]
struct MemberGenerator {
kind_weights: WeightedIndex<u8>,
event_generator: EventGenerator,
service_check_generator: ServiceCheckGenerator,
metric_generator: MetricGenerator,
}
#[inline]
fn random_strings_with_length<R>(min_max: Range<usize>, max_length: u16, rng: &mut R) -> Vec<String>
where
R: Rng + ?Sized,
{
let mut buf = Vec::with_capacity(min_max.end);
for _ in 0..rng.gen_range(min_max) {
buf.push(AsciiString::with_maximum_length(max_length).generate(rng));
}
buf
}
impl MemberGenerator {
fn new<R>(
metric_range: Range<NonZeroUsize>,
key_range: Range<usize>,
kind_weights: KindWeights,
metric_weights: MetricWeights,
mut rng: &mut R,
) -> Self
where
R: Rng + ?Sized,
{
let metric_range = metric_range.start.get()..metric_range.end.get();
let titles = random_strings_with_length(metric_range, 64, &mut rng);
let texts_or_messages = random_strings_with_length(4..128, 1024, &mut rng);
let small_strings = random_strings_with_length(16..1024, 8, &mut rng);
let total_tag_sets = 512;
let max_values_per_tag_set = 512;
let mut tags = Vec::with_capacity(total_tag_sets);
let tags_generator = tags::Generator::new(key_range, max_values_per_tag_set);
for _ in 0..total_tag_sets {
tags.push(tags_generator.generate(&mut rng));
}
let event_generator = EventGenerator {
titles: titles.clone(),
texts_or_messages: texts_or_messages.clone(),
small_strings: small_strings.clone(),
tags: tags.clone(),
};
let service_check_generator = ServiceCheckGenerator {
names: titles.clone(),
small_strings: small_strings.clone(),
texts_or_messages,
tags: tags.clone(),
};
// NOTE the ordering here of `metric_choices` is very important! If you
// change it here you MUST also change it in `Generator<Metric> for
// MetricGenerator`.
let metric_choices = [
metric_weights.count,
metric_weights.gauge,
metric_weights.timer,
metric_weights.distribution,
metric_weights.set,
metric_weights.histogram,
];
let metric_generator = MetricGenerator {
metric_weights: WeightedIndex::new(&metric_choices).unwrap(),
names: titles,
container_ids: small_strings,
tags,
};
// NOTE the ordering here of `member_choices` is very important! If you
// change it here you MUST also change it in `Generator<Member> for
// MemberGenerator`.
let member_choices = [kind_weights.metric, kind_weights.event, kind_weights.event];
MemberGenerator {
kind_weights: WeightedIndex::new(&member_choices).unwrap(),
event_generator,
service_check_generator,
metric_generator,
}
}
}
impl Generator<Member> for MemberGenerator {
fn generate<R>(&self, rng: &mut R) -> Member
where
R: rand::Rng + ?Sized,
{
match self.kind_weights.sample(rng) {
0 => Member::Metric(self.metric_generator.generate(rng)),
1 => Member::Event(self.event_generator.generate(rng)),
2 => Member::ServiceCheck(self.service_check_generator.generate(rng)),
_ => unreachable!(),
}
}
}
// https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/
enum Member {
Metric(metric::Metric),
Event(event::Event),
ServiceCheck(service_check::ServiceCheck),
}
impl fmt::Display for Member {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Metric(ref m) => write!(f, "{m}"),
Self::Event(ref e) => write!(f, "{e}"),
Self::ServiceCheck(ref sc) => write!(f, "{sc}"),
}
}
}
#[derive(Debug, Clone)]
#[allow(clippy::module_name_repetitions)]
pub(crate) struct DogStatsD {
member_generator: MemberGenerator,
}
impl DogStatsD {
pub(crate) fn new<R>(
metric_names_range: Range<NonZeroUsize>,
tag_keys_range: Range<usize>,
kind_weights: KindWeights,
metric_weights: MetricWeights,
rng: &mut R,
) -> Self
where
R: rand::Rng + ?Sized,
{
let member_generator = MemberGenerator::new(
metric_names_range,
tag_keys_range,
kind_weights,
metric_weights,
rng,
);
Self { member_generator }
}
}
impl Serialize for DogStatsD {
fn to_bytes<W, R>(&self, mut rng: R, max_bytes: usize, writer: &mut W) -> Result<(), Error>
where
R: Rng + Sized,
W: Write,
{
let mut bytes_remaining = max_bytes;
loop {
let member: Member = self.member_generator.generate(&mut rng);
let encoding = format!("{member}");
let line_length = encoding.len() + 1; // add one for the newline
match bytes_remaining.checked_sub(line_length) {
Some(remainder) => {
writeln!(writer, "{encoding}")?;
bytes_remaining = remainder;
}
None => break,
}
}
Ok(())
}
}
#[cfg(test)]
mod test {
use std::num::NonZeroUsize;
use proptest::prelude::*;
use rand::{rngs::SmallRng, SeedableRng};
use crate::payload::{
dogstatsd::{KindWeights, MetricWeights},
DogStatsD, Serialize,
};
// We want to be sure that the serialized size of the payload does not
// exceed `max_bytes`.
proptest! {
#[test]
fn payload_not_exceed_max_bytes(seed: u64, max_bytes: u16) {
let max_bytes = max_bytes as usize;
let mut rng = SmallRng::seed_from_u64(seed);
let metric_names_range = NonZeroUsize::new(1).unwrap()..NonZeroUsize::new(64).unwrap();
let tag_keys_range = 0..32;
let kind_weights = KindWeights::default();
let metric_weights = MetricWeights::default();
let dogstatsd = DogStatsD::new(metric_names_range, tag_keys_range, kind_weights, metric_weights, &mut rng);
let mut bytes = Vec::with_capacity(max_bytes);
dogstatsd.to_bytes(rng, max_bytes, &mut bytes).unwrap();
debug_assert!(
bytes.len() <= max_bytes,
"{:?}",
std::str::from_utf8(&bytes).unwrap()
);
}
}
}
|
use discorsd::commands::SlashCommandRaw;
use crate::Bot;
pub mod addme;
pub mod info;
pub mod ping;
pub mod rules;
pub mod stop;
pub mod uptime;
pub mod start;
pub mod system_info;
pub mod ll;
pub mod unpin;
pub mod test;
pub mod components;
pub mod start_game;
pub fn commands() -> Vec<Box<dyn SlashCommandRaw<Bot=Bot>>> {
vec![
Box::new(addme::AddMeCommand),
Box::<start::StartCommand>::default(),
Box::<stop::StopCommand>::default(),
Box::new(components::ComponentsCommand),
]
} |
#![feature(custom_attribute)]
use futures::Stream;
use tokio_core::reactor::Core;
use telegram_bot::*;
use std::collections::HashMap;
mod config;
fn main() {
let mut core = Core::new().unwrap();
let handle = core.handle();
let config = match config::Config::from_config() {
Ok(c) => c,
Err(e) => panic!("Cant load/parse config file: {}", e)
};
let admin_user = telegram_bot::UserId::new(config.admin_user_id);
let api = Api::configure(config.token.to_owned()).build(&handle).unwrap();
// Fetch new updates via long poll method
let future = api.stream().for_each(|update| {
match update.kind {
UpdateKind::Message(message) => {
if message.from.id == admin_user {
println!("{:?}", message);
handle_message(api.clone(), message, &handle, &config)
} else {
eprintln!("Unknown user tried to call the bot {:?}", message);
api.spawn(telegram_bot::SendMessage::new(message.to_source_chat(), "Sorry, I'm not allowed to talk to strangers."))
}
},
UpdateKind::EditedMessage(message) => api.spawn(telegram_bot::SendMessage::new(message.to_source_chat(), "I don't support edited messages.")),
_ => println!("I don't support this kind of message."),
}
Ok(())
});
core.run(future).unwrap();
}
fn handle_start(api: Api, message: Message, _handle: &tokio_core::reactor::Handle, commands: &HashMap<String, config::Command>) {
let mut available_commands: String = String::from("Overview of all configured commands");
available_commands.push_str(&String::from("\n\n"));
available_commands.push_str(&String::from("/start -> This view"));
for (command_string, command) in commands {
available_commands.push_str(&String::from("\n"));
available_commands.push_str(&format!("{} -> {}", command_string, command.name));
}
api.spawn(telegram_bot::SendMessage::new(message.to_source_chat(), available_commands))
}
fn handle_command(api: Api, message: Message, _handle: &tokio_core::reactor::Handle, command: &config::Command) {
println!("Command: {:?}", command);
match std::process::Command::new(&command.script).output() {
Ok(o) => {
let output_length = if o.stdout.len() > 4095 { 4095 } else { o.stdout.len()};
let output = String::from_utf8(o.stdout[0..output_length].to_vec()).expect("Not UTF-8");
println!("Output: {:?}", output );
api.spawn(telegram_bot::SendMessage::new(message.to_source_chat(), output))
},
Err(e) => {
println!("Error: {:?}", e);
api.spawn(telegram_bot::SendMessage::new(message.to_source_chat(), format!("Command {} returned an error: {}", e, e)))
}
}
}
fn handle_message(api: Api, message: Message, handle: &tokio_core::reactor::Handle, config: &config::Config) {
match message.kind {
MessageKind::Text {ref data, ..} => {
match data.as_str() {
"/start" => handle_start(api, message.to_owned(), handle, &config.commands),
s => if config.commands.contains_key(s) {
handle_command(api, message.to_owned(), handle, config.commands.get(s).unwrap().to_owned())
},
}
}
_ => return
}
} |
use wasm_bindgen::prelude::*;
use crate::{active_tab, goto_page};
#[wasm_bindgen]
pub async fn archviz() {
// Set active tab.
active_tab("");
// Go to the page.
goto_page(
"/projects/archviz",
"/api/projects/archviz/archviz.html?ver=gIkkDibIHyE",
"Archviz",
)
.await;
let window = web_sys::window().expect("No global `window` exists");
let document = window.document().expect("Should have a document on window");
// Load Unity's JavaScript stuff.
let loader = document
.create_element("script")
.expect("Could not create Unity Load script element.");
loader
.set_attribute(
"src",
"/api/projects/archviz/Build/UnityLoader.js?ver=d8tjExZ0_k8",
)
.expect("Could not set unity loader 'src' attribute.");
loader
.set_attribute("onload", "unityInitializer()")
.expect("Could not set unity loader 'onload' attribute.");
let progress = document
.create_element("script")
.expect("Could not create Unity Progress script element.");
progress
.set_attribute(
"src",
"/api/projects/archviz/TemplateData/UnityProgress.js?ver=ac6T--xi1Fs",
)
.expect("Could not set unity progress bar 'src' attribute.");
let instance = document
.create_element("script")
.expect("Could not create Unity instance script element.");
instance.set_inner_html(
"var unityInstance;
function unityInitializer() {
unityInstance = UnityLoader.instantiate(
\"unityContainer\",
\"/api/projects/archviz/Build/WebGLBuild.json?ver=h0kqziNXTOM\",
{onProgress: UnityProgress});
}",
);
if let Some(head) = document.get_elements_by_tag_name("head").item(0) {
head.append_with_node_1(&progress)
.expect("Could not append Unity Progress script to document");
}
if let Some(head) = document.get_elements_by_tag_name("head").item(0) {
head.append_with_node_1(&loader)
.expect("Could not append Unity Load script to document");
}
if let Some(head) = document.get_elements_by_tag_name("head").item(0) {
head.append_with_node_1(&instance)
.expect("Could not append Unity Instance script to document");
}
}
|
#[doc = "Register `HSEM_C2ICR` reader"]
pub type R = crate::R<HSEM_C2ICR_SPEC>;
#[doc = "Register `HSEM_C2ICR` writer"]
pub type W = crate::W<HSEM_C2ICR_SPEC>;
#[doc = "Field `ISC` reader - ISC"]
pub type ISC_R = crate::FieldReader<u32>;
#[doc = "Field `ISC` writer - ISC"]
pub type ISC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - ISC"]
#[inline(always)]
pub fn isc(&self) -> ISC_R {
ISC_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - ISC"]
#[inline(always)]
#[must_use]
pub fn isc(&mut self) -> ISC_W<HSEM_C2ICR_SPEC, 0> {
ISC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "HSEM i2terrupt clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hsem_c2icr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hsem_c2icr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HSEM_C2ICR_SPEC;
impl crate::RegisterSpec for HSEM_C2ICR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hsem_c2icr::R`](R) reader structure"]
impl crate::Readable for HSEM_C2ICR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hsem_c2icr::W`](W) writer structure"]
impl crate::Writable for HSEM_C2ICR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HSEM_C2ICR to value 0"]
impl crate::Resettable for HSEM_C2ICR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use pyo3::prelude::*;
use numpy::
{
IntoPyArray,
PyArrayDyn,
PyReadonlyArrayDyn
};
pub fn register_module(py: Python<'_>, parent_module: &PyModule) -> PyResult<()>
{
let modified_canonical = PyModule::new(py, "modified_canonical")?;
super::asymptotic::py::register_module(py, modified_canonical)?;
parent_module.add_submodule(modified_canonical)?;
modified_canonical.add_class::<FJC>()?;
Ok(())
}
/// The freely-jointed chain (FJC) model thermodynamics in the modified canonical ensemble.
#[pyclass]
#[derive(Copy, Clone)]
pub struct FJC
{
/// The mass of each hinge in the chain in units of kg/mol.
#[pyo3(get)]
pub hinge_mass: f64,
/// The length of each link in the chain in units of nm.
#[pyo3(get)]
pub link_length: f64,
/// The number of links in the chain.
#[pyo3(get)]
pub number_of_links: u8,
/// The thermodynamic functions of the model in the isotensional ensemble approximated using an asymptotic approach.
#[pyo3(get)]
pub asymptotic: super::asymptotic::py::FJC
}
#[pymethods]
impl FJC
{
#[new]
pub fn init(number_of_links: u8, link_length: f64, hinge_mass: f64) -> Self
{
FJC
{
hinge_mass,
link_length,
number_of_links,
asymptotic: super::asymptotic::py::FJC::init(number_of_links, link_length, hinge_mass)
}
}
/// The expected end-to-end length as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The end-to-end length :math:`\xi`.
///
pub fn end_to_end_length<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::end_to_end_length(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The expected end-to-end length per link as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The end-to-end length per link :math:`\xi/N_b=\ell_b\gamma`.
///
pub fn end_to_end_length_per_link<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::end_to_end_length_per_link(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The expected nondimensional end-to-end length as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional end-to-end length :math:`N_b\gamma=\xi/\ell_b`.
///
pub fn nondimensional_end_to_end_length<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_end_to_end_length(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The expected nondimensional end-to-end length per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional end-to-end length :math:`\gamma\equiv\xi/N_b\ell_b`.
///
pub fn nondimensional_end_to_end_length_per_link<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_end_to_end_length_per_link(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The expected force as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The force :math:`f`.
///
pub fn force<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::force(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The expected nondimensional force as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional force :math:`\eta\equiv\beta f\ell_b`.
///
pub fn nondimensional_force<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_force(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The Helmholtz free energy as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Helmholtz free energy :math:`\psi`.
///
pub fn helmholtz_free_energy<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::helmholtz_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The Helmholtz free energy per link as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Helmholtz free energy per link :math:`\psi/N_b`.
///
pub fn helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The relative Helmholtz free energy as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Helmholtz free energy :math:`\Delta\psi`.
///
pub fn relative_helmholtz_free_energy<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::relative_helmholtz_free_energy(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The relative Helmholtz free energy per link as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Helmholtz free energy per link :math:`\Delta\psi/N_b`.
///
pub fn relative_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::relative_helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional Helmholtz free energy as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Helmholtz free energy :math:`\beta\psi=N_b\vartheta`.
///
pub fn nondimensional_helmholtz_free_energy<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_helmholtz_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_potential_distance, &nondimensional_potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional Helmholtz free energy per link as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Helmholtz free energy per link :math:`\vartheta\equiv\beta\psi/N_b`.
///
pub fn nondimensional_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_potential_distance, &nondimensional_potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional relative Helmholtz free energy as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Helmholtz free energy :math:`\beta\Delta\psi=N_b\Delta\vartheta`.
///
pub fn nondimensional_relative_helmholtz_free_energy<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_relative_helmholtz_free_energy(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The nondimensional relative Helmholtz free energy per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Helmholtz free energy per link :math:`\Delta\vartheta\equiv\beta\Delta\psi/N_b`.
///
pub fn nondimensional_relative_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_relative_helmholtz_free_energy_per_link(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Gibbs free energy :math:`\varphi`.
///
pub fn gibbs_free_energy<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::gibbs_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The Gibbs free energy epr link as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Gibbs free energy per link :math:`\varphi/N_b`.
///
pub fn gibbs_free_energy_per_link<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The relative Gibbs free energy as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Gibbs free energy :math:`\Delta\varphi`.
///
pub fn relative_gibbs_free_energy<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::relative_gibbs_free_energy(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The relative Gibbs free energy per link as a function of the applied potential distance, potential stiffness, and temperature.
///
/// Args:
/// potential_distance (numpy.ndarray): The potential distance.
/// potential_stiffness (float): The potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Gibbs free energy per link :math:`\Delta\varphi/N_b`.
///
pub fn relative_gibbs_free_energy_per_link<'py>(&self, py: Python<'py>, potential_distance: PyReadonlyArrayDyn<f64>, potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
potential_distance.as_array().mapv(|potential_distance: f64| super::relative_gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, &potential_distance, &potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional Gibbs free energy as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Gibbs free energy :math:`\beta\varphi=N_b\varrho`.
///
pub fn nondimensional_gibbs_free_energy<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_gibbs_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_potential_distance, &nondimensional_potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional Gibbs free energy per link as a function of the applied nondimensional potential distance, nondimensional potential stiffness, and temperature.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Gibbs free energy per link :math:`\varrho\equiv\beta\varphi/N_b`.
///
pub fn nondimensional_gibbs_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_gibbs_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_potential_distance, &nondimensional_potential_stiffness, &temperature)).into_pyarray(py)
}
/// The nondimensional relative Gibbs free energy as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Gibbs free energy :math:`\beta\Delta\varphi=N_b\Delta\varrho`.
///
pub fn nondimensional_relative_gibbs_free_energy<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_relative_gibbs_free_energy(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
/// The nondimensional relative Gibbs free energy per link as a function of the applied nondimensional potential distance and nondimensional potential stiffness.
///
/// Args:
/// nondimensional_potential_distance (numpy.ndarray): The nondimensional potential distance.
/// nondimensional_potential_stiffness (float): The nondimensional potential stiffness.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Gibbs free energy per link :math:`\Delta\varrho\equiv\beta\Delta\varphi/N_b`.
///
pub fn nondimensional_relative_gibbs_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_potential_distance: PyReadonlyArrayDyn<f64>, nondimensional_potential_stiffness: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_potential_distance.as_array().mapv(|nondimensional_potential_distance: f64| super::nondimensional_relative_gibbs_free_energy_per_link(&self.number_of_links, &nondimensional_potential_distance, &nondimensional_potential_stiffness)).into_pyarray(py)
}
} |
// Check if the final remaining boards are complete.
//
// Input:
// board depth idx
// ...
//
// board: hex representation of bit-board
// depth: the depth of the board
// idx: move index (the index of return value of Board#next)
#[macro_use]
extern crate precomp;
use std::process;
use precomp::{In, Out};
use precomp::board::{Board, Result};
use precomp::board_collection::{BoardSet, BoardMap};
struct Node {
idx: u8,
depth: i32,
}
// load all boards
fn load() -> (Vec<Node>, BoardMap) {
let mut map = BoardMap::new();
let mut nodes = vec![];
In::each(|b, depth, idx| {
map[b] = nodes.len() as i32;
nodes.push(Node {
idx: idx as u8,
depth: depth,
})
});
log!("board#: {}", nodes.len());
(nodes, map)
}
macro_rules! error(
($($arg:tt)*) => { {
log!($($arg)*);
process::exit(1)
} }
);
fn main() {
let mut out = Out::new();
let (nodes, map) = load();
let mut boards = vec![];
if let Result::Unknown(bs) = Board::init().next() {
for b in bs { boards.push((b, 78)) }
}
let mut visited = BoardSet::new();
// straightforward DFS
while let Some((b, depth)) = boards.pop() {
if visited.contains(b) { continue }
visited.insert(b);
out!(out, "{:015x}\n", b.0);
if b.easy() { continue }
if !map.contains(b) {
error!("unknown board!: {:015x}", b.0);
}
let ref node = nodes[map[b] as usize];
if node.depth >= depth {
error!("error! board={:015x} depth={} (expected: <{})\n",
b.0, depth, depth);
}
match b.next() {
Result::Win | Result::Lose => error!("error!"),
Result::Unknown(bs) =>
if let Result::Unknown(bs) = bs[node.idx as usize].next() {
for b in bs { boards.push((b, node.depth)) }
}
}
}
log!("OK!")
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SolutionProperties {
#[serde(rename = "workspaceResourceId")]
pub workspace_resource_id: String,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "containedResources", default, skip_serializing_if = "Vec::is_empty")]
pub contained_resources: Vec<String>,
#[serde(rename = "referencedResources", default, skip_serializing_if = "Vec::is_empty")]
pub referenced_resources: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementAssociationProperties {
#[serde(rename = "applicationId")]
pub application_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementConfigurationProperties {
#[serde(rename = "applicationId", default, skip_serializing_if = "Option::is_none")]
pub application_id: Option<String>,
#[serde(rename = "parentResourceType")]
pub parent_resource_type: String,
pub parameters: Vec<ArmTemplateParameter>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
pub template: serde_json::Value,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SolutionPropertiesList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Solution>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementAssociationPropertiesList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ManagementAssociation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementConfigurationPropertiesList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ManagementConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SolutionPlan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "promotionCode", default, skip_serializing_if = "Option::is_none")]
pub promotion_code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Solution {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<SolutionPlan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SolutionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SolutionPatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementAssociation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ManagementAssociationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ManagementConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ArmTemplateParameter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CodeMessageError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<code_message_error::Error>,
}
pub mod code_message_error {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
}
|
use futures::{
sink::{Sink, SinkExt},
stream::{Stream, StreamExt},
};
use tokio::sync::mpsc;
use tracing::{debug, error};
/// Forwards a Stream to a tokio::sync::mpsc::Sender of the same item type
pub async fn stream_to_sender<Item, S>(mut stream: S, sender: mpsc::Sender<Item>)
where
S: Stream<Item = Item> + Unpin,
{
debug!("stream_to_sender starting");
while let Some(next) = stream.next().await {
if let Err(err) = sender.send(next).await {
error!("Queue error: {}", err);
break;
}
}
debug!("stream_to_sender closing");
}
/// Forwards a Stream to a Sink of the same item type
pub async fn stream_to_sink<Item, S, K>(mut stream: S, mut sink: K)
where
S: Stream<Item = Item> + Unpin,
K: Sink<Item> + Unpin,
<K as Sink<Item>>::Error: std::fmt::Display,
{
debug!("stream_to_sink starting");
while let Some(item) = stream.next().await {
debug!("stream_to_sink got item");
if let Err(err) = sink.send(item).await {
error!("Write error: {}", err);
}
}
debug!("stream_to_sink closing");
}
|
use core::convert::TryInto;
use embedded_time::{duration::*, Clock, Instant};
use heapless::ArrayLength;
use super::{Error, Result, RingBuffer, Socket, SocketHandle, SocketMeta};
/// A TCP socket ring buffer.
pub type SocketBuffer<N> = RingBuffer<u8, N>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, defmt::Format)]
pub enum State {
/// Freshly created, unsullied
Created,
/// TCP connected or UDP has an address
Connected,
/// Block all writes
ShutdownForWrite,
}
impl Default for State {
fn default() -> Self {
State::Created
}
}
/// A Transmission Control Protocol socket.
///
/// A TCP socket may passively listen for connections or actively connect to another endpoint.
/// Note that, for listening sockets, there is no "backlog"; to be able to simultaneously
/// accept several connections, as many sockets must be allocated, or any new connection
/// attempts will be reset.
pub struct TcpSocket<L: ArrayLength<u8>, CLK: Clock> {
pub(crate) meta: SocketMeta,
state: State,
check_interval: Seconds<u32>,
available_data: usize,
rx_buffer: SocketBuffer<L>,
last_check_time: Option<Instant<CLK>>,
}
impl<L: ArrayLength<u8>, CLK: Clock> TcpSocket<L, CLK> {
/// Create a socket using the given buffers.
pub fn new(socket_id: u8) -> TcpSocket<L, CLK> {
TcpSocket {
meta: SocketMeta {
handle: SocketHandle(socket_id),
},
state: State::default(),
rx_buffer: SocketBuffer::new(),
available_data: 0,
check_interval: Seconds(15),
last_check_time: None,
}
}
/// Return the socket handle.
#[inline]
pub fn handle(&self) -> SocketHandle {
self.meta.handle
}
/// Return the connection state, in terms of the TCP state machine.
#[inline]
pub fn state(&self) -> State {
self.state
}
pub fn should_update_available_data(&mut self, ts: Instant<CLK>) -> bool
where
Generic<CLK::T>: TryInto<Milliseconds>,
{
let should_update = self
.last_check_time
.as_ref()
.and_then(|last_check_time| ts.checked_duration_since(last_check_time))
.and_then(|dur| dur.try_into().ok())
.map(|dur: Milliseconds<u32>| dur >= self.check_interval)
.unwrap_or(true);
if should_update {
self.last_check_time.replace(ts);
}
should_update
}
/// Set available data.
pub fn set_available_data(&mut self, available_data: usize) {
self.available_data = available_data;
}
/// Get the number of bytes available to ingress.
pub fn get_available_data(&self) -> usize {
self.available_data
}
/// Return whether a connection is active.
///
/// This function returns true if the socket is actively exchanging packets
/// with a remote endpoint. Note that this does not mean that it is possible
/// to send or receive data through the socket; for that, use
/// [can_send](#method.can_send) or [can_recv](#method.can_recv).
///
/// If a connection is established, [abort](#method.close) will send a reset
/// to the remote endpoint.
///
/// In terms of the TCP state machine, the socket must be in the `Closed` or
/// `ShutdownForRead` state.
#[inline]
pub fn is_connected(&self) -> bool {
matches!(self.state, State::Connected)
}
/// Return whether the receive half of the full-duplex connection is open.
///
/// This function returns true if it's possible to receive data from the remote endpoint.
/// It will return true while there is data in the receive buffer, and if there isn't,
/// as long as the remote endpoint has not closed the connection.
///
/// In terms of the TCP state machine, the socket must be in the `Connected`,
/// `FIN-WAIT-1`, or `FIN-WAIT-2` state, or have data in the receive buffer instead.
#[inline]
pub fn may_recv(&self) -> bool {
match self.state {
State::Connected => true,
State::ShutdownForWrite => true,
// If we have something in the receive buffer, we can receive that.
_ if !self.rx_buffer.is_empty() => true,
_ => false,
}
}
/// Check whether the receive half of the full-duplex connection buffer is open
/// (see [may_recv](#method.may_recv), and the receive buffer is not full.
#[inline]
pub fn can_recv(&self) -> bool {
if !self.may_recv() {
return false;
}
!self.rx_buffer.is_full()
}
fn recv_impl<'b, F, R>(&'b mut self, f: F) -> Result<R>
where
F: FnOnce(&'b mut SocketBuffer<L>) -> (usize, R),
{
// We may have received some data inside the initial SYN, but until the connection
// is fully open we must not dequeue any data, as it may be overwritten by e.g.
// another (stale) SYN. (We do not support TCP Fast Open.)
if !self.may_recv() {
return Err(Error::Illegal);
}
let (_size, result) = f(&mut self.rx_buffer);
Ok(result)
}
/// Call `f` with the largest contiguous slice of octets in the receive buffer,
/// and dequeue the amount of elements returned by `f`.
///
/// This function returns `Err(Error::Illegal) if the receive half of
/// the connection is not open; see [may_recv](#method.may_recv).
pub fn recv<'b, F, R>(&'b mut self, f: F) -> Result<R>
where
F: FnOnce(&'b mut [u8]) -> (usize, R),
{
self.recv_impl(|rx_buffer| rx_buffer.dequeue_many_with(f))
}
/// Call `f` with a slice of octets in the receive buffer, and dequeue the
/// amount of elements returned by `f`.
///
/// If the buffer read wraps around, the second argument of `f` will be
/// `Some()` with the remainder of the buffer, such that the combined slice
/// of the two arguments, makes up the full buffer.
///
/// This function returns `Err(Error::Illegal) if the receive half of the
/// connection is not open; see [may_recv](#method.may_recv).
pub fn recv_wrapping<'b, F>(&'b mut self, f: F) -> Result<usize>
where
F: FnOnce(&'b [u8], Option<&'b [u8]>) -> usize,
{
self.recv_impl(|rx_buffer| {
rx_buffer.dequeue_many_with_wrapping(|a, b| {
let len = f(a, b);
(len, len)
})
})
}
/// Dequeue a sequence of received octets, and fill a slice from it.
///
/// This function returns the amount of bytes actually dequeued, which is limited
/// by the amount of free space in the transmit buffer; down to zero.
///
/// See also [recv](#method.recv).
pub fn recv_slice(&mut self, data: &mut [u8]) -> Result<usize> {
self.recv_impl(|rx_buffer| {
let size = rx_buffer.dequeue_slice(data);
(size, size)
})
}
/// Peek at a sequence of received octets without removing them from
/// the receive buffer, and return a pointer to it.
///
/// This function otherwise behaves identically to [recv](#method.recv).
pub fn peek(&mut self, size: usize) -> Result<&[u8]> {
// See recv() above.
if !self.may_recv() {
return Err(Error::Illegal);
}
Ok(self.rx_buffer.get_allocated(0, size))
}
pub fn rx_window(&self) -> usize {
self.rx_buffer.window()
}
/// Peek at a sequence of received octets without removing them from
/// the receive buffer, and fill a slice from it.
///
/// This function otherwise behaves identically to [recv_slice](#method.recv_slice).
pub fn peek_slice(&mut self, data: &mut [u8]) -> Result<usize> {
let buffer = self.peek(data.len())?;
let data = &mut data[..buffer.len()];
data.copy_from_slice(buffer);
Ok(buffer.len())
}
pub fn rx_enqueue_slice(&mut self, data: &[u8]) -> usize {
self.rx_buffer.enqueue_slice(data)
}
/// Return the amount of octets queued in the receive buffer.
///
/// Note that the Berkeley sockets interface does not have an equivalent of this API.
pub fn recv_queue(&self) -> usize {
self.rx_buffer.len()
}
pub fn set_state(&mut self, state: State) {
self.state = state
}
}
impl<L: ArrayLength<u8>, CLK: Clock> Into<Socket<L, CLK>> for TcpSocket<L, CLK> {
fn into(self) -> Socket<L, CLK> {
Socket::Tcp(self)
}
}
|
use itertools::Itertools;
use num_integer::Integer;
use regex::Regex;
use std::{cmp::Ordering, collections::HashMap};
#[aoc_generator(day12)]
pub fn day12_gen(input: &str) -> (Vec<i32>, Vec<i32>, Vec<i32>) {
lazy_static! {
static ref PATTERN: Regex =
Regex::new(r"<x=(?P<x>-?\d+),\s*y=(?P<y>-?\d+),\s*z=(?P<z>-?\d+)>").unwrap();
}
let mut vx = Vec::new();
let mut vy = Vec::new();
let mut vz = Vec::new();
PATTERN.captures_iter(input).for_each(|caps| {
vx.push(caps["x"].parse().unwrap());
vy.push(caps["y"].parse().unwrap());
vz.push(caps["z"].parse().unwrap());
});
(vx, vy, vz)
}
fn step_axis(positions: &mut Vec<i32>, velocities: &mut Vec<i32>) {
positions
.iter()
.zip(velocities.iter_mut())
.for_each(|(p1, vel)| {
positions.iter().for_each(|p2| {
*vel += match p1.cmp(&p2) {
Ordering::Less => 1,
Ordering::Equal => 0,
Ordering::Greater => -1,
}
})
});
positions
.iter_mut()
.zip(velocities.iter())
.for_each(|(pos, vel)| *pos += vel);
}
#[aoc(day12, part1)]
pub fn day12_part1(input: &(Vec<i32>, Vec<i32>, Vec<i32>)) -> i32 {
const STEPS: u64 = 1000;
let mut px = Vec::from(&input.0 as &[i32]);
let mut py = Vec::from(&input.1 as &[i32]);
let mut pz = Vec::from(&input.2 as &[i32]);
let mut vx = vec![0; px.len()];
let mut vy = vec![0; py.len()];
let mut vz = vec![0; pz.len()];
for _ in 0..STEPS {
step_axis(&mut px, &mut vx);
step_axis(&mut py, &mut vy);
step_axis(&mut pz, &mut vz);
}
(0..px.len())
.map(|i| {
(px[i].abs() + py[i].abs() + pz[i].abs()) * (vx[i].abs() + vy[i].abs() + vz[i].abs())
})
.sum()
}
fn find_axis_cycle(initial_positions: &[i32]) -> u64 {
let mut positions = Vec::from(initial_positions);
let mut velocities = vec![0; positions.len()];
let mut step = 1;
loop {
step_axis(&mut positions, &mut velocities);
if velocities.iter().all(|&v| v == 0) && positions == initial_positions {
break step;
}
step += 1;
}
}
#[aoc(day12, part2)]
pub fn day12_part2(input: &(Vec<i32>, Vec<i32>, Vec<i32>)) -> u64 {
let sx = find_axis_cycle(&input.0);
let sy = find_axis_cycle(&input.1);
let sz = find_axis_cycle(&input.2);
sy.lcm(&sx.lcm(&sz))
}
|
#[doc = "Register `BSEC_OTP_STATUS` reader"]
pub type R = crate::R<BSEC_OTP_STATUS_SPEC>;
#[doc = "Field `SECURE` reader - SECURE"]
pub type SECURE_R = crate::BitReader;
#[doc = "Field `FULLDBG` reader - FULLDBG"]
pub type FULLDBG_R = crate::BitReader;
#[doc = "Field `INVALID` reader - INVALID"]
pub type INVALID_R = crate::BitReader;
#[doc = "Field `BUSY` reader - BUSY"]
pub type BUSY_R = crate::BitReader;
#[doc = "Field `PROGFAIL` reader - PROGFAIL"]
pub type PROGFAIL_R = crate::BitReader;
#[doc = "Field `PWRON` reader - PWRON"]
pub type PWRON_R = crate::BitReader;
#[doc = "Field `BIST1LOCK` reader - BIST1LOCK"]
pub type BIST1LOCK_R = crate::BitReader;
#[doc = "Field `BIST2LOCK` reader - BIST2LOCK"]
pub type BIST2LOCK_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - SECURE"]
#[inline(always)]
pub fn secure(&self) -> SECURE_R {
SECURE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - FULLDBG"]
#[inline(always)]
pub fn fulldbg(&self) -> FULLDBG_R {
FULLDBG_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - INVALID"]
#[inline(always)]
pub fn invalid(&self) -> INVALID_R {
INVALID_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - BUSY"]
#[inline(always)]
pub fn busy(&self) -> BUSY_R {
BUSY_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - PROGFAIL"]
#[inline(always)]
pub fn progfail(&self) -> PROGFAIL_R {
PROGFAIL_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - PWRON"]
#[inline(always)]
pub fn pwron(&self) -> PWRON_R {
PWRON_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - BIST1LOCK"]
#[inline(always)]
pub fn bist1lock(&self) -> BIST1LOCK_R {
BIST1LOCK_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - BIST2LOCK"]
#[inline(always)]
pub fn bist2lock(&self) -> BIST2LOCK_R {
BIST2LOCK_R::new(((self.bits >> 7) & 1) != 0)
}
}
#[doc = "BSEC OTP status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bsec_otp_status::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct BSEC_OTP_STATUS_SPEC;
impl crate::RegisterSpec for BSEC_OTP_STATUS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`bsec_otp_status::R`](R) reader structure"]
impl crate::Readable for BSEC_OTP_STATUS_SPEC {}
#[doc = "`reset()` method sets BSEC_OTP_STATUS to value 0"]
impl crate::Resettable for BSEC_OTP_STATUS_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::CRC;
pub struct CRC8Impl<
const POLY: u8,
const INIT: u8,
const XOROUT: u8,
const REFIN: bool,
const REFOUT: bool,
>;
// pub type CRC8XModem = CRC8<0x1021, 0x0000, 0x0000, false, false>;
// pub type CRC8Genibus = CRC8<0x1021, 0xFFFF, 0xFFFF, false, false>;
// pub type CRC8CDMA2000 = CRC8<0xC867, 0xFFFF, 0x0000, false, false>;
pub type CRC8 = CRC8Impl<0x07, 0x00, 0x00, false, false>;
pub type CRC8CDMA2000 = CRC8Impl<0x9B, 0xFF, 0x00, false, false>;
pub type CRC8DARC = CRC8Impl<0x39, 0x00, 0x00, true, true>;
pub type CRC8DVBS2 = CRC8Impl<0xD5, 0x00, 0x00, false, false>;
// CRC-8/EBU 1D FF 00 True True
// CRC-8/I-CODE 1D FD 00 False False
// CRC-8/ITU 07 00 55 False False
// CRC-8/MAXIM 31 00 00 True True
// CRC-8/ROHC 07 FF 00 True True
// CRC-8/WCDMA 9B 00 00 True True
impl<
const POLY: u8,
const INIT: u8,
const XOROUT: u8,
const REFIN: bool,
const REFOUT: bool,
> CRC8Impl<POLY, INIT, XOROUT, REFIN, REFOUT>
{
const fn gen_byte(b: u8) -> u8 {
let crc = b;
// unrolled `for i in 0..8`
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
let crc = (crc << 1) ^ [0, POLY][(crc >> 7) as usize];
crc
}
const fn init() -> u8 {
INIT
}
const fn next(crc: u8, c: u8) -> u8 {
let c = if REFIN { c.reverse_bits() } else { c };
let c = crc ^ c;
Self::gen_byte(c)
}
const fn finish(crc: u8) -> u8 {
let crc = if REFOUT { crc.reverse_bits() } else { crc };
crc ^ XOROUT
}
pub const CHECK: u8 = {
let crc = Self::init();
let crc = Self::next(crc, b'0' + 1);
let crc = Self::next(crc, b'0' + 2);
let crc = Self::next(crc, b'0' + 3);
let crc = Self::next(crc, b'0' + 4);
let crc = Self::next(crc, b'0' + 5);
let crc = Self::next(crc, b'0' + 6);
let crc = Self::next(crc, b'0' + 7);
let crc = Self::next(crc, b'0' + 8);
let crc = Self::next(crc, b'0' + 9);
Self::finish(crc)
};
}
impl<
const POLY: u8,
const INIT: u8,
const XOROUT: u8,
const REFIN: bool,
const REFOUT: bool,
> CRC for CRC8Impl<POLY, INIT, XOROUT, REFIN, REFOUT>
{
type Output = u8;
fn calculate(bytes: &[u8]) -> u8 {
let crc = bytes.iter().map(|&c| c).fold(Self::init(), Self::next);
Self::finish(crc)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn check() {
assert_eq!(CRC8::CHECK, 0xF4);
assert_eq!(CRC8CDMA2000::CHECK, 0xDA);
assert_eq!(CRC8DARC::CHECK, 0x15);
assert_eq!(CRC8DVBS2::CHECK, 0xBC)
}
}
|
extern crate time;
extern crate byteorder;
extern crate getopts;
mod server;
mod client;
use client::test_client;
use server::test_server;
mod commands;
use std::io::{stdout, Write};
use getopts::Options;
use std::env;
#[macro_use]
extern crate log;
extern crate log4rs;
use std::default::Default;
static DEFAULT_HOST: &'static str = "0.0.0.0";
const DEFAULT_PORT:u16 = 5001;
const DEFAULT_TIME:u64 = 10;
const DEFAULT_PINGS:u64 = 20;
const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION");
fn main() {
log4rs::init_file("log.yaml", Default::default()).unwrap();
println!("Logging started using 'log4rs', see log.toml for configuration details");
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
if cfg!(debug_assertions) {
warn!("Running a not optimized version. Please use the --release build switch for any serious tests !!");
}
info!("Version: v{}", VERSION.unwrap_or("unknown"));
info!("Arguments: {:?}", args);
let mut opts = Options::new();
opts.optflag("h", "help", "Shows this text");
opts.optflag("s", "server", "Launches a server");
opts.optopt("c", "client", "connects to a server", "HOST");
opts.optopt("t", "time", &format!("time to test upload/download for in seconds (default: {})", DEFAULT_TIME), "TIME");
opts.optopt("n", "num_pings", &format!("number of pings to perform (default: {})", DEFAULT_PINGS), "PINGS");
opts.optopt("p", "port", &format!("the port to listen on and connect to (default: {})", DEFAULT_PORT), "PORT", );
opts.optopt("b", "bind", &format!("Server bind address (default: {})", DEFAULT_HOST.to_string()), "ADDR");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => {
error!("{}", f.to_string());
print_usage(&program, opts);
return;
}
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
let mut has_done_stuff = false;
if matches.opt_present("s") {
has_done_stuff = true;
let port = matches.opt_str("p").and_then(|p| p.parse::<u16>().ok()).unwrap_or(DEFAULT_PORT);
let bind = matches.opt_str("b").unwrap_or(DEFAULT_HOST.to_string());
let s = test_server::TestServer::new(port, &bind);
s.listen();
}
if matches.opt_present("c") {
has_done_stuff = true;
let host = &matches.opt_str("c").unwrap();
let port = matches.opt_str("p").and_then(|p| p.parse::<u16>().ok()).unwrap_or(DEFAULT_PORT);
let time = matches.opt_str("t").and_then(|p| p.parse::<u64>().ok()).unwrap_or(DEFAULT_TIME);
let pings = matches.opt_str("n").and_then(|n| n.parse::<u64>().ok()).unwrap_or(DEFAULT_PINGS);
match run_client(host, port, time, pings) {
Ok(_) => {}
Err(x) => {
error!("Error during client test: {}", x);
}
};
}
if !has_done_stuff {
print_usage(&program, opts);
}
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief));
}
fn run_client(host: &str, port: u16, time: u64, pings: u64) -> Result<(), ::std::io::Error> {
let mut c = try!(test_client::TestClient::new(host, port));
info!("Testing ping... ({} times) ", pings);
try!(stdout().flush());
let ping_time = try!(c.test_ping(pings));
info!("done, {:.*} ms", 2, ping_time);
info!("Testing download... ({} seconds) ", time);
try!(stdout().flush());
let download_speed = try!(c.test_downstream(time * 1_000u64));
info!("done, {}", format_speed(download_speed));
info!("Testing upload... ({} seconds) ", time);
try!(stdout().flush());
let upload_speed = try!(c.test_upstream(time * 1_000u64));
info!("done, {}", format_speed(upload_speed));
Ok(())
}
fn format_speed(speed: f64) -> String {
let mut speed = speed;
let units = ["bit/s", "Kbit/s", "Mbit/s", "Gbit/s", "Tbit/s"];
let mut idx = 0;
while speed > 1024f64 && idx < 4 {
idx += 1;
speed /= 1024f64;
}
format!("{:.3} {}", speed, units[idx])
}
|
pub fn majority_element(nums: Vec<i32>) -> i32 {
let mut maj = 0;
let mut counter = 0;
for num in nums {
if counter == 0 {
maj = num;
counter = 1;
} else {
if num == maj {
counter += 1;
} else {
counter -= 1;
}
}
}
maj
} |
use super::Generate;
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Kind {
I, T, O, J, L, S, Z,
}
#[derive(Copy, Clone, Debug)]
pub struct DeltaPos {
pub dx: isize,
pub dy: isize,
}
#[derive(Clone, Copy, Debug)]
pub struct Template(pub [DeltaPos; 4], pub Kind);
impl Generate for Template {
fn generate() -> Self {
use rand;
use rand::Rng;
let mut rng = rand::thread_rng();
static TEMPLATES: [Template; 7] = [I, T, O, J, L, S, Z];
TEMPLATES[rng.gen_range(0, 7)]
}
}
impl Template {
#[allow(dead_code)]
pub fn rotate_left(&self) -> Self {
let mut piece = self.clone();
if self.1 == Kind::O {
return piece;
}
for (output, input) in piece.0.iter_mut().zip(self.0.iter()) {
output.dx = -input.dy;
output.dy = input.dx;
}
piece
}
pub fn rotate_right(&self) -> Self {
let mut piece = self.clone();
if self.1 == Kind::O {
return piece;
}
for (output, input) in piece.0.iter_mut().zip(self.0.iter()) {
output.dx = input.dy;
output.dy = -input.dx;
}
piece
}
}
pub const I: Template = Template([DeltaPos { dx: -1, dy: 0 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }, DeltaPos { dx: 2, dy: 0 }], Kind::I);
pub const T: Template = Template([DeltaPos { dx: 0, dy: -1 }, DeltaPos { dx: -1, dy: 0 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }], Kind::T);
pub const O: Template = Template([DeltaPos { dx: 0, dy: -1 }, DeltaPos { dx: 1, dy: -1 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }], Kind::O);
pub const J: Template = Template([DeltaPos { dx: -1, dy: -1 }, DeltaPos { dx: -1, dy: 0 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }], Kind::J);
pub const L: Template = Template([DeltaPos { dx: 1, dy: -1 }, DeltaPos { dx: -1, dy: 0 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }], Kind::L);
pub const S: Template = Template([DeltaPos { dx: 0, dy: -1 }, DeltaPos { dx: 1, dy: -1 }, DeltaPos { dx: -1, dy: 0 }, DeltaPos { dx: 0, dy: 0 }], Kind::S);
pub const Z: Template = Template([DeltaPos { dx: -1, dy: -1 }, DeltaPos { dx: 0, dy: -1 }, DeltaPos { dx: 0, dy: 0 }, DeltaPos { dx: 1, dy: 0 }], Kind::Z);
|
use std::fmt;
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
use url::Url;
use crate::error::Result;
use crate::serialization as ser;
use crate::urls;
/// Custom type used for [`Movie`](./struct.Movie.html) ids.
#[derive(
Clone, Copy, Debug, Default, Hash, PartialEq, PartialOrd, Ord, Eq, Deserialize, Serialize,
)]
pub struct MovieID(pub u32);
impl fmt::Display for MovieID {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<u32> for MovieID {
fn from(i: u32) -> Self {
MovieID(i)
}
}
/// Movie data returned by [`Client::movie`].
///
/// [`Client::movie`]: ../client/struct.Client.html#method.movie
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct Movie {
/// ID of the movie.
pub id: MovieID,
/// Movie URL.
pub url: String,
/// Movie runtime, in minutes.
pub runtime: u16,
/// Movie genres.
pub genres: Vec<Genre>,
/// Movie translations.
pub translations: Vec<Translation>,
/// Movie release dates.
pub release_dates: Vec<ReleaseDate>,
/// Movie artworks.
pub artworks: Vec<Artwork>,
/// Movie trailers.
pub trailers: Vec<Trailer>,
/// Movie IDs on other websites.
pub remoteids: Vec<RemoteID>,
/// Movie people (actors, writers, directors, producers).
pub people: People,
}
/// Movie genre data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[cfg_attr(test, derive(Default))]
#[non_exhaustive]
pub struct Genre {
/// Genre path.
///
/// For the full URL use [`full_url`](#method.full_url).
pub url: String,
/// Genre name.
pub name: String,
/// Genre ID.
pub id: u16,
}
impl Genre {
/// Returns the full URL of the genre page.
pub fn full_url(&self) -> Result<Url> {
urls::genre_page(&self.url)
}
}
/// Movie translation data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct Translation {
/// Translation language code.
pub language_code: String,
/// Movie name in this language.
pub name: String,
/// Movie overview in this language.
#[serde(deserialize_with = "ser::optional_string")]
pub overview: Option<String>,
/// Whether this is the movie's primary translation.
pub is_primary: bool,
/// Movie tagline in this language.
#[serde(deserialize_with = "ser::optional_string")]
pub tagline: Option<String>,
}
/// Movie release date data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct ReleaseDate {
/// Type of release date.
///
/// In the API this field is named `type`.
#[serde(rename = "type")]
pub kind: String,
/// The release date.
pub date: NaiveDate,
/// Country or location release date applies to.
pub country: String,
}
/// Movie artwork image data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[cfg_attr(test, derive(Default))]
#[non_exhaustive]
pub struct Artwork {
/// Artwork's ID.
pub id: String,
/// Artwork's type.
pub artwork_type: String,
/// Artwork's path.
///
/// For the full URL use [`full_url`](#method.full_url).
pub url: String,
/// Artwork thumbnail path.
///
/// For the full URL use [`full_thumb_url`](#method.full_thumb_url).
pub thumb_url: String,
/// Artwork's tags.
#[serde(deserialize_with = "ser::optional_string")]
pub tags: Option<String>,
/// Whether this is the primary artwork of the movie.
pub is_primary: bool,
/// Artwork's width.
pub width: u16,
/// Artwork's height.
pub height: u16,
}
impl Artwork {
/// Returns the full URL of the artwork image.
pub fn full_url(&self) -> Result<Url> {
urls::image(&self.url)
}
/// Returns the full URL of the artwork thumbnail.
pub fn full_thumb_url(&self) -> Result<Url> {
urls::image(&self.thumb_url)
}
}
/// Movie trailer data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct Trailer {
/// Trailer full URL.
pub url: String,
/// Trailer title.
pub name: String,
}
/// Movie remote ID data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct RemoteID {
/// The ID.
pub id: String,
/// ID of the remote source.
pub source_id: u32,
/// Name of the remote source.
pub source_name: String,
/// Remote movie webpage URL.
pub url: String,
}
/// Movie people data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct People {
/// List of movie's actors.
#[serde(default)]
pub actors: Vec<Person>,
/// List of movie's directors.
#[serde(default)]
pub directors: Vec<Person>,
/// List of movie's producers.
#[serde(default)]
pub producers: Vec<Person>,
/// List of movie's writers.
#[serde(default)]
pub writers: Vec<Person>,
}
/// Movie person (actor, director, etc.) data.
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[cfg_attr(test, derive(Default))]
#[non_exhaustive]
pub struct Person {
/// Person ID for this movie.
pub id: String,
/// Person's name.
pub name: String,
/// Person's role in this movie.
#[serde(deserialize_with = "ser::optional_string")]
pub role: Option<String>,
/// Person's image path.
#[serde(deserialize_with = "ser::optional_string")]
pub people_image: Option<String>,
/// Person's movie role image path.
#[serde(deserialize_with = "ser::optional_string")]
pub role_image: Option<String>,
/// Whether this person is featured for this movie.
pub is_featured: bool,
/// ID of the person.
pub people_id: String,
/// Person's IMDb ID.
#[serde(deserialize_with = "ser::optional_string")]
pub imdb_id: Option<String>,
/// Person's Twitter.
#[serde(deserialize_with = "ser::optional_string")]
pub people_twitter: Option<String>,
/// Person's Facebook.
#[serde(deserialize_with = "ser::optional_string")]
pub people_facebook: Option<String>,
/// Person's Instagram.
#[serde(deserialize_with = "ser::optional_string")]
pub people_instagram: Option<String>,
}
impl Person {
/// Returns the full URL of the person's image.
pub fn people_image_url(&self) -> Result<Url> {
urls::opt_image(&self.people_image)
}
/// Returns the full URL of the person's role image.
pub fn role_image_url(&self) -> Result<Url> {
urls::opt_image(&self.role_image)
}
}
/// List of updated movies as returned by [`Client::movie_updates`].
///
/// [`Client::movie_updates`]: ../client/struct.Client.html#method.movie_updates
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
#[non_exhaustive]
pub struct MovieUpdates {
/// IDs of updated movies.
pub movies: Vec<MovieID>,
}
|
#![allow(clippy::type_complexity)]
use crate::{
components::player::Player,
resources::{
globals::{GamePhase, Globals},
turn::TurnManager,
},
};
use oxygengine::prelude::*;
pub struct GameSystem;
impl<'s> System<'s> for GameSystem {
type SystemData = (
Write<'s, Globals>,
Write<'s, TurnManager>,
Read<'s, InputController>,
ReadStorage<'s, Player>,
);
fn run(&mut self, (mut globals, mut turns, input, players): Self::SystemData) {
match globals.phase {
GamePhase::Start => {
if input.trigger_or_default("fire").is_pressed() {
globals.phase = GamePhase::Game;
turns.select_nth(0);
}
}
GamePhase::Game => {
let players = players.join().map(|p| p.0).collect::<Vec<_>>();
match players.len() {
0 => {
globals.phase = GamePhase::End(None);
}
1 => {
globals.phase = GamePhase::End(Some(players[0]));
}
_ => {}
}
}
GamePhase::End(_) => {
if input.trigger_or_default("fire").is_pressed() {
globals.phase = GamePhase::Restart;
}
}
_ => {}
}
}
}
|
mod beatmap;
mod configs;
mod map_tags;
mod osu_users;
mod tracking;
pub use self::{
beatmap::{DBBeatmap, DBBeatmapset},
configs::{
Authorities, EmbedsSize, GuildConfig, MinimizedPp, OsuData, Prefix, Prefixes, UserConfig,
},
map_tags::{MapsetTagWrapper, TagRow},
osu_users::{UserStatsColumn, UserValueRaw},
tracking::TrackingUser,
};
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use crate::{files::*, internal::*};
use maplit::hashmap;
use std::collections::HashMap;
#[cfg(target_os = "macos")]
const DEFAULT_SERVER_RUST_TARGET: &str = "x86_64-apple-darwin";
#[cfg(not(target_os = "macos"))]
const DEFAULT_SERVER_RUST_TARGET: &str = "x86_64-unknown-linux-musl";
#[cfg(target_os = "macos")]
const DEFAULT_EXAMPLE_BACKEND_RUST_TARGET: &str = "x86_64-apple-darwin";
#[cfg(not(target_os = "macos"))]
const DEFAULT_EXAMPLE_BACKEND_RUST_TARGET: &str = "x86_64-unknown-linux-gnu";
pub const ALL_CLIENTS: &str = "all";
pub const NO_CLIENTS: &str = "none";
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct Example {
name: String,
#[serde(default)]
server: ExampleServer,
#[serde(default)]
backends: HashMap<String, Executable>,
applications: HashMap<String, Application>,
clients: HashMap<String, Executable>,
}
impl Example {
fn has_classic_app(&self) -> bool {
self.applications.values().any(|app| match app {
Application::Classic(_) => true,
Application::Functions(_) => false,
})
}
fn has_functions_app(&self) -> bool {
self.applications.values().any(|app| match app {
Application::Classic(_) => false,
Application::Functions(_) => true,
})
}
}
/// A construct representing either an Oak Classic or an Oak Functions application.
///
/// The condition that only one of `classic` or `functions` should be non-empty is
/// checked in each operation of this struct. If neither or both are empty, the
/// operation panics with an error message.
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
#[serde(tag = "type")]
enum Application {
Classic(ApplicationClassic),
Functions(ApplicationFunctions),
}
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct ApplicationClassic {
manifest: String,
out: String,
modules: HashMap<String, Target>,
}
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct ApplicationFunctions {
wasm_path: String,
target: Target,
}
#[derive(serde::Deserialize, Debug, Default)]
#[serde(deny_unknown_fields)]
struct ExampleServer {
#[serde(default)]
additional_args: Vec<String>,
#[serde(default)]
required_features: Vec<String>,
}
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub enum Target {
Bazel {
bazel_target: String,
#[serde(default)]
config: String,
},
Cargo {
cargo_manifest: String,
#[serde(default)]
additional_build_args: Vec<String>,
},
Npm {
package_directory: String,
},
Shell {
script: String,
},
}
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct Executable {
#[serde(flatten)]
target: Target,
#[serde(default)]
additional_args: Vec<String>,
}
impl ApplicationClassic {
fn construct_application_build_steps(&self, example_name: &str) -> Vec<Step> {
vec![
Step::Multiple {
name: "build wasm modules".to_string(),
steps: self
.modules
.iter()
.map(|(name, target)| build_wasm_module(name, target, example_name))
.collect(),
},
Step::Single {
name: "build application".to_string(),
command: build_application(self),
},
]
}
fn construct_example_server_run_step(
&self,
example: &ClassicExample,
run_clients: Step,
) -> Step {
let opt = &example.options;
let run_server = run_example_server(
&opt.build_server,
&example.example.server,
opt.server_additional_args.clone(),
&self.out,
&opt.permissions_file,
);
if opt.build_client.client_variant == NO_CLIENTS {
Step::Single {
name: "run server".to_string(),
command: run_server,
}
} else {
Step::WithBackground {
name: "background server".to_string(),
background: run_server,
foreground: Box::new(run_clients),
}
}
}
}
impl ApplicationFunctions {
fn construct_application_build_steps(&self, example_name: &str) -> Vec<Step> {
vec![build_wasm_module(example_name, &self.target, example_name)]
}
fn construct_example_server_run_step(
&self,
example: &FunctionsExample,
run_clients: Step,
) -> Step {
let opt = &example.options;
let run_server = run_functions_example_server(&example.example.server, self);
if opt.build_client.client_variant == NO_CLIENTS {
Step::Single {
name: "run server".to_string(),
command: run_server,
}
} else {
Step::WithBackground {
name: "background server".to_string(),
background: run_server,
foreground: Box::new(run_clients),
}
}
}
}
trait OakExample {
fn get_backends(&self) -> &HashMap<String, Executable>;
fn get_build_client(&self) -> &BuildClient;
/// Constructs application build steps.
fn construct_application_build_steps(&self) -> Vec<Step>;
/// Constructs run step for the example server.
fn construct_example_server_run_step(&self, run_clients: Step) -> Step;
/// Constructs build steps for the backends.
fn construct_backend_build_steps(&self) -> Vec<Step> {
self.get_backends()
.iter()
.map(move |(name, backend)| Step::Single {
name: name.to_string(),
command: build(&backend.target, self.get_build_client()),
})
.collect()
}
/// Recursively constructs run steps for the backends.
fn construct_backend_run_steps(&self, run_server_clients: Step) -> Step {
self.get_backends()
.iter()
// First iteration includes `run_server_clients` as a foreground step.
.fold(run_server_clients, |backend_steps, (name, backend)| {
Step::WithBackground {
name: name.to_string(),
// Each `backend` is included as background step.
background: run(backend, self.get_build_client(), Vec::new()),
foreground: Box::new(backend_steps),
}
})
}
}
pub struct ClassicExample<'a> {
example: &'a Example,
applications: Box<HashMap<String, &'a ApplicationClassic>>,
options: RunExamples,
}
impl<'a> ClassicExample<'a> {
fn new(example: &'a Example, options: RunExamples) -> Self {
let applications = Box::new(example.applications.iter().fold(
hashmap! {},
|mut apps, app| match app {
(name, Application::Classic(ref app)) => {
apps.insert(name.clone(), app);
apps
}
(_name, Application::Functions(_app)) => apps,
},
));
ClassicExample {
example,
applications,
options,
}
}
}
impl OakExample for ClassicExample<'_> {
fn get_backends(&self) -> &HashMap<String, Executable> {
&self.example.backends
}
fn get_build_client(&self) -> &BuildClient {
&self.options.build_client
}
fn construct_application_build_steps(&self) -> Vec<Step> {
let app_variant = self.options.application_variant.as_str();
match self.applications.get(app_variant) {
None => vec![],
Some(app) => app.construct_application_build_steps(&self.example.name),
}
}
fn construct_example_server_run_step(&self, run_clients: Step) -> Step {
let app_variant = self.options.application_variant.as_str();
match self.applications.get(app_variant) {
None => run_clients,
Some(app) => app.construct_example_server_run_step(self, run_clients),
}
}
}
pub struct FunctionsExample<'a> {
example: &'a Example,
applications: Box<HashMap<String, &'a ApplicationFunctions>>,
options: RunFunctionsExamples,
}
impl<'a> FunctionsExample<'a> {
fn new(example: &'a Example, options: RunFunctionsExamples) -> Self {
let applications = Box::new(example.applications.iter().fold(
hashmap! {},
|mut apps, app| match app {
(_name, Application::Classic(_app)) => apps,
(name, Application::Functions(ref app)) => {
apps.insert(name.clone(), app);
apps
}
},
));
FunctionsExample {
example,
applications,
options,
}
}
}
impl OakExample for FunctionsExample<'_> {
fn get_backends(&self) -> &HashMap<String, Executable> {
&self.example.backends
}
fn get_build_client(&self) -> &BuildClient {
&self.options.build_client
}
fn construct_application_build_steps(&self) -> Vec<Step> {
let app_variant = self.options.application_variant.as_str();
match self.applications.get(app_variant) {
None => vec![],
Some(app) => app.construct_application_build_steps(&self.example.name),
}
}
fn construct_example_server_run_step(&self, run_clients: Step) -> Step {
let app_variant = self.options.application_variant.as_str();
match self.applications.get(app_variant) {
None => run_clients,
Some(app) => app.construct_example_server_run_step(self, run_clients),
}
}
}
pub fn run_examples(opt: &RunExamples) -> Step {
let examples: Vec<Example> = example_toml_files(&opt.commits)
.map(|path| {
toml::from_str(&read_file(&path)).unwrap_or_else(|err| {
panic!("could not parse example manifest file {:?}: {}", path, err)
})
})
.filter(|example: &Example| !example.has_functions_app())
.collect();
Step::Multiple {
name: "examples".to_string(),
/// TODO(#396): Check that all the example folders are covered by an entry here, or
/// explicitly ignored. This will probably require pulling out the `Vec<Example>` to a
/// top-level method first.
steps: examples
.iter()
.filter(|example| match &opt.example_name {
Some(example_name) => &example.name == example_name,
None => true,
})
.filter(|example| {
example.applications.is_empty()
|| example.applications.get(&opt.application_variant).is_some()
})
.map(|example| ClassicExample::new(example, opt.clone()))
.map(|example| run_example(&example))
.collect(),
}
}
pub fn run_functions_examples(opt: &RunFunctionsExamples) -> Step {
let examples: Vec<Example> = example_toml_files(&opt.commits)
.map(|path| {
toml::from_str(&read_file(&path)).unwrap_or_else(|err| {
panic!("could not parse example manifest file {:?}: {}", path, err)
})
})
.filter(|example: &Example| example.has_functions_app() && !example.has_classic_app())
.collect();
Step::Multiple {
name: "oak-functions examples".to_string(),
steps: examples
.iter()
.filter(|example| match &opt.example_name {
Some(example_name) => &example.name == example_name,
None => true,
})
.map(|example| FunctionsExample::new(example, opt.clone()))
.map(|example| run_functions_example(&example))
.collect(),
}
}
pub fn build_server(opt: &BuildServer, additional_features: Vec<String>) -> Step {
Step::Multiple {
name: "server".to_string(),
steps: vec![
vec![Step::Single {
name: "create bin folder".to_string(),
command: Cmd::new(
"mkdir",
vec!["-p".to_string(), "oak_loader/bin".to_string()],
),
}],
match opt.server_variant {
ServerVariant::Unsafe | ServerVariant::Coverage | ServerVariant::Experimental => vec![Step::Single {
name: "build introspection browser client".to_string(),
command: Cmd::new("npm",
vec![
"--prefix",
"oak_runtime/introspection_browser_client",
"run",
"build",
])
}],
_ => vec![]
},
vec![
build_rust_binary("oak_loader", opt, additional_features,
&if opt.server_variant == ServerVariant::Coverage {
hashmap! {
// Build the Runtime server in coverage mode, as per https://github.com/mozilla/grcov
"CARGO_INCREMENTAL".to_string() => "0".to_string(),
"RUSTDOCFLAGS".to_string() => "-Cpanic=abort".to_string(),
// grcov instructions suggest also including `-Cpanic=abort` in RUSTFLAGS, but this causes our build.rs scripts to fail.
"RUSTFLAGS".to_string() => "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic-abort_tests".to_string(),
}
} else {
hashmap! {}
},)
],
].into_iter()
.flatten()
.collect::<Vec<_>>()
}
}
pub fn build_functions_server(
opt: &BuildFunctionsServer,
additional_features: Vec<String>,
) -> Step {
Step::Multiple {
name: "server".to_string(),
steps: vec![
vec![Step::Single {
name: "create bin folder".to_string(),
command: Cmd::new(
"mkdir",
vec!["-p".to_string(), "oak_functions/loader/bin".to_string()],
),
}],
vec![build_rust_binary(
"oak_functions/loader",
opt,
additional_features,
&hashmap! {},
)],
]
.into_iter()
.flatten()
.collect::<Vec<_>>(),
}
}
fn run_example(example: &ClassicExample) -> Step {
let opt = &example.options;
let run_clients = run_clients(
example.example,
&opt.build_client,
opt.client_additional_args.clone(),
);
// Build the run steps (if any) according to the provided flags.
//
// If `run-server` is enabled, then run the server as well as a potential backend, both in the
// background.
//
// If `client-variant` is not 'none', then run the server and backend in the background, and the
// clients in the foreground.
#[allow(clippy::collapsible_if)]
let run_backend_server_clients: Step = if opt.run_server.unwrap_or(true) {
let run_server_clients = example.construct_example_server_run_step(run_clients);
example.construct_backend_run_steps(run_server_clients)
} else {
run_clients
};
Step::Multiple {
name: example.example.name.to_string(),
steps: vec![
example.construct_application_build_steps(),
if opt.run_server.unwrap_or(true) {
// Build the server first so that when running it in the next step it will start up
// faster.
vec![build_server(
&opt.build_server,
example.example.server.required_features.clone(),
)]
} else {
vec![]
},
if opt.build_docker {
vec![build_docker(example.example)]
} else {
vec![]
},
example.construct_backend_build_steps(),
vec![Step::Multiple {
name: "run".to_string(),
steps: vec![run_backend_server_clients],
}],
]
.into_iter()
.flatten()
.collect::<Vec<_>>(),
}
}
fn run_functions_example(example: &FunctionsExample) -> Step {
let opt = &example.options;
// Build steps for running clients
let run_clients = run_clients(
example.example,
&opt.build_client,
opt.client_additional_args.clone(),
);
// Build any backend server
#[allow(clippy::collapsible_if)]
let run_backend_server_clients: Step = if opt.run_server.unwrap_or(true) {
let run_server_clients = example.construct_example_server_run_step(run_clients);
example.construct_backend_run_steps(run_server_clients)
} else {
run_clients
};
Step::Multiple {
name: example.example.name.to_string(),
steps: vec![
example.construct_application_build_steps(),
if opt.run_server.unwrap_or(true) {
// Build the server first so that when running it in the next step it will start up
// faster.
vec![build_functions_server(
&opt.build_server,
example.example.server.required_features.clone(),
)]
} else {
vec![]
},
if opt.build_docker {
vec![build_docker(example.example)]
} else {
vec![]
},
example.construct_backend_build_steps(),
vec![Step::Multiple {
name: "run".to_string(),
steps: vec![run_backend_server_clients],
}],
]
.into_iter()
.flatten()
.collect::<Vec<_>>(),
}
}
pub fn build_functions_example(opt: &RunFunctionsExamples) -> Step {
let example_name = &opt
.example_name
.as_ref()
.expect("--example-name must be specified")
.clone();
let example: Example = example_toml_files(&Commits::default())
.map(|path| {
toml::from_str(&read_file(&path)).unwrap_or_else(|err| {
panic!("could not parse example manifest file {:?}: {}", path, err)
})
})
.find(|example: &Example| &example.name == example_name)
.filter(|example| example.has_functions_app())
.expect("could not find the specified functions example");
// Build steps for building clients
let build_client = Step::Multiple {
name: "build clients".to_string(),
steps: example
.clients
.iter()
.filter(|(name, _)| match opt.build_client.client_variant.as_str() {
ALL_CLIENTS => true,
client => *name == client,
})
.map(|(name, client)| Step::Single {
name: format!("build{}", name),
command: build(&client.target, &opt.build_client),
})
.collect(),
};
let functions_example = FunctionsExample::new(&example, opt.clone());
Step::Multiple {
name: example.name.to_string(),
steps: vec![
functions_example.construct_application_build_steps(),
// Build the server first so that when running it in the next step it will start up
// faster.
vec![build_functions_server(
&opt.build_server,
example.server.required_features.clone(),
)],
if opt.build_docker {
vec![build_docker(&example)]
} else {
vec![]
},
functions_example.construct_backend_build_steps(),
vec![build_client],
]
.into_iter()
.flatten()
.collect::<Vec<_>>(),
}
}
pub fn build_wasm_module(name: &str, target: &Target, example_name: &str) -> Step {
match target {
Target::Cargo {
cargo_manifest,
additional_build_args,
} => {
let metadata = cargo_metadata::MetadataCommand::new()
.manifest_path(cargo_manifest)
.exec()
.unwrap();
Step::Single {
name: format!("wasm:{}:{}", name, cargo_manifest.to_string()),
command: Cmd::new(
"cargo",
// Keep this in sync with `/oak_functions/sdk/test/utils/src/lib.rs`.
// Keep this in sync with `/sdk/rust/oak_tests/src/lib.rs`.
spread![
// `--out-dir` is unstable and requires `-Zunstable-options`.
"-Zunstable-options".to_string(),
"build".to_string(),
"--target=wasm32-unknown-unknown".to_string(),
// Use a fixed `--target-dir`, because it influences the SHA256 hash of the
// Wasm module.
//
// This directory is separate from `examples/target` because it is used by
// `cargo test`, which also executes [`oak_tests::compile_rust_wasm`] and
// thus runs `cargo build` inside it. It may lead to errors, since
// dependencies may be recompiled by `cargo build` and `cargo test` will
// fail to continue.
format!("--target-dir={}/wasm", metadata.target_directory),
format!("--manifest-path={}", cargo_manifest),
format!("--out-dir={}/bin", metadata.workspace_root),
"--release".to_string(),
...additional_build_args
],
),
}
}
Target::Bazel {
bazel_target,
config,
} => Step::Multiple {
name: "wasm".to_string(),
steps: vec![
Step::Single {
name: format!("wasm:{}:{}", name, bazel_target.to_string()),
command: Cmd::new(
"bazel",
vec![
"build".to_string(),
format!("--config={}", config),
bazel_target.to_string(),
],
),
},
Step::Single {
name: "create bin folder".to_string(),
command: Cmd::new(
"mkdir",
vec!["-p".to_string(), format!("examples/{}/bin", example_name)],
),
},
Step::Single {
name: "copy wasm module".to_string(),
command: Cmd::new(
"cp",
vec![
"-f".to_string(),
format!(
"bazel-{}-bin/{}",
match config.as_ref() {
"emscripten" => "emscripten",
"wasm32" => "wasm",
_ => panic!("unsupported Bazel config: {}", config),
},
bazel_target.replace("//", "").replace(":", "/")
),
format!("examples/{}/bin", example_name),
],
),
},
],
},
Target::Npm { .. } => todo!(),
Target::Shell { .. } => todo!(),
}
}
fn run_example_server(
opt: &BuildServer,
example_server: &ExampleServer,
server_additional_args: Vec<String>,
application_file: &str,
permissions_file: &str,
) -> Box<dyn Runnable> {
Cmd::new_with_env(
"oak_loader/bin/oak_loader",
spread![
"--grpc-tls-certificate=./examples/certs/local/local.pem".to_string(),
"--grpc-tls-private-key=./examples/certs/local/local.key".to_string(),
"--http-tls-certificate=./examples/certs/local/local.pem".to_string(),
"--http-tls-private-key=./examples/certs/local/local.key".to_string(),
// TODO(#396): Add `--oidc-client` support.
format!("--application={}", application_file),
...match opt.server_variant {
// server variants that don't have `oak-unsafe` require a `permissions` file
ServerVariant::Base => vec![format!("--permissions={}", permissions_file)],
// server variants that have `oak-unsafe` need to specify `root-tls-certificate`
_ => vec!["--root-tls-certificate=./examples/certs/local/ca.pem".to_string()],
},
...example_server.additional_args.clone(),
...server_additional_args,
],
&if opt.server_variant == ServerVariant::Coverage {
hashmap! {
// Build the Runtime server in coverage mode, as per https://github.com/mozilla/grcov
"CARGO_INCREMENTAL".to_string() => "0".to_string(),
"RUSTDOCFLAGS".to_string() => "-Cpanic=abort".to_string(),
// grcov instructions suggest also including `-Cpanic=abort` in RUSTFLAGS, but this causes our build.rs scripts to fail.
"RUSTFLAGS".to_string() => "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic-abort_tests".to_string(),
}
} else {
hashmap! {}
},
)
}
fn run_functions_example_server(
example_server: &ExampleServer,
application: &ApplicationFunctions,
) -> Box<dyn Runnable> {
Cmd::new_with_env(
"oak_functions/loader/bin/oak_functions_loader",
spread![
format!("--wasm-path={}", application.wasm_path),
...example_server.additional_args.clone(),
],
&hashmap! {},
)
}
fn run_clients(
example: &Example,
build_client: &BuildClient,
client_additional_args: Vec<String>,
) -> Step {
Step::Multiple {
name: "run clients".to_string(),
steps: example
.clients
.iter()
.filter(|(name, _)| match build_client.client_variant.as_str() {
ALL_CLIENTS => true,
client => *name == client,
})
.map(|(name, client)| {
run_client(name, client, build_client, client_additional_args.clone())
})
.collect(),
}
}
fn run_client(
name: &str,
executable: &Executable,
opt: &BuildClient,
additional_args: Vec<String>,
) -> Step {
Step::Multiple {
name: name.to_string(),
steps: vec![
Step::Single {
name: "build".to_string(),
command: build(&executable.target, opt),
},
Step::Single {
name: "run".to_string(),
command: run(executable, opt, additional_args),
},
],
}
}
fn build_application(application: &ApplicationClassic) -> Box<dyn Runnable> {
Cmd::new(
"cargo",
vec![
"run".to_string(),
"--manifest-path=sdk/rust/oak_app_build/Cargo.toml".to_string(),
"--".to_string(),
format!("--manifest-path={}", application.manifest),
],
)
}
fn build_docker(example: &Example) -> Step {
Step::Multiple {
name: "docker".to_string(),
steps: vec![
Step::Single {
name: "build server image".to_string(),
command: Cmd::new(
"docker",
&[
"build",
"--tag=oak_docker",
"--file=./oak_loader/Dockerfile",
"./oak_loader",
],
),
},
Step::Single {
name: "build example image".to_string(),
command: Cmd::new(
"docker",
&[
"build",
&format!("--tag={}", example.name),
"--file=./examples/Dockerfile",
// An example may have more than one application, and the applications may
// have arbitrary names, so this is an approximation of the expected
// application file name of one of them.
&format!("--build-arg=application_file_name={}.oak", example.name),
&format!("./examples/{}", example.name),
],
),
},
Step::Single {
name: "save example image".to_string(),
command: Cmd::new(
"docker",
&[
"save",
&example.name,
&format!(
"--output=./examples/{}/bin/{}.tar",
example.name, example.name
),
],
),
},
],
}
}
fn build(target: &Target, opt: &BuildClient) -> Box<dyn Runnable> {
match target {
Target::Cargo {
cargo_manifest,
additional_build_args,
} => Cmd::new(
"cargo",
spread![
"build".to_string(),
"--release".to_string(),
format!(
"--target={}",
opt.client_rust_target
.as_deref()
.unwrap_or(DEFAULT_EXAMPLE_BACKEND_RUST_TARGET)
),
format!("--manifest-path={}", cargo_manifest),
...additional_build_args,
],
),
Target::Bazel {
bazel_target,
config,
} => Cmd::new(
"bazel",
spread![
"build".to_string(),
...if config.is_empty() {
vec![]
} else {
vec![format!("--config={}", config)]
},
bazel_target.to_string(),
],
),
Target::Npm { package_directory } => Cmd::new(
"npm",
vec!["ci".to_string(), format!("--prefix={}", package_directory)],
),
Target::Shell { script } => Cmd::new("bash", &[script]),
}
}
fn run(
executable: &Executable,
opt: &BuildClient,
additional_args: Vec<String>,
) -> Box<dyn Runnable> {
match &executable.target {
Target::Cargo {
cargo_manifest,
additional_build_args,
} => Cmd::new(
"cargo",
spread![
"run".to_string(),
"--release".to_string(),
format!("--target={}", opt.client_rust_target.as_deref().unwrap_or(DEFAULT_EXAMPLE_BACKEND_RUST_TARGET)),
format!("--manifest-path={}", cargo_manifest),
...additional_build_args,
"--".to_string(),
...executable.additional_args.clone(),
...additional_args,
],
),
Target::Bazel {
bazel_target,
config,
} => Cmd::new(
"bazel",
spread![
"run".to_string(),
...if config.is_empty() {
vec![]
} else {
vec![format!("--config={}", config)]
},
"--".to_string(),
bazel_target.to_string(),
"--ca_cert_path=../../../../../../../../../examples/certs/local/ca.pem".to_string(),
...executable.additional_args.clone(),
...additional_args,
],
),
Target::Npm { package_directory } => Cmd::new(
"npm",
vec![
"start".to_string(),
format!("--prefix={}", package_directory),
],
),
Target::Shell { script } => Cmd::new("bash", &[script]),
}
}
fn build_rust_binary<T: RustBinaryOptions>(
manifest_dir: &str,
opt: &T,
additional_features: Vec<String>,
env: &HashMap<String, String>,
) -> Step {
let mut features = additional_features;
let mut server_variant_features = opt.features().iter().map(|s| s.to_string()).collect();
features.append(&mut server_variant_features);
let features = if !features.is_empty() {
features.join(",")
} else {
"".to_string()
};
Step::Single {
name: "build rust binary".to_string(),
command: Cmd::new_with_env(
"cargo",
spread![
...match opt.server_rust_toolchain() {
// This overrides the toolchain used by `rustup` to invoke the actual
// `cargo` binary.
// See https://github.com/rust-lang/rustup#toolchain-override-shorthand
Some(server_rust_toolchain) => vec![format!("+{}", server_rust_toolchain)],
None => vec![],
},
"build".to_string(),
format!("--manifest-path={}/Cargo.toml", manifest_dir),
format!("--out-dir={}/bin", manifest_dir),
// `--out-dir` is unstable and requires `-Zunstable-options`.
"-Zunstable-options".to_string(),
...if !features.is_empty() {
vec![format!("--features={}", features)]
} else {
vec![]
},
...if opt.build_release() {
vec![format!("--target={}", opt.server_rust_target().as_deref().unwrap_or(DEFAULT_SERVER_RUST_TARGET)),
"--release".to_string() ]} else {vec![]},
],
env,
),
}
}
|
//////////////////////////////////////////////////
// General notes
//
// - When heap objects fall out of scope, the drop
// function is called. The author can put code
// into this function to return the allocated
// memory.
// - Rust has a special annotation called the
// Copy trait. If a type implements this, an
// older variable is still usable after
// assignment. Rust will not allow this
// annotation to be used if the type implements
// the Drop trait.
// - All integer/boolean/floating point/char types
// implement Copy.
// - Tuples also implement Copy, but only if the
// types they contain implement Copy, e.g.
// (i32, i32)
// - The opposite of referencing by using & is
// dereferencing using *.
// - Rust prevents dangling references by ensuring
// data will not go out of scope before its
// reference
// - String literals are string slices
// some_string goes out of scope at the end of the function and 'drop' is called
//
fn takes_ownership(some_string: String) {
println!("takes_ownership: {}", some_string);
}
// some_string is returned and moves out to the calling function
//
fn gives_ownership() -> String {
let some_string = String::from("goodbye");
some_string
}
// This function takes ownership of a_string, then moves a_string out to the
// calling function
//
fn takes_and_gives_back(a_string: String) -> String {
a_string
}
// some_integer goes out of scope at the end of the function, but nothing
// special happens
//
fn makes_copy(some_integer: i32) {
println!("makes_copy: {}", some_integer);
}
// This function borrows s (via reference), instead of owning it.
// s will *not* be dropped at the end of the function.
//
fn calculate_length(s: &String) -> usize {
s.len()
}
// An example of borrowing by mutable reference.
//
fn change(some_string: &mut String) {
some_string.push_str(" has been changed");
}
// fn dangle() -> &String {
// let s = String::from("dangler"):
// &s
// } // s goes out of scope and is dropped. &s is no longer valid
fn no_dangle() -> String {
let s = String::from("no dangle");
s
} // s goes out of scope and is moved to the caller
// Example using string slices.
//
fn first_word(s: &String) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
// Improved version that borrows a string slice
// as a parameter, which makes this function
// more general and useful
//
fn first_word_improved(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
fn main() {
let mut s = String::from("hello");
s.push_str(", world!");
println!("s: {}", s);
takes_ownership(s);
// println!("{}", s); // This will fail
let s1 = gives_ownership();
println!("gives_ownership(): {}", s1);
let s2 = takes_and_gives_back(s1);
println!("takes_and_gives_back(s1): {}", s2);
// Use a reference to s2 when calling this function
//
let len = calculate_length(&s2);
println!("len of s2: {}", len);
let mut s3 = String::from("My string");
change(&mut s3);
println!("change(&s3): {}", s3);
// NOTE: You can only have one mutable reference to a
// particular piece of data in a particular
// scope. This is to prevent data races.
{
let mut _s = String::from("hello");
let _r1 = &mut _s;
// let _r2 = &mut _s; // this will fail
// println!("{}{}", _r1, _r2);
}
// Multiple immutable references are okay, since readers
// will not impact other readers.
//
// However, you can't have a mutable reference with an
// immutable reference in the same scope, since the
// owner of the mutable reference could change data
// while immutable references are being read...
//
{
let mut _s = String::from("hello");
let _r1 = &_s; // okay
let _r2 = &_s; // also okay
// let _r3 = &mut _s; // NOT okay
// println!("{}{}{}", _r1, _r2, _r3); // fails here
}
let s = String::from("hello world");
let word = first_word(&s);
// s.clear(); // error, since this is a mutable borrow...
println!("first_word(&s): {}", word);
let word = first_word_improved(&s[..]);
println!("first_word_improved(&s[..]): {}", word);
let x = 5;
makes_copy(x);
{
// The following snippet shows _s1 being
// moved into _s2, which invalidates _s1.
//
let _s1 = String::from("hello");
let _s2 = _s1;
// println!("{}", _s1); // invalid
}
{
// Clone will allow a deep-copy of s1 to
// s2.
//
let s1 = String::from("hello");
let s2 = s1.clone();
println!("{} {}", s1, s2);
}
{
// Stack variables are copied, since it
// is an inexpensive operation. No need
// to clone.
//
let x = 5;
let y = x;
println!("{} {}", x, y);
}
// Other types of slices
//
{
let a = [1, 2, 3, 4, 5];
let slice = &a[1..3];
assert_eq!(slice, &[2, 3]);
}
}
|
use std::io;
use std::io::{File, FileMode, fs, stdio};
mod plutomain {
static mut pluto_name: String =""; /* name (path) we were invoked with */
static ctlbase: String = "/var/run/pluto";
static mut pluto_listen: String = "";
static fork_desired: bool = true;
/* pulled from main for show_setup_plutomain() */
//static const struct lsw_conf_options *oco;
static mut coredir: String = "";
static nhelpers: int = -1;
//libreswan_passert_fail_t libreswan_passert_fail = passert_fail;
///////////////////////////////////////////////////////////////////////////////////////////////
pub fn free_pluto_main() {
/* Some values can be NULL if not specified as pluto argument */
pfree(coredir);
pfreeany(pluto_stats_binary);
pfreeany(pluto_listen);
pfree(pluto_vendorid);
}
/*
* invocation_fail - print diagnostic and usage hint message and exit
*
* @param mess String - diagnostic message to print
*/
pub fn invocation_fail(mess: &str) {
if mess != Nil {
stderr.write_str(mess);
}
let usage: String = format!("For usage information: {} --help\n Libreswan {}\n" +
pluto_name +
ipsec_version_code());
stderr.write_str(usage);
/* not exit_pluto because we are not initialized yet */
exit(1);
}
/* string naming compile-time options that have interop implications */
static compile_time_interop_options: &str =
//#ifdef NETKEY_SUPPORT
" XFRM(netkey)" +
//#ifdef KLIPS
" KLIPS" +
//#ifdef KLIPSMAST
" MAST" +
//#ifdef HAVE_NO_FORK
" NO_FORK" +
//#ifdef HAVE_BROKEN_POPEN
" BROKEN_POPEN" +
" NSS" +
//#ifdef DNSSEC
" DNSSEC" +
//#ifdef FIPS_CHECK
" FIPS_CHECK" +
//#ifdef HAVE_LABELED_IPSEC
" LABELED_IPSEC" +
//#ifdef HAVE_LIBCAP_NG
" LIBCAP_NG" +
//#ifdef USE_LINUX_AUDIT
" LINUX_AUDIT" +
//#ifdef XAUTH_HAVE_PAM
" XAUTH_PAM" +
//#ifdef HAVE_NM
" NETWORKMANAGER" +
//#ifdef KLIPS_MAST
" KLIPS_MAST" +
//#ifdef LIBCURL
" CURL(non-NSS)" +
//#ifdef LDAP_VER
" LDAP(non-NSS)";
/*
* lock file support
* - provides convenient way for scripts to find Pluto's pid
* - prevents multiple Plutos competing for the same port
* - same basename as unix domain control socket
* NOTE: will not take account of sharing LOCK_DIR with other systems.
*/
//static char pluto_lock[sizeof(ctl_addr.sun_path)] = DEFAULT_CTLBASE LOCK_SUFFIX;
static pluto_lock_created: bool = false;
/** create lockfile, or die in the attempt */
fn create_lock() -> int {
let ctlbase = Path::new(".."); // TODO: set ctlbase path
if File::mkdir(ctlbase, /*0755*/ io::UserDir) != 0 {
if errno != EEXIST {
println!(stderr,
"pluto: FATAL: unable to create lock dir: {}: {}\n",
ctlbase,
strerror(errno));
exit_pluto(10);
}
}
let fd = File::open_mode(&pluto_lock, Open, Write);
/*O_WRONLY | O_CREAT | O_EXCL | O_TRUNC, S_IRUSR | S_IRGRP | S_IROTH */
match fd {
// TODO: write analyse error creating lock file
Ok(()) => println!(""),
Err(e) => fail!("pluto: FATAL: unable to create lock file {} ({})",pluto_lock, e)
}
/*
if (fd < 0) {
if (errno == EEXIST) {
// if we did not fork, then we do't really need the pid to control, so wipe it
if (!fork_desired) {
if (unlink(pluto_lock) == -1) {
fprintf(stderr,
"pluto: FATAL: lock file \"%s\" already exists and could not be removed (%d %s)\n",
pluto_lock, errno,
strerror(errno));
exit_pluto(10);
} else {
// lock file removed, try creating it again
return create_lock();
}
} else {
fprintf(stderr,
"pluto: FATAL: lock file \"%s\" already exists\n",
pluto_lock);
exit_pluto(10);
}
} else {
fprintf(stderr,
"pluto: FATAL: unable to create lock file \"%s\" (%d %s)\n",
pluto_lock, errno, strerror(errno));
exit_pluto(1);
}
}
// TODO avoid boolean variables
pluto_lock_created = TRUE;
*/
fd
}
/*
* fill_lock - Populate the lock file with pluto's PID
*
* @param lockfd File Descriptor for the lock file
* @param pid PID (pid_t struct) to be put into the lock file
* @return bool True if successful
*/
pub fn fill_lock(lockfd: int, pid: pid_t) -> bool {
let buf: &str = format!("{}",pid); /* holds "<pid>\n" */
lockfd.write_str(buf);
lockfd.close();
}
/*
* delete_lock - Delete the lock file
*/
pub fn delete_lock() {
if (pluto_lock_created) {
delete_ctl_socket();
unlink(pluto_lock); /* is noting failure useful? */
}
}
/*
* parser.l and keywords.c need these global variables
* FIXME: move them to confread_load() parameters
*/
static verbose: int = 0;
static warningsarefatal: int = 0;
/* Read config file. exit() on error. */
struct starter_config;
/*struct starter_config {
struct {
ksf strings;
knf options;
str_set strings_set;
int_set options_set;
// derived types
char **interfaces;
} setup;
// conn %default
struct starter_conn conn_default;
struct starter_conn conn_oedefault;
bool got_oedefault;
char *ctlbase; // location of pluto control socket
// connections list (without %default)
TAILQ_HEAD(, starter_conn) conns;
}; */
pub fn read_cfg_file(configfile: &str) -> starter_config {
let mut cfg: starter_config = Nil;
let mut err: err_t = Nil;
cfg = confread_load(configfile, &err, FALSE, NULL, TRUE);
if cfg == Nil {
invocation_fail(err);
}
cfg
}
/* Helper function for config file mapper: set string option value */
pub fn set_cfg_string(target: &str, value: &str) {
/* Do nothing if value is unset. */
if value == Nil || *value == "\0" {
return;
}
/* Don't free previous target, it might be statically set. */
*target = strdup_uniq(value);
}
/* TODO: Check the status of crypto libs, and use it in the code
pub fn pluto_init_nss(confddir: &str) {
SECStatus nss_init_status;
loglog(RC_LOG_SERIOUS, "nss directory plutomain: %s", confddir);
nss_init_status = NSS_Init(confddir);
if (nss_init_status != SECSuccess) {
loglog(RC_LOG_SERIOUS, "FATAL: NSS readonly initialization (\"%s\") failed (err %d)\n",
confddir, PR_GetError());
exit_pluto(10);
} else {
libreswan_log("NSS Initialized");
PK11_SetPasswordFunc(getNSSPassword);
}
}
*/
/* by default the CRL policy is lenient */
static mut strict_crl_policy: bool = false;
/* 0 is special and default: do not check crls dynamically */
//deltatime_t crl_check_interval = { 0 };
/* by default pluto sends no cookies in ikev2 or ikev1 aggrmode */
static force_busy: bool = false;
/* whether or not to use klips */
//enum kernel_interface kern_interface = USE_NETKEY; /* new default */
//#ifdef HAVE_LABELED_IPSEC
static secctx_attr_value: u16 = SECCTX;
/*
* Table of Pluto command-line options.
*
* For getopt_ling(3), but with twists.
*
* We never find that letting getopt set an option makes sense
* so flag is always NULL.
*
* Trick: we split the "name" string with a '\0'.
* Before it is the option name, as seen by getopt_long.
* After it is meta-information:
* - _ means: obsolete due to _ in name: replace _ with -
* - > means: obsolete spelling; use spelling from rest of string
* - ! means: obsolete and ignored (no replacement)
* - anything else is a description of the options argument (printed by --help)
* If it starts with ^, that means start a newline in the --help output.
*
* The table should be ordered to maximize the clarity of --help.
*
* val values free due to removal of options: '1', '3', '4', 'G'
*/
//#define DBG_OFFSET 256
struct Option {
name: &str,
has_arg: has_arg,
flag: int,
val: &str
}
//#define D(name, code) { "debug-" name, no_argument, NULL, (code) + DBG_OFFSET }
static debug = |name: &str, code:int| -> Option { {"debug-" + name, no_argument, NULL, code + DBG_OFFSET} }
static long_opts: &[Option] = [
/* name, has_arg, flag, val */
{ "help\0"; no_argument; NULL; 'h' },
{ "version\0"; no_argument; NULL; 'v' },
{ "config\0<filename>"; required_argument; NULL; 'z' },
{ "nofork\0"; no_argument; NULL; 'd' },
{ "stderrlog\0"; no_argument; NULL; 'e' },
{ "logfile\0<filename>"; required_argument; NULL; 'g' },
{ "plutostderrlogtime\0"; no_argument; NULL; 't' },
{ "force_busy\0_"; no_argument; NULL; 'D' }, /* _ */
{ "force-busy\0"; no_argument; NULL; 'D' },
{ "strictcrlpolicy\0"; no_argument; NULL; 'r' },
{ "crlcheckinterval\0<seconds>"; required_argument; NULL; 'x' },
{ "uniqueids\0"; no_argument; NULL; 'u' },
{ "noklips\0>use-nostack"; no_argument; NULL; 'n' }, /* redundant spelling */
{ "use-nostack\0"; no_argument; NULL; 'n' },
{ "use-none\0>use-nostack"; no_argument; NULL; 'n' }, /* redundant spelling */
{ "useklips\0>use-klips"; no_argument; NULL; 'k' }, /* redundant spelling */
{ "use-klips\0"; no_argument; NULL; 'k' },
{ "use-auto\0>use-netkey"; no_argument; NULL; 'K' }, /* rednundate spelling (sort of) */
{ "usenetkey\0>use-netkey"; no_argument; NULL; 'K' }, /* redundant spelling */
{ "use-netkey\0"; no_argument; NULL; 'K' },
{ "use-mast\0"; no_argument; NULL; 'M' },
{ "use-mastklips\0"; no_argument; NULL; 'M' },
{ "use-bsdkame\0"; no_argument; NULL; 'F' },
{ "interface\0<ifname|ifaddr>"; required_argument; NULL; 'i' },
{ "listen\0<ifaddr>"; required_argument; NULL; 'L' },
{ "ikeport\0<port-number>"; required_argument; NULL; 'p' },
{ "natikeport\0<port-number>"; required_argument; NULL; 'q' },
{ "ctlbase\0<path>"; required_argument; NULL; 'b' },
{ "secretsfile\0<secrets-file>"; required_argument; NULL; 's' },
{ "perpeerlogbase\0<path>"; required_argument; NULL; 'P' },
{ "perpeerlog\0"; no_argument; NULL; 'l' },
{ "noretransmits\0"; no_argument; NULL; 'R' },
{ "coredir\0>dumpdir"; required_argument; NULL; 'C' }, /* redundant spelling */
{ "dumpdir\0<dirname>"; required_argument; NULL; 'C' },
{ "statsbin\0<filename>"; required_argument; NULL; 'S' },
{ "ipsecdir\0<ipsec-dir>"; required_argument; NULL; 'f' },
{ "ipsec_dir\0>ipsecdir"; required_argument; NULL; 'f' }, /* redundant spelling; _ */
{ "foodgroupsdir\0>ipsecdir"; required_argument; NULL; 'f' }, /* redundant spelling */
{ "adns\0<pathname>"; required_argument; NULL; 'a' },
{ "nat_traversal\0!"; no_argument; NULL; 'h' }, /* obsolete; _ */
{ "keep_alive\0_"; required_argument; NULL; '2' }, /* _ */
{ "keep-alive\0<delay_secs>"; required_argument; NULL; '2' },
{ "force_keepalive\0!"; no_argument; NULL; 'h' }, /* obsolete; _ */
{ "disable_port_floating\0!"; no_argument; NULL; 'h' }, /* obsolete; _ */
{ "virtual_private\0_"; required_argument; NULL; '6' }, /* _ */
{ "virtual-private\0<network_list>"; required_argument; NULL; '6' },
{ "nhelpers\0<number>"; required_argument; NULL; 'j' },
//#ifdef HAVE_LABELED_IPSEC
{ "secctx_attr_value\0_"; required_argument; NULL; 'w' }, /* _ */
{ "secctx-attr-value\0<number>"; required_argument; NULL; 'w' },
//#endif
{ "vendorid\0<vendorid>"; required_argument; NULL; 'V' },
{ "leak-detective\0"; no_argument; NULL; 'X' },
{ "debug-nat_t\0>debug-nattraversal"; no_argument; NULL; '5' }, /* redundant spelling; _ */
{ "debug-nat-t\0>debug-nattraversal"; no_argument; NULL; '5' }, /* redundant spelling */
{ "debug-nattraversal\0"; no_argument; NULL; '5' },
{ "debug-none\0^"; no_argument; NULL; 'N' },
{ "debug-all\0"; no_argument; NULL; 'A' }
/* --debug-* options (using D for shorthand)
#define D(name, code) { "debug-" name, no_argument, NULL, (code) + DBG_OFFSET }
D("raw\0", DBG_RAW_IX),
D("crypt\0", DBG_CRYPT_IX),
D("crypto\0>crypt", DBG_CRYPT_IX), // redundant spelling
D("parsing\0", DBG_PARSING_IX),
D("emitting\0", DBG_EMITTING_IX),
D("control\0", DBG_CONTROL_IX),
D("lifecycle\0", DBG_LIFECYCLE_IX),
D("kernel\0", DBG_KERNEL_IX),
D("klips\0>kernel", DBG_KERNEL_IX), // redundant spelling
D("netkey\0>kernel", DBG_KERNEL_IX), // redundant spelling
D("dns\0", DBG_DNS_IX),
D("oppo\0", DBG_OPPO_IX),
D("oppoinfo\0", DBG_OPPOINFO_IX),
D("controlmore\0", DBG_CONTROLMORE_IX),
D("dpd\0", DBG_DPD_IX),
D("x509\0", DBG_X509_IX),
D("private\0", DBG_PRIVATE_IX),
D("pfkey\0", DBG_PFKEY_IX),
#undef D
// --impair-* options (using I for shorthand)
#define I(name, code) { "impair-" name, no_argument, NULL, (code) + DBG_OFFSET }
I("delay-adns-key-answer\0^", IMPAIR_DELAY_ADNS_KEY_ANSWER_IX),
I("delay-adns-txt-answer\0", IMPAIR_DELAY_ADNS_TXT_ANSWER_IX),
I("bust-mi2\0", IMPAIR_BUST_MI2_IX),
I("bust-mr2\0", IMPAIR_BUST_MR2_IX),
I("sa-creation\0", IMPAIR_SA_CREATION_IX),
I("die-oninfo\0", IMPAIR_DIE_ONINFO_IX),
I("jacob-two-two\0", IMPAIR_JACOB_TWO_TWO_IX),
I("major-version-bump\0", IMPAIR_MAJOR_VERSION_BUMP_IX),
I("minor-version-bump\0", IMPAIR_MINOR_VERSION_BUMP_IX),
I("retransmits\0", IMPAIR_RETRANSMITS_IX),
I("send-bogus-isakmp-flag\0", IMPAIR_SEND_BOGUS_ISAKMP_FLAG_IX),
I("send-ikev2-ke\0", IMPAIR_SEND_IKEv2_KE_IX),
I("send-key-size-check\0", IMPAIR_SEND_KEY_SIZE_CHECK_IX),
#undef I
{ 0, 0, 0, 0 } */
];
/* print full usage (from long_opts[]) */
pub fn usage() {
let mut opt: option = Nil;
let mut line: String;
let lw: size_t;
println("Usage: {}", pluto_name);
lw = strlen(line);
/*
for (opt = long_opts; opt->name != NULL; opt++) {
const char *nm = opt->name;
const char *meta = nm + strlen(nm) + 1;
bool force_nl = FALSE;
char chunk[sizeof(line) - 1];
int cw;
switch (*meta) {
case '_':
case '>':
case '!':
// ignore these entries
break;
case '^':
force_nl = TRUE;
meta++; // eat ^
// fall through
default:
if (*meta == '\0')
snprintf(chunk, sizeof(chunk), "[--%s]", nm);
else
snprintf(chunk, sizeof(chunk), "[--%s %s]", nm, meta);
cw = strlen(chunk);
if (force_nl || lw + cw + 2 >= sizeof(line)) {
fprintf(stderr, "%s\n", line);
line[0] = '\t';
lw = 1;
} else {
line[lw++] = ' ';
}
passert(lw + cw + 1 < sizeof(line));
strcpy(&line[lw], chunk);
lw += cw;
}
}
stderr.write_str("{}\n", line);
stderr.write_str("Libreswan {}\n", ipsec_version_code());
// not exit_pluto because we are not initialized yet
*/
exit(0);
}
/*
* leave pluto, with status.
* Once child is launched, parent must not exit this way because
* the lock would be released.
*
* 0 OK
* 1 general discomfort
* 10 lock file exists
*/
pub fn exit_pluto(status:int) {
/* needed because we may be called in odd state */
reset_globals();
free_preshared_secrets();
free_remembered_public_keys();
delete_every_connection();
/*
* free memory allocated by initialization routines. Please don't
* forget to do this.
*/
//#if defined(LIBCURL) || defined(LDAP_VER)
free_crl_fetch(); /* free chain of crl fetch requests */
//#endif
/* free chain of X.509 authority certificates */
free_authcerts();
free_crls(); /* free chain of X.509 CRLs */
lsw_conf_free_oco(); /* free global_oco containing path names */
free_myFQDN(); /* free myid FQDN */
free_ifaces(); /* free interface list from memory */
stop_adns(); /* Stop async DNS process (if running) */
free_md_pool(); /* free the md pool */
NSS_Shutdown();
delete_lock(); /* delete any lock files */
free_virtual_ip(); /* virtual_private= */
free_pluto_main(); /* our static chars */
/* report memory leaks now, after all free()s */
if leak_detective {
report_leaks();
}
close_log(); /* close the logfiles */
exit(status); /* exit, with our error code */
}
pub fn show_setup_plutomain() {
whack_log(RC_COMMENT, "config setup options:"); /* spacer */
whack_log(RC_COMMENT, " "); /* spacer */
whack_log(RC_COMMENT,
"configdir=%s, configfile=%s, secrets=%s, ipsecdir=%s, dumpdir=%s, statsbin=%s",
oco.confdir,
oco.conffile,
pluto_shared_secrets_file,
oco.confddir,
coredir,
match pluto_stats_binary {
Nil => "unset",
_ => pluto_stats_binary
}
);
whack_log(RC_COMMENT, "sbindir=%s, libexecdir=%s",
IPSEC_SBINDIR,
IPSEC_EXECDIR
);
whack_log(RC_COMMENT, "pluto_version=%s, pluto_vendorid=%s",
ipsec_version_code(),
pluto_vendorid
);
whack_log(RC_COMMENT,
"nhelpers=%d, uniqueids=%s, retransmits=%s, force-busy=%s",
nhelpers,
match uniqueIDs { true => "yes", false => "no"},
match no_retransmits { true => "no", false => "yes"},
match force_busy { true => "yes", false => "no"}
);
whack_log(RC_COMMENT,
"ikeport=%d, strictcrlpolicy=%s, crlcheckinterval=%lu, listen=%s",
pluto_port,
match strict_crl_policy { true => "yes", false => "no"},
deltasecs(crl_check_interval),
match pluto_listen { true => pluto_listen, false => "<any>"}
);
//#ifdef HAVE_LABELED_IPSEC
whack_log(RC_COMMENT, "secctx-attr-value=%d", secctx_attr_value);
//#else
whack_log(RC_COMMENT, "secctx-attr-value=<unsupported>");
//#endif
}
} // end mod plutomain
fn main() {
} |
extern crate cfg_if;
extern crate gif;
extern crate wasm_bindgen;
use cfg_if::cfg_if;
use gif::Encoder;
use wasm_bindgen::prelude::*;
mod utils;
cfg_if! {
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
if #[cfg(feature = "wee_alloc")] {
extern crate wee_alloc;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
}
}
pub fn frames_array2frames(width: u16, height: u16, frames_array: Vec<u8>) -> Vec<Vec<u8>> {
let chunk_size = width as usize * height as usize * 4;
let mut res = Vec::new();
for v in frames_array.chunks(chunk_size) {
res.push(rgba2rgb(v))
}
res
}
#[wasm_bindgen]
pub fn rgba2rgb(pixels: &[u8]) -> Vec<u8> {
let mut rgb_pixels: Vec<u8> = Vec::new();
for v in pixels.chunks(4) {
rgb_pixels.extend([v[0], v[1], v[2]].iter().cloned())
}
rgb_pixels
}
#[wasm_bindgen]
pub fn encode_gif(width: u16, height: u16, frames_array: Vec<u8>) -> Vec<u8> {
let mut image = Vec::new();
{
let mut encoder = Encoder::new(&mut image, width, height, &[]).unwrap();
for frame in frames_array2frames(width, height, frames_array) {
let _frame = gif::Frame::from_rgb(width, height, &frame);
encoder.write_frame(&_frame).unwrap();
}
}
return image;
}
|
//! The module implements [`HashMap`].
use super::ebr::{Arc, AtomicArc, Barrier, Tag};
use super::hash_table::cell::Locker;
use super::hash_table::cell_array::CellArray;
use super::hash_table::HashTable;
use std::borrow::Borrow;
use std::collections::hash_map::RandomState;
use std::hash::{BuildHasher, Hash};
use std::sync::atomic::Ordering::{Acquire, Relaxed};
use std::sync::atomic::{AtomicU8, AtomicUsize};
/// A scalable concurrent hash map data structure.
///
/// [`HashMap`] is a concurrent hash map data structure that is targeted at a highly concurrent
/// workload. The use of an epoch-based reclamation technique enables the data structure to
/// implement non-blocking resizing and fine-granular locking. It has a single array of entry
/// metadata, and each entry of the array, called `Cell`, manages a fixed size key-value pair
/// array. Each `Cell` has a customized 8-byte read-write mutex to protect the data structure,
/// and a linked list for resolving hash collisions.
///
/// ## The key features of [`HashMap`]
///
/// * Non-sharded: the data is managed by a single entry metadata array.
/// * Automatic resizing: it automatically grows or shrinks.
/// * Non-blocking resizing: resizing does not block other threads.
/// * Incremental resizing: each access to the data structure is mandated to rehash a fixed
/// number of key-value pairs.
/// * Optimized resizing: key-value pairs managed by a single `Cell` are guaranteed to be
/// relocated to consecutive `Cell` instances.
/// * No busy waiting: the customized mutex never spins.
///
/// ## The key statistics for [`HashMap`]
///
/// * The expected size of metadata for a single key-value pair: 2-byte.
/// * The expected number of atomic operations required for an operation on a single key: 2.
/// * The expected number of atomic variables accessed during a single key operation: 1.
/// * The number of entries managed by a single metadata `Cell` without a linked list: 32.
/// * The expected maximum linked list length when resize is triggered: log(capacity) / 8.
pub struct HashMap<K, V, H = RandomState>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
array: AtomicArc<CellArray<K, V, false>>,
minimum_capacity: usize,
additional_capacity: AtomicUsize,
resize_mutex: AtomicU8,
build_hasher: H,
}
impl<K, V, H> HashMap<K, V, H>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
/// Creates an empty [`HashMap`] with the given capacity and [`BuildHasher`].
///
/// The actual capacity is equal to or greater than the given capacity.
///
/// # Panics
///
/// Panics if memory allocation fails.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
/// use std::collections::hash_map::RandomState;
///
/// let hashmap: HashMap<u64, u32, RandomState> = HashMap::new(1000, RandomState::new());
///
/// let result = hashmap.capacity();
/// assert_eq!(result, 1024);
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
/// let result = hashmap.capacity();
/// assert_eq!(result, 64);
/// ```
pub fn new(capacity: usize, build_hasher: H) -> HashMap<K, V, H> {
let initial_capacity = capacity.max(Self::default_capacity());
let array = Arc::new(CellArray::<K, V, false>::new(
initial_capacity,
AtomicArc::null(),
));
let current_capacity = array.num_entries();
HashMap {
array: AtomicArc::from(array),
minimum_capacity: current_capacity,
additional_capacity: AtomicUsize::new(0),
resize_mutex: AtomicU8::new(0),
build_hasher,
}
}
/// Temporarily increases the minimum capacity of the [`HashMap`].
///
/// The reserved space is not exclusively owned by the [`Ticket`], thus can be overtaken.
/// Unused space is immediately reclaimed when the [`Ticket`] is dropped.
///
/// # Errors
///
/// Returns `None` if a too large number is given.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
/// use std::collections::hash_map::RandomState;
///
/// let hashmap: HashMap<usize, usize, RandomState> = HashMap::new(1000, RandomState::new());
/// assert_eq!(hashmap.capacity(), 1024);
///
/// let ticket = hashmap.reserve(10000);
/// assert!(ticket.is_some());
/// assert_eq!(hashmap.capacity(), 16384);
/// for i in 0..16 {
/// assert!(hashmap.insert(i, i).is_ok());
/// }
/// drop(ticket);
///
/// assert_eq!(hashmap.capacity(), 1024);
/// ```
pub fn reserve(&self, capacity: usize) -> Option<Ticket<K, V, H>> {
let mut current_additional_capacity = self.additional_capacity.load(Relaxed);
loop {
if usize::MAX - self.minimum_capacity - current_additional_capacity <= capacity {
// The given value is too large.
return None;
}
match self.additional_capacity.compare_exchange(
current_additional_capacity,
current_additional_capacity + capacity,
Relaxed,
Relaxed,
) {
Ok(_) => {
self.resize(&Barrier::new());
return Some(Ticket {
hash_map: self,
increment: capacity,
});
}
Err(current) => current_additional_capacity = current,
}
}
}
/// Inserts a key-value pair into the [`HashMap`].
///
/// # Errors
///
/// Returns an error along with the supplied key-value pair if the key exists.
///
/// # Panics
///
/// Panics if memory allocation fails, or the number of entries in the target cell reaches
/// `u32::MAX`.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert_eq!(hashmap.insert(1, 1).unwrap_err(), (1, 1));
/// ```
#[inline]
pub fn insert(&self, key: K, val: V) -> Result<(), (K, V)> {
self.insert_entry(key, val)
}
/// Updates an existing key-value pair.
///
/// It returns `None` if the key does not exist.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.update(&1, |_, _| true).is_none());
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert_eq!(hashmap.update(&1, |_, v| { *v = 2; *v }).unwrap(), 2);
/// assert_eq!(hashmap.read(&1, |_, v| *v).unwrap(), 2);
/// ```
#[inline]
pub fn update<Q, F, R>(&self, key_ref: &Q, updater: F) -> Option<R>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
F: FnOnce(&K, &mut V) -> R,
{
let (hash, partial_hash) = self.hash(key_ref);
let barrier = Barrier::new();
let (_, _, iterator) = self.acquire(key_ref, hash, partial_hash, &barrier);
if let Some(iterator) = iterator {
if let Some((k, v)) = iterator.get() {
// The presence of `locker` prevents the entry from being modified outside it.
#[allow(clippy::cast_ref_to_mut)]
return Some(updater(k, unsafe { &mut *(v as *const V as *mut V) }));
}
}
None
}
/// Constructs the value in-place, or modifies an existing value corresponding to the key.
///
/// # Panics
///
/// Panics if memory allocation fails, or the number of entries in the target cell is
/// reached `u32::MAX`.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// hashmap.upsert(1, || 2, |_, v| *v = 2);
/// assert_eq!(hashmap.read(&1, |_, v| *v).unwrap(), 2);
/// hashmap.upsert(1, || 2, |_, v| *v = 3);
/// assert_eq!(hashmap.read(&1, |_, v| *v).unwrap(), 3);
/// ```
#[inline]
pub fn upsert<FI: FnOnce() -> V, FU: FnOnce(&K, &mut V)>(
&self,
key: K,
constructor: FI,
updater: FU,
) {
let (hash, partial_hash) = self.hash(&key);
let barrier = Barrier::new();
let (_, locker, iterator) = self.acquire(&key, hash, partial_hash, &barrier);
if let Some(iterator) = iterator {
if let Some((k, v)) = iterator.get() {
// The presence of `locker` prevents the entry from being modified outside it.
#[allow(clippy::cast_ref_to_mut)]
updater(k, unsafe { &mut *(v as *const V as *mut V) });
return;
}
}
locker.insert(key, constructor(), partial_hash, &barrier);
}
/// Removes a key-value pair if the key exists.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.remove(&1).is_none());
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert_eq!(hashmap.remove(&1).unwrap(), (1, 0));
/// ```
#[inline]
pub fn remove<Q>(&self, key_ref: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.remove_if(key_ref, |_| true)
}
/// Removes a key-value pair if the key exists and the given condition is met.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert!(hashmap.remove_if(&1, |v| *v == 1).is_none());
/// assert_eq!(hashmap.remove_if(&1, |v| *v == 0).unwrap(), (1, 0));
/// ```
#[inline]
pub fn remove_if<Q, F: FnOnce(&V) -> bool>(&self, key_ref: &Q, condition: F) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.remove_entry(key_ref, condition).0
}
/// Reads a key-value pair.
///
/// It returns `None` if the key does not exist.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.read(&1, |_, v| *v).is_none());
/// assert!(hashmap.insert(1, 10).is_ok());
/// assert_eq!(hashmap.read(&1, |_, v| *v).unwrap(), 10);
/// ```
#[inline]
pub fn read<Q, R, F: FnOnce(&K, &V) -> R>(&self, key_ref: &Q, reader: F) -> Option<R>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
let barrier = Barrier::new();
self.read_with(key_ref, reader, &barrier)
}
/// Reads a key-value pair using the supplied [`Barrier`].
///
/// It enables the caller to use the value reference outside the method. It returns `None`
/// if the key does not exist.
///
/// # Examples
///
/// ```
/// use scc::ebr::Barrier;
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 10).is_ok());
///
/// let barrier = Barrier::new();
/// let value_ref = hashmap.read_with(&1, |k, v| v, &barrier).unwrap();
/// assert_eq!(*value_ref, 10);
/// ```
#[inline]
pub fn read_with<'b, Q, R, F: FnOnce(&'b K, &'b V) -> R>(
&self,
key_ref: &Q,
reader: F,
barrier: &'b Barrier,
) -> Option<R>
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.read_entry(key_ref, reader, barrier)
}
/// Checks if the key exists.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(!hashmap.contains(&1));
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert!(hashmap.contains(&1));
/// ```
#[inline]
pub fn contains<Q>(&self, key: &Q) -> bool
where
K: Borrow<Q>,
Q: Eq + Hash + ?Sized,
{
self.read(key, |_, _| ()).is_some()
}
/// Iterates over all the entries in the [`HashMap`].
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert!(hashmap.insert(2, 1).is_ok());
///
/// let mut acc = 0;
/// hashmap.for_each(|k, v| { acc += *k; *v = 2; });
/// assert_eq!(acc, 3);
/// assert_eq!(hashmap.read(&1, |_, v| *v).unwrap(), 2);
/// assert_eq!(hashmap.read(&2, |_, v| *v).unwrap(), 2);
/// ```
#[inline]
pub fn for_each<F: FnMut(&K, &mut V)>(&self, mut f: F) {
self.retain(|k, v| {
f(k, v);
true
});
}
/// Retains key-value pairs that satisfy the given predicate.
///
/// It returns the number of entries remaining and removed.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert!(hashmap.insert(2, 1).is_ok());
/// assert!(hashmap.insert(3, 2).is_ok());
///
/// assert_eq!(hashmap.retain(|k, v| *k == 1 && *v == 0), (1, 2));
/// ```
pub fn retain<F: FnMut(&K, &mut V) -> bool>(&self, mut filter: F) -> (usize, usize) {
let mut retained_entries = 0;
let mut removed_entries = 0;
let barrier = Barrier::new();
// An acquire fence is required to correctly load the contents of the array.
let mut current_array_ptr = self.array.load(Acquire, &barrier);
while let Some(current_array_ref) = current_array_ptr.as_ref() {
if !current_array_ref.old_array(&barrier).is_null() {
current_array_ref.partial_rehash(|key| self.hash(key), |_, _| None, &barrier);
current_array_ptr = self.array.load(Acquire, &barrier);
continue;
}
for cell_index in 0..current_array_ref.num_cells() {
if let Some(locker) = Locker::lock(current_array_ref.cell(cell_index), &barrier) {
let mut iterator = locker.cell_ref().iter(&barrier);
while iterator.next().is_some() {
let retain = if let Some((k, v)) = iterator.get() {
#[allow(clippy::cast_ref_to_mut)]
filter(k, unsafe { &mut *(v as *const V as *mut V) })
} else {
true
};
if retain {
retained_entries += 1;
} else {
locker.erase(&mut iterator);
removed_entries += 1;
}
if retained_entries == usize::MAX || removed_entries == usize::MAX {
// Gives up iteration on an integer overflow.
return (retained_entries, removed_entries);
}
}
}
}
let new_current_array_ptr = self.array.load(Acquire, &barrier);
if current_array_ptr == new_current_array_ptr {
break;
}
retained_entries = 0;
current_array_ptr = new_current_array_ptr;
}
if removed_entries >= retained_entries {
self.resize(&barrier);
}
(retained_entries, removed_entries)
}
/// Clears all the key-value pairs.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert_eq!(hashmap.clear(), 1);
/// ```
#[inline]
pub fn clear(&self) -> usize {
self.retain(|_, _| false).1
}
/// Returns the number of entries in the [`HashMap`].
///
/// It scans the entire array to calculate the number of valid entries, making its time
/// complexity `O(N)`.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.insert(1, 0).is_ok());
/// assert_eq!(hashmap.len(), 1);
/// ```
#[inline]
pub fn len(&self) -> usize {
self.num_entries(&Barrier::new())
}
/// Returns `true` if the [`HashMap`] is empty.
///
/// It scans the entire array to calculate the number of valid entries, making its time
/// complexity `O(N)`.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// assert!(hashmap.is_empty());
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the capacity of the [`HashMap`].
///
/// # Examples
///
/// ```
/// use scc::HashMap;
/// use std::collections::hash_map::RandomState;
///
/// let hashmap: HashMap<u64, u32, RandomState> = HashMap::new(1000000, RandomState::new());
/// assert_eq!(hashmap.capacity(), 1048576);
/// ```
#[inline]
pub fn capacity(&self) -> usize {
self.num_slots(&Barrier::new())
}
}
impl<K, V> Default for HashMap<K, V, RandomState>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
{
/// Creates a [`HashMap`] with the default parameters.
///
/// The default hash builder is [`RandomState`], and the default capacity is `64`.
///
/// # Panics
///
/// Panics if memory allocation fails.
///
/// # Examples
///
/// ```
/// use scc::HashMap;
///
/// let hashmap: HashMap<u64, u32> = HashMap::default();
///
/// let result = hashmap.capacity();
/// assert_eq!(result, 64);
/// ```
fn default() -> Self {
HashMap {
array: AtomicArc::new(CellArray::<K, V, false>::new(
Self::default_capacity(),
AtomicArc::null(),
)),
minimum_capacity: Self::default_capacity(),
additional_capacity: AtomicUsize::new(0),
resize_mutex: AtomicU8::new(0),
build_hasher: RandomState::new(),
}
}
}
impl<K, V, H> Drop for HashMap<K, V, H>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
fn drop(&mut self) {
self.clear();
let barrier = Barrier::new();
let current_array_ptr = self.array.load(Acquire, &barrier);
if let Some(current_array_ref) = current_array_ptr.as_ref() {
current_array_ref.drop_old_array(&barrier);
if let Some(current_array) = self.array.swap((None, Tag::None), Relaxed) {
barrier.reclaim(current_array);
}
}
}
}
impl<K, V, H> HashTable<K, V, H, false> for HashMap<K, V, H>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
fn hasher(&self) -> &H {
&self.build_hasher
}
fn copier(_: &K, _: &V) -> Option<(K, V)> {
None
}
fn cell_array(&self) -> &AtomicArc<CellArray<K, V, false>> {
&self.array
}
fn minimum_capacity(&self) -> usize {
self.minimum_capacity + self.additional_capacity.load(Relaxed)
}
fn resize_mutex(&self) -> &AtomicU8 {
&self.resize_mutex
}
}
/// [`Ticket`] keeps the increased minimum capacity of the [`HashMap`] during its lifetime.
///
/// The minimum capacity is lowered when the [`Ticket`] is dropped, thereby allowing unused
/// memory to be reclaimed.
pub struct Ticket<'h, K, V, H>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
hash_map: &'h HashMap<K, V, H>,
increment: usize,
}
impl<'h, K, V, H> Drop for Ticket<'h, K, V, H>
where
K: 'static + Eq + Hash + Sync,
V: 'static + Sync,
H: BuildHasher,
{
fn drop(&mut self) {
let result = self
.hash_map
.additional_capacity
.fetch_sub(self.increment, Relaxed);
self.hash_map.resize(&Barrier::new());
debug_assert!(result >= self.increment);
}
}
|
error_chain! {
errors {
#[doc = "An error message from the SDL2 crate."]
SdlMsg(msg: ::std::string::String) {
description("sdl error")
display("{}", msg)
}
}
}
|
use crate::{backend::SchemaBuilder, prepare::*, types::*, SchemaStatementBuilder};
/// Rename a table
///
/// # Examples
///
/// ```
/// use sea_query::{*, tests_cfg::*};
///
/// let table = Table::rename()
/// .table(Font::Table, Alias::new("font_new"))
/// .to_owned();
///
/// assert_eq!(
/// table.to_string(MysqlQueryBuilder),
/// r#"RENAME TABLE `font` TO `font_new`"#
/// );
/// assert_eq!(
/// table.to_string(PostgresQueryBuilder),
/// r#"ALTER TABLE "font" RENAME TO "font_new""#
/// );
/// assert_eq!(
/// table.to_string(SqliteQueryBuilder),
/// r#"ALTER TABLE `font` RENAME TO `font_new`"#
/// );
/// ```
#[derive(Debug, Clone)]
pub struct TableRenameStatement {
pub(crate) from_name: Option<DynIden>,
pub(crate) to_name: Option<DynIden>,
}
impl Default for TableRenameStatement {
fn default() -> Self {
Self::new()
}
}
impl TableRenameStatement {
/// Construct rename table statement
pub fn new() -> Self {
Self {
from_name: None,
to_name: None,
}
}
/// Set old and new table name
pub fn table<T: 'static, R: 'static>(mut self, from_name: T, to_name: R) -> Self
where
T: Iden,
R: Iden,
{
self.from_name = Some(SeaRc::new(from_name));
self.to_name = Some(SeaRc::new(to_name));
self
}
}
impl SchemaStatementBuilder for TableRenameStatement {
fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String {
let mut sql = SqlWriter::new();
schema_builder.prepare_table_rename_statement(self, &mut sql);
sql.result()
}
fn build_any(&self, schema_builder: &dyn SchemaBuilder) -> String {
let mut sql = SqlWriter::new();
schema_builder.prepare_table_rename_statement(self, &mut sql);
sql.result()
}
}
|
pub mod kill_all_kruskal;
use rand::distributions::{IndependentSample, Range};
use std::ops::Mul;
use std::hash::Hash;
use typenum;
mod hall;
pub use self::hall::create_hall;
#[derive(Serialize, Deserialize, Clone)]
pub enum Level {
KillAllKruskal2D(kill_all_kruskal::Conf2D),
KillAllKruskal3D(kill_all_kruskal::Conf3D),
}
impl Level {
pub fn create(&self, world: &mut ::specs::World) {
match *self {
Level::KillAllKruskal2D(ref conf) => conf.create(world),
Level::KillAllKruskal3D(ref conf) => conf.create(world),
}
}
}
pub struct KruskalDecorated<D>
where
D: ::na::Dim + ::na::DimName + Hash,
D::Value: Mul<typenum::UInt<typenum::UTerm, typenum::B1>, Output = D::Value>
+ ::generic_array::ArrayLength<isize> + ::generic_array::ArrayLength<f32>,
{
maze: ::maze::Maze<D>,
start_cell: ::na::VectorN<isize, D>,
start_opening: ::na::VectorN<isize, D>,
end_cell: ::na::VectorN<isize, D>,
end_opening: ::na::VectorN<isize, D>,
entity_cells: Vec<::na::VectorN<isize, D>>,
turret_cells: Vec<::na::VectorN<isize, D>>,
}
impl<D> KruskalDecorated<D>
where
D: ::na::Dim + ::na::DimName + Hash,
D::Value: Mul<typenum::UInt<typenum::UTerm, typenum::B1>, Output = D::Value>
+ ::generic_array::ArrayLength<isize> + ::generic_array::ArrayLength<f32>,
{
/// we choose start room.
/// then end room the further from start
/// in rooms cells we put turret exept in front of end and start room
/// in all cells exept turret and start room we put entities
/// and all other things
pub fn new(size: ::na::VectorN<isize, D>, percent: f64, bug: ::na::VectorN<isize, D>, turrets: usize, entities: usize) -> Self {
let mut rng = ::rand::thread_rng();
loop {
// Generate general maze
let mut maze = ::maze::Maze::kruskal(size.clone(), percent, bug.clone(), 1.0);
maze.reduce(1);
maze.circle();
maze.fill_smallests();
while maze.fill_dead_corridors() {}
maze.extend(1);
maze.circle();
// Start
let mut dig_start = maze.dig_cells(1, |_| true);
if dig_start.first().is_none() { continue }
let (start_cell, start_opening) = dig_start.remove(0);
// End
let mut dig_end = maze.dig_cells(1, |_| true);
if dig_end.first().is_none() { continue }
let (end_cell, end_opening) = dig_end.remove(0);
// Put turrets
let cells = maze.compute_inner_room_zones()
.iter()
.cloned()
.filter_map(|mut room| {
room.retain(|cell| {
(start_cell.clone() - cell.clone()).iter().fold(0, |acc, c| acc + c.pow(2)) > 5_isize.pow(2)
&& *cell != start_cell
&& *cell != start_opening
&& *cell != end_cell
&& *cell != end_opening
&& maze.is_neighbouring_wall(cell)
});
if room.is_empty() {
None
} else {
let cell = room.iter()
.skip(Range::new(0, room.len()).ind_sample(&mut rng))
.next()
.unwrap()
.clone();
Some(cell)
}
})
.collect::<Vec<_>>();
let mut turret_cells = vec![];
for (_, cell) in (0..turrets).zip(cells) {
turret_cells.push(cell);
}
// Put entities
let mut cells = maze.iterate_maze();
cells.retain(|cell| {
!maze.walls.contains(&cell)
&& (start_cell.clone() - cell.clone()).iter().fold(0, |acc, c| acc + c.pow(2)) > 5_isize.pow(2)
&& *cell != start_cell
&& *cell != start_opening
&& *cell != end_cell
&& *cell != end_opening
&& !turret_cells.contains(cell)
});
let mut entity_cells = vec![];
for _ in 0..entities {
if cells.is_empty() {
break
}
let index = Range::new(0, cells.len()).ind_sample(&mut rng);
let cell = cells.swap_remove(index);
entity_cells.push(cell);
}
break KruskalDecorated {
maze,
start_cell,
start_opening,
end_cell,
end_opening,
entity_cells,
turret_cells,
}
}
}
}
|
pub fn length_of_longest_substring(s: String) -> i32 {
// use std::collections::HashSet;
// let hs: HashSet<_> = s.chars().collect();
// hs.len() as i32
// s.chars()
// .fold(String::new(), |mut acc, el| {
// if !acc.contains(el) {
// acc.push(el)
// }
// acc
// })
// .len() as i32
use std::collections::VecDeque;
let mut q: VecDeque<char> = VecDeque::new();
let mut longest = 0;
for c in s.chars() {
while q.contains(&c) {
q.pop_front();
}
q.push_back(c);
longest = longest.max(q.len());
}
longest as i32
}
fn main() {
assert!(length_of_longest_substring("abcabcbb".to_string()) == 3);
assert!(length_of_longest_substring("bbb".to_string()) == 1);
assert!(length_of_longest_substring("pwwkew".to_string()) == 3);
}
|
use crate::{
bet::Bet,
bet_database::{BetId, BetOrProp, BetState, CancelReason},
};
use anyhow::anyhow;
use bdk::{bitcoin::OutPoint, blockchain::UtxoExists};
use super::Party;
macro_rules! update_bet {
($self:expr, $bet_id:expr, $($tt:tt)+) => {
$self.bet_db.update_bets(&[$bet_id], |old_state, _, _| {
Ok(match old_state {
$($tt)+,
_ => old_state
})
})?;
}
}
impl<D> Party<bdk::blockchain::EsploraBlockchain, D>
where
D: bdk::database::BatchDatabase,
{
fn check_cancelled(&self, inputs: &[OutPoint]) -> anyhow::Result<Option<CancelReason>> {
for input in inputs {
if !self.wallet.client().utxo_exists(*input)? {
let tx = self.wallet.list_transactions(true)?.into_iter().find(|tx| {
tx.transaction
.as_ref()
.unwrap()
.input
.iter()
.find(|txin| txin.previous_output == *input)
.is_some()
&& tx.confirmation_time.is_some()
});
return Ok(Some(match tx {
Some(tx) => CancelReason::ICancelled {
spent: *input,
my_cancelling_tx: tx.txid,
},
None => CancelReason::TheyCancelled { spent: *input },
}));
}
}
Ok(None)
}
/// Look at current state and see if we can progress it.
///
/// The `try_learn_outcome` exists so during tests it can be turned off so this doesn't try and contact a non-existent oracle.
/// TODO: fix this with an oracle trait that can be mocked in tests.
pub fn take_next_action(&self, bet_id: BetId, try_learn_outcome: bool) -> anyhow::Result<()> {
let bet_state = self
.bet_db
.get_entity(bet_id)?
.ok_or(anyhow!("Bet {} does not exist"))?;
match bet_state {
BetState::Claimed { .. } | BetState::Cancelled { .. } | BetState::Lost { .. } => {}
BetState::Cancelling { bet_or_prop, .. } => {
// success cancelling
if let Some(reason) = self.check_cancelled(&bet_or_prop.inputs())? {
update_bet! {
self, bet_id,
BetState::Cancelling { bet_or_prop, .. } => BetState::Cancelled {
bet_or_prop,
reason: reason.clone()
}
};
}
// failed to cancel
if let BetOrProp::Bet(bet) = bet_or_prop {
if let Some(height) =
self.is_confirmed(bet.tx().txid(), bet.joint_output.wallet_descriptor())?
{
update_bet! { self, bet_id,
BetState::Cancelling { .. } => BetState::Confirmed { bet: bet.clone(), height }
}
}
}
}
BetState::Proposed { local_proposal } => {
if let Some(reason) = self.check_cancelled(&local_proposal.proposal.inputs)? {
update_bet! { self, bet_id,
BetState::Proposed { local_proposal } => BetState::Cancelled {
bet_or_prop: BetOrProp::Proposal(local_proposal),
reason: reason.clone()
}
};
}
}
BetState::Offered { bet, .. } => {
let txid = bet.tx().txid();
if let Some(height) =
self.is_confirmed(txid, bet.joint_output.wallet_descriptor())?
{
update_bet! { self, bet_id,
BetState::Offered { bet, .. } => BetState::Confirmed { bet, height }
};
self.take_next_action(bet_id, try_learn_outcome)?;
}
let inputs_to_check_for_cancellation = bet
.tx()
.input
.iter()
.map(|x| x.previous_output)
.collect::<Vec<_>>();
if let Some(reason) = self.check_cancelled(&inputs_to_check_for_cancellation)? {
update_bet! { self, bet_id,
BetState::Offered { bet, .. } => BetState::Cancelled {
bet_or_prop: BetOrProp::Bet(bet),
reason: reason.clone()
}
};
}
}
BetState::Unconfirmed { bet } => {
let txid = bet.tx().txid();
if let Some(height) =
self.is_confirmed(txid, bet.joint_output.wallet_descriptor())?
{
update_bet! { self, bet_id,
BetState::Unconfirmed { bet, .. } => BetState::Confirmed { bet, height }
};
self.take_next_action(bet_id, try_learn_outcome)?;
} else {
let inputs_to_check_for_cancellation = bet
.tx()
.input
.iter()
.map(|x| x.previous_output)
.collect::<Vec<_>>();
if let Some(reason) = self.check_cancelled(&inputs_to_check_for_cancellation)? {
update_bet! { self, bet_id,
BetState::Unconfirmed { bet, .. } => {
BetState::Cancelled {
bet_or_prop: BetOrProp::Bet(bet),
reason: reason.clone()
}
}
};
}
}
}
BetState::Confirmed { bet, height: _ } => {
if try_learn_outcome {
self.try_get_outcome(bet_id, bet)?;
}
}
BetState::Won { bet, .. } => {
// It should never happen that you go from "Won" to "Claimed" without going through
// claiming but just in case someone steals your keys somehow we handle it.
if let Some(tx_that_claimed) =
self.get_spending_tx(bet.outpoint(), bet.joint_output.wallet_descriptor())?
{
update_bet! {
self, bet_id,
BetState::Won { bet, .. } => BetState::Claimed { bet, expecting: None, txid: tx_that_claimed }
}
}
}
BetState::Claiming { bet, .. } => {
if let Some(tx_that_claimed) =
self.get_spending_tx(bet.outpoint(), bet.joint_output.wallet_descriptor())?
{
update_bet! {
self, bet_id,
BetState::Claiming { bet, claim_txid, .. } => BetState::Claimed { bet, expecting: Some(claim_txid), txid: tx_that_claimed }
}
}
}
}
Ok(())
}
fn try_get_outcome(&self, bet_id: BetId, bet: Bet) -> anyhow::Result<()> {
let event_id = bet.oracle_event.event.id;
let event_url = reqwest::Url::parse(&format!("https://{}{}", bet.oracle_id, event_id))?;
let event_response = self
.client
.get(event_url)
.send()?
.error_for_status()?
.json::<crate::EventResponse>()?;
if let Some(attestation) = event_response.attestation {
self.learn_outcome(bet_id, attestation)?;
}
Ok(())
}
}
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PADREGG {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `PAD27RSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD27RSELR {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD27RSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD27RSELR::PULL1_5K => 0,
PAD27RSELR::PULL6K => 1,
PAD27RSELR::PULL12K => 2,
PAD27RSELR::PULL24K => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD27RSELR {
match value {
0 => PAD27RSELR::PULL1_5K,
1 => PAD27RSELR::PULL6K,
2 => PAD27RSELR::PULL12K,
3 => PAD27RSELR::PULL24K,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `PULL1_5K`"]
#[inline]
pub fn is_pull1_5k(&self) -> bool {
*self == PAD27RSELR::PULL1_5K
}
#[doc = "Checks if the value of the field is `PULL6K`"]
#[inline]
pub fn is_pull6k(&self) -> bool {
*self == PAD27RSELR::PULL6K
}
#[doc = "Checks if the value of the field is `PULL12K`"]
#[inline]
pub fn is_pull12k(&self) -> bool {
*self == PAD27RSELR::PULL12K
}
#[doc = "Checks if the value of the field is `PULL24K`"]
#[inline]
pub fn is_pull24k(&self) -> bool {
*self == PAD27RSELR::PULL24K
}
}
#[doc = "Possible values of the field `PAD27FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD27FNCSELR {
#[doc = "Configure as UART0 RX input signal value."]
UART0RX,
#[doc = "IOM/MSPI nCE group 27 value."]
NCE27,
#[doc = "CTIMER connection 5 value."]
CT5,
#[doc = "Configure as GPIO27 value."]
GPIO27,
#[doc = "Configure as I2C clock I/O signal from IOMSTR2 value."]
M2SCL,
#[doc = "Configure as SPI clock output signal from IOMSTR2 value."]
M2SCK,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD27FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD27FNCSELR::UART0RX => 0,
PAD27FNCSELR::NCE27 => 1,
PAD27FNCSELR::CT5 => 2,
PAD27FNCSELR::GPIO27 => 3,
PAD27FNCSELR::M2SCL => 4,
PAD27FNCSELR::M2SCK => 5,
PAD27FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD27FNCSELR {
match value {
0 => PAD27FNCSELR::UART0RX,
1 => PAD27FNCSELR::NCE27,
2 => PAD27FNCSELR::CT5,
3 => PAD27FNCSELR::GPIO27,
4 => PAD27FNCSELR::M2SCL,
5 => PAD27FNCSELR::M2SCK,
i => PAD27FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART0RX`"]
#[inline]
pub fn is_uart0rx(&self) -> bool {
*self == PAD27FNCSELR::UART0RX
}
#[doc = "Checks if the value of the field is `NCE27`"]
#[inline]
pub fn is_nce27(&self) -> bool {
*self == PAD27FNCSELR::NCE27
}
#[doc = "Checks if the value of the field is `CT5`"]
#[inline]
pub fn is_ct5(&self) -> bool {
*self == PAD27FNCSELR::CT5
}
#[doc = "Checks if the value of the field is `GPIO27`"]
#[inline]
pub fn is_gpio27(&self) -> bool {
*self == PAD27FNCSELR::GPIO27
}
#[doc = "Checks if the value of the field is `M2SCL`"]
#[inline]
pub fn is_m2scl(&self) -> bool {
*self == PAD27FNCSELR::M2SCL
}
#[doc = "Checks if the value of the field is `M2SCK`"]
#[inline]
pub fn is_m2sck(&self) -> bool {
*self == PAD27FNCSELR::M2SCK
}
}
#[doc = "Possible values of the field `PAD27STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD27STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD27STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD27STRNGR::LOW => false,
PAD27STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD27STRNGR {
match value {
false => PAD27STRNGR::LOW,
true => PAD27STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD27STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD27STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD27INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD27INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD27INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD27INPENR::DIS => false,
PAD27INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD27INPENR {
match value {
false => PAD27INPENR::DIS,
true => PAD27INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD27INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD27INPENR::EN
}
}
#[doc = "Possible values of the field `PAD27PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD27PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD27PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD27PULLR::DIS => false,
PAD27PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD27PULLR {
match value {
false => PAD27PULLR::DIS,
true => PAD27PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD27PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD27PULLR::EN
}
}
#[doc = "Possible values of the field `PAD26FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD26FNCSELR {
#[doc = "Configure as the external HFRC oscillator input value."]
EXTHF,
#[doc = "IOM/MSPI nCE group 26 value."]
NCE26,
#[doc = "CTIMER connection 3 value."]
CT3,
#[doc = "Configure as GPIO26 value."]
GPIO26,
#[doc = "SCARD reset output value."]
SCCRST,
#[doc = "MSPI data connection 1 value."]
MSPI1,
#[doc = "Configure as UART0 TX output signal value."]
UART0TX,
#[doc = "Configure as UART1 CTS input signal value."]
UA1CTS,
}
impl PAD26FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD26FNCSELR::EXTHF => 0,
PAD26FNCSELR::NCE26 => 1,
PAD26FNCSELR::CT3 => 2,
PAD26FNCSELR::GPIO26 => 3,
PAD26FNCSELR::SCCRST => 4,
PAD26FNCSELR::MSPI1 => 5,
PAD26FNCSELR::UART0TX => 6,
PAD26FNCSELR::UA1CTS => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD26FNCSELR {
match value {
0 => PAD26FNCSELR::EXTHF,
1 => PAD26FNCSELR::NCE26,
2 => PAD26FNCSELR::CT3,
3 => PAD26FNCSELR::GPIO26,
4 => PAD26FNCSELR::SCCRST,
5 => PAD26FNCSELR::MSPI1,
6 => PAD26FNCSELR::UART0TX,
7 => PAD26FNCSELR::UA1CTS,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `EXTHF`"]
#[inline]
pub fn is_exthf(&self) -> bool {
*self == PAD26FNCSELR::EXTHF
}
#[doc = "Checks if the value of the field is `NCE26`"]
#[inline]
pub fn is_nce26(&self) -> bool {
*self == PAD26FNCSELR::NCE26
}
#[doc = "Checks if the value of the field is `CT3`"]
#[inline]
pub fn is_ct3(&self) -> bool {
*self == PAD26FNCSELR::CT3
}
#[doc = "Checks if the value of the field is `GPIO26`"]
#[inline]
pub fn is_gpio26(&self) -> bool {
*self == PAD26FNCSELR::GPIO26
}
#[doc = "Checks if the value of the field is `SCCRST`"]
#[inline]
pub fn is_sccrst(&self) -> bool {
*self == PAD26FNCSELR::SCCRST
}
#[doc = "Checks if the value of the field is `MSPI1`"]
#[inline]
pub fn is_mspi1(&self) -> bool {
*self == PAD26FNCSELR::MSPI1
}
#[doc = "Checks if the value of the field is `UART0TX`"]
#[inline]
pub fn is_uart0tx(&self) -> bool {
*self == PAD26FNCSELR::UART0TX
}
#[doc = "Checks if the value of the field is `UA1CTS`"]
#[inline]
pub fn is_ua1cts(&self) -> bool {
*self == PAD26FNCSELR::UA1CTS
}
}
#[doc = "Possible values of the field `PAD26STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD26STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD26STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD26STRNGR::LOW => false,
PAD26STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD26STRNGR {
match value {
false => PAD26STRNGR::LOW,
true => PAD26STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD26STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD26STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD26INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD26INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD26INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD26INPENR::DIS => false,
PAD26INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD26INPENR {
match value {
false => PAD26INPENR::DIS,
true => PAD26INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD26INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD26INPENR::EN
}
}
#[doc = "Possible values of the field `PAD26PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD26PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD26PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD26PULLR::DIS => false,
PAD26PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD26PULLR {
match value {
false => PAD26PULLR::DIS,
true => PAD26PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD26PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD26PULLR::EN
}
}
#[doc = "Possible values of the field `PAD25RSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD25RSELR {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD25RSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD25RSELR::PULL1_5K => 0,
PAD25RSELR::PULL6K => 1,
PAD25RSELR::PULL12K => 2,
PAD25RSELR::PULL24K => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD25RSELR {
match value {
0 => PAD25RSELR::PULL1_5K,
1 => PAD25RSELR::PULL6K,
2 => PAD25RSELR::PULL12K,
3 => PAD25RSELR::PULL24K,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `PULL1_5K`"]
#[inline]
pub fn is_pull1_5k(&self) -> bool {
*self == PAD25RSELR::PULL1_5K
}
#[doc = "Checks if the value of the field is `PULL6K`"]
#[inline]
pub fn is_pull6k(&self) -> bool {
*self == PAD25RSELR::PULL6K
}
#[doc = "Checks if the value of the field is `PULL12K`"]
#[inline]
pub fn is_pull12k(&self) -> bool {
*self == PAD25RSELR::PULL12K
}
#[doc = "Checks if the value of the field is `PULL24K`"]
#[inline]
pub fn is_pull24k(&self) -> bool {
*self == PAD25RSELR::PULL24K
}
}
#[doc = "Possible values of the field `PAD25FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD25FNCSELR {
#[doc = "Configure as UART1 RX input signal value."]
UART1RX,
#[doc = "IOM/MSPI nCE group 25 value."]
NCE25,
#[doc = "CTIMER connection 1 value."]
CT1,
#[doc = "Configure as GPIO25 value."]
GPIO25,
#[doc = "Configure as the IOMSTR2 I2C SDA or SPI WIR3 signal value."]
M2SDAWIR3,
#[doc = "Configure as the IOMSTR2 SPI MISO input signal value."]
M2MISO,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD25FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD25FNCSELR::UART1RX => 0,
PAD25FNCSELR::NCE25 => 1,
PAD25FNCSELR::CT1 => 2,
PAD25FNCSELR::GPIO25 => 3,
PAD25FNCSELR::M2SDAWIR3 => 4,
PAD25FNCSELR::M2MISO => 5,
PAD25FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD25FNCSELR {
match value {
0 => PAD25FNCSELR::UART1RX,
1 => PAD25FNCSELR::NCE25,
2 => PAD25FNCSELR::CT1,
3 => PAD25FNCSELR::GPIO25,
4 => PAD25FNCSELR::M2SDAWIR3,
5 => PAD25FNCSELR::M2MISO,
i => PAD25FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART1RX`"]
#[inline]
pub fn is_uart1rx(&self) -> bool {
*self == PAD25FNCSELR::UART1RX
}
#[doc = "Checks if the value of the field is `NCE25`"]
#[inline]
pub fn is_nce25(&self) -> bool {
*self == PAD25FNCSELR::NCE25
}
#[doc = "Checks if the value of the field is `CT1`"]
#[inline]
pub fn is_ct1(&self) -> bool {
*self == PAD25FNCSELR::CT1
}
#[doc = "Checks if the value of the field is `GPIO25`"]
#[inline]
pub fn is_gpio25(&self) -> bool {
*self == PAD25FNCSELR::GPIO25
}
#[doc = "Checks if the value of the field is `M2SDAWIR3`"]
#[inline]
pub fn is_m2sdawir3(&self) -> bool {
*self == PAD25FNCSELR::M2SDAWIR3
}
#[doc = "Checks if the value of the field is `M2MISO`"]
#[inline]
pub fn is_m2miso(&self) -> bool {
*self == PAD25FNCSELR::M2MISO
}
}
#[doc = "Possible values of the field `PAD25STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD25STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD25STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD25STRNGR::LOW => false,
PAD25STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD25STRNGR {
match value {
false => PAD25STRNGR::LOW,
true => PAD25STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD25STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD25STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD25INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD25INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD25INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD25INPENR::DIS => false,
PAD25INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD25INPENR {
match value {
false => PAD25INPENR::DIS,
true => PAD25INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD25INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD25INPENR::EN
}
}
#[doc = "Possible values of the field `PAD25PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD25PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD25PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD25PULLR::DIS => false,
PAD25PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD25PULLR {
match value {
false => PAD25PULLR::DIS,
true => PAD25PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD25PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD25PULLR::EN
}
}
#[doc = "Possible values of the field `PAD24FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD24FNCSELR {
#[doc = "Configure as UART1 TX output signal value."]
UART1TX,
#[doc = "IOM/MSPI nCE group 24 value."]
NCE24,
#[doc = "MSPI data connection 8 value."]
MSPI8,
#[doc = "Configure as GPIO24 value."]
GPIO24,
#[doc = "Configure as UART0 CTS input signal value."]
UA0CTS,
#[doc = "CTIMER connection 21 value."]
CT21,
#[doc = "Configure as the 32kHz crystal output signal value."]
_32KHZXT,
#[doc = "Configure as the serial trace data output signal value."]
SWO,
}
impl PAD24FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD24FNCSELR::UART1TX => 0,
PAD24FNCSELR::NCE24 => 1,
PAD24FNCSELR::MSPI8 => 2,
PAD24FNCSELR::GPIO24 => 3,
PAD24FNCSELR::UA0CTS => 4,
PAD24FNCSELR::CT21 => 5,
PAD24FNCSELR::_32KHZXT => 6,
PAD24FNCSELR::SWO => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD24FNCSELR {
match value {
0 => PAD24FNCSELR::UART1TX,
1 => PAD24FNCSELR::NCE24,
2 => PAD24FNCSELR::MSPI8,
3 => PAD24FNCSELR::GPIO24,
4 => PAD24FNCSELR::UA0CTS,
5 => PAD24FNCSELR::CT21,
6 => PAD24FNCSELR::_32KHZXT,
7 => PAD24FNCSELR::SWO,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `UART1TX`"]
#[inline]
pub fn is_uart1tx(&self) -> bool {
*self == PAD24FNCSELR::UART1TX
}
#[doc = "Checks if the value of the field is `NCE24`"]
#[inline]
pub fn is_nce24(&self) -> bool {
*self == PAD24FNCSELR::NCE24
}
#[doc = "Checks if the value of the field is `MSPI8`"]
#[inline]
pub fn is_mspi8(&self) -> bool {
*self == PAD24FNCSELR::MSPI8
}
#[doc = "Checks if the value of the field is `GPIO24`"]
#[inline]
pub fn is_gpio24(&self) -> bool {
*self == PAD24FNCSELR::GPIO24
}
#[doc = "Checks if the value of the field is `UA0CTS`"]
#[inline]
pub fn is_ua0cts(&self) -> bool {
*self == PAD24FNCSELR::UA0CTS
}
#[doc = "Checks if the value of the field is `CT21`"]
#[inline]
pub fn is_ct21(&self) -> bool {
*self == PAD24FNCSELR::CT21
}
#[doc = "Checks if the value of the field is `_32KHZXT`"]
#[inline]
pub fn is_32k_hz_xt(&self) -> bool {
*self == PAD24FNCSELR::_32KHZXT
}
#[doc = "Checks if the value of the field is `SWO`"]
#[inline]
pub fn is_swo(&self) -> bool {
*self == PAD24FNCSELR::SWO
}
}
#[doc = "Possible values of the field `PAD24STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD24STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD24STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD24STRNGR::LOW => false,
PAD24STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD24STRNGR {
match value {
false => PAD24STRNGR::LOW,
true => PAD24STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD24STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD24STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD24INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD24INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD24INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD24INPENR::DIS => false,
PAD24INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD24INPENR {
match value {
false => PAD24INPENR::DIS,
true => PAD24INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD24INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD24INPENR::EN
}
}
#[doc = "Possible values of the field `PAD24PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD24PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD24PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD24PULLR::DIS => false,
PAD24PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD24PULLR {
match value {
false => PAD24PULLR::DIS,
true => PAD24PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD24PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD24PULLR::EN
}
}
#[doc = "Values that can be written to the field `PAD27RSEL`"]
pub enum PAD27RSELW {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD27RSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD27RSELW::PULL1_5K => 0,
PAD27RSELW::PULL6K => 1,
PAD27RSELW::PULL12K => 2,
PAD27RSELW::PULL24K => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD27RSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD27RSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD27RSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Pullup is ~1.5 KOhms value."]
#[inline]
pub fn pull1_5k(self) -> &'a mut W {
self.variant(PAD27RSELW::PULL1_5K)
}
#[doc = "Pullup is ~6 KOhms value."]
#[inline]
pub fn pull6k(self) -> &'a mut W {
self.variant(PAD27RSELW::PULL6K)
}
#[doc = "Pullup is ~12 KOhms value."]
#[inline]
pub fn pull12k(self) -> &'a mut W {
self.variant(PAD27RSELW::PULL12K)
}
#[doc = "Pullup is ~24 KOhms value."]
#[inline]
pub fn pull24k(self) -> &'a mut W {
self.variant(PAD27RSELW::PULL24K)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD27FNCSEL`"]
pub enum PAD27FNCSELW {
#[doc = "Configure as UART0 RX input signal value."]
UART0RX,
#[doc = "IOM/MSPI nCE group 27 value."]
NCE27,
#[doc = "CTIMER connection 5 value."]
CT5,
#[doc = "Configure as GPIO27 value."]
GPIO27,
#[doc = "Configure as I2C clock I/O signal from IOMSTR2 value."]
M2SCL,
#[doc = "Configure as SPI clock output signal from IOMSTR2 value."]
M2SCK,
}
impl PAD27FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD27FNCSELW::UART0RX => 0,
PAD27FNCSELW::NCE27 => 1,
PAD27FNCSELW::CT5 => 2,
PAD27FNCSELW::GPIO27 => 3,
PAD27FNCSELW::M2SCL => 4,
PAD27FNCSELW::M2SCK => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD27FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD27FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD27FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as UART0 RX input signal value."]
#[inline]
pub fn uart0rx(self) -> &'a mut W {
self.variant(PAD27FNCSELW::UART0RX)
}
#[doc = "IOM/MSPI nCE group 27 value."]
#[inline]
pub fn nce27(self) -> &'a mut W {
self.variant(PAD27FNCSELW::NCE27)
}
#[doc = "CTIMER connection 5 value."]
#[inline]
pub fn ct5(self) -> &'a mut W {
self.variant(PAD27FNCSELW::CT5)
}
#[doc = "Configure as GPIO27 value."]
#[inline]
pub fn gpio27(self) -> &'a mut W {
self.variant(PAD27FNCSELW::GPIO27)
}
#[doc = "Configure as I2C clock I/O signal from IOMSTR2 value."]
#[inline]
pub fn m2scl(self) -> &'a mut W {
self.variant(PAD27FNCSELW::M2SCL)
}
#[doc = "Configure as SPI clock output signal from IOMSTR2 value."]
#[inline]
pub fn m2sck(self) -> &'a mut W {
self.variant(PAD27FNCSELW::M2SCK)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD27STRNG`"]
pub enum PAD27STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD27STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD27STRNGW::LOW => false,
PAD27STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD27STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD27STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD27STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD27STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD27STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD27INPEN`"]
pub enum PAD27INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD27INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD27INPENW::DIS => false,
PAD27INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD27INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD27INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD27INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD27INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD27INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD27PULL`"]
pub enum PAD27PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD27PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD27PULLW::DIS => false,
PAD27PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD27PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD27PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD27PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD27PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD27PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD26FNCSEL`"]
pub enum PAD26FNCSELW {
#[doc = "Configure as the external HFRC oscillator input value."]
EXTHF,
#[doc = "IOM/MSPI nCE group 26 value."]
NCE26,
#[doc = "CTIMER connection 3 value."]
CT3,
#[doc = "Configure as GPIO26 value."]
GPIO26,
#[doc = "SCARD reset output value."]
SCCRST,
#[doc = "MSPI data connection 1 value."]
MSPI1,
#[doc = "Configure as UART0 TX output signal value."]
UART0TX,
#[doc = "Configure as UART1 CTS input signal value."]
UA1CTS,
}
impl PAD26FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD26FNCSELW::EXTHF => 0,
PAD26FNCSELW::NCE26 => 1,
PAD26FNCSELW::CT3 => 2,
PAD26FNCSELW::GPIO26 => 3,
PAD26FNCSELW::SCCRST => 4,
PAD26FNCSELW::MSPI1 => 5,
PAD26FNCSELW::UART0TX => 6,
PAD26FNCSELW::UA1CTS => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD26FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD26FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD26FNCSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Configure as the external HFRC oscillator input value."]
#[inline]
pub fn exthf(self) -> &'a mut W {
self.variant(PAD26FNCSELW::EXTHF)
}
#[doc = "IOM/MSPI nCE group 26 value."]
#[inline]
pub fn nce26(self) -> &'a mut W {
self.variant(PAD26FNCSELW::NCE26)
}
#[doc = "CTIMER connection 3 value."]
#[inline]
pub fn ct3(self) -> &'a mut W {
self.variant(PAD26FNCSELW::CT3)
}
#[doc = "Configure as GPIO26 value."]
#[inline]
pub fn gpio26(self) -> &'a mut W {
self.variant(PAD26FNCSELW::GPIO26)
}
#[doc = "SCARD reset output value."]
#[inline]
pub fn sccrst(self) -> &'a mut W {
self.variant(PAD26FNCSELW::SCCRST)
}
#[doc = "MSPI data connection 1 value."]
#[inline]
pub fn mspi1(self) -> &'a mut W {
self.variant(PAD26FNCSELW::MSPI1)
}
#[doc = "Configure as UART0 TX output signal value."]
#[inline]
pub fn uart0tx(self) -> &'a mut W {
self.variant(PAD26FNCSELW::UART0TX)
}
#[doc = "Configure as UART1 CTS input signal value."]
#[inline]
pub fn ua1cts(self) -> &'a mut W {
self.variant(PAD26FNCSELW::UA1CTS)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD26STRNG`"]
pub enum PAD26STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD26STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD26STRNGW::LOW => false,
PAD26STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD26STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD26STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD26STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD26STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD26STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD26INPEN`"]
pub enum PAD26INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD26INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD26INPENW::DIS => false,
PAD26INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD26INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD26INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD26INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD26INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD26INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD26PULL`"]
pub enum PAD26PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD26PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD26PULLW::DIS => false,
PAD26PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD26PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD26PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD26PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD26PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD26PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD25RSEL`"]
pub enum PAD25RSELW {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD25RSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD25RSELW::PULL1_5K => 0,
PAD25RSELW::PULL6K => 1,
PAD25RSELW::PULL12K => 2,
PAD25RSELW::PULL24K => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD25RSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD25RSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD25RSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Pullup is ~1.5 KOhms value."]
#[inline]
pub fn pull1_5k(self) -> &'a mut W {
self.variant(PAD25RSELW::PULL1_5K)
}
#[doc = "Pullup is ~6 KOhms value."]
#[inline]
pub fn pull6k(self) -> &'a mut W {
self.variant(PAD25RSELW::PULL6K)
}
#[doc = "Pullup is ~12 KOhms value."]
#[inline]
pub fn pull12k(self) -> &'a mut W {
self.variant(PAD25RSELW::PULL12K)
}
#[doc = "Pullup is ~24 KOhms value."]
#[inline]
pub fn pull24k(self) -> &'a mut W {
self.variant(PAD25RSELW::PULL24K)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD25FNCSEL`"]
pub enum PAD25FNCSELW {
#[doc = "Configure as UART1 RX input signal value."]
UART1RX,
#[doc = "IOM/MSPI nCE group 25 value."]
NCE25,
#[doc = "CTIMER connection 1 value."]
CT1,
#[doc = "Configure as GPIO25 value."]
GPIO25,
#[doc = "Configure as the IOMSTR2 I2C SDA or SPI WIR3 signal value."]
M2SDAWIR3,
#[doc = "Configure as the IOMSTR2 SPI MISO input signal value."]
M2MISO,
}
impl PAD25FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD25FNCSELW::UART1RX => 0,
PAD25FNCSELW::NCE25 => 1,
PAD25FNCSELW::CT1 => 2,
PAD25FNCSELW::GPIO25 => 3,
PAD25FNCSELW::M2SDAWIR3 => 4,
PAD25FNCSELW::M2MISO => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD25FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD25FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD25FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as UART1 RX input signal value."]
#[inline]
pub fn uart1rx(self) -> &'a mut W {
self.variant(PAD25FNCSELW::UART1RX)
}
#[doc = "IOM/MSPI nCE group 25 value."]
#[inline]
pub fn nce25(self) -> &'a mut W {
self.variant(PAD25FNCSELW::NCE25)
}
#[doc = "CTIMER connection 1 value."]
#[inline]
pub fn ct1(self) -> &'a mut W {
self.variant(PAD25FNCSELW::CT1)
}
#[doc = "Configure as GPIO25 value."]
#[inline]
pub fn gpio25(self) -> &'a mut W {
self.variant(PAD25FNCSELW::GPIO25)
}
#[doc = "Configure as the IOMSTR2 I2C SDA or SPI WIR3 signal value."]
#[inline]
pub fn m2sdawir3(self) -> &'a mut W {
self.variant(PAD25FNCSELW::M2SDAWIR3)
}
#[doc = "Configure as the IOMSTR2 SPI MISO input signal value."]
#[inline]
pub fn m2miso(self) -> &'a mut W {
self.variant(PAD25FNCSELW::M2MISO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD25STRNG`"]
pub enum PAD25STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD25STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD25STRNGW::LOW => false,
PAD25STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD25STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD25STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD25STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD25STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD25STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD25INPEN`"]
pub enum PAD25INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD25INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD25INPENW::DIS => false,
PAD25INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD25INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD25INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD25INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD25INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD25INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD25PULL`"]
pub enum PAD25PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD25PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD25PULLW::DIS => false,
PAD25PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD25PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD25PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD25PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD25PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD25PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD24FNCSEL`"]
pub enum PAD24FNCSELW {
#[doc = "Configure as UART1 TX output signal value."]
UART1TX,
#[doc = "IOM/MSPI nCE group 24 value."]
NCE24,
#[doc = "MSPI data connection 8 value."]
MSPI8,
#[doc = "Configure as GPIO24 value."]
GPIO24,
#[doc = "Configure as UART0 CTS input signal value."]
UA0CTS,
#[doc = "CTIMER connection 21 value."]
CT21,
#[doc = "Configure as the 32kHz crystal output signal value."]
_32KHZXT,
#[doc = "Configure as the serial trace data output signal value."]
SWO,
}
impl PAD24FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD24FNCSELW::UART1TX => 0,
PAD24FNCSELW::NCE24 => 1,
PAD24FNCSELW::MSPI8 => 2,
PAD24FNCSELW::GPIO24 => 3,
PAD24FNCSELW::UA0CTS => 4,
PAD24FNCSELW::CT21 => 5,
PAD24FNCSELW::_32KHZXT => 6,
PAD24FNCSELW::SWO => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD24FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD24FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD24FNCSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Configure as UART1 TX output signal value."]
#[inline]
pub fn uart1tx(self) -> &'a mut W {
self.variant(PAD24FNCSELW::UART1TX)
}
#[doc = "IOM/MSPI nCE group 24 value."]
#[inline]
pub fn nce24(self) -> &'a mut W {
self.variant(PAD24FNCSELW::NCE24)
}
#[doc = "MSPI data connection 8 value."]
#[inline]
pub fn mspi8(self) -> &'a mut W {
self.variant(PAD24FNCSELW::MSPI8)
}
#[doc = "Configure as GPIO24 value."]
#[inline]
pub fn gpio24(self) -> &'a mut W {
self.variant(PAD24FNCSELW::GPIO24)
}
#[doc = "Configure as UART0 CTS input signal value."]
#[inline]
pub fn ua0cts(self) -> &'a mut W {
self.variant(PAD24FNCSELW::UA0CTS)
}
#[doc = "CTIMER connection 21 value."]
#[inline]
pub fn ct21(self) -> &'a mut W {
self.variant(PAD24FNCSELW::CT21)
}
#[doc = "Configure as the 32kHz crystal output signal value."]
#[inline]
pub fn _32k_hz_xt(self) -> &'a mut W {
self.variant(PAD24FNCSELW::_32KHZXT)
}
#[doc = "Configure as the serial trace data output signal value."]
#[inline]
pub fn swo(self) -> &'a mut W {
self.variant(PAD24FNCSELW::SWO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD24STRNG`"]
pub enum PAD24STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD24STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD24STRNGW::LOW => false,
PAD24STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD24STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD24STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD24STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD24STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD24STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD24INPEN`"]
pub enum PAD24INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD24INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD24INPENW::DIS => false,
PAD24INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD24INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD24INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD24INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD24INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD24INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD24PULL`"]
pub enum PAD24PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD24PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD24PULLW::DIS => false,
PAD24PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD24PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD24PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD24PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD24PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD24PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 30:31 - Pad 27 pullup resistor selection."]
#[inline]
pub fn pad27rsel(&self) -> PAD27RSELR {
PAD27RSELR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 27:29 - Pad 27 function select"]
#[inline]
pub fn pad27fncsel(&self) -> PAD27FNCSELR {
PAD27FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 26 - Pad 27 drive strength"]
#[inline]
pub fn pad27strng(&self) -> PAD27STRNGR {
PAD27STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - Pad 27 input enable"]
#[inline]
pub fn pad27inpen(&self) -> PAD27INPENR {
PAD27INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 24 - Pad 27 pullup enable"]
#[inline]
pub fn pad27pull(&self) -> PAD27PULLR {
PAD27PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 19:21 - Pad 26 function select"]
#[inline]
pub fn pad26fncsel(&self) -> PAD26FNCSELR {
PAD26FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 18 - Pad 26 drive strength"]
#[inline]
pub fn pad26strng(&self) -> PAD26STRNGR {
PAD26STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - Pad 26 input enable"]
#[inline]
pub fn pad26inpen(&self) -> PAD26INPENR {
PAD26INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 16 - Pad 26 pullup enable"]
#[inline]
pub fn pad26pull(&self) -> PAD26PULLR {
PAD26PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 14:15 - Pad 25 pullup resistor selection."]
#[inline]
pub fn pad25rsel(&self) -> PAD25RSELR {
PAD25RSELR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 11:13 - Pad 25 function select"]
#[inline]
pub fn pad25fncsel(&self) -> PAD25FNCSELR {
PAD25FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 10 - Pad 25 drive strength"]
#[inline]
pub fn pad25strng(&self) -> PAD25STRNGR {
PAD25STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Pad 25 input enable"]
#[inline]
pub fn pad25inpen(&self) -> PAD25INPENR {
PAD25INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Pad 25 pullup enable"]
#[inline]
pub fn pad25pull(&self) -> PAD25PULLR {
PAD25PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 3:5 - Pad 24 function select"]
#[inline]
pub fn pad24fncsel(&self) -> PAD24FNCSELR {
PAD24FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 2 - Pad 24 drive strength"]
#[inline]
pub fn pad24strng(&self) -> PAD24STRNGR {
PAD24STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Pad 24 input enable"]
#[inline]
pub fn pad24inpen(&self) -> PAD24INPENR {
PAD24INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 0 - Pad 24 pullup enable"]
#[inline]
pub fn pad24pull(&self) -> PAD24PULLR {
PAD24PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 404232216 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 30:31 - Pad 27 pullup resistor selection."]
#[inline]
pub fn pad27rsel(&mut self) -> _PAD27RSELW {
_PAD27RSELW { w: self }
}
#[doc = "Bits 27:29 - Pad 27 function select"]
#[inline]
pub fn pad27fncsel(&mut self) -> _PAD27FNCSELW {
_PAD27FNCSELW { w: self }
}
#[doc = "Bit 26 - Pad 27 drive strength"]
#[inline]
pub fn pad27strng(&mut self) -> _PAD27STRNGW {
_PAD27STRNGW { w: self }
}
#[doc = "Bit 25 - Pad 27 input enable"]
#[inline]
pub fn pad27inpen(&mut self) -> _PAD27INPENW {
_PAD27INPENW { w: self }
}
#[doc = "Bit 24 - Pad 27 pullup enable"]
#[inline]
pub fn pad27pull(&mut self) -> _PAD27PULLW {
_PAD27PULLW { w: self }
}
#[doc = "Bits 19:21 - Pad 26 function select"]
#[inline]
pub fn pad26fncsel(&mut self) -> _PAD26FNCSELW {
_PAD26FNCSELW { w: self }
}
#[doc = "Bit 18 - Pad 26 drive strength"]
#[inline]
pub fn pad26strng(&mut self) -> _PAD26STRNGW {
_PAD26STRNGW { w: self }
}
#[doc = "Bit 17 - Pad 26 input enable"]
#[inline]
pub fn pad26inpen(&mut self) -> _PAD26INPENW {
_PAD26INPENW { w: self }
}
#[doc = "Bit 16 - Pad 26 pullup enable"]
#[inline]
pub fn pad26pull(&mut self) -> _PAD26PULLW {
_PAD26PULLW { w: self }
}
#[doc = "Bits 14:15 - Pad 25 pullup resistor selection."]
#[inline]
pub fn pad25rsel(&mut self) -> _PAD25RSELW {
_PAD25RSELW { w: self }
}
#[doc = "Bits 11:13 - Pad 25 function select"]
#[inline]
pub fn pad25fncsel(&mut self) -> _PAD25FNCSELW {
_PAD25FNCSELW { w: self }
}
#[doc = "Bit 10 - Pad 25 drive strength"]
#[inline]
pub fn pad25strng(&mut self) -> _PAD25STRNGW {
_PAD25STRNGW { w: self }
}
#[doc = "Bit 9 - Pad 25 input enable"]
#[inline]
pub fn pad25inpen(&mut self) -> _PAD25INPENW {
_PAD25INPENW { w: self }
}
#[doc = "Bit 8 - Pad 25 pullup enable"]
#[inline]
pub fn pad25pull(&mut self) -> _PAD25PULLW {
_PAD25PULLW { w: self }
}
#[doc = "Bits 3:5 - Pad 24 function select"]
#[inline]
pub fn pad24fncsel(&mut self) -> _PAD24FNCSELW {
_PAD24FNCSELW { w: self }
}
#[doc = "Bit 2 - Pad 24 drive strength"]
#[inline]
pub fn pad24strng(&mut self) -> _PAD24STRNGW {
_PAD24STRNGW { w: self }
}
#[doc = "Bit 1 - Pad 24 input enable"]
#[inline]
pub fn pad24inpen(&mut self) -> _PAD24INPENW {
_PAD24INPENW { w: self }
}
#[doc = "Bit 0 - Pad 24 pullup enable"]
#[inline]
pub fn pad24pull(&mut self) -> _PAD24PULLW {
_PAD24PULLW { w: self }
}
}
|
// Vicfred
// https://atcoder.jp/contests/abc160/tasks/abc160_a
// implementation
use std::io;
fn main() {
let mut s = String::new();
io::stdin()
.read_line(&mut s)
.unwrap();
let s = s.trim();
let s: Vec<char> = s.chars().collect();
if &s[2] == &s[3] && &s[4] == &s[5] {
println!("Yes");
} else {
println!("No");
}
}
|
use crate::{
nla::{NlaBuffer, NlasIterator},
DecodeError, Index, Rest,
};
const RTGEN_FAMILY: Index = 0;
// const PADDING: Field = 1..4;
const ATTRIBUTES: Rest = 4..;
pub const NSID_HEADER_LEN: usize = ATTRIBUTES.start;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct NsIdBuffer<T> {
buffer: T,
}
impl<T: AsRef<[u8]>> NsIdBuffer<T> {
pub fn new(buffer: T) -> NsIdBuffer<T> {
NsIdBuffer { buffer }
}
/// Consume the packet, returning the underlying buffer.
pub fn into_inner(self) -> T {
self.buffer
}
pub fn new_checked(buffer: T) -> Result<NsIdBuffer<T>, DecodeError> {
let packet = Self::new(buffer);
packet.check_buffer_length()?;
Ok(packet)
}
fn check_buffer_length(&self) -> Result<(), DecodeError> {
let len = self.buffer.as_ref().len();
if len < NSID_HEADER_LEN {
Err(format!(
"invalid NsIdBuffer: length is {} but NsIdBuffer are at least {} bytes",
len, NSID_HEADER_LEN
)
.into())
} else {
Ok(())
}
}
/// Return the rtgen family field
pub fn rtgen_family(&self) -> u8 {
let data = self.buffer.as_ref();
data[RTGEN_FAMILY]
}
}
impl<'a, T: AsRef<[u8]> + ?Sized> NsIdBuffer<&'a T> {
/// Return a pointer to the payload.
pub fn payload(&self) -> &'a [u8] {
let data = self.buffer.as_ref();
&data[ATTRIBUTES]
}
pub fn nlas(&self) -> impl Iterator<Item = Result<NlaBuffer<&'a [u8]>, DecodeError>> {
NlasIterator::new(self.payload())
}
}
impl<'a, T: AsRef<[u8]> + AsMut<[u8]> + ?Sized> NsIdBuffer<&'a mut T> {
/// Return a mutable pointer to the payload.
pub fn payload_mut(&mut self) -> &mut [u8] {
let data = self.buffer.as_mut();
&mut data[ATTRIBUTES]
}
}
impl<T: AsRef<[u8]> + AsMut<[u8]>> NsIdBuffer<T> {
/// set the rtgen family field
pub fn set_rtgen_family(&mut self, value: u8) {
let data = self.buffer.as_mut();
data[RTGEN_FAMILY] = value
}
}
|
use super::archive_schema::Archive;
use super::paths::to_absolute;
use super::setup_archive::setup_archive;
use super::utils::Arguments;
use std::ffi::OsStr;
use std::fs::create_dir_all;
use std::io::{
Error as ioError,
Result as ioResult,
};
use std::path::Path;
use std::path::PathBuf;
/// Helper function to make code more clean
#[inline]
fn string_to_bool(input: &str) -> bool {
return matches!(input, "true");
}
/// Helper function to make code more clean
#[inline]
fn process_paths<T: AsRef<Path>>(val: T) -> ioResult<PathBuf> {
return to_absolute(std::env::current_dir()?.as_path(), val.as_ref());
}
/// Process input to useable PathBuf for temporary directory
fn get_tmp_path(val: Option<&OsStr>) -> ioResult<PathBuf> {
let mut ret_path = process_paths({
if let Some(path) = val {
PathBuf::from(path)
} else {
std::env::temp_dir()
}
})?;
if ret_path.exists() && !ret_path.is_dir() {
debug!("Temporary path exists, but is not an directory");
ret_path.pop();
}
// its "3" because "/" is an ancestor and "tmp" is an ancestor
if ret_path.ancestors().count() < 3 {
debug!(
"Adding another directory to YTDL_TMP, original: \"{}\"",
ret_path.display()
);
ret_path = ret_path.join("ytdl-rust");
create_dir_all(&ret_path)?;
}
return Ok(ret_path);
}
/// Process input to useable Archive
fn get_config_path(val: Option<&OsStr>) -> ioResult<Option<Archive>> {
let archive_path = process_paths({
if let Some(path) = val {
PathBuf::from(path)
} else {
dirs_next::config_dir()
.expect("Could not find an Default Config Directory")
.join("ytdl_archive.json")
}
})?;
return Ok(setup_archive(archive_path));
}
/// Process input to useable PathBuf for Output
fn get_output_path(val: Option<&OsStr>) -> ioResult<PathBuf> {
let mut ret_path = process_paths({
if let Some(path) = val {
PathBuf::from(path)
} else {
dirs_next::download_dir()
.unwrap_or_else(|| return PathBuf::from("."))
.join("ytdl-out")
}
})?;
if ret_path.exists() && !ret_path.is_dir() {
debug!("Output path exists, but is not an directory");
ret_path.pop();
}
return Ok(ret_path);
}
/// Setup clap-arguments
pub fn setup_args(cli_matches: &clap::ArgMatches) -> Result<Arguments, ioError> {
let mut args = Arguments {
out: get_output_path(cli_matches.value_of_os("out"))?,
tmp: get_tmp_path(cli_matches.value_of_os("tmp"))?,
url: cli_matches.value_of("URL").unwrap_or("").to_owned(),
archive: get_config_path(cli_matches.value_of_os("archive"))?,
audio_only: cli_matches.is_present("audio_only"),
debug: cli_matches.is_present("debug"),
disable_cleanup: cli_matches.is_present("disablecleanup"),
disable_re_thumbnail: cli_matches.is_present("disableeditorthumbnail"),
askedit: string_to_bool(cli_matches.value_of("askedit").unwrap()),
editor: cli_matches.value_of("editor").unwrap().to_owned(),
extra_args: cli_matches
.values_of("ytdlargs") // get all values after "--"
.map(|v| return v.collect::<Vec<&str>>()) // because "clap::Values" is an iterator, collect it all as Vec<&str>
.unwrap_or_default() // unwrap the Option<Vec<&str>> or create a new Vec
.iter() // Convert the Vec<&str> to an iterator
.map(|v| return String::from(*v)) // Map every value to String (de-referencing because otherwise it would be "&&str")
.collect(), // Collect it again as Vec<String>
};
if args.url.is_empty() {
println!("URL is required!");
std::process::exit(2);
}
args.extra_args.push("--write-thumbnail".to_owned());
return Ok(args);
}
#[cfg(test)]
mod test {
use super::*;
#[test]
// TODO: Enable this test when upgrading to clap 3.x
#[ignore = "https://github.com/clap-rs/clap/issues/2491"]
fn test_everything_default() {
let args = vec!["bin", "SomeURL"];
let yml = clap::load_yaml!("../cli.yml");
let cli_matches = clap::App::from_yaml(yml).get_matches_from(args);
let arguments = setup_args(&cli_matches).unwrap();
let download_dir = dirs_next::download_dir();
// this is because when used on desktop sessins, there is an download dir, while in something like ci there is not
if let Some(path) = download_dir {
assert_eq!(path.join("ytdl-out"), arguments.out);
} else {
assert_eq!(std::env::current_dir().unwrap().join("ytdl-out"), arguments.out);
}
assert_eq!(PathBuf::from("/tmp/ytdl-rust"), arguments.tmp);
assert_eq!("SomeURL", arguments.url);
assert!(arguments.extra_args.is_empty());
assert!(!arguments.audio_only);
assert!(!arguments.debug);
assert!(!arguments.disable_cleanup);
assert!(!arguments.disable_re_thumbnail);
assert!(arguments.archive.is_some());
assert!(arguments.askedit);
assert!(arguments.editor.is_empty());
}
#[test]
fn test_arguments_tmp_add_ancestor() {
let args = vec!["bin", "--tmp", "/tmp", "SomeURL"];
let yml = clap::load_yaml!("../cli.yml");
let cli_matches = clap::App::from_yaml(yml).get_matches_from(args);
let archive = setup_args(&cli_matches).unwrap();
assert_eq!(PathBuf::from("/tmp/ytdl-rust"), archive.tmp);
}
}
|
use crate::entities::aggregation::NewAggregationStrategy;
use chrono::{DateTime, Utc};
#[derive(Serialize, Deserialize, PartialEq, Debug)]
pub struct StoragePoint {
pub value: f64,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, GraphQLObject)]
#[graphql(description = "Data at a specific time")]
pub struct Point {
pub time: DateTime<Utc>,
pub value: f64,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, GraphQLInputObject)]
#[graphql(description = "Data at a specific time")]
pub struct NewPoint {
pub time: DateTime<Utc>,
pub value: f64,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone, GraphQLInputObject, Default)]
pub struct QueryOptions {
pub since: Option<DateTime<Utc>>,
pub until: Option<DateTime<Utc>>,
pub aggregate: Option<NewAggregationStrategy>,
}
impl QueryOptions {
pub fn with<F>(setter: F) -> QueryOptions
where
F: FnOnce(&mut QueryOptions) -> (),
{
let mut options: QueryOptions = Default::default();
setter(&mut options);
options
}
}
|
/* Redis Glue is provides abstractions over single and cluster mode Redis interactions
* Copyright 2021 Aravinth Manivannan <realaravinth@batsense.net>
*
* Licensed under the Apache License, Version 2.0 (the "License") or MIT
*/
//! Redis Client/Connection manager that can handle both single and clustered Redis Instances
use std::cell::RefCell;
use std::rc::Rc;
use redis::cluster::ClusterClient;
use redis::Client;
use redis::FromRedisValue;
use redis::RedisResult;
use redis::{aio::Connection, cluster::ClusterConnection};
pub use redis;
/// Client configuration
#[derive(Clone)]
pub enum RedisConfig {
/// Redis server URL
Single(String),
/// List of URL of Redis nodes in cluster mode
Cluster(Vec<String>),
}
impl RedisConfig {
/// Create Redis connection
pub fn connect(&self) -> RedisClient {
match self {
Self::Single(url) => {
let client = Client::open(url.as_str()).unwrap();
RedisClient::Single(client)
}
Self::Cluster(nodes) => {
let cluster_client = ClusterClient::open(nodes.to_owned()).unwrap();
RedisClient::Cluster(cluster_client)
}
}
}
}
/// Redis connection - manages both single and clustered deployments
#[derive(Clone)]
pub enum RedisConnection {
Single(Rc<RefCell<Connection>>),
Cluster(Rc<RefCell<ClusterConnection>>),
}
impl RedisConnection {
#[inline]
/// Get client. Uses interior mutability, so lookout for panics
pub fn get_client(&self) -> Self {
match self {
Self::Single(con) => Self::Single(Rc::clone(&con)),
Self::Cluster(con) => Self::Cluster(Rc::clone(&con)),
}
}
#[inline]
/// execute a redis command against a [Self]
pub async fn exec<T: FromRedisValue>(&self, cmd: &mut redis::Cmd) -> redis::RedisResult<T> {
match self {
RedisConnection::Single(con) => cmd.query_async(&mut *con.borrow_mut()).await,
RedisConnection::Cluster(con) => cmd.query(&mut *con.borrow_mut()),
}
}
pub async fn ping(&self) -> bool {
if let Ok(redis::Value::Status(v)) = self.exec(&mut redis::cmd("PING")).await {
v == "PONG"
} else {
false
}
}
}
#[derive(Clone)]
/// Client Configuration that can be used to get new connection shuld [RedisConnection] fail
pub enum RedisClient {
Single(Client),
Cluster(ClusterClient),
}
/// A Redis Client Object that encapsulates [RedisClient] and [RedisConnection].
/// Use this when you need a Redis Client
#[derive(Clone)]
pub struct Redis {
_client: RedisClient,
connection: RedisConnection,
}
impl Redis {
/// create new [Redis]. Will try to connect to Redis instance specified in [RedisConfig]
pub async fn new(redis: RedisConfig) -> RedisResult<Self> {
let (_client, connection) = Self::connect(redis).await?;
let master = Self {
_client,
connection,
};
Ok(master)
}
/// Get client to do interact with Redis server.
///
/// Uses Interior mutability so look out for panics
pub fn get_client(&self) -> RedisConnection {
self.connection.get_client()
}
async fn connect(redis: RedisConfig) -> RedisResult<(RedisClient, RedisConnection)> {
let redis = redis.connect();
let client = match &redis {
RedisClient::Single(c) => {
let con = c.get_async_connection().await?;
RedisConnection::Single(Rc::new(RefCell::new(con)))
}
RedisClient::Cluster(c) => {
let con = c.get_connection()?;
RedisConnection::Cluster(Rc::new(RefCell::new(con)))
}
};
Ok((redis, client))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[actix_rt::test]
async fn ping_works() {
let r = Redis::new(RedisConfig::Single("redis://127.0.0.1".into()))
.await
.unwrap();
assert!(r.get_client().ping().await);
}
#[actix_rt::test]
async fn exec_works() {
const VAR: (&str, &str) = ("testval", "4");
let r = Redis::new(RedisConfig::Single("redis://127.0.0.1".into()))
.await
.unwrap();
let _set: () = r
.get_client()
.exec(redis::cmd("SET").arg(&[VAR.0, VAR.1]))
.await
.unwrap();
let get: String = r
.get_client()
.exec(redis::cmd("GET").arg(&[VAR.0]))
.await
.unwrap();
assert_eq!(&get, VAR.1);
}
}
|
//! A re-implementation of the "Datetime" parsing utility from the Taskwarrior
//! source.
// TODO: this module is not yet implemented
pub(crate) struct DateTime {}
impl DateTime {
/// Parse a datestamp from a prefix of input and return the number of bytes consumed in the
/// input
pub(crate) fn parse<S: AsRef<str>>(
input: S,
format: &'static str,
) -> Option<(DateTime, usize)> {
let input = input.as_ref();
let mut len = input.len();
// try parsing the whole string and repeatedly drop suffixes until a match
while len > 0 {
if let Some(str) = input.get(..len) {
match str {
"2015" => return Some((DateTime {}, len)),
"2015-" => return Some((DateTime {}, len)),
"9th" => return Some((DateTime {}, len)),
"10th" => return Some((DateTime {}, len)),
"2015-W01" => return Some((DateTime {}, len)),
"2015-02-17" => return Some((DateTime {}, len)),
"2013-11-29T22:58:00Z" => return Some((DateTime {}, len)),
"315532800" => return Some((DateTime {}, len)),
"20131129T225800Z" => return Some((DateTime {}, len)),
"today" => return Some((DateTime {}, len)),
_ => (),
}
}
len -= 1;
}
None
}
}
|
mod farm;
mod plot;
pub(crate) use farm::farm;
pub(crate) use plot::plot;
|
mod test_conds;
mod test_vec2;
mod vec2;
mod conds;
// #[macro_export]
// macro_rules! {
// () => {};
// } |
#[macro_use]
extern crate rustacuda;
use rustacuda::prelude::*;
use rustacuda::memory::DeviceBox;
use std::error::Error;
use std::ffi::CString;
fn main() -> Result<(), Box<dyn Error>> {
// Initialize the CUDA API
rustacuda::init(CudaFlags::empty())?;
// Get the first device
let device = Device::get_device(0)?;
// Create a context associated to this device
let _context = Context::create_and_push(
ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?;
// Load the module containing the function we want to call
let module_data = CString::new(include_str!("../resources/add.ptx"))?;
let module = Module::load_from_string(&module_data)?;
// Create a stream to submit work to
let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?;
// Allocate space on the device and copy numbers to it.
let mut x = DeviceBox::new(&10.0f32)?;
let mut y = DeviceBox::new(&20.0f32)?;
let mut result = DeviceBox::new(&0.0f32)?;
// Launching kernels is unsafe since Rust can't enforce safety - think of kernel launches
// as a foreign-function call. In this case, it is - this kernel is written in CUDA C.
unsafe {
// Launch the `sum` function with one block containing one thread on the given stream.
launch!(module.sum<<<1, 1, 0, stream>>>(
x.as_device_ptr(),
y.as_device_ptr(),
result.as_device_ptr(),
1 // Length
))?;
}
// The kernel launch is asynchronous, so we wait for the kernel to finish executing
stream.synchronize()?;
// Copy the result back to the host
let mut result_host = 0.0f32;
result.copy_to(&mut result_host)?;
println!("Sum is {}", result_host);
Ok(())
}
|
use super::Code;
#[derive(Debug)]
pub enum Paragraph {
Text(Text),
List(Vec<Text>),
Code(Code),
InvalidCode(Code),
SubSection(Box<Section>),
}
#[derive(Debug)]
pub struct Section {
title: Option<Text>,
content: Vec<Paragraph>,
}
#[derive(Debug, Clone)]
pub struct TextComponent {
text: String,
code: bool,
italic: bool,
bold: bool,
link: Option<String>,
}
#[derive(Debug)]
pub struct Text {
components: Vec<TextComponent>,
}
impl Section {
pub fn title(&self) -> Option<&Text> {
self.title.as_ref()
}
pub fn content(&self) -> &[Paragraph] {
&self.content
}
}
impl TextComponent {
pub fn text(&self) -> &str {
&self.text
}
pub fn is_code(&self) -> bool {
self.code
}
pub fn is_italic(&self) -> bool {
self.italic
}
pub fn is_bold(&self) -> bool {
self.bold
}
pub fn link(&self) -> Option<&str> {
self.link.as_deref()
}
}
impl Text {
pub fn components(&self) -> &[TextComponent] {
&self.components
}
}
use kuchiki::{NodeRef, NodeData, iter::NodeEdge};
use html5ever::local_name;
pub(crate) fn parse_text(node: &NodeRef) -> Text {
let mut code_stack = 0;
let mut italic_stack = 0;
let mut bold_stack = 0;
let mut link_stack = Vec::new();
let mut components = Vec::new();
for edge in node.traverse() {
match edge {
NodeEdge::Start(node_start) => match node_start.data() {
NodeData::Element(element) => match element.name.local {
local_name!("code") => code_stack += 1,
local_name!("strong") => bold_stack += 1,
local_name!("em") => italic_stack += 1,
local_name!("a") => {
link_stack.push(element.attributes.borrow().get("href").map(str::to_owned))
}
_ => {}
},
NodeData::Text(text) => {
components.push(TextComponent {
text: text.borrow().clone(),
code: code_stack > 0,
italic: italic_stack > 0,
bold: bold_stack > 0,
link: link_stack.last().cloned().flatten(),
});
}
_ => {}
},
NodeEdge::End(node_end) => {
if let Some(element) = node_end.as_element() {
match element.name.local {
local_name!("code") => code_stack -= 1,
local_name!("strong") => bold_stack -= 1,
local_name!("em") => italic_stack -= 1,
local_name!("a") => {
link_stack.pop();
}
_ => {}
}
}
}
}
}
Text { components }
}
macro_rules! handle_heading_depth {
($depth:literal, $stack:ident, $doc_node:ident, $title:ident, $content:ident) => {{
// heading depth is $depth - 2, therefore save current state
// and work on a new state
if $stack.len() < $depth - 1 {
$stack.push(Section {
title: $title,
content: $content,
});
} else if $stack.len() >= $depth - 1 {
let mut previous_stage = $stack.pop().unwrap();
previous_stage
.content
.push(Paragraph::SubSection(Box::new(Section {
title: $title,
content: $content,
})));
while $stack.len() >= $depth - 1 {
let mut parent_stage = $stack.pop().unwrap();
parent_stage
.content
.push(Paragraph::SubSection(Box::new(previous_stage)));
previous_stage = parent_stage;
}
$stack.push(previous_stage);
}
$title = Some(parse_text(&$doc_node));
$content = Vec::new();
}};
}
pub(crate) fn parse_docblock(docblock: &NodeRef) -> Option<Vec<Section>> {
let mut stack = Vec::new();
let mut sections = Vec::new();
let mut content = Vec::new();
let mut title = None;
for doc_node in docblock.children() {
let element = match doc_node.as_element() {
Some(element) => element,
None => continue,
};
match element.name.local {
local_name!("p") => {
let text = parse_text(&doc_node);
content.push(Paragraph::Text(text));
}
local_name!("ul") | local_name!("ol") => {
let list = doc_node
.children()
.map(|child| parse_text(&child))
.filter(|text| !text.components.is_empty())
.collect();
content.push(Paragraph::List(list));
}
local_name!("pre") => {
let code = doc_node.text_contents();
content.push(Paragraph::Code(code));
}
local_name!("div") => {
let pre = doc_node.first_child()?;
let code = pre.text_contents();
if pre
.as_element()?
.attributes
.borrow()
.get("class")?
.contains("ignore")
{
content.push(Paragraph::InvalidCode(code));
} else {
content.push(Paragraph::Code(code));
}
}
local_name!("h1") => {
if !stack.is_empty() {
let mut previous_stage: Section = stack.pop().unwrap();
previous_stage
.content
.push(Paragraph::SubSection(Box::new(Section { title, content })));
while !stack.is_empty() {
let mut parent_stage = stack.pop().unwrap();
parent_stage
.content
.push(Paragraph::SubSection(Box::new(previous_stage)));
previous_stage = parent_stage;
}
sections.push(previous_stage);
} else {
sections.push(Section { title, content });
}
title = Some(parse_text(&doc_node));
content = Vec::new();
}
local_name!("h2") => handle_heading_depth!(2, stack, doc_node, title, content),
local_name!("h3") => handle_heading_depth!(3, stack, doc_node, title, content),
local_name!("h4") => handle_heading_depth!(4, stack, doc_node, title, content),
local_name!("h5") => handle_heading_depth!(5, stack, doc_node, title, content),
local_name!("h6") => handle_heading_depth!(6, stack, doc_node, title, content),
_ => {}
}
}
if !stack.is_empty() {
let mut previous_stage = stack.pop().unwrap();
previous_stage
.content
.push(Paragraph::SubSection(Box::new(Section { title, content })));
while !stack.is_empty() {
let mut parent_stage = stack.pop().unwrap();
parent_stage
.content
.push(Paragraph::SubSection(Box::new(previous_stage)));
previous_stage = parent_stage;
}
sections.push(previous_stage);
} else {
sections.push(Section { title, content });
}
Some(sections)
} |
#[doc = "Reader of register CH_AL3_READ_ADDR_TRIG"]
pub type R = crate::R<u32, super::CH_AL3_READ_ADDR_TRIG>;
impl R {}
|
/// ViewState: view model and interactions.
// rendering is done in view.rs
pub struct ViewState {
pub sort_by: Metric,
pub sort_dir: Dir,
pub alert: Option<String>,
}
impl Default for ViewState {
fn default() -> Self {
Self {
sort_by: Metric::Cpu,
sort_dir: Dir::Desc,
alert: None,
}
}
}
#[derive(Copy, Clone, PartialEq)]
pub enum Metric {
Pid, // not really a "metric"... rename this?
Cpu,
Mem,
DiskRead,
DiskWrite,
DiskTotal,
}
pub enum Dir {
Asc,
Desc,
}
impl Dir {
pub fn flip(&mut self) {
use Dir::*;
match self {
Asc => *self = Desc,
Desc => *self = Asc,
}
}
}
|
use std::fs::File;
use std::io::prelude::*;
pub fn file2str(filename: &str) -> String {
let mut file = File::open(filename).expect("file not found");
let mut string = String::new();
file.read_to_string(&mut string)
.expect("error reading file");
string
}
pub fn str2vec_u32(input: &str) -> Vec<u32> {
let string = String::from(input);
let mut vec = vec![];
for c in string.chars() {
vec.push(c.to_digit(10).unwrap());
}
vec
}
pub fn str2vecinception(input: &str) -> Vec<Vec<u32>> {
let lines = str2linevec(&input);
let vector = lines2vecvec(lines);
vector
}
pub fn str2linevec(string: &str) -> Vec<&str> {
let lines: Vec<&str> = string.split("\r\n").collect();
lines
}
pub fn lines2vecvec(lines: Vec<&str>) -> Vec<Vec<u32>> {
let mut vector: Vec<Vec<u32>> = Vec::new();
for line in lines {
let row: Vec<u32> = line.split(' ').map(|x| x.parse().unwrap()).collect();
vector.push(row);
}
vector
}
#[cfg(test)]
mod str2vec_test {
use super::*;
fn str2vec_assert(input: &str, answer: Vec<u32>) {
let result = str2vec_u32(&input);
assert_eq!(result, answer)
}
#[test]
fn given_empty_return_empty() {
str2vec_assert("", vec![])
}
#[test]
fn assert_one() {
str2vec_assert("1", vec![1])
}
#[test]
fn assert_oneone() {
str2vec_assert("11", vec![1, 1])
}
#[test]
fn assert_stuff() {
str2vec_assert("192837465", vec![1, 9, 2, 8, 3, 7, 4, 6, 5])
}
}
#[cfg(test)]
mod str2vecinception_test {
use super::*;
// Tab seperated values, newline seperated rows
#[test]
fn assert_1x1() {
let string = "525";
let result = str2vecinception(string);
let expected = [[525]];
assert_eq!(result, expected);
}
#[test]
fn assert_1x2() {
let string = "111 222\r\n333 444";
let result = str2vecinception(string);
let expected = [[111, 222], [333, 444]];
assert_eq!(result, expected);
}
fn assert_3x3() {
let string = "1 2\r\n3\r\n4 5";
let result = str2vecinception(string);
let expected = vec![vec![1, 2], vec![3], vec![4, 5]];
assert_eq!(result, expected);
}
}
|
#[doc = "Register `APB1_FZ` reader"]
pub type R = crate::R<APB1_FZ_SPEC>;
#[doc = "Register `APB1_FZ` writer"]
pub type W = crate::W<APB1_FZ_SPEC>;
#[doc = "Field `DBG_TIM2_STOP` reader - DBG_TIM2_STOP"]
pub type DBG_TIM2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM2_STOP` writer - DBG_TIM2_STOP"]
pub type DBG_TIM2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM3_STOP` reader - DBG_TIM3_STOP"]
pub type DBG_TIM3_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM3_STOP` writer - DBG_TIM3_STOP"]
pub type DBG_TIM3_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM4_STOP` reader - DBG_TIM4_STOP"]
pub type DBG_TIM4_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM4_STOP` writer - DBG_TIM4_STOP"]
pub type DBG_TIM4_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM5_STOP` reader - DBG_TIM5_STOP"]
pub type DBG_TIM5_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM5_STOP` writer - DBG_TIM5_STOP"]
pub type DBG_TIM5_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM6_STOP` reader - DBG_TIM6_STOP"]
pub type DBG_TIM6_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM6_STOP` writer - DBG_TIM6_STOP"]
pub type DBG_TIM6_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM7_STOP` reader - DBG_TIM7_STOP"]
pub type DBG_TIM7_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM7_STOP` writer - DBG_TIM7_STOP"]
pub type DBG_TIM7_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM12_STOP` reader - DBG_TIM12_STOP"]
pub type DBG_TIM12_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM12_STOP` writer - DBG_TIM12_STOP"]
pub type DBG_TIM12_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM13_STOP` reader - DBG_TIM13_STOP"]
pub type DBG_TIM13_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM13_STOP` writer - DBG_TIM13_STOP"]
pub type DBG_TIM13_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_TIM14_STOP` reader - DBG_TIM14_STOP"]
pub type DBG_TIM14_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM14_STOP` writer - DBG_TIM14_STOP"]
pub type DBG_TIM14_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_LPTIM1_STOP` reader - DBG_LPTIM1_STOP"]
pub type DBG_LPTIM1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_LPTIM1_STOP` writer - DBG_LPTIM1_STOP"]
pub type DBG_LPTIM1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_RTC_STOP` reader - DBG_RTC_STOP"]
pub type DBG_RTC_STOP_R = crate::BitReader;
#[doc = "Field `DBG_RTC_STOP` writer - DBG_RTC_STOP"]
pub type DBG_RTC_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_WWDG_STOP` reader - DBG_WWDG_STOP"]
pub type DBG_WWDG_STOP_R = crate::BitReader;
#[doc = "Field `DBG_WWDG_STOP` writer - DBG_WWDG_STOP"]
pub type DBG_WWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_IWDG_STOP` reader - DBG_IWDG_STOP"]
pub type DBG_IWDG_STOP_R = crate::BitReader;
#[doc = "Field `DBG_IWDG_STOP` writer - DBG_IWDG_STOP"]
pub type DBG_IWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_CAN3_STOP` reader - DBG_CAN3_STOP"]
pub type DBG_CAN3_STOP_R = crate::BitReader;
#[doc = "Field `DBG_CAN3_STOP` writer - DBG_CAN3_STOP"]
pub type DBG_CAN3_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C1_SMBUS_TIMEOUT` reader - DBG_I2C1_SMBUS_TIMEOUT"]
pub type DBG_I2C1_SMBUS_TIMEOUT_R = crate::BitReader;
#[doc = "Field `DBG_I2C1_SMBUS_TIMEOUT` writer - DBG_I2C1_SMBUS_TIMEOUT"]
pub type DBG_I2C1_SMBUS_TIMEOUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C2_SMBUS_TIMEOUT` reader - DBG_I2C2_SMBUS_TIMEOUT"]
pub type DBG_I2C2_SMBUS_TIMEOUT_R = crate::BitReader;
#[doc = "Field `DBG_I2C2_SMBUS_TIMEOUT` writer - DBG_I2C2_SMBUS_TIMEOUT"]
pub type DBG_I2C2_SMBUS_TIMEOUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C3_SMBUS_TIMEOUT` reader - DBG_I2C3_SMBUS_TIMEOUT"]
pub type DBG_I2C3_SMBUS_TIMEOUT_R = crate::BitReader;
#[doc = "Field `DBG_I2C3_SMBUS_TIMEOUT` writer - DBG_I2C3_SMBUS_TIMEOUT"]
pub type DBG_I2C3_SMBUS_TIMEOUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C4_SMBUS_TIMEOUT` reader - DBG_I2C4SMBUS_TIMEOUT"]
pub type DBG_I2C4_SMBUS_TIMEOUT_R = crate::BitReader;
#[doc = "Field `DBG_I2C4_SMBUS_TIMEOUT` writer - DBG_I2C4SMBUS_TIMEOUT"]
pub type DBG_I2C4_SMBUS_TIMEOUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_CAN1_STOP` reader - DBG_CAN1_STOP"]
pub type DBG_CAN1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_CAN1_STOP` writer - DBG_CAN1_STOP"]
pub type DBG_CAN1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_CAN2_STOP` reader - DBG_CAN2_STOP"]
pub type DBG_CAN2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_CAN2_STOP` writer - DBG_CAN2_STOP"]
pub type DBG_CAN2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - DBG_TIM2_STOP"]
#[inline(always)]
pub fn dbg_tim2_stop(&self) -> DBG_TIM2_STOP_R {
DBG_TIM2_STOP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - DBG_TIM3_STOP"]
#[inline(always)]
pub fn dbg_tim3_stop(&self) -> DBG_TIM3_STOP_R {
DBG_TIM3_STOP_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - DBG_TIM4_STOP"]
#[inline(always)]
pub fn dbg_tim4_stop(&self) -> DBG_TIM4_STOP_R {
DBG_TIM4_STOP_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - DBG_TIM5_STOP"]
#[inline(always)]
pub fn dbg_tim5_stop(&self) -> DBG_TIM5_STOP_R {
DBG_TIM5_STOP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - DBG_TIM6_STOP"]
#[inline(always)]
pub fn dbg_tim6_stop(&self) -> DBG_TIM6_STOP_R {
DBG_TIM6_STOP_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - DBG_TIM7_STOP"]
#[inline(always)]
pub fn dbg_tim7_stop(&self) -> DBG_TIM7_STOP_R {
DBG_TIM7_STOP_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - DBG_TIM12_STOP"]
#[inline(always)]
pub fn dbg_tim12_stop(&self) -> DBG_TIM12_STOP_R {
DBG_TIM12_STOP_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - DBG_TIM13_STOP"]
#[inline(always)]
pub fn dbg_tim13_stop(&self) -> DBG_TIM13_STOP_R {
DBG_TIM13_STOP_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - DBG_TIM14_STOP"]
#[inline(always)]
pub fn dbg_tim14_stop(&self) -> DBG_TIM14_STOP_R {
DBG_TIM14_STOP_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - DBG_LPTIM1_STOP"]
#[inline(always)]
pub fn dbg_lptim1_stop(&self) -> DBG_LPTIM1_STOP_R {
DBG_LPTIM1_STOP_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - DBG_RTC_STOP"]
#[inline(always)]
pub fn dbg_rtc_stop(&self) -> DBG_RTC_STOP_R {
DBG_RTC_STOP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - DBG_WWDG_STOP"]
#[inline(always)]
pub fn dbg_wwdg_stop(&self) -> DBG_WWDG_STOP_R {
DBG_WWDG_STOP_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - DBG_IWDG_STOP"]
#[inline(always)]
pub fn dbg_iwdg_stop(&self) -> DBG_IWDG_STOP_R {
DBG_IWDG_STOP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - DBG_CAN3_STOP"]
#[inline(always)]
pub fn dbg_can3_stop(&self) -> DBG_CAN3_STOP_R {
DBG_CAN3_STOP_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 21 - DBG_I2C1_SMBUS_TIMEOUT"]
#[inline(always)]
pub fn dbg_i2c1_smbus_timeout(&self) -> DBG_I2C1_SMBUS_TIMEOUT_R {
DBG_I2C1_SMBUS_TIMEOUT_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - DBG_I2C2_SMBUS_TIMEOUT"]
#[inline(always)]
pub fn dbg_i2c2_smbus_timeout(&self) -> DBG_I2C2_SMBUS_TIMEOUT_R {
DBG_I2C2_SMBUS_TIMEOUT_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - DBG_I2C3_SMBUS_TIMEOUT"]
#[inline(always)]
pub fn dbg_i2c3_smbus_timeout(&self) -> DBG_I2C3_SMBUS_TIMEOUT_R {
DBG_I2C3_SMBUS_TIMEOUT_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - DBG_I2C4SMBUS_TIMEOUT"]
#[inline(always)]
pub fn dbg_i2c4_smbus_timeout(&self) -> DBG_I2C4_SMBUS_TIMEOUT_R {
DBG_I2C4_SMBUS_TIMEOUT_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - DBG_CAN1_STOP"]
#[inline(always)]
pub fn dbg_can1_stop(&self) -> DBG_CAN1_STOP_R {
DBG_CAN1_STOP_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - DBG_CAN2_STOP"]
#[inline(always)]
pub fn dbg_can2_stop(&self) -> DBG_CAN2_STOP_R {
DBG_CAN2_STOP_R::new(((self.bits >> 26) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - DBG_TIM2_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim2_stop(&mut self) -> DBG_TIM2_STOP_W<APB1_FZ_SPEC, 0> {
DBG_TIM2_STOP_W::new(self)
}
#[doc = "Bit 1 - DBG_TIM3_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim3_stop(&mut self) -> DBG_TIM3_STOP_W<APB1_FZ_SPEC, 1> {
DBG_TIM3_STOP_W::new(self)
}
#[doc = "Bit 2 - DBG_TIM4_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim4_stop(&mut self) -> DBG_TIM4_STOP_W<APB1_FZ_SPEC, 2> {
DBG_TIM4_STOP_W::new(self)
}
#[doc = "Bit 3 - DBG_TIM5_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim5_stop(&mut self) -> DBG_TIM5_STOP_W<APB1_FZ_SPEC, 3> {
DBG_TIM5_STOP_W::new(self)
}
#[doc = "Bit 4 - DBG_TIM6_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim6_stop(&mut self) -> DBG_TIM6_STOP_W<APB1_FZ_SPEC, 4> {
DBG_TIM6_STOP_W::new(self)
}
#[doc = "Bit 5 - DBG_TIM7_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim7_stop(&mut self) -> DBG_TIM7_STOP_W<APB1_FZ_SPEC, 5> {
DBG_TIM7_STOP_W::new(self)
}
#[doc = "Bit 6 - DBG_TIM12_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim12_stop(&mut self) -> DBG_TIM12_STOP_W<APB1_FZ_SPEC, 6> {
DBG_TIM12_STOP_W::new(self)
}
#[doc = "Bit 7 - DBG_TIM13_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim13_stop(&mut self) -> DBG_TIM13_STOP_W<APB1_FZ_SPEC, 7> {
DBG_TIM13_STOP_W::new(self)
}
#[doc = "Bit 8 - DBG_TIM14_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim14_stop(&mut self) -> DBG_TIM14_STOP_W<APB1_FZ_SPEC, 8> {
DBG_TIM14_STOP_W::new(self)
}
#[doc = "Bit 9 - DBG_LPTIM1_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_lptim1_stop(&mut self) -> DBG_LPTIM1_STOP_W<APB1_FZ_SPEC, 9> {
DBG_LPTIM1_STOP_W::new(self)
}
#[doc = "Bit 10 - DBG_RTC_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_rtc_stop(&mut self) -> DBG_RTC_STOP_W<APB1_FZ_SPEC, 10> {
DBG_RTC_STOP_W::new(self)
}
#[doc = "Bit 11 - DBG_WWDG_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_wwdg_stop(&mut self) -> DBG_WWDG_STOP_W<APB1_FZ_SPEC, 11> {
DBG_WWDG_STOP_W::new(self)
}
#[doc = "Bit 12 - DBG_IWDG_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_iwdg_stop(&mut self) -> DBG_IWDG_STOP_W<APB1_FZ_SPEC, 12> {
DBG_IWDG_STOP_W::new(self)
}
#[doc = "Bit 13 - DBG_CAN3_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_can3_stop(&mut self) -> DBG_CAN3_STOP_W<APB1_FZ_SPEC, 13> {
DBG_CAN3_STOP_W::new(self)
}
#[doc = "Bit 21 - DBG_I2C1_SMBUS_TIMEOUT"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c1_smbus_timeout(&mut self) -> DBG_I2C1_SMBUS_TIMEOUT_W<APB1_FZ_SPEC, 21> {
DBG_I2C1_SMBUS_TIMEOUT_W::new(self)
}
#[doc = "Bit 22 - DBG_I2C2_SMBUS_TIMEOUT"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c2_smbus_timeout(&mut self) -> DBG_I2C2_SMBUS_TIMEOUT_W<APB1_FZ_SPEC, 22> {
DBG_I2C2_SMBUS_TIMEOUT_W::new(self)
}
#[doc = "Bit 23 - DBG_I2C3_SMBUS_TIMEOUT"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c3_smbus_timeout(&mut self) -> DBG_I2C3_SMBUS_TIMEOUT_W<APB1_FZ_SPEC, 23> {
DBG_I2C3_SMBUS_TIMEOUT_W::new(self)
}
#[doc = "Bit 24 - DBG_I2C4SMBUS_TIMEOUT"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c4_smbus_timeout(&mut self) -> DBG_I2C4_SMBUS_TIMEOUT_W<APB1_FZ_SPEC, 24> {
DBG_I2C4_SMBUS_TIMEOUT_W::new(self)
}
#[doc = "Bit 25 - DBG_CAN1_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_can1_stop(&mut self) -> DBG_CAN1_STOP_W<APB1_FZ_SPEC, 25> {
DBG_CAN1_STOP_W::new(self)
}
#[doc = "Bit 26 - DBG_CAN2_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_can2_stop(&mut self) -> DBG_CAN2_STOP_W<APB1_FZ_SPEC, 26> {
DBG_CAN2_STOP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Debug MCU APB1 Freeze register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1_fz::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1_fz::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1_FZ_SPEC;
impl crate::RegisterSpec for APB1_FZ_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1_fz::R`](R) reader structure"]
impl crate::Readable for APB1_FZ_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1_fz::W`](W) writer structure"]
impl crate::Writable for APB1_FZ_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1_FZ to value 0"]
impl crate::Resettable for APB1_FZ_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use azure_core::prelude::*;
use azure_identity::device_code_flow::{self, DeviceCodeResponse};
use azure_identity::refresh_token;
use azure_storage::core::prelude::*;
use futures::stream::StreamExt;
use oauth2::ClientId;
use std::env;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
let client_id =
ClientId::new(env::var("CLIENT_ID").expect("Missing CLIENT_ID environment variable."));
let tenant_id = env::var("TENANT_ID").expect("Missing TENANT_ID environment variable.");
let storage_account_name = std::env::args()
.nth(1)
.expect("please specify the storage account name as first command line parameter");
let client = reqwest::Client::new();
// the process requires two steps. The first is to ask for
// the code to show to the user. This is done with the following
// function. Notice you can pass as many scopes as you want.
// Since we are asking for the "offline_access" scope we will
// receive the refresh token as well.
// We are requesting access to the storage account passed as parameter.
let device_code_flow = device_code_flow::start(
&client,
&tenant_id,
&client_id,
&[
&format!(
"https://{}.blob.core.windows.net/user_impersonation",
storage_account_name
),
"offline_access",
],
)
.await?;
// now we must show the user the authentication message. It
// will point the user to the login page and show the code
// they have to specify.
println!("{}", device_code_flow.message());
// now we poll the auth endpoint until the user
// completes the authentication. The following stream can
// return, besides errors, a success meaning either
// Success or Pending. The loop will continue until we
// get either a Success or an error.
let mut stream = Box::pin(device_code_flow.stream());
let mut authorization = None;
while let Some(resp) = stream.next().await {
println!("{:?}", resp);
// if we have the authorization, let's store it for later use.
if let DeviceCodeResponse::AuthorizationSucceded(auth) = resp? {
authorization = Some(auth);
}
}
// remove the option (this is safe since we
// unwrapped the errors before).
let authorization = authorization.unwrap();
println!(
"\nReceived valid bearer token: {}",
&authorization.access_token().secret()
);
if let Some(refresh_token) = authorization.refresh_token().as_ref() {
println!("Received valid refresh token: {}", &refresh_token.secret());
}
// we can now spend the access token in other crates. In
// this example we are creating an Azure Storage client
// using the access token.
let http_client = new_http_client();
let storage_client = StorageAccountClient::new_bearer_token(
http_client.clone(),
&storage_account_name,
authorization.access_token().secret() as &str,
)
.as_storage_client();
// now we enumerate the containers in the
// specified storage account.
let containers = storage_client.list_containers().execute().await?;
println!("\nList containers completed succesfully: {:?}", containers);
// now let's refresh the token, if available
if let Some(refresh_token) = authorization.refresh_token() {
let refreshed_token =
refresh_token::exchange(&client, &tenant_id, &client_id, None, refresh_token).await?;
println!("refreshed token == {:#?}", refreshed_token);
}
Ok(())
}
|
use super::super::prelude::{
LONG
};
#[repr(C)]
pub struct Point {
pub x : LONG ,
pub y : LONG ,
}
pub type POINT = Point;
impl Point {
pub fn new(nx : LONG , ny : LONG) -> Point {
Point {
x : nx ,
y : ny ,
}
}
} |
fn main(){
println!("Hola madre, ya se programar en rust!");
}
|
// Copyright 2019, 2020 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use hash_db::Hasher;
use reference_trie::{
ExtensionLayout, NoExtensionLayout,
};
use trie_db::{
DBValue, TrieDB, TrieDBMut, TrieLayout, TrieMut,
proof::{generate_proof, verify_proof, VerifyError}, Trie,
};
type MemoryDB<H> = memory_db::MemoryDB<H, memory_db::HashKey<H>, DBValue>;
fn test_entries() -> Vec<(&'static [u8], &'static [u8])> {
vec![
// "alfa" is at a hash-referenced leaf node.
(b"alfa", &[0; 32]),
// "bravo" is at an inline leaf node.
(b"bravo", b"bravo"),
// "do" is at a hash-referenced branch node.
(b"do", b"verb"),
// "dog" is at a hash-referenced branch node.
(b"dog", b"puppy"),
// "doge" is at a hash-referenced leaf node.
(b"doge", &[0; 32]),
// extension node "o" (plus nibble) to next branch.
(b"horse", b"stallion"),
(b"house", b"building"),
]
}
fn test_generate_proof<L: TrieLayout>(
entries: Vec<(&'static [u8], &'static [u8])>,
keys: Vec<&'static [u8]>,
) -> (<L::Hash as Hasher>::Out, Vec<Vec<u8>>, Vec<(&'static [u8], Option<DBValue>)>)
{
// Populate DB with full trie from entries.
let (db, root) = {
let mut db = <MemoryDB<L::Hash>>::default();
let mut root = Default::default();
{
let mut trie = <TrieDBMut<L>>::new(&mut db, &mut root);
for (key, value) in entries.iter() {
trie.insert(key, value).unwrap();
}
}
(db, root)
};
// Generate proof for the given keys..
let trie = <TrieDB<L>>::new(&db, &root).unwrap();
let proof = generate_proof::<_, L, _, _>(&trie, keys.iter()).unwrap();
let items = keys.into_iter()
.map(|key| (key, trie.get(key).unwrap()))
.collect();
(root, proof, items)
}
#[test]
fn trie_proof_works_with_ext() {
let (root, proof, items) = test_generate_proof::<ExtensionLayout>(
test_entries(),
vec![
b"do",
b"dog",
b"doge",
b"bravo",
b"alfabet", // None, not found under leaf node
b"d", // None, witness is extension node with omitted child
b"do\x10", // None, empty branch child
b"halp", // None, witness is extension node with non-omitted child
],
);
verify_proof::<ExtensionLayout, _, _, _>(&root, &proof, items.iter()).unwrap();
}
#[test]
fn trie_proof_works_without_ext() {
let (root, proof, items) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![
b"do",
b"dog",
b"doge",
b"bravo",
b"alfabet", // None, not found under leaf node
b"d", // None, witness is extension node with omitted child
b"do\x10", // None, empty branch child
b"halp", // None, witness is extension node with non-omitted child
],
);
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()).unwrap();
}
#[test]
fn trie_proof_works_for_empty_trie() {
let (root, proof, items) = test_generate_proof::<NoExtensionLayout>(
vec![],
vec![
b"alpha",
b"bravo",
b"\x42\x42",
],
);
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()).unwrap();
}
#[test]
fn test_verify_duplicate_keys() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"bravo"],
);
let items = vec![
(b"bravo", Some(b"bravo")),
(b"bravo", Some(b"bravo")),
];
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::DuplicateKey(b"bravo".to_vec()))
);
}
#[test]
fn test_verify_extraneous_node() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"bravo", b"do"],
);
let items = vec![
(b"bravo", Some(b"bravo")),
];
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::ExtraneousNode)
);
}
#[test]
fn test_verify_extraneous_value() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"doge"],
);
let items = vec![
(&b"do"[..], Some(&b"verb"[..])),
(&b"doge"[..], Some(&[0; 32][..])),
];
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::ExtraneousValue(b"do".to_vec()))
);
}
#[test]
fn test_verify_extraneous_hash_reference() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"do"],
);
let items = vec![
(&b"alfa"[..], Some(&[0; 32][..])),
(&b"do"[..], Some(&b"verb"[..])),
];
match verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()) {
Err(VerifyError::ExtraneousHashReference(_)) => {}
result => panic!("expected VerifyError::ExtraneousHashReference, got {:?}", result),
}
}
#[test]
fn test_verify_invalid_child_reference() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"bravo"],
);
// InvalidChildReference because "bravo" is in an inline leaf node and a 32-byte value cannot
// fit in an inline leaf.
let items = vec![
(b"bravo", Some([0; 32])),
];
match verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()) {
Err(VerifyError::InvalidChildReference(_)) => {}
result => panic!("expected VerifyError::InvalidChildReference, got {:?}", result),
}
}
#[test]
fn test_verify_value_mismatch_some_to_none() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"horse"],
);
let items = vec![
(&b"horse"[..], Some(&b"stallion"[..])),
(&b"halp"[..], Some(&b"plz"[..])),
];
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::ValueMismatch(b"halp".to_vec()))
);
}
#[test]
fn test_verify_value_mismatch_none_to_some() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"alfa", b"bravo"],
);
let items = vec![
(&b"alfa"[..], Some(&[0; 32][..])),
(&b"bravo"[..], None),
];
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::ValueMismatch(b"bravo".to_vec()))
);
}
#[test]
fn test_verify_incomplete_proof() {
let (root, mut proof, items) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"alfa"],
);
proof.pop();
assert_eq!(
verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()),
Err(VerifyError::IncompleteProof)
);
}
#[test]
fn test_verify_root_mismatch() {
let (root, proof, _) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"bravo"],
);
let items = vec![
(b"bravo", Some("incorrect")),
];
match verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()) {
Err(VerifyError::RootMismatch(_)) => {}
result => panic!("expected VerifyError::RootMismatch, got {:?}", result),
}
}
#[test]
fn test_verify_decode_error() {
let (root, mut proof, items) = test_generate_proof::<NoExtensionLayout>(
test_entries(),
vec![b"bravo"],
);
proof.insert(0, b"this is not a trie node".to_vec());
match verify_proof::<NoExtensionLayout, _, _, _>(&root, &proof, items.iter()) {
Err(VerifyError::DecodeError(_)) => {}
result => panic!("expected VerifyError::DecodeError, got {:?}", result),
}
}
|
pub mod html_parser;
pub mod markdown_generator;
|
use std::io::{self, BufRead};
fn main() {
let lines : Vec<String> = io::stdin().lock().lines().map(|l| l.unwrap()).collect();
let ready_at = lines[0].parse::<usize>().unwrap();
let possibly_times : Vec<&str> = lines[1].split(',').collect();
let known_times = possibly_times.iter()
.filter(|t| **t != "x")
.map(|t| t.parse::<usize>().unwrap());
let (_, shortest_wait_bus) = known_times.map(|t| (ready_at % t, t)).max().unwrap();
let shortest_wait_bus_arrives_at = (ready_at + shortest_wait_bus - 1) / shortest_wait_bus * shortest_wait_bus;
println!("{}, {}, {}", ready_at, shortest_wait_bus_arrives_at, shortest_wait_bus);
println!("{}", (shortest_wait_bus_arrives_at - ready_at) * shortest_wait_bus);
let offset_factors : Vec<(u128, u128)> = possibly_times.iter().enumerate()
.filter(|(_, factor)| **factor != "x")
.map(|(offset, factor)| (offset as u128, factor.parse::<u128>().unwrap()))
.collect();
let mut decreasing_factor_offset_factors = offset_factors.clone();
decreasing_factor_offset_factors.sort_by(|(_, factor1), (_, factor2)| factor2.partial_cmp(factor1).unwrap());
let biggest_factor_offset : u128 = decreasing_factor_offset_factors[0].0;
let biggest_factor : u128 = decreasing_factor_offset_factors[0].1;
let mut start = 0;
let mut jump = 1;
let mut lastn = 0;
while lastn < decreasing_factor_offset_factors.len() {
let mut i = start;
let mut n0 = 0;
loop {
let mut all = true;
for (offset, factor) in &decreasing_factor_offset_factors[0..(lastn+1)] {
if (i + offset) % factor != 0 {
all = false;
}
}
if all {
if n0 == 0 {
n0 = i;
} else {
start = n0;
jump = i - n0;
break;
}
}
i += jump;
}
lastn += 1;
}
println!("{}", start);
}
fn n_with_no_distance_from(multiplier: u128, offset: u128, factor: u128, start_point: u128) -> u128 {
let mut n = 1;
loop {
if (start_point + (n * multiplier) + offset) % factor == 0 {
return n;
}
n += 1;
}
}
/*
[src/main.rs:39] differences = [
320,
13,
11,
26,
15,
10,
3,
10,
]
*/
|
use crate::gui::UiNode;
use crate::interaction::InteractionModeTrait;
use crate::scene::commands::{ChangeSelectionCommand, SceneCommand};
use crate::scene::{EditorScene, GraphSelection, Selection};
use crate::settings::Settings;
use crate::{GameEngine, Message};
use rg3d::core::algebra::Vector2;
use rg3d::core::math::aabb::AxisAlignedBoundingBox;
use rg3d::core::pool::Handle;
use rg3d::gui::message::{MessageDirection, WidgetMessage};
use rg3d::scene::node::Node;
use std::sync::mpsc::Sender;
pub struct SelectInteractionMode {
preview: Handle<UiNode>,
selection_frame: Handle<UiNode>,
message_sender: Sender<Message>,
stack: Vec<Handle<Node>>,
click_pos: Vector2<f32>,
}
impl SelectInteractionMode {
pub fn new(
preview: Handle<UiNode>,
selection_frame: Handle<UiNode>,
message_sender: Sender<Message>,
) -> Self {
Self {
preview,
selection_frame,
message_sender,
stack: Vec::new(),
click_pos: Vector2::default(),
}
}
}
impl InteractionModeTrait for SelectInteractionMode {
fn on_left_mouse_button_down(
&mut self,
_editor_scene: &mut EditorScene,
engine: &mut GameEngine,
mouse_pos: Vector2<f32>,
_frame_size: Vector2<f32>,
) {
self.click_pos = mouse_pos;
let ui = &mut engine.user_interface;
ui.send_message(WidgetMessage::visibility(
self.selection_frame,
MessageDirection::ToWidget,
true,
));
ui.send_message(WidgetMessage::desired_position(
self.selection_frame,
MessageDirection::ToWidget,
mouse_pos,
));
ui.send_message(WidgetMessage::width(
self.selection_frame,
MessageDirection::ToWidget,
0.0,
));
ui.send_message(WidgetMessage::height(
self.selection_frame,
MessageDirection::ToWidget,
0.0,
));
}
fn on_left_mouse_button_up(
&mut self,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
_mouse_pos: Vector2<f32>,
frame_size: Vector2<f32>,
) {
let scene = &engine.scenes[editor_scene.scene];
let camera = scene.graph[editor_scene.camera_controller.camera].as_camera();
let preview_screen_bounds = engine.user_interface.node(self.preview).screen_bounds();
let frame_screen_bounds = engine
.user_interface
.node(self.selection_frame)
.screen_bounds();
let relative_bounds = frame_screen_bounds.translate(-preview_screen_bounds.position);
self.stack.clear();
self.stack.push(scene.graph.get_root());
let mut graph_selection = GraphSelection::default();
while let Some(handle) = self.stack.pop() {
let node = &scene.graph[handle];
if handle == editor_scene.root {
continue;
}
if handle == scene.graph.get_root() {
self.stack.extend_from_slice(node.children());
continue;
}
let aabb = match node {
Node::Base(_) => AxisAlignedBoundingBox::unit(),
Node::Light(_) => AxisAlignedBoundingBox::unit(),
Node::Camera(_) => AxisAlignedBoundingBox::unit(),
Node::Mesh(mesh) => mesh.bounding_box(),
Node::Sprite(_) => AxisAlignedBoundingBox::unit(),
Node::Decal(_) => AxisAlignedBoundingBox::unit(),
Node::ParticleSystem(_) => AxisAlignedBoundingBox::unit(),
Node::Terrain(ref terrain) => terrain.bounding_box(),
};
for screen_corner in aabb
.corners()
.iter()
.filter_map(|&p| camera.project(p + node.global_position(), frame_size))
{
if relative_bounds.contains(screen_corner) {
graph_selection.insert_or_exclude(handle);
break;
}
}
self.stack.extend_from_slice(node.children());
}
let new_selection = Selection::Graph(graph_selection);
if new_selection != editor_scene.selection {
self.message_sender
.send(Message::DoSceneCommand(SceneCommand::ChangeSelection(
ChangeSelectionCommand::new(new_selection, editor_scene.selection.clone()),
)))
.unwrap();
}
engine
.user_interface
.send_message(WidgetMessage::visibility(
self.selection_frame,
MessageDirection::ToWidget,
false,
));
}
fn on_mouse_move(
&mut self,
_mouse_offset: Vector2<f32>,
mouse_position: Vector2<f32>,
_camera: Handle<Node>,
_editor_scene: &mut EditorScene,
engine: &mut GameEngine,
_frame_size: Vector2<f32>,
_settings: &Settings,
) {
let ui = &mut engine.user_interface;
let width = mouse_position.x - self.click_pos.x;
let height = mouse_position.y - self.click_pos.y;
let position = Vector2::new(
if width < 0.0 {
mouse_position.x
} else {
self.click_pos.x
},
if height < 0.0 {
mouse_position.y
} else {
self.click_pos.y
},
);
ui.send_message(WidgetMessage::desired_position(
self.selection_frame,
MessageDirection::ToWidget,
position,
));
ui.send_message(WidgetMessage::width(
self.selection_frame,
MessageDirection::ToWidget,
width.abs(),
));
ui.send_message(WidgetMessage::height(
self.selection_frame,
MessageDirection::ToWidget,
height.abs(),
));
}
fn update(
&mut self,
_editor_scene: &mut EditorScene,
_camera: Handle<Node>,
_engine: &mut GameEngine,
) {
}
fn deactivate(&mut self, _editor_scene: &EditorScene, _engine: &mut GameEngine) {}
}
|
///// chapter 4 "structuring data and matching patterns"
///// program section:
//
fn main() {
let magician = "merlin";
let mut chars: Vec<char> = magician.chars().collect();
chars.sort();
for c in chars.iter() {
print!("{} ", c);
}
}
///// output should be:
/*
eilmnr
*/// end of output
|
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate exonum;
extern crate exonum_configuration;
extern crate router;
extern crate bodyparser;
extern crate iron;
use exonum_configuration::ConfigurationService;
use exonum::helpers::fabric::NodeBuilder;
pub mod cryptocurrency;
use cryptocurrency::CurrencyService;
fn main() {
exonum::helpers::init_logger().unwrap();
exonum::crypto::init();
// Create Keys
//let (consensus_public_key, consensus_secret_key) = exonum::crypto::gen_keypair();
//let (service_public_key, service_secret_key) = exonum::crypto::gen_keypair();
//let mut node = Node::new(blockchain, node_cfg);
//node.run().unwrap();
NodeBuilder::new()
.with_service::<ConfigurationService>()
.with_service::<CurrencyService>()
.run();
}
|
use proc_macro2::TokenStream;
pub fn bits_to_byte_floor(leading_bits : &TokenStream) -> TokenStream
{
quote!{ (#leading_bits / 8) }
}
pub fn bits_to_byte_ceiling(leading_bits : &TokenStream) -> TokenStream
{
quote!{ ((#leading_bits + 7) / 8) }
}
pub fn get_bitmask(size_in_bits : &TokenStream, bits_consumed_inside_byte : &TokenStream) -> TokenStream
{
//create the bitmask for this byte // shift the mask by the number of bits already read in this byte
quote!{ ((1u8 << (#size_in_bits % 8)) - 1u8) << ((8 - (#size_in_bits + #bits_consumed_inside_byte) ) as u8) }
}
//this gets byte index of a var inside an array
//i.e. [1..2]
pub fn get_byte_indices(derivable : &syn::Type, total_bits_consumed : &TokenStream, byte_token : TokenStream) -> TokenStream
{
//the byte where the number of bits already read lands
let array_start = bits_to_byte_floor(total_bits_consumed);
let size_in_bits = quote!{ (<#derivable>::SIZE_IN_BITS) };
//the byte where the number of bits already read + the size of this var lands
let array_end = bits_to_byte_floor(&(quote!{ (#total_bits_consumed + #size_in_bits) }));
quote!{ & #byte_token[#array_start .. #array_end] }
}
pub fn get_slice_indices(derivable : &syn::Type, total_bits_consumed : &TokenStream) -> TokenStream
{
quote!{
if buf.len() > self.len() { return Err(Error::new(std::io::ErrorKind::UnexpectedEof, "Slice wasn't long enough!")); }
//cut the slice at <derivable>::size_in_bytes
let (a, b) = slice.split_at(<#derivable>::SIZE_IN_BYTES);
//update slice len
*slice = b;
a.try_into().expect("Packattack Internal Error : Slice with incorrect length! This should be unreachable due to check. Please open an issue.")
}
}
//This finds the byte that a var is inside
//indexes it inside the current array using that byte
pub fn get_bit_indices_from_array(derivable : &syn::Type,
total_bits_consumed : &TokenStream,
array_name : syn::Ident) -> TokenStream
{
//find the byte I'm in
let byte = bits_to_byte_floor(total_bits_consumed);
get_bit_indices(derivable, total_bits_consumed, quote!{ #array_name[#byte] })
}
//This returns the bits masked from the #byte_token byte and shifted back to the front of the byte
//ready for big endian reading
pub fn get_bit_indices(derivable : &syn::Type,
total_bits_consumed : &TokenStream,
byte_token : TokenStream) -> TokenStream
{
//this is the number of
let bits_consumed_inside_byte = quote!{ (#total_bits_consumed % 8) };
let size_in_bits = quote!{ (<#derivable>::SIZE_IN_BITS) };
//byte bitmask
let bitmask = get_bitmask(&size_in_bits, &bits_consumed_inside_byte);
// read the byte, mask it for the bits we want,
//and bit shift them back to the beginning of the u8
//finally pass that value into from_u8
quote!{ (#byte_token & #bitmask) >> (8 - (#size_in_bits + #bits_consumed_inside_byte)) }
}
///////////////////////////
/*
pub fn get_read_clause(derivable : &syn::Type, preceeding_bits : &TokenStream, field_data_type : FieldDataType,
parent_data_type : ParentDataType, array_count : usize) -> (TokenStream, TokenStream)
{
match parent_data_type
{
//were in from reader which means we have `reader` and `array_1`, `array_2`... and so on
ParentDataType::FromReader => handle_from_reader_parent(derivable, preceeding_bits, field_data_type, array_count),
//were in from_bytes which means we only have the `bytes` array to read from
ParentDataType::FromBytes =>
{
match field_data_type
{
FieldDataType::FromReader => unimplemented!("Can't read a from_reader type inside a from_bytes type!"),
FieldDataType::FromBytes(from_bytes_type) =>
{
match from_bytes_type
{
FromBytesType::WithLength(len) => unimplemented!("TODO: Implement copy slice into BUFFER starting from preceeding_bits"),
FromBytesType::SizeInBytes =>
{
let size_in_bytes = bits_to_byte_ceiling("e!{ <#derivable>::SIZE_IN_BITS });
(quote!{
let (a, b) = bytes.split_at(#size_in_bytes);
*bytes = b;
<#derivable>::from_bytes(a.try_into().expect("Packattack Internal Error: Slice was the wrong size for type"))
}, quote!{ 0 })
}
}
},
FieldDataType::FromBits =>
{
let (address, size) = get_bit_indices_from_array(derivable, preceeding_bits, ident_from_str("bytes"));
(quote!{ <#derivable>::from_bytes((#address).to_be_bytes())? }, size)
},
FieldDataType::Payload => unimplemented!("TODO : let the payload take the remaining bytes")
}
}
}
}
fn handle_from_reader_parent(derivable : &syn::Type, preceeding_bits : &TokenStream, field_data_type : FieldDataType, array_count : usize) -> (TokenStream, TokenStream)
{
//Now we know we're in from_reader, what kind of field are we reading?
match field_data_type
{
FieldDataType::FromReader =>
{
//from_reader is variable only
(quote!{ <#derivable>::from_reader(reader).await? }, quote!{ 0 })
},
FieldDataType::FromBytes(from_bytes_type) =>
{
//from_bytes can be #[length], or SIZE_IN_BYTES large
match from_bytes_type
{
//there's a length so find the slice
FromBytesType::WithLength(len) => unimplemented!(),
/*{
(quote!{{let buffer = vec![0; #len];
reader.read_exact(buffer.as_mut_slice()).await?;
<#derivable>::from_reader(buffer.as_slice()).await?
}},
//TODO: fix this length to assign to a local variable outside this scope
quote!{ (#len * 8) })
},*/
//there's no length so there's a fixed size_in_bytes
//NOT BROKEN
FromBytesType::SizeInBytes =>
{
//create a new array with the count + "array_" as the identifier
let array_num = format!{"array_{}", array_count};
let array_name : syn::Ident = ident_from_str(array_num.as_str());
let (address, size) = get_byte_indices(derivable, preceeding_bits, quote!{ #array_name });
(quote!{ <#derivable>::from_bytes((#address).try_into()?)? }, size)
}
}
},
FieldDataType::FromBits =>
{
//create a new array with the count + "array_" as the identifier
let array_num = format!{"array_{}", array_count};
let array_name : syn::Ident = ident_from_str(array_num.as_str());
let (address, size) = get_bit_indices_from_array(&derivable, &preceeding_bits, array_name);
(quote!{ <#derivable>::from_bytes((#address).to_be_bytes())? }, size)
},
FieldDataType::Payload =>
{
//pass in the reader here because if we're a payload in from reader
//then size_hint has already been defined which means reader is &mut &[u8]
( quote!{ <#derivable>::from_bytes(reader)? }, quote!{ 0 })
}
}
}*/ |
#[doc = "Register `CSR49` reader"]
pub type R = crate::R<CSR49_SPEC>;
#[doc = "Register `CSR49` writer"]
pub type W = crate::W<CSR49_SPEC>;
#[doc = "Field `CSR49` reader - CSR49"]
pub type CSR49_R = crate::FieldReader<u32>;
#[doc = "Field `CSR49` writer - CSR49"]
pub type CSR49_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - CSR49"]
#[inline(always)]
pub fn csr49(&self) -> CSR49_R {
CSR49_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - CSR49"]
#[inline(always)]
#[must_use]
pub fn csr49(&mut self) -> CSR49_W<CSR49_SPEC, 0> {
CSR49_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "context swap registers\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr49::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr49::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CSR49_SPEC;
impl crate::RegisterSpec for CSR49_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`csr49::R`](R) reader structure"]
impl crate::Readable for CSR49_SPEC {}
#[doc = "`write(|w| ..)` method takes [`csr49::W`](W) writer structure"]
impl crate::Writable for CSR49_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CSR49 to value 0"]
impl crate::Resettable for CSR49_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Light client substrate primitives for Subspace.
#![forbid(unsafe_code)]
#![warn(rust_2018_idioms, missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use codec::{Decode, Encode};
use scale_info::TypeInfo;
use sp_arithmetic::traits::{CheckedAdd, CheckedSub, One, Zero};
use sp_consensus_slots::Slot;
use sp_consensus_subspace::consensus::verify_solution;
use sp_consensus_subspace::digests::{
extract_pre_digest, extract_subspace_digest_items, verify_next_digests, CompatibleDigestItem,
Error as DigestError, ErrorDigestType, NextDigestsVerificationParams, PreDigest,
SubspaceDigestItems,
};
use sp_consensus_subspace::{FarmerPublicKey, FarmerSignature};
use sp_runtime::traits::Header as HeaderT;
use sp_runtime::ArithmeticError;
use sp_std::cmp::Ordering;
use sp_std::collections::btree_map::BTreeMap;
use sp_std::marker::PhantomData;
use sp_std::num::NonZeroU64;
use subspace_core_primitives::{
ArchivedHistorySegment, BlockWeight, HistorySize, PublicKey, Randomness, RewardSignature,
SectorId, SegmentCommitment, SegmentIndex, SolutionRange,
};
use subspace_solving::REWARD_SIGNING_CONTEXT;
use subspace_verification::{
calculate_block_weight, check_reward_signature, PieceCheckParams, VerifySolutionParams,
};
#[cfg(test)]
mod tests;
#[cfg(test)]
mod mock;
/// Chain constants.
#[derive(Debug, Encode, Decode, Clone, TypeInfo)]
pub struct ChainConstants<Header: HeaderT> {
/// K Depth at which we finalize the heads.
pub k_depth: NumberOf<Header>,
/// Genesis digest items at the start of the chain since the genesis block will not have any
/// digests to verify the Block #1 digests.
pub genesis_digest_items: NextDigestItems,
/// Genesis block segment commitments to verify the Block #1 and other block solutions until
/// Block #1 is finalized.
/// When Block #1 is finalized, these segment commitments are present in Block #1 are stored in
/// the storage.
pub genesis_segment_commitments: BTreeMap<SegmentIndex, SegmentCommitment>,
/// Defines interval at which randomness is updated.
pub global_randomness_interval: NumberOf<Header>,
/// Era duration at which solution range is updated.
pub era_duration: NumberOf<Header>,
/// Slot probability.
pub slot_probability: (u64, u64),
/// Storage bound for the light client store.
pub storage_bound: StorageBound<NumberOf<Header>>,
/// Number of latest archived segments that are considered "recent history".
pub recent_segments: HistorySize,
/// Fraction of pieces from the "recent history" (`recent_segments`) in each sector.
pub recent_history_fraction: (HistorySize, HistorySize),
/// Minimum lifetime of a plotted sector, measured in archived segment.
pub min_sector_lifetime: HistorySize,
}
/// Defines the storage bound for the light client store.
#[derive(Default, Debug, Encode, Decode, TypeInfo, Clone)]
pub enum StorageBound<Number> {
/// Keeps all the headers in the storage.
#[default]
Unbounded,
/// Keeps only # number of headers beyond K depth.
NumberOfHeaderToKeepBeyondKDepth(Number),
}
/// HeaderExt describes an extended block chain header at a specific height along with some computed
/// values.
#[derive(Default, Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)]
pub struct HeaderExt<Header> {
/// Actual header of the subspace block chain at a specific number.
pub header: Header,
/// Cumulative weight of chain until this header.
pub total_weight: BlockWeight,
/// Slot at which current era started.
pub era_start_slot: Slot,
/// Should adjust solution range on era change.
pub should_adjust_solution_range: bool,
/// Solution range override for the current era.
pub maybe_current_solution_range_override: Option<SolutionRange>,
/// Solution range override for the next era.
pub maybe_next_solution_range_override: Option<SolutionRange>,
/// Restrict block authoring to this public key.
pub maybe_root_plot_public_key: Option<FarmerPublicKey>,
#[cfg(test)]
test_overrides: mock::TestOverrides,
}
/// Type to hold next digest items present in parent header that are used to verify the immediate
/// descendant.
#[derive(Default, Debug, Encode, Decode, Clone, TypeInfo)]
pub struct NextDigestItems {
next_global_randomness: Randomness,
next_solution_range: SolutionRange,
}
impl NextDigestItems {
/// Constructs self with provided next digest items.
pub fn new(next_global_randomness: Randomness, next_solution_range: SolutionRange) -> Self {
Self {
next_global_randomness,
next_solution_range,
}
}
}
impl<Header: HeaderT> HeaderExt<Header> {
/// Extracts the next digest items Randomness, Solution range, and Salt present in the Header.
/// If next digests are not present, then we fallback to the current ones.
fn extract_next_digest_items(&self) -> Result<NextDigestItems, ImportError<Header>> {
let SubspaceDigestItems {
global_randomness,
solution_range,
next_global_randomness,
next_solution_range,
..
} = extract_subspace_digest_items::<_, FarmerPublicKey, FarmerPublicKey, FarmerSignature>(
&self.header,
)?;
// if there is override for solution range for current era, override it
let solution_range = self
.maybe_current_solution_range_override
.unwrap_or(solution_range);
#[cfg(test)]
let solution_range = {
if self.test_overrides.solution_range.is_some() {
self.test_overrides.solution_range.unwrap()
} else {
solution_range
}
};
#[cfg(test)]
let next_solution_range = {
if self.test_overrides.next_solution_range.is_some() {
self.test_overrides.next_solution_range
} else {
next_solution_range
}
};
Ok(NextDigestItems {
next_global_randomness: next_global_randomness.unwrap_or(global_randomness),
next_solution_range: next_solution_range.unwrap_or(solution_range),
})
}
}
type HashOf<T> = <T as HeaderT>::Hash;
type NumberOf<T> = <T as HeaderT>::Number;
/// Storage responsible for storing headers.
pub trait Storage<Header: HeaderT> {
/// Returns the chain constants.
fn chain_constants(&self) -> ChainConstants<Header>;
/// Queries a header at a specific block number or block hash.
fn header(&self, hash: HashOf<Header>) -> Option<HeaderExt<Header>>;
/// Stores the extended header.
/// `as_best_header` signifies of the header we are importing is considered best.
fn store_header(&mut self, header_ext: HeaderExt<Header>, as_best_header: bool);
/// Returns the best known tip of the chain.
fn best_header(&self) -> HeaderExt<Header>;
/// Returns headers at a given number.
fn headers_at_number(&self, number: NumberOf<Header>) -> Vec<HeaderExt<Header>>;
/// Prunes header with hash.
fn prune_header(&mut self, hash: HashOf<Header>);
/// Marks a given header with hash as finalized.
fn finalize_header(&mut self, hash: HashOf<Header>);
/// Returns the latest finalized header.
fn finalized_header(&self) -> HeaderExt<Header>;
/// Stores segment commitments for fast retrieval by segment index at or below finalized header.
fn store_segment_commitments(
&mut self,
segment_commitments: BTreeMap<SegmentIndex, SegmentCommitment>,
);
/// Returns a segment commitment for a given segment index.
fn segment_commitment(&self, segment_index: SegmentIndex) -> Option<SegmentCommitment>;
/// Returns the stored segment count.
// TODO: Ideally should use `HistorySize` instead of `u64`
fn number_of_segments(&self) -> u64;
/// How many pieces one sector is supposed to contain (max)
fn max_pieces_in_sector(&self) -> u16;
}
/// Error type that holds the current finalized number and the header number we are trying to import.
#[derive(Debug, PartialEq, Eq)]
pub struct HeaderBelowArchivingDepthError<Header: HeaderT> {
current_finalized_number: NumberOf<Header>,
header_number: NumberOf<Header>,
}
/// Error during the header import.
#[derive(Debug, PartialEq, Eq)]
pub enum ImportError<Header: HeaderT> {
/// Header already imported.
HeaderAlreadyImported,
/// Missing parent header.
MissingParent(HashOf<Header>),
/// Missing header associated with hash.
MissingHeader(HashOf<Header>),
/// Missing ancestor header at the number.
MissingAncestorHeader(HashOf<Header>, NumberOf<Header>),
/// Error while extracting digests from header.
DigestError(DigestError),
/// Invalid digest in the header.
InvalidDigest(ErrorDigestType),
/// Invalid slot when compared with parent header.
InvalidSlot,
/// Block signature is invalid.
InvalidBlockSignature,
/// Solution present in the header is invalid.
InvalidSolution(String),
/// Arithmetic error.
ArithmeticError(ArithmeticError),
/// Switched to different fork beyond archiving depth.
SwitchedToForkBelowArchivingDepth,
/// Header being imported is below the archiving depth.
HeaderIsBelowArchivingDepth(HeaderBelowArchivingDepthError<Header>),
/// Missing segment commitment for a given segment index.
MissingSegmentCommitment(SegmentIndex),
/// Incorrect block author.
IncorrectBlockAuthor(FarmerPublicKey),
/// Segment commitment history is empty
EmptySegmentCommitmentHistory,
/// Invalid history size
InvalidHistorySize,
}
impl<Header: HeaderT> From<DigestError> for ImportError<Header> {
#[inline]
fn from(error: DigestError) -> Self {
ImportError::DigestError(error)
}
}
/// Verifies and import headers.
#[derive(Debug)]
pub struct HeaderImporter<Header: HeaderT, Store: Storage<Header>> {
store: Store,
_phantom: PhantomData<Header>,
}
impl<Header: HeaderT, Store: Storage<Header>> HeaderImporter<Header, Store> {
/// Returns a new instance of HeaderImporter with provided Storage impls
pub fn new(store: Store) -> Self {
HeaderImporter {
store,
_phantom: Default::default(),
}
}
/// Verifies header, computes consensus values for block progress and stores the HeaderExt.
pub fn import_header(&mut self, mut header: Header) -> Result<(), ImportError<Header>> {
// check if the header is already imported
match self.store.header(header.hash()) {
Some(_) => Err(ImportError::HeaderAlreadyImported),
None => Ok(()),
}?;
// only try and import headers above the finalized number
let current_finalized_number = *self.store.finalized_header().header.number();
if *header.number() <= current_finalized_number {
return Err(ImportError::HeaderIsBelowArchivingDepth(
HeaderBelowArchivingDepthError {
current_finalized_number,
header_number: *header.number(),
},
));
}
// fetch parent header
let parent_header = self
.store
.header(*header.parent_hash())
.ok_or_else(|| ImportError::MissingParent(header.hash()))?;
// verify global randomness and solution range from the parent header
let header_digests = self.verify_header_digest_with_parent(&parent_header, &header)?;
// verify next digest items
let constants = self.store.chain_constants();
let mut maybe_root_plot_public_key = parent_header.maybe_root_plot_public_key;
if let Some(root_plot_public_key) = &maybe_root_plot_public_key {
if root_plot_public_key != &header_digests.pre_digest.solution.public_key {
return Err(ImportError::IncorrectBlockAuthor(
header_digests.pre_digest.solution.public_key,
));
}
}
let mut should_adjust_solution_range = parent_header.should_adjust_solution_range;
let mut maybe_next_solution_range_override =
parent_header.maybe_next_solution_range_override;
verify_next_digests::<Header>(NextDigestsVerificationParams {
number: *header.number(),
header_digests: &header_digests,
global_randomness_interval: constants.global_randomness_interval,
era_duration: constants.era_duration,
slot_probability: constants.slot_probability,
era_start_slot: parent_header.era_start_slot,
should_adjust_solution_range: &mut should_adjust_solution_range,
maybe_next_solution_range_override: &mut maybe_next_solution_range_override,
maybe_root_plot_public_key: &mut maybe_root_plot_public_key,
})?;
// slot must be strictly increasing from the parent header
Self::verify_slot(&parent_header.header, &header_digests.pre_digest)?;
// verify block signature
Self::verify_block_signature(&mut header, &header_digests.pre_digest.solution.public_key)?;
// verify solution
let sector_id = SectorId::new(
PublicKey::from(&header_digests.pre_digest.solution.public_key).hash(),
header_digests.pre_digest.solution.sector_index,
);
let max_pieces_in_sector = self.store.max_pieces_in_sector();
let segment_index = sector_id
.derive_piece_index(
header_digests.pre_digest.solution.piece_offset,
header_digests.pre_digest.solution.history_size,
max_pieces_in_sector,
constants.recent_segments,
constants.recent_history_fraction,
)
.segment_index();
let segment_commitment = self
.find_segment_commitment_for_segment_index(segment_index, parent_header.header.hash())?
.ok_or(ImportError::MissingSegmentCommitment(segment_index))?;
let current_history_size = HistorySize::new(
NonZeroU64::try_from(self.store.number_of_segments())
.map_err(|_error| ImportError::EmptySegmentCommitmentHistory)?,
);
let sector_expiration_check_segment_commitment = self
.find_segment_commitment_for_segment_index(
header_digests
.pre_digest
.solution
.history_size
.sector_expiration_check(constants.min_sector_lifetime)
.ok_or(ImportError::InvalidHistorySize)?
.segment_index(),
parent_header.header.hash(),
)?;
verify_solution(
(&header_digests.pre_digest.solution).into(),
header_digests.pre_digest.slot.into(),
(&VerifySolutionParams {
global_randomness: header_digests.global_randomness,
solution_range: header_digests.solution_range,
piece_check_params: Some(PieceCheckParams {
max_pieces_in_sector,
segment_commitment,
recent_segments: constants.recent_segments,
recent_history_fraction: constants.recent_history_fraction,
min_sector_lifetime: constants.min_sector_lifetime,
current_history_size,
sector_expiration_check_segment_commitment,
}),
})
.into(),
)
.map_err(ImportError::InvalidSolution)?;
let added_weight = calculate_block_weight(header_digests.solution_range);
let total_weight = parent_header.total_weight + added_weight;
// last best header should ideally be parent header. if not check for forks and pick the best chain
let last_best_header = self.store.best_header();
let last_best_weight = last_best_header.total_weight;
let is_best_header = total_weight > last_best_weight;
// check if era has changed
let era_start_slot = if Self::has_era_changed(&header, constants.era_duration) {
header_digests.pre_digest.slot
} else {
parent_header.era_start_slot
};
// check if we should update current solution range override
let mut maybe_current_solution_range_override =
parent_header.maybe_current_solution_range_override;
// if there is override of solution range in this header, use it
if let Some(current_solution_range_override) =
header_digests.enable_solution_range_adjustment_and_override
{
maybe_current_solution_range_override = current_solution_range_override;
}
// check if the era has changed and there is a current solution range override, reset it
if maybe_current_solution_range_override.is_some()
&& Self::has_era_changed(&header, constants.era_duration)
{
maybe_current_solution_range_override = None
}
// store header
let header_ext = HeaderExt {
header,
total_weight,
era_start_slot,
should_adjust_solution_range,
maybe_current_solution_range_override,
maybe_next_solution_range_override,
maybe_root_plot_public_key,
#[cfg(test)]
test_overrides: Default::default(),
};
self.store.store_header(header_ext, is_best_header);
// finalize, prune forks, and ensure storage is bounded if the chain has progressed
if is_best_header {
self.finalize_header_at_k_depth()?;
self.ensure_storage_bound();
}
Ok(())
}
fn has_era_changed(header: &Header, era_duration: NumberOf<Header>) -> bool {
// special case when the current header is one, then first era begins
// or
// era duration interval has reached, so era has changed
header.number().is_one() || *header.number() % era_duration == Zero::zero()
}
/// Verifies if the header digests matches with logs from the parent header.
fn verify_header_digest_with_parent(
&self,
parent_header: &HeaderExt<Header>,
header: &Header,
) -> Result<
SubspaceDigestItems<FarmerPublicKey, FarmerPublicKey, FarmerSignature>,
ImportError<Header>,
> {
// extract digest items from the header
let pre_digest_items = extract_subspace_digest_items(header)?;
// extract next digest items from the parent header
let next_digest_items = {
// if the header we are verifying is #1, then parent header, genesis, wont have the next digests
// instead fetch them from the constants provided by the store
if header.number() == &One::one() {
self.store.chain_constants().genesis_digest_items
} else {
parent_header.extract_next_digest_items()?
}
};
// check the digest items against the next digest items from parent header
if pre_digest_items.global_randomness != next_digest_items.next_global_randomness {
return Err(ImportError::InvalidDigest(
ErrorDigestType::GlobalRandomness,
));
}
if pre_digest_items.solution_range != next_digest_items.next_solution_range {
return Err(ImportError::InvalidDigest(ErrorDigestType::SolutionRange));
}
Ok(pre_digest_items)
}
/// Verifies that slot present in the header is strictly increasing from the slot in the parent.
fn verify_slot(
parent_header: &Header,
pre_digest: &PreDigest<FarmerPublicKey, FarmerPublicKey>,
) -> Result<(), ImportError<Header>> {
let parent_pre_digest = extract_pre_digest(parent_header)?;
if pre_digest.slot <= parent_pre_digest.slot {
return Err(ImportError::InvalidSlot);
}
Ok(())
}
/// Verifies the block signature present in the last digest log.
fn verify_block_signature(
header: &mut Header,
public_key: &FarmerPublicKey,
) -> Result<(), ImportError<Header>> {
let seal =
header
.digest_mut()
.pop()
.ok_or(ImportError::DigestError(DigestError::Missing(
ErrorDigestType::Seal,
)))?;
let signature = seal
.as_subspace_seal()
.ok_or(ImportError::InvalidDigest(ErrorDigestType::Seal))?;
// the pre-hash of the header doesn't include the seal and that's what we sign
let pre_hash = header.hash();
// verify that block is signed properly
check_reward_signature(
pre_hash.as_ref(),
&RewardSignature::from(&signature),
&PublicKey::from(public_key),
&schnorrkel::context::signing_context(REWARD_SIGNING_CONTEXT),
)
.map_err(|_| ImportError::InvalidBlockSignature)?;
// push the seal back into the header
header.digest_mut().push(seal);
Ok(())
}
/// Returns the ancestor of the header at number.
fn find_ancestor_of_header_at_number(
&self,
hash: HashOf<Header>,
ancestor_number: NumberOf<Header>,
) -> Option<HeaderExt<Header>> {
let header = self.store.header(hash)?;
// header number must be greater than the ancestor number
if *header.header.number() < ancestor_number {
return None;
}
let headers_at_ancestor_number = self.store.headers_at_number(ancestor_number);
// short circuit if there are no fork headers at the ancestor number
if headers_at_ancestor_number.len() == 1 {
return headers_at_ancestor_number.into_iter().next();
}
// start tree route till the ancestor
let mut header = header;
while *header.header.number() > ancestor_number {
header = self.store.header(*header.header.parent_hash())?;
}
Some(header)
}
/// Prunes header and its descendant header chain(s).
fn prune_header_and_its_descendants(
&mut self,
header: HeaderExt<Header>,
) -> Result<(), ImportError<Header>> {
// prune the header
self.store.prune_header(header.header.hash());
// start pruning all the descendant headers from the current header
// header(at number n)
// / \
// descendant-1 descendant-2
// /
// descendant-3
let mut pruned_parent_hashes = vec![header.header.hash()];
let mut current_number = *header.header.number();
while !pruned_parent_hashes.is_empty() {
current_number = current_number
.checked_add(&One::one())
.ok_or(ImportError::ArithmeticError(ArithmeticError::Overflow))?;
// get headers at the current number and filter the headers descended from the pruned parents
let descendant_header_hashes = self
.store
.headers_at_number(current_number)
.into_iter()
.filter(|descendant_header| {
pruned_parent_hashes.contains(descendant_header.header.parent_hash())
})
.map(|header| header.header.hash())
.collect::<Vec<HashOf<Header>>>();
// prune the descendant headers
descendant_header_hashes
.iter()
.for_each(|hash| self.store.prune_header(*hash));
pruned_parent_hashes = descendant_header_hashes;
}
Ok(())
}
/// Returns the total pieces on chain where chain_tip is the hash of the tip of the chain.
/// We count the total segments to calculate total pieces as follows,
/// - Fetch the segment count from the store.
/// - Count the segments from each header that is not finalized.
// TODO: This function will become useful in the future for verifying sector expiration
#[allow(dead_code)]
fn total_pieces(&self, chain_tip: HashOf<Header>) -> Result<u64, ImportError<Header>> {
// fetch the segment count from the store
let segment_commitments_count_till_finalized_header = self.store.number_of_segments();
let finalized_header = self.store.finalized_header();
let mut segment_commitments_count = segment_commitments_count_till_finalized_header;
// special case when Block #1 is not finalized yet, then include the genesis segment count
if finalized_header.header.number().is_zero() {
segment_commitments_count += self
.store
.chain_constants()
.genesis_segment_commitments
.len() as u64;
}
// calculate segment count present in each header from header till finalized header
let mut header = self
.store
.header(chain_tip)
.ok_or(ImportError::MissingHeader(chain_tip))?;
while header.header.hash() != finalized_header.header.hash() {
let digest_items = extract_subspace_digest_items::<
_,
FarmerPublicKey,
FarmerPublicKey,
FarmerSignature,
>(&header.header)?;
segment_commitments_count += digest_items.segment_commitments.len() as u64;
header = self
.store
.header(*header.header.parent_hash())
.ok_or_else(|| ImportError::MissingParent(header.header.hash()))?;
}
Ok(segment_commitments_count * ArchivedHistorySegment::NUM_PIECES as u64)
}
/// Finds a segment commitment mapped against a segment index in the chain with chain_tip as the
/// tip of the chain.
/// We try to find the segment commitment as follows:
/// - Find segment commitment from the store and return if found.
/// - Find segment commitment from the genesis segment commitment and return if found.
/// - Find the segment commitment present in the non finalized headers.
fn find_segment_commitment_for_segment_index(
&self,
segment_index: SegmentIndex,
chain_tip: HashOf<Header>,
) -> Result<Option<SegmentCommitment>, ImportError<Header>> {
// check if the segment commitment is already in the store
if let Some(segment_commitment) = self.store.segment_commitment(segment_index) {
return Ok(Some(segment_commitment));
};
// special case: check the genesis segment commitments if the Block #1 is not finalized yet
if let Some(segment_commitment) = self
.store
.chain_constants()
.genesis_segment_commitments
.get(&segment_index)
{
return Ok(Some(*segment_commitment));
}
// find the segment commitment from the headers which are not finalized yet.
let finalized_header = self.store.finalized_header();
let mut header = self
.store
.header(chain_tip)
.ok_or(ImportError::MissingHeader(chain_tip))?;
while header.header.hash() != finalized_header.header.hash() {
let digest_items = extract_subspace_digest_items::<
_,
FarmerPublicKey,
FarmerPublicKey,
FarmerSignature,
>(&header.header)?;
if let Some(segment_commitment) = digest_items.segment_commitments.get(&segment_index) {
return Ok(Some(*segment_commitment));
}
header = self
.store
.header(*header.header.parent_hash())
.ok_or_else(|| ImportError::MissingParent(header.header.hash()))?;
}
Ok(None)
}
/// Stores finalized header and segment commitments present in the header.
fn store_finalized_header_and_segment_commitments(
&mut self,
header: &Header,
) -> Result<(), ImportError<Header>> {
let digests_items =
extract_subspace_digest_items::<_, FarmerPublicKey, FarmerPublicKey, FarmerSignature>(
header,
)?;
// mark header as finalized
self.store.finalize_header(header.hash());
// store the segment commitments present in the header digests
self.store
.store_segment_commitments(digests_items.segment_commitments);
Ok(())
}
/// Finalize the header at K-depth from the best block and prune remaining forks at that number.
/// We want to finalize the header from the current finalized header until the K-depth number of the best.
/// 1. In an ideal scenario, the current finalized head is one number less than number to be finalized.
/// 2. If there was a re-org to longer chain when new header was imported, we do not want to miss
/// pruning fork headers between current and to be finalized number. So we go number by number and prune fork headers.
/// 3. If there was a re-org to a shorter chain and to be finalized header was below the current finalized head,
/// fail and let user know.
fn finalize_header_at_k_depth(&mut self) -> Result<(), ImportError<Header>> {
let k_depth = self.store.chain_constants().k_depth;
let current_finalized_header = self.store.finalized_header();
// ensure we have imported at least K-depth number of headers
let number_to_finalize = match self
.store
.best_header()
.header
.number()
.checked_sub(&k_depth)
{
// we have not progressed that far to finalize yet
None => {
// if the chain re-org happened to smaller chain and if there was any finalized heads,
// fail and let the user decide what to do
if *current_finalized_header.header.number() > Zero::zero() {
return Err(ImportError::SwitchedToForkBelowArchivingDepth);
}
return Ok(());
}
Some(number) => number,
};
match number_to_finalize.cmp(current_finalized_header.header.number()) {
Ordering::Less => Err(ImportError::SwitchedToForkBelowArchivingDepth),
// nothing to do as we finalized the header already
Ordering::Equal => Ok(()),
// finalize heads one after the other and prune any forks
Ordering::Greater => {
let mut current_finalized_number = *current_finalized_header.header.number();
while current_finalized_number < number_to_finalize {
current_finalized_number = current_finalized_number
.checked_add(&One::one())
.ok_or(ImportError::ArithmeticError(ArithmeticError::Overflow))?;
// find the headers at the number to be finalized
let headers_at_number_to_be_finalized =
self.store.headers_at_number(current_finalized_number);
// if there is just one header at that number, we mark that header as finalized and move one
if headers_at_number_to_be_finalized.len() == 1 {
let header_to_finalize = headers_at_number_to_be_finalized
.first()
.expect("First item must exist as the len is 1.");
self.store_finalized_header_and_segment_commitments(
&header_to_finalize.header,
)?
} else {
// there are multiple headers at the number to be finalized.
// find the correct ancestor header of the current best header.
// finalize it and prune all the remaining fork headers.
let current_best_header = self.store.best_header();
let (current_best_hash, current_best_number) = (
current_best_header.header.hash(),
*current_best_header.header.number(),
);
let header_to_finalize = self
.find_ancestor_of_header_at_number(
current_best_hash,
current_finalized_number,
)
.ok_or(ImportError::MissingAncestorHeader(
current_best_hash,
current_best_number,
))?;
// filter fork headers and prune them
let headers_to_prune = headers_at_number_to_be_finalized
.into_iter()
.filter(|header| {
header.header.hash() != header_to_finalize.header.hash()
})
.collect::<Vec<HeaderExt<Header>>>();
for header_to_prune in headers_to_prune {
self.prune_header_and_its_descendants(header_to_prune)?;
}
// mark the header as finalized
self.store_finalized_header_and_segment_commitments(
&header_to_finalize.header,
)?
}
}
Ok(())
}
}
}
/// Ensure light client storage is bounded by the defined storage bound constant.
/// If unbounded, we keep all the finalized headers in the store.
/// If bounded, we fetch the finalized head and then prune all the headers
/// beyond K depth as per bounded value.
/// If finalized head is at x and storage is bounded to keep y headers beyond, then
/// prune all headers at and below (x - y - 1)
fn ensure_storage_bound(&mut self) {
let storage_bound = self.store.chain_constants().storage_bound;
let number_of_headers_to_keep_beyond_k_depth = match storage_bound {
// unbounded storage, so return
StorageBound::Unbounded => return,
// bounded storage, keep only # number of headers beyond K depth
StorageBound::NumberOfHeaderToKeepBeyondKDepth(number_of_headers_to_keep) => {
number_of_headers_to_keep
}
};
let finalized_head_number = *self.store.finalized_header().header.number();
// (finalized_number - bound_value - 1)
let mut maybe_prune_headers_from_number = finalized_head_number
.checked_sub(&number_of_headers_to_keep_beyond_k_depth)
.and_then(|number| number.checked_sub(&One::one()));
let mut headers_to_prune = maybe_prune_headers_from_number
.map(|number| self.store.headers_at_number(number))
.unwrap_or_default();
while !headers_to_prune.is_empty() {
// loop and prune even though there should be only 1 head beyond finalized head
for header in headers_to_prune {
self.store.prune_header(header.header.hash())
}
maybe_prune_headers_from_number =
maybe_prune_headers_from_number.and_then(|number| number.checked_sub(&One::one()));
headers_to_prune = maybe_prune_headers_from_number
.map(|number| self.store.headers_at_number(number))
.unwrap_or_default();
}
}
}
|
use std::path::Path;
use futures::sink::SinkExt;
use futures::stream::{Stream, StreamExt};
use tokio::net::UnixStream;
use tokio_util::codec::{Framed, LinesCodec};
use persist_core::error::Error;
use persist_core::protocol::*;
pub struct DaemonClient {
socket: Framed<UnixStream, LinesCodec>,
}
impl DaemonClient {
pub async fn new(socket_path: impl AsRef<Path>) -> Result<DaemonClient, Error> {
let socket = UnixStream::connect(socket_path).await?;
let framed = Framed::new(socket, LinesCodec::new());
Ok(DaemonClient { socket: framed })
}
pub async fn kill(&mut self) -> Result<(), Error> {
let request = Request::Kill;
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
Ok(())
}
pub async fn version(&mut self) -> Result<VersionResponse, Error> {
let request = Request::Version;
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let response = match response {
Response::Version(response) => response,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(response)
}
pub async fn list(&mut self, request: ListRequest) -> Result<Vec<ListResponse>, Error> {
let request = Request::List(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::List(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn start(&mut self, request: StartRequest) -> Result<StartResponse, Error> {
let request = Request::Start(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let response = match response {
Response::Start(response) => response,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(response)
}
pub async fn stop(&mut self, request: StopRequest) -> Result<Vec<StopResponse>, Error> {
let request = Request::Stop(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::Stop(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn restart(
&mut self,
request: RestartRequest,
) -> Result<Vec<RestartResponse>, Error> {
let request = Request::Restart(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::Restart(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn delete(&mut self, request: DeleteRequest) -> Result<Vec<DeleteResponse>, Error> {
let request = Request::Delete(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::Delete(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn info(&mut self, request: InfoRequest) -> Result<InfoResponse, Error> {
let request = Request::Info(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let response = match response {
Response::Info(response) => response,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(response)
}
pub async fn logs<'a>(
&'a mut self,
request: LogsRequest,
) -> Result<impl Stream<Item = Result<LogsResponse, Error>> + 'a, Error> {
let request = Request::Logs(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
match response {
Response::Logs(LogsResponse::Subscribed) => {}
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
let stream = self.socket.by_ref().map(|item| {
let line = item?;
let response = json::from_str::<Response>(line.as_str())?;
let response = match response {
Response::Logs(response) => response,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(response)
});
Ok(stream)
}
pub async fn dump(&mut self, request: DumpRequest) -> Result<Vec<DumpResponse>, Error> {
let request = Request::Dump(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::Dump(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn restore(
&mut self,
request: RestoreRequest,
) -> Result<Vec<RestoreResponse>, Error> {
let request = Request::Restore(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let responses = match response {
Response::Restore(responses) => responses,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(responses)
}
pub async fn prune(&mut self, request: PruneRequest) -> Result<PruneResponse, Error> {
let request = Request::Prune(request);
let serialized = json::to_string(&request)?;
self.socket.send(serialized).await?;
let response = if let Some(response) = self.socket.next().await {
let response = response?;
json::from_str::<Response>(response.as_str())?
} else {
return Err(Error::from(String::from(
"daemon closed connection without responding",
)));
};
let response = match response {
Response::Prune(response) => response,
Response::Error(err) => return Err(Error::from(err)),
_ => return Err(Error::from(String::from("unexpected response from daemon"))),
};
Ok(response)
}
}
|
#[doc = "Register `DDRCTRL_CRCPARCTL0` reader"]
pub type R = crate::R<DDRCTRL_CRCPARCTL0_SPEC>;
#[doc = "Register `DDRCTRL_CRCPARCTL0` writer"]
pub type W = crate::W<DDRCTRL_CRCPARCTL0_SPEC>;
#[doc = "Field `DFI_ALERT_ERR_INT_EN` reader - DFI_ALERT_ERR_INT_EN"]
pub type DFI_ALERT_ERR_INT_EN_R = crate::BitReader;
#[doc = "Field `DFI_ALERT_ERR_INT_EN` writer - DFI_ALERT_ERR_INT_EN"]
pub type DFI_ALERT_ERR_INT_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DFI_ALERT_ERR_INT_CLR` reader - DFI_ALERT_ERR_INT_CLR"]
pub type DFI_ALERT_ERR_INT_CLR_R = crate::BitReader;
#[doc = "Field `DFI_ALERT_ERR_INT_CLR` writer - DFI_ALERT_ERR_INT_CLR"]
pub type DFI_ALERT_ERR_INT_CLR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DFI_ALERT_ERR_CNT_CLR` reader - DFI_ALERT_ERR_CNT_CLR"]
pub type DFI_ALERT_ERR_CNT_CLR_R = crate::BitReader;
#[doc = "Field `DFI_ALERT_ERR_CNT_CLR` writer - DFI_ALERT_ERR_CNT_CLR"]
pub type DFI_ALERT_ERR_CNT_CLR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - DFI_ALERT_ERR_INT_EN"]
#[inline(always)]
pub fn dfi_alert_err_int_en(&self) -> DFI_ALERT_ERR_INT_EN_R {
DFI_ALERT_ERR_INT_EN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - DFI_ALERT_ERR_INT_CLR"]
#[inline(always)]
pub fn dfi_alert_err_int_clr(&self) -> DFI_ALERT_ERR_INT_CLR_R {
DFI_ALERT_ERR_INT_CLR_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - DFI_ALERT_ERR_CNT_CLR"]
#[inline(always)]
pub fn dfi_alert_err_cnt_clr(&self) -> DFI_ALERT_ERR_CNT_CLR_R {
DFI_ALERT_ERR_CNT_CLR_R::new(((self.bits >> 2) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - DFI_ALERT_ERR_INT_EN"]
#[inline(always)]
#[must_use]
pub fn dfi_alert_err_int_en(&mut self) -> DFI_ALERT_ERR_INT_EN_W<DDRCTRL_CRCPARCTL0_SPEC, 0> {
DFI_ALERT_ERR_INT_EN_W::new(self)
}
#[doc = "Bit 1 - DFI_ALERT_ERR_INT_CLR"]
#[inline(always)]
#[must_use]
pub fn dfi_alert_err_int_clr(&mut self) -> DFI_ALERT_ERR_INT_CLR_W<DDRCTRL_CRCPARCTL0_SPEC, 1> {
DFI_ALERT_ERR_INT_CLR_W::new(self)
}
#[doc = "Bit 2 - DFI_ALERT_ERR_CNT_CLR"]
#[inline(always)]
#[must_use]
pub fn dfi_alert_err_cnt_clr(&mut self) -> DFI_ALERT_ERR_CNT_CLR_W<DDRCTRL_CRCPARCTL0_SPEC, 2> {
DFI_ALERT_ERR_CNT_CLR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DDRCTRL CRC parity control register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrctrl_crcparctl0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ddrctrl_crcparctl0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRCTRL_CRCPARCTL0_SPEC;
impl crate::RegisterSpec for DDRCTRL_CRCPARCTL0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrctrl_crcparctl0::R`](R) reader structure"]
impl crate::Readable for DDRCTRL_CRCPARCTL0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ddrctrl_crcparctl0::W`](W) writer structure"]
impl crate::Writable for DDRCTRL_CRCPARCTL0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DDRCTRL_CRCPARCTL0 to value 0"]
impl crate::Resettable for DDRCTRL_CRCPARCTL0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `ITLINE30` reader"]
pub type R = crate::R<ITLINE30_SPEC>;
#[doc = "Field `USART2` reader - CEC"]
pub type USART2_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - CEC"]
#[inline(always)]
pub fn usart2(&self) -> USART2_R {
USART2_R::new((self.bits & 1) != 0)
}
}
#[doc = "interrupt line 30 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`itline30::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ITLINE30_SPEC;
impl crate::RegisterSpec for ITLINE30_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`itline30::R`](R) reader structure"]
impl crate::Readable for ITLINE30_SPEC {}
#[doc = "`reset()` method sets ITLINE30 to value 0"]
impl crate::Resettable for ITLINE30_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register DATA_CHANNELS_H1"]
pub type R = crate::R<u32, super::DATA_CHANNELS_H1>;
#[doc = "Writer for register DATA_CHANNELS_H1"]
pub type W = crate::W<u32, super::DATA_CHANNELS_H1>;
#[doc = "Register DATA_CHANNELS_H1 `reset()`'s with value 0"]
impl crate::ResetValue for super::DATA_CHANNELS_H1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DATA_CHANNELS_H1`"]
pub type DATA_CHANNELS_H1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA_CHANNELS_H1`"]
pub struct DATA_CHANNELS_H1_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_CHANNELS_H1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f);
self.w
}
}
impl R {
#[doc = "Bits 0:4 - This register field indicates which of the data channels are in use. This stores the information for the upper 5 data channel indices. '1' indicates the corresponding data channel is used and '0' indicates the channel is unused. Note: The Data channel map 0 and data channel map 1 are two sets of channel maps stored, common for all the connections. At any given time, only two maps can be maintained and the connections will use one of the two sets as indicated by the channel map index field in the CE_CNFG_STS registers specific to the link. Firmware must also manage to update this field along with the map."]
#[inline(always)]
pub fn data_channels_h1(&self) -> DATA_CHANNELS_H1_R {
DATA_CHANNELS_H1_R::new((self.bits & 0x1f) as u8)
}
}
impl W {
#[doc = "Bits 0:4 - This register field indicates which of the data channels are in use. This stores the information for the upper 5 data channel indices. '1' indicates the corresponding data channel is used and '0' indicates the channel is unused. Note: The Data channel map 0 and data channel map 1 are two sets of channel maps stored, common for all the connections. At any given time, only two maps can be maintained and the connections will use one of the two sets as indicated by the channel map index field in the CE_CNFG_STS registers specific to the link. Firmware must also manage to update this field along with the map."]
#[inline(always)]
pub fn data_channels_h1(&mut self) -> DATA_CHANNELS_H1_W {
DATA_CHANNELS_H1_W { w: self }
}
}
|
//! Define an Orientation and associated methods.
use vec::Vec2;
/// Describes a vertical or horizontal orientation for a view.
#[derive(Clone,Copy,Debug,PartialEq)]
pub enum Orientation {
/// Horizontal orientation
Horizontal,
/// Vertical orientation
Vertical,
}
impl Orientation {
/// Returns the component of the given vector corresponding to this orientation.
/// (Horizontal will return the x value, and Vertical will return the y value.)
pub fn get(&self, v: &Vec2) -> usize {
match *self {
Orientation::Horizontal => v.x,
Orientation::Vertical => v.y,
}
}
/// Returns the other orientation.
pub fn swap(&self) -> Self {
match *self {
Orientation::Horizontal => Orientation::Vertical,
Orientation::Vertical => Orientation::Horizontal,
}
}
/// Returns a mutable reference to the component of the given vector
/// corresponding to this orientation.
pub fn get_ref<'a, 'b>(&'a self, v: &'b mut Vec2) -> &'b mut usize {
match *self {
Orientation::Horizontal => &mut v.x,
Orientation::Vertical => &mut v.y,
}
}
/// Takes an iterator on sizes, and stack them in the current orientation,
/// returning the size of the required bounding box.
///
/// For an horizontal view, returns (Sum(x), Max(y)).
/// For a vertical view, returns (Max(x),Sum(y)).
pub fn stack<'a, T: Iterator<Item = &'a Vec2>>(&self, iter: T) -> Vec2 {
match *self {
Orientation::Horizontal => {
iter.fold(Vec2::zero(), |a, b| a.stack_horizontal(b))
}
Orientation::Vertical => {
iter.fold(Vec2::zero(), |a, b| a.stack_vertical(b))
}
}
}
}
|
#![allow(clippy::unused_unit)]
use once_cell::sync::Lazy;
use std::cmp::Reverse;
use wasm_bindgen::prelude::*;
mod filter;
mod storage;
use crate::filter::{Id, PostFilters, Score, XorfProxy};
use crate::storage::Storage;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
static FILTERS: Lazy<PostFilters> = Lazy::new(|| {
let bytes = include_bytes!("../../../.generated/fulltext-search/storage");
Storage::from_bytes(bytes).unwrap().filters
});
fn tokenize(query: &str) -> Vec<String> {
query
.to_lowercase()
.split_whitespace()
.filter(|token| !token.trim().is_empty())
.map(String::from)
.collect()
}
fn score(title: &str, search_terms: &[String], filter: &XorfProxy) -> usize {
let tokens = tokenize(title);
let title_score = search_terms
.iter()
.filter(|term| tokens.contains(term))
.map(String::from)
.count();
// weight title more than content body
title_score * 5 + filter.score(search_terms)
}
#[wasm_bindgen]
pub fn search(query: String, per_page: usize) -> JsValue {
let terms = tokenize(&query);
let mut matches: Vec<(&Id, usize)> = FILTERS
.iter()
.map(|(id, filter)| (id, score(&id.0, &terms, filter)))
.filter(|(_id, score)| score > &0)
.collect();
matches.sort_by_key(|key| Reverse(key.1));
let results: Vec<&Id> = matches
.into_iter()
.take(per_page)
.map(|(id, _score)| id)
.collect();
JsValue::from_serde(&results).unwrap()
}
|
/// functions having to do with primes for Project Euler
// credit goes to github.com/roycrippen/euler_rust for some of these, his style is great
fn len_int(n: u32) -> u32 {
// 0
std::iter::repeat_with({
let mut l = 0;
// can't call pow on ambiguous numeric type
move || match n / 10u32.pow(l) {
// 1
0 => 0,
_ => {
l += 1;
1
}
}
})
.take_while(|&x| x != 0)
// count returns usize
.count() as u32 // 2
}
#[derive(Debug)]
pub struct Fibonacci {
a: usize,
b: usize,
}
impl Iterator for Fibonacci {
type Item = usize;
fn next(&mut self) -> Option<usize> {
let next = (self.a + self.b, self.a);
std::mem::replace(&mut (self.a, self.b), next);
Some(self.a)
}
}
impl PartialEq for Fibonacci {
fn eq(&self, other: &Fibonacci) -> bool {
self.a == other.a && self.b == other.b
}
/// This method tests for `!=`.
#[inline]
fn ne(&self, other: &Fibonacci) -> bool {
!self.eq(other)
}
}
pub fn fibonacci() -> Fibonacci {
Fibonacci { a: 1, b: 1 }
}
#[cfg(test)]
pub mod tests {
use super::*;
#[test]
fn fib_works() {
assert_eq!(fibonacci(), Fibonacci { a: 1, b: 1 });
}
}
|
#[doc = "Register `HCCHAR0` reader"]
pub type R = crate::R<HCCHAR0_SPEC>;
#[doc = "Register `HCCHAR0` writer"]
pub type W = crate::W<HCCHAR0_SPEC>;
#[doc = "Field `MPSIZ` reader - Maximum packet size"]
pub type MPSIZ_R = crate::FieldReader<u16>;
#[doc = "Field `MPSIZ` writer - Maximum packet size"]
pub type MPSIZ_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 11, O, u16>;
#[doc = "Field `EPNUM` reader - Endpoint number"]
pub type EPNUM_R = crate::FieldReader;
#[doc = "Field `EPNUM` writer - Endpoint number"]
pub type EPNUM_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `EPDIR` reader - Endpoint direction"]
pub type EPDIR_R = crate::BitReader;
#[doc = "Field `EPDIR` writer - Endpoint direction"]
pub type EPDIR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LSDEV` reader - Low-speed device"]
pub type LSDEV_R = crate::BitReader;
#[doc = "Field `LSDEV` writer - Low-speed device"]
pub type LSDEV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EPTYP` reader - Endpoint type"]
pub type EPTYP_R = crate::FieldReader;
#[doc = "Field `EPTYP` writer - Endpoint type"]
pub type EPTYP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `MCNT` reader - Multicount"]
pub type MCNT_R = crate::FieldReader;
#[doc = "Field `MCNT` writer - Multicount"]
pub type MCNT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DAD` reader - Device address"]
pub type DAD_R = crate::FieldReader;
#[doc = "Field `DAD` writer - Device address"]
pub type DAD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `ODDFRM` reader - Odd frame"]
pub type ODDFRM_R = crate::BitReader;
#[doc = "Field `ODDFRM` writer - Odd frame"]
pub type ODDFRM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CHDIS` reader - Channel disable"]
pub type CHDIS_R = crate::BitReader;
#[doc = "Field `CHDIS` writer - Channel disable"]
pub type CHDIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CHENA` reader - Channel enable"]
pub type CHENA_R = crate::BitReader;
#[doc = "Field `CHENA` writer - Channel enable"]
pub type CHENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:10 - Maximum packet size"]
#[inline(always)]
pub fn mpsiz(&self) -> MPSIZ_R {
MPSIZ_R::new((self.bits & 0x07ff) as u16)
}
#[doc = "Bits 11:14 - Endpoint number"]
#[inline(always)]
pub fn epnum(&self) -> EPNUM_R {
EPNUM_R::new(((self.bits >> 11) & 0x0f) as u8)
}
#[doc = "Bit 15 - Endpoint direction"]
#[inline(always)]
pub fn epdir(&self) -> EPDIR_R {
EPDIR_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 17 - Low-speed device"]
#[inline(always)]
pub fn lsdev(&self) -> LSDEV_R {
LSDEV_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bits 18:19 - Endpoint type"]
#[inline(always)]
pub fn eptyp(&self) -> EPTYP_R {
EPTYP_R::new(((self.bits >> 18) & 3) as u8)
}
#[doc = "Bits 20:21 - Multicount"]
#[inline(always)]
pub fn mcnt(&self) -> MCNT_R {
MCNT_R::new(((self.bits >> 20) & 3) as u8)
}
#[doc = "Bits 22:28 - Device address"]
#[inline(always)]
pub fn dad(&self) -> DAD_R {
DAD_R::new(((self.bits >> 22) & 0x7f) as u8)
}
#[doc = "Bit 29 - Odd frame"]
#[inline(always)]
pub fn oddfrm(&self) -> ODDFRM_R {
ODDFRM_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Channel disable"]
#[inline(always)]
pub fn chdis(&self) -> CHDIS_R {
CHDIS_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Channel enable"]
#[inline(always)]
pub fn chena(&self) -> CHENA_R {
CHENA_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:10 - Maximum packet size"]
#[inline(always)]
#[must_use]
pub fn mpsiz(&mut self) -> MPSIZ_W<HCCHAR0_SPEC, 0> {
MPSIZ_W::new(self)
}
#[doc = "Bits 11:14 - Endpoint number"]
#[inline(always)]
#[must_use]
pub fn epnum(&mut self) -> EPNUM_W<HCCHAR0_SPEC, 11> {
EPNUM_W::new(self)
}
#[doc = "Bit 15 - Endpoint direction"]
#[inline(always)]
#[must_use]
pub fn epdir(&mut self) -> EPDIR_W<HCCHAR0_SPEC, 15> {
EPDIR_W::new(self)
}
#[doc = "Bit 17 - Low-speed device"]
#[inline(always)]
#[must_use]
pub fn lsdev(&mut self) -> LSDEV_W<HCCHAR0_SPEC, 17> {
LSDEV_W::new(self)
}
#[doc = "Bits 18:19 - Endpoint type"]
#[inline(always)]
#[must_use]
pub fn eptyp(&mut self) -> EPTYP_W<HCCHAR0_SPEC, 18> {
EPTYP_W::new(self)
}
#[doc = "Bits 20:21 - Multicount"]
#[inline(always)]
#[must_use]
pub fn mcnt(&mut self) -> MCNT_W<HCCHAR0_SPEC, 20> {
MCNT_W::new(self)
}
#[doc = "Bits 22:28 - Device address"]
#[inline(always)]
#[must_use]
pub fn dad(&mut self) -> DAD_W<HCCHAR0_SPEC, 22> {
DAD_W::new(self)
}
#[doc = "Bit 29 - Odd frame"]
#[inline(always)]
#[must_use]
pub fn oddfrm(&mut self) -> ODDFRM_W<HCCHAR0_SPEC, 29> {
ODDFRM_W::new(self)
}
#[doc = "Bit 30 - Channel disable"]
#[inline(always)]
#[must_use]
pub fn chdis(&mut self) -> CHDIS_W<HCCHAR0_SPEC, 30> {
CHDIS_W::new(self)
}
#[doc = "Bit 31 - Channel enable"]
#[inline(always)]
#[must_use]
pub fn chena(&mut self) -> CHENA_W<HCCHAR0_SPEC, 31> {
CHENA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "OTG_FS host channel-0 characteristics register (OTG_FS_HCCHAR0)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hcchar0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hcchar0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HCCHAR0_SPEC;
impl crate::RegisterSpec for HCCHAR0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hcchar0::R`](R) reader structure"]
impl crate::Readable for HCCHAR0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hcchar0::W`](W) writer structure"]
impl crate::Writable for HCCHAR0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HCCHAR0 to value 0"]
impl crate::Resettable for HCCHAR0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::{api::get_post_view, api::get_posts, msg::Msg, state::State};
use anyhow::{bail, Result};
use futures::future::BoxFuture;
use termion::event::Key;
use tui::widgets::ListState;
pub fn update(
msg: Msg,
state_stack: &mut Vec<State>,
) -> Result<Option<BoxFuture<'static, Result<Msg>>>> {
let last_state_index = state_stack.len() - 1;
match (state_stack.as_mut_slice(), msg) {
(_, Msg::Error(e)) => {
bail!(e);
}
(_, Msg::FetchSubreddit(sub)) => {
state_stack.push(State::Loading);
return Ok(Some(Box::pin(async move {
let posts = get_posts(sub.as_deref()).await?;
Ok(Msg::SubredditResponse(posts, sub))
})));
}
([.., State::Loading], Msg::SubredditResponse(posts, sub)) => {
let mut list_state = ListState::default();
if !posts.is_empty() {
list_state.select(Some(0));
}
state_stack[last_state_index] = State::SubList(posts, list_state, sub);
}
([.., State::SubList(_, _, sub)], Msg::Input(Key::Char('r'))) => {
return update(Msg::FetchSubreddit(sub.clone()), state_stack);
}
([.., State::SubList(posts, ref mut list_state, _)], Msg::Input(Key::Char('j'))) => {
list_state.select(list_state.selected().map(|s| {
if s < posts.len() - 1 {
s + 1
} else {
s
}
}));
}
([.., State::PostView(post_view, ref mut list_state)], Msg::Input(Key::Char('j'))) => {
list_state.select(list_state.selected().map(|s| {
if s < post_view.comments.len() {
s + 1
} else {
s
}
}));
}
([.., State::SubList(_posts, ref mut list_state, _)], Msg::Input(Key::Char('k'))) => {
list_state.select(list_state.selected().map(|s| if s > 0 { s - 1 } else { s }));
}
([.., State::PostView(_post_view, ref mut list_state)], Msg::Input(Key::Char('k'))) => {
list_state.select(list_state.selected().map(|s| if s > 0 { s - 1 } else { s }));
}
([.., State::PostView(_, _)], Msg::Input(Key::Char('h'))) => {
state_stack.pop();
}
([.., State::SubList(posts, list_state, _)], Msg::Input(Key::Char('\n'))) => {
if let Some(url) = list_state
.selected()
.and_then(|i| posts.get(i))
.map(|post| post.url.as_str())
.and_then(|s| s.strip_prefix("\""))
.and_then(|s| s.strip_suffix("\""))
{
webbrowser::open(url)?;
}
}
([.., State::SubList(posts, list_state, _)], Msg::Input(Key::Char('l'))) => {
if let Some(permalink) =
list_state
.selected()
.and_then(|i| posts.get(i))
.and_then(|post| {
Some(
post.permalink
.strip_suffix("\"")?
.strip_prefix("\"")?
.to_string(),
)
})
{
state_stack.push(State::Loading);
return Ok(Some(Box::pin(async move {
let comments = get_post_view(permalink.as_str()).await?;
Ok(Msg::CommentsResponse(comments))
})));
}
}
([.., State::Loading], Msg::CommentsResponse(post_view)) => {
let mut list_state = ListState::default();
if !post_view.comments.is_empty() {
list_state.select(Some(0));
}
state_stack[last_state_index] = State::PostView(post_view, list_state);
}
([.., State::SelectSubreddit(prompt)], Msg::Input(Key::Char('\n'))) => {
return update(Msg::FetchSubreddit(Some(prompt.clone())), state_stack);
}
([.., State::SelectSubreddit(_)], Msg::Input(Key::Esc)) => {
state_stack.pop();
}
([.., State::SelectSubreddit(ref mut prompt)], Msg::Input(Key::Backspace)) => {
prompt.pop();
}
([.., State::SelectSubreddit(ref mut prompt)], Msg::Input(Key::Char(c))) => {
prompt.push(c);
}
(_, Msg::Input(Key::Char('q'))) => {
bail!("Quitting"); // TODO: Better quiting path
}
(_, Msg::Input(Key::Ctrl('c'))) => {
bail!("Quitting"); // TODO: Better quiting path
}
(_, Msg::Input(Key::Char('/'))) => {
state_stack.push(State::SelectSubreddit(String::new()));
}
(_, _) => {}
}
Ok(None)
}
|
use crossbeam::atomic::AtomicCell;
use parking_lot::Mutex;
use rustc_hash::FxHashMap;
use std::{
any::TypeId,
collections::{hash_map::DefaultHasher, HashMap},
hash::Hasher,
sync::Arc,
};
use crate::{app::App, universe::Node};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum WindowRoot {
Main,
Auxiliary { id: usize },
}
struct EguiWindowCfg {
builder: Box<dyn Fn() -> egui::Window<'static>>,
id: egui::Id,
}
impl EguiWindowCfg {
fn show_with_open<F>(&self, ctx: &egui::CtxRef, open: &mut bool, render: F)
where
F: FnOnce(&mut egui::Ui),
{
let window = (self.builder)();
window.open(open).show(ctx, render);
}
fn show_with_atomic_open<F>(
&self,
ctx: &egui::CtxRef,
cell: &AtomicCell<bool>,
render: F,
) where
F: FnOnce(&mut egui::Ui),
{
let mut open = cell.load();
let window = (self.builder)();
window.open(&mut open).show(ctx, render);
cell.store(open);
}
}
// impl std::default::Default
pub struct WindowCfg {
// root: WindowRoot,
title: String,
egui_window_cfg: EguiWindowCfg,
}
pub struct WrapWin {
//
id: egui::Id,
title: String,
show: Box<dyn FnMut(&App, &mut egui::Ui, &[Node]) + Send + Sync>,
}
/*
pub trait AnySendSync: std::any::Any + Send + Sync + 'static {}
impl<T: std::any::Any + Send + Sync + 'static> AnySendSync for T {}
pub struct WrapWinT<T: AnySendSync> {
id: egui::Id,
title: String,
type_id: TypeId,
show: Box<dyn Fn(&mut egui::Ui, &mut Box<T>) + Send + Sync>,
window_state: Box<T>,
}
pub struct WrapWin_ {
id: egui::Id,
title: String,
type_id: TypeId,
show: Box<dyn Fn(&mut egui::Ui, &mut Box<dyn AnySendSync>) + Send + Sync>,
window_state: Box<dyn AnySendSync>,
// show: Box<dyn FnMut(&mut egui::Ui, &mut Box<dyn std::any::Any + Send + Sync>) + Send + Sync>,
}
impl<T: AnySendSync> WrapWinT<T> {
pub fn new<F>(title: &str, show: F, state: T) -> Self
where
F: Fn(&mut egui::Ui, &mut T) + Send + Sync,
{
unimplemented!();
}
}
*/
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct GuiId(u64);
impl GuiId {
pub fn new(id: impl std::hash::Hash) -> Self {
let mut hasher = DefaultHasher::default();
id.hash(&mut hasher);
GuiId(hasher.finish())
}
}
/*
pub struct GuiChn<T: std::any::Any + Send + Sync + 'static> {
id: GuiId,
type_id: TypeId,
_type: std::marker::PhantomData<T>,
tx: crossbeam::channel::Sender<T>,
rx: crossbeam::channel::Receiver<T>,
}
*/
#[derive(Debug, Clone)]
pub struct GuiChnInfo<T: std::any::Any + Send + Sync + 'static> {
id: GuiId,
type_id: TypeId,
_type: std::marker::PhantomData<T>,
tx: crossbeam::channel::Sender<T>,
rx: crossbeam::channel::Receiver<T>,
}
pub struct GuiChannels {
channel_types: FxHashMap<GuiId, TypeId>,
tx_channels: FxHashMap<GuiId, Box<dyn std::any::Any>>,
rx_channels: FxHashMap<GuiId, Box<dyn std::any::Any>>,
// clone_tx: FxHashMap<GuiId, Arc<dyn Fn() -> Box<dyn std::any::Any>>>,
// clone_rx: FxHashMap<GuiId, Arc<dyn Fn() -> Box<dyn std::any::Any>>>,
}
impl GuiChannels {
pub fn new() -> Self {
let channel_types = FxHashMap::default();
let tx_channels = FxHashMap::default();
let rx_channels = FxHashMap::default();
Self {
channel_types,
tx_channels,
rx_channels,
}
}
pub fn get_rx<T>(
&self,
id: GuiId,
) -> Option<&crossbeam::channel::Receiver<T>>
where
T: std::any::Any + Send + Sync + 'static,
{
let chn_type_id = *self.channel_types.get(&id)?;
let t_type_id = TypeId::of::<T>();
if chn_type_id != t_type_id {
return None;
}
let rx_ = self.rx_channels.get(&id)?;
rx_.downcast_ref()
}
pub fn get_tx<T>(&self, id: GuiId) -> Option<&crossbeam::channel::Sender<T>>
where
T: std::any::Any + Send + Sync + 'static,
{
let chn_type_id = *self.channel_types.get(&id)?;
let t_type_id = TypeId::of::<T>();
if chn_type_id != t_type_id {
return None;
}
let tx_ = self.tx_channels.get(&id)?;
tx_.downcast_ref()
}
pub fn has_channel<T>(&self, id: GuiId) -> bool
where
T: std::any::Any + Send + Sync + 'static,
{
if let Some(type_id) = self.channel_types.get(&id) {
let t_type_id = TypeId::of::<T>();
return *type_id == t_type_id;
}
false
}
// if returns Err(TypeId) if a channel with that id already
// existed, where TypeId is that of the existing channel
pub fn add_channel<T>(
&mut self,
id: GuiId,
bounded: Option<usize>,
) -> std::result::Result<GuiChnInfo<T>, TypeId>
where
T: std::any::Any + Send + Sync + 'static,
{
if let Some(type_id) = self.channel_types.get(&id) {
return Err(*type_id);
}
let (tx, rx) = if let Some(n) = bounded {
crossbeam::channel::bounded::<T>(n)
} else {
crossbeam::channel::unbounded::<T>()
};
let t_type_id = TypeId::of::<T>();
self.channel_types.insert(id, t_type_id);
self.tx_channels.insert(id, Box::new(tx.clone()) as _);
self.rx_channels.insert(id, Box::new(rx.clone()) as _);
let info = GuiChnInfo {
id,
type_id: t_type_id,
tx,
rx,
_type: std::marker::PhantomData,
};
Ok(info)
}
// fn has_rx<T: std::any::Any + 'static>(&self, id: GuiId) -> Option<bool> {
// unimplemented!();
// }
}
#[derive(Default)]
pub struct GuiWindows {
windows: FxHashMap<GuiId, Arc<Mutex<WrapWin>>>,
open_windows: FxHashMap<GuiId, Arc<AtomicCell<bool>>>,
}
impl GuiWindows {
pub fn open_windows(&self) -> Vec<GuiId> {
self.open_windows
.iter()
.filter_map(|(id, v)| if v.load() { Some(*id) } else { None })
.collect()
}
// pub fn show_in(&self, id: GuiId, ui: &mut Ui) -> Option<()> {
pub fn show_in_window(
&self,
app: &App,
ctx: &egui::CtxRef,
nodes: &[Node],
id: GuiId,
window: egui::Window,
) -> Option<()> {
let cell = self.open_windows.get(&id)?;
let mut open = cell.load();
let w = self.windows.get(&id)?;
{
let mut lock = w.lock();
window.open(&mut open).show(ctx, |ui| {
(lock.show)(app, ui, nodes);
});
}
cell.store(open);
Some(())
}
pub fn add_window<F>(&mut self, id: GuiId, title: &str, f: F)
where
F: FnMut(&App, &mut egui::Ui, &[Node]) + Send + Sync + 'static,
{
let wrap_win = WrapWin {
id: egui::Id::new(id),
title: title.to_string(),
show: Box::new(f) as _,
};
self.windows.insert(id, Arc::new(Mutex::new(wrap_win)));
self.open_windows.insert(id, Arc::new(false.into()));
}
pub fn is_open(&self, id: GuiId) -> bool {
self.open_windows
.get(&id)
.map(|c| c.load())
.unwrap_or(false)
}
pub fn get_open_arc(&self, id: GuiId) -> Option<&Arc<AtomicCell<bool>>> {
self.open_windows.get(&id)
}
pub fn set_open(&self, id: GuiId, open: bool) {
if let Some(o) = self.open_windows.get(&id) {
o.store(open);
}
}
pub fn toggle_open(&self, id: GuiId) {
if let Some(o) = self.open_windows.get(&id) {
o.fetch_xor(true);
}
}
}
/*
pub struct AppViewState {
settings: SettingsWindow,
fps: ViewStateChannel<FrameRate, FrameRateMsg>,
graph_stats: ViewStateChannel<GraphStats, GraphStatsMsg>,
node_list: ViewStateChannel<NodeList, NodeListMsg>,
node_details: ViewStateChannel<NodeDetails, NodeDetailsMsg>,
path_list: ViewStateChannel<PathList, PathListMsg>,
path_details: ViewStateChannel<PathDetails, ()>,
// theme_editor: ThemeEditor,
// theme_list: ThemeList,
overlay_creator: ViewStateChannel<OverlayCreator, OverlayCreatorMsg>,
overlay_list: ViewStateChannel<OverlayList, OverlayListMsg>,
}
*/
// impl GuiWindows {
// pub fn (
// }
/*
*/
|
mod board;
mod search;
fn main() {
use std::io;
use std::io::prelude::*;
let mut board = board::Board::initial_position();
let stdin = io::stdin();
for line in stdin.lock().lines() {
let command = line.unwrap();
let mut words = command.split_whitespace().into_iter();
match words.next() {
Some("uci") => {
println!("id name RustChess");
println!("id author Håkon Sandsmark");
println!("uciok");
}
Some("isready") => println!("readyok"),
Some("position") => {
if words.next() == Some("startpos") {
board = board::Board::initial_position();
if words.next() == Some("moves") {
for m in words {
assert!(board.make_move(m));
}
}
}
}
Some("go") => {
let mut time_left = std::time::Duration::from_secs(10);
while let Some(part) = words.next() {
if (part == "wtime" && board.turn() == board::Color::White)
|| (part == "btime" && board.turn() == board::Color::Black)
{
let millis = words.next().expect("time").parse::<u64>().expect("millis");
time_left = std::time::Duration::from_millis(millis);
}
}
let time_budget = time_left / 15;
let (score, moves, depth) =
search::negamax_iterative_deepening(&board, time_budget);
assert!(board.make_move(&format!("{}", moves.at(0))));
println!("bestmove {}", moves.at(0));
}
Some("perft") => {
let depth = words.next().unwrap_or("6").parse::<u32>().expect("depth");
let start = std::time::Instant::now();
let perft = search::perft(&board, depth, false);
let time = start.elapsed();
println!("perft({}) = {} took {:?}", depth, perft, time);
}
_ => (),
};
}
}
|
use std::path::PathBuf;
use actix_files::NamedFile;
use actix_web::middleware::Logger;
use actix_web::{get, App, HttpRequest, HttpServer, Responder};
use anyhow::bail;
use anyhow::{Context, Result};
use openssl::ssl::{SslAcceptor, SslFiletype, SslMethod};
#[get("/")]
async fn index(_req: HttpRequest) -> impl Responder {
"Hello World"
}
#[get("/html")]
async fn html(req: HttpRequest) -> impl Responder {
// "html!!"
let path = "public/index.html";
NamedFile::open(path)
}
#[actix_web::main]
async fn main() -> Result<(), std::io::Error> {
env_logger::init();
let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();
builder
.set_private_key_file("key.pem", SslFiletype::PEM)
.unwrap();
builder.set_certificate_chain_file("cert.pem").unwrap();
HttpServer::new(|| {
App::new()
.wrap(Logger::default())
.service(index)
.service(html)
})
.bind_openssl("127.0.0.1:8443", builder)?
.run()
.await
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudErrorBody {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PurchasePlan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
#[serde(rename = "promotionCode", default, skip_serializing_if = "Option::is_none")]
pub promotion_code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftRouterProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "publicSubdomain", default, skip_serializing_if = "Option::is_none")]
pub public_subdomain: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fqdn: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkProfile {
#[serde(rename = "vnetCidr", default, skip_serializing_if = "Option::is_none")]
pub vnet_cidr: Option<String>,
#[serde(rename = "peerVnetId", default, skip_serializing_if = "Option::is_none")]
pub peer_vnet_id: Option<String>,
#[serde(rename = "vnetId", default, skip_serializing_if = "Option::is_none")]
pub vnet_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Linux,
Windows,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OpenShiftContainerServiceVmSize {
#[serde(rename = "Standard_D2s_v3")]
StandardD2sV3,
#[serde(rename = "Standard_D4s_v3")]
StandardD4sV3,
#[serde(rename = "Standard_D8s_v3")]
StandardD8sV3,
#[serde(rename = "Standard_D16s_v3")]
StandardD16sV3,
#[serde(rename = "Standard_D32s_v3")]
StandardD32sV3,
#[serde(rename = "Standard_D64s_v3")]
StandardD64sV3,
#[serde(rename = "Standard_DS4_v2")]
StandardDs4V2,
#[serde(rename = "Standard_DS5_v2")]
StandardDs5V2,
#[serde(rename = "Standard_F8s_v2")]
StandardF8sV2,
#[serde(rename = "Standard_F16s_v2")]
StandardF16sV2,
#[serde(rename = "Standard_F32s_v2")]
StandardF32sV2,
#[serde(rename = "Standard_F64s_v2")]
StandardF64sV2,
#[serde(rename = "Standard_F72s_v2")]
StandardF72sV2,
#[serde(rename = "Standard_F8s")]
StandardF8s,
#[serde(rename = "Standard_F16s")]
StandardF16s,
#[serde(rename = "Standard_E4s_v3")]
StandardE4sV3,
#[serde(rename = "Standard_E8s_v3")]
StandardE8sV3,
#[serde(rename = "Standard_E16s_v3")]
StandardE16sV3,
#[serde(rename = "Standard_E20s_v3")]
StandardE20sV3,
#[serde(rename = "Standard_E32s_v3")]
StandardE32sV3,
#[serde(rename = "Standard_E64s_v3")]
StandardE64sV3,
#[serde(rename = "Standard_GS2")]
StandardGs2,
#[serde(rename = "Standard_GS3")]
StandardGs3,
#[serde(rename = "Standard_GS4")]
StandardGs4,
#[serde(rename = "Standard_GS5")]
StandardGs5,
#[serde(rename = "Standard_DS12_v2")]
StandardDs12V2,
#[serde(rename = "Standard_DS13_v2")]
StandardDs13V2,
#[serde(rename = "Standard_DS14_v2")]
StandardDs14V2,
#[serde(rename = "Standard_DS15_v2")]
StandardDs15V2,
#[serde(rename = "Standard_L4s")]
StandardL4s,
#[serde(rename = "Standard_L8s")]
StandardL8s,
#[serde(rename = "Standard_L16s")]
StandardL16s,
#[serde(rename = "Standard_L32s")]
StandardL32s,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OpenShiftAgentPoolProfileRole {
#[serde(rename = "compute")]
Compute,
#[serde(rename = "infra")]
Infra,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterMasterPoolProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub count: i32,
#[serde(rename = "vmSize")]
pub vm_size: OpenShiftContainerServiceVmSize,
#[serde(rename = "subnetCidr", default, skip_serializing_if = "Option::is_none")]
pub subnet_cidr: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<OsType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterAgentPoolProfile {
pub name: String,
pub count: i32,
#[serde(rename = "vmSize")]
pub vm_size: OpenShiftContainerServiceVmSize,
#[serde(rename = "subnetCidr", default, skip_serializing_if = "Option::is_none")]
pub subnet_cidr: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<OsType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub role: Option<OpenShiftAgentPoolProfileRole>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterIdentityProvider {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<OpenShiftManagedClusterBaseIdentityProvider>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterAuthProfile {
#[serde(rename = "identityProviders", default, skip_serializing_if = "Vec::is_empty")]
pub identity_providers: Vec<OpenShiftManagedClusterIdentityProvider>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterMonitorProfile {
#[serde(rename = "workspaceResourceID", default, skip_serializing_if = "Option::is_none")]
pub workspace_resource_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "openShiftVersion")]
pub open_shift_version: String,
#[serde(rename = "clusterVersion", default, skip_serializing_if = "Option::is_none")]
pub cluster_version: Option<String>,
#[serde(rename = "publicHostname", default, skip_serializing_if = "Option::is_none")]
pub public_hostname: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fqdn: Option<String>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<NetworkProfile>,
#[serde(rename = "routerProfiles", default, skip_serializing_if = "Vec::is_empty")]
pub router_profiles: Vec<OpenShiftRouterProfile>,
#[serde(rename = "masterPoolProfile", default, skip_serializing_if = "Option::is_none")]
pub master_pool_profile: Option<OpenShiftManagedClusterMasterPoolProfile>,
#[serde(rename = "agentPoolProfiles", default, skip_serializing_if = "Vec::is_empty")]
pub agent_pool_profiles: Vec<OpenShiftManagedClusterAgentPoolProfile>,
#[serde(rename = "authProfile", default, skip_serializing_if = "Option::is_none")]
pub auth_profile: Option<OpenShiftManagedClusterAuthProfile>,
#[serde(rename = "monitorProfile", default, skip_serializing_if = "Option::is_none")]
pub monitor_profile: Option<OpenShiftManagedClusterMonitorProfile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedCluster {
#[serde(flatten)]
pub resource: Resource,
#[serde(flatten)]
pub serde_json_value: serde_json::Value,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterBaseIdentityProvider {
pub kind: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterAadIdentityProvider {
#[serde(flatten)]
pub open_shift_managed_cluster_base_identity_provider: OpenShiftManagedClusterBaseIdentityProvider,
#[serde(flatten)]
pub serde_json_value: serde_json::Value,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsObject {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OpenShiftManagedClusterListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OpenShiftManagedCluster>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
|
use std::fs;
fn get_dimensions(line: &str) -> Vec<u32> {
let mut dims: Vec<u32> = line
.split('x')
.map(|dimensions| dimensions.parse().unwrap())
.collect();
dims.sort();
dims
}
fn part_1(input: &str) -> u32 {
input
.lines()
.map(|l| {
let dim = get_dimensions(l);
let (a, b, c) = (dim[0], dim[1], dim[2]);
2 * a * b + 2 * a * c + 2 * b * c + a * b
})
.sum()
}
fn part_2(input: &str) -> u32 {
input
.lines()
.map(|l| {
let dim = get_dimensions(l);
let (a, b, c) = (dim[0], dim[1], dim[2]);
2 * a + 2 * b + a * b * c
})
.sum()
}
fn main() {
let content = fs::read_to_string("input").expect("file not found");
let content = content.trim();
println!("First puzzle: {}", part_1(&content));
println!("Second puzzle: {}", part_2(&content));
}
#[cfg(test)]
mod day02 {
use super::*;
#[test]
fn test_part_1() {
println!("Running test for part 1");
assert_eq!(58, part_1("2x3x4"));
assert_eq!(43, part_1("1x1x10"));
}
#[test]
fn test_part_2() {
println!("Running test for part 2");
assert_eq!(34, part_2("2x3x4"));
assert_eq!(14, part_2("1x1x10"));
}
}
|
use ComponentBitField;
use chunk::Chunk;
use commands::Commands;
use component_group::{Read, Write};
use entity::Entity;
use entity_collection::{Entities, EntityCollection};
use entity_template::EntityTemplate;
use shared_resources::SharedResources;
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use system::System;
use worker::schema::{Command, Component, GeneratedSchema, GlobalComponentDataInterface};
use worker::{Authority, CommandStatus, ComponentId, Connection, Dispatcher, EntityId, LogLevel,
RequestId};
/// Possible errors which can be thrown by the `World`.
pub enum WorldError {
/// We tried to perform an operation which required a connection to SpatialOS, but
/// the connection to SpatialOS is closed.
ConnectionLost,
}
#[doc(hidden)]
#[derive(Default, Clone)]
pub struct WorldTime {
timestamp: u64,
}
impl WorldTime {
pub fn new() -> WorldTime {
WorldTime { timestamp: 0 }
}
// True if a happened after b
pub fn occured_after(&self, b: &WorldTime) -> bool {
self.timestamp > b.timestamp
}
pub fn max_time<'a>(&'a self, b: &'a WorldTime) -> &'a WorldTime {
if self.occured_after(b) {
self
} else {
b
}
}
pub fn get_time(&mut self) -> WorldTime {
let time = WorldTime {
timestamp: self.timestamp,
};
self.timestamp = self.timestamp + 1;
time
}
}
pub struct PartialEntity<S: GeneratedSchema> {
pub entity_id: EntityId,
pub bit_field: S::ComponentBitField,
pub component_data: HashMap<ComponentId, S::ComponentData>,
pub write_authority: HashMap<ComponentId, Authority>,
}
pub struct SystemData<S> {
system: Box<System<S>>,
last_update: WorldTime,
}
/// The `World` is the worker's view into the SpatialOS world.
///
/// It can be used to
/// * Query local entity data
/// * Send and receive commands
/// * Manage shared resources
/// * Create and delete entities
///
/// The `World` is also responsible for processing each system and each
/// SpatialOS operation. To tick the worker, you must call `process` for each tick.
///
/// ## Shared resources
///
/// You can set and retrieve shared objects of any type using `get_shared_resource`
/// and `set_shared_resource`. These objects can be used to store global information
/// or information which must be shared between systems.
pub struct World<S: GeneratedSchema> {
connection: Connection,
entities: EntityCollection<S>,
added_this_cs: HashMap<EntityId, PartialEntity<S>>,
entity_ids: HashMap<EntityId, Rc<RefCell<Entity<S>>>>,
systems: Vec<SystemData<S>>,
world_time: WorldTime,
commands: Commands<S>,
shared_resources: SharedResources,
}
impl<S: 'static + GeneratedSchema> World<S> {
/// Construct a new `World` given an existing `Connection` to SpatialOS.
pub fn new(connection: Connection) -> Box<World<S>> {
let manager = Box::new(World::<S> {
connection,
entities: EntityCollection::new(),
added_this_cs: HashMap::new(),
entity_ids: HashMap::new(),
systems: Vec::new(),
world_time: WorldTime::new(),
commands: Commands::new(),
shared_resources: SharedResources::new(),
});
manager
}
/// Runs a world tick. This does the following in order:
///
/// * Checks if the connection is still active.
/// * Get's the list of ops from SpatialOS.
/// * Processes each of these ops. This will in turn update component data and
/// trigger command callbacks and handlers.
/// * Calls each registered system's `on_update` method.
/// * Sends any updates to components which were changed by a system.
pub fn process(&mut self, timeout_millis: u32) -> Result<(), WorldError> {
if !self.connection.is_connected() {
return Result::Err(WorldError::ConnectionLost);
}
let world_ptr = self as *mut World<S>;
unsafe {
let op_list = (*world_ptr).connection.get_op_list(timeout_millis);
self.process_op_list(op_list);
}
unsafe {
for system in (*world_ptr).systems.iter_mut() {
{
let mut entities_view = Entities::entities_from_time(
&mut (*world_ptr).entities,
&system.last_update,
);
system.system.on_update(self, &mut entities_view);
}
system.last_update = self.world_time.get_time();
}
}
self.entities.replicate(&mut self.connection);
self.entities.cleanup_after_frame();
Result::Ok(())
}
/// Registers a system to the World. The system's `on_ready` method will be
/// called during this method.
pub fn register<A: 'static + System<S> + Sized>(&mut self, mut system: A) {
{
system.on_ready(self);
}
self.systems.push(SystemData::<S> {
system: Box::new(system),
last_update: self.world_time.get_time(),
});
}
/// Sends a log message to SpatialOS, as well as logging it to `stdout`.
///
/// It is invalid to call this method if the connection is no longer active.
pub fn log(&mut self, level: LogLevel, logger_name: &str, message: &str) {
println!("{:?} [{}] {}", level, logger_name, message);
self.connection
.send_log_message(level, String::from(logger_name), String::from(message));
}
/// Get's the shared resource of type `R`, if it exists.
pub fn get_shared_resource<R: 'static>(&mut self) -> Option<&mut R> {
self.shared_resources.get::<R>()
}
/// Sets or replaces the shared resource of type `R`.
pub fn set_shared_resource<R: 'static>(&mut self, resource: R) {
self.shared_resources.add(resource)
}
/// Gets an immutable reference to the component data of the given `EntityId` for component `C`.
pub fn get_component<C: 'static + Component<S>>(
&mut self,
entity_id: EntityId,
) -> Option<Read<S, C>> {
if let Some(entity) = self.entity_ids.get(&entity_id) {
let entity = entity.borrow();
self.entities
.get_chunk_for_entity(&entity)
.get_component_storage::<C>()
.map(|storage| {
Read::new(&storage.get_component_data_entry(entity.index_in_chunk).data)
})
} else {
None
}
}
/// Gets a mutable reference to the component data of the given `EntityId` for component `C`.
///
/// Any changes made to the component will be replicated over the network at the end of the
/// current tick.
pub fn get_mut_component<C: 'static + Component<S>>(
&mut self,
entity_id: EntityId,
) -> Option<Write<S, C>> {
if let Some(entity) = self.entity_ids.get(&entity_id) {
let entity = entity.borrow();
let chunk = self.entities.get_chunk_for_entity(&entity);
chunk.mark_component_storage_as_dirty::<C>();
chunk.get_component_storage::<C>().and_then(|storage| {
match storage.get_authority(entity.index_in_chunk) {
Authority::NotAuthoritative => None,
_ => Some(Write::new(
&mut storage.get_component_data_entry(entity.index_in_chunk).data,
)),
}
})
} else {
None
}
}
/// Gets the current authority of component `C` for the given `EntityId`.
pub fn get_authority<C: 'static + Component<S>>(
&mut self,
entity_id: EntityId,
) -> Option<Authority> {
if let Some(entity) = self.entity_ids.get(&entity_id) {
let entity = entity.borrow();
self.entities
.get_chunk_for_entity(&entity)
.get_component_storage::<C>()
.map(|storage| storage.get_authority(entity.index_in_chunk))
} else {
None
}
}
/// Registers a command handler for command `C`. The handler takes as arguments:
///
/// * A reference to this `World`.
/// * The `EntityId` of the entity which is receiving this command.
/// * The command request object itself.
///
/// The handler must return a response which will be sent back to the calling entity.
///
/// These handlers are called in a single threaded environment, outside of any system
/// update call.
///
/// ## Example
///
/// ```
/// world.register_command_handler(
/// Transform::example_command(),
/// |world, entity_id, request| {
/// println!("Got request: {} {:?}", entity_id, request);
///
/// ExampleResponse { reply: 0.1 }
/// },
/// );
/// ```
pub fn register_command_handler<C: 'static + Command<S>, H: 'static>(
&mut self,
_command: C,
handler: H,
) where
H: Fn(&mut World<S>, EntityId, &C::Request) -> C::Response,
{
self.commands.register_handler::<C, H>(handler);
}
/// Sends a command of type `C` to the given `EntityId`. Two closures must also be given
/// to handle the success and failure of the command.
///
/// `success` is triggered if the command response was successfully received, and it takes as arguments:
/// * A reference to this `World`.
/// * The `EntityId` of the entity which this command was sent to.
/// * The command response object.
///
/// `failure` is triggered if there was an error sending the command, and it takes as arguments:
/// * A reference to this `World`.
/// * The failure code.
/// * The failure error message.
///
/// Short circuiting is enabled for this command.
///
/// ## Example
///
/// ```
/// world.send_command(
/// Transform::example_command(),
/// 100,
/// ExampleRequest { param: 0.5 },
/// |_world, entity_id, response| {
/// println!("Command succeeded: {} {:?}", entity_id, response.reply);
/// },
/// |_world, status, message| {
/// println!("Command failed: {:?} {}", status, message);
/// },
/// );
/// ```
pub fn send_command<C: 'static + Command<S>, A: 'static, F: 'static>(
&mut self,
_command: C,
entity_id: EntityId,
request: C::Request,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId, &C::Response),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
self.commands.send_command::<C, A, F>(
&mut self.connection,
entity_id,
request,
success,
failure,
);
}
/// Creates a new SpatialOS entity. Two closures must also be given
/// to handle the success and failure of this creation.
///
/// `success` is triggered if the entity was successfully created, and it takes as arguments:
/// * A reference to this `World`.
/// * The `EntityId` of the created entity.
///
/// `failure` is triggered if there was an error creating the entity, and it takes as arguments:
/// * A reference to this `World`.
/// * The failure code.
/// * The failure error message.
///
/// ## Example
///
/// ```
/// world.create_entity(
/// EntityBuilder::new(vec![Worker::Type("visual"), Worker::Type("physics")])
/// .with_component(
/// Worker::Type("physics"),
/// Position {
/// coords: Coordinates { x: 0.1, y: 0.2, z: 0.3 },
/// },
/// )
/// .with_component(
/// Worker::Specific(client_worker_id),
/// Character {
/// health: 50,
/// },
/// ),
/// |_world, entity_id| {
/// println!("Created entity: {}", entity_id);
/// },
/// |_world, status, message| {
/// println!("Failure creating entity: {:?} {}", status, message);
/// },
/// );
/// ```
pub fn create_entity<A: 'static, F: 'static>(
&mut self,
entity_template: EntityTemplate,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
self.commands
.create_entity(&mut self.connection, entity_template, success, failure);
}
/// Deletes an existing SpatialOS entity. Two closures must also be given
/// to handle the success and failure of this deletion.
///
/// `success` is triggered if the entity was successfully deleted, and it takes as arguments:
/// * A reference to this `World`.
/// * The `EntityId` of the deleted entity.
///
/// `failure` is triggered if there was an error deleting the entity, and it takes as arguments:
/// * A reference to this `World`.
/// * The failure code.
/// * The failure error message.
///
/// ## Example
///
/// ```
/// world.delete_entity(
/// entity_to_delete,
/// |_world, entity_id| {
/// println!("Entity {} successfully deleted.", entity_id);
/// },
/// |_world, status, message| {
/// println!("Failure deleting entity: {:?} {}", status, message);
/// },
/// );
/// ```
pub fn delete_entity<A: 'static, F: 'static>(
&mut self,
entity_id: EntityId,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
self.commands
.delete_entity(&mut self.connection, entity_id, success, failure);
}
}
impl<S: 'static + GeneratedSchema> Dispatcher<S> for World<S> {
fn on_critical_section(&mut self, in_critical_section: bool) {
if !in_critical_section {
for (entity_id, entity) in self.added_this_cs.drain() {
let chunk: &mut Chunk<S> =
self.entities
.get_free_chunk(&entity.bit_field, &mut self.world_time, &entity);
let new_entity = chunk.add_entity(&mut self.world_time, entity);
self.entity_ids.insert(entity_id, new_entity);
}
}
}
fn on_add_entity(&mut self, entity_id: EntityId) {
self.added_this_cs.insert(
entity_id,
PartialEntity {
entity_id,
bit_field: ComponentBitField::new(),
component_data: HashMap::new(),
write_authority: HashMap::new(),
},
);
}
fn on_remove_entity(&mut self, entity_id: EntityId) {
{
let entity = self.entity_ids[&entity_id].borrow();
let chunk = self.entities.get_chunk_for_entity(&entity);
chunk.remove_entity(&entity);
}
self.entity_ids.remove(&entity_id);
}
fn on_add_component(
&mut self,
entity_id: EntityId,
component_id: ComponentId,
data: S::ComponentData,
) {
let entity: &mut PartialEntity<S> = self.added_this_cs.get_mut(&entity_id).unwrap();
if entity.bit_field.add_component(component_id) {
// We have this component
entity.component_data.insert(component_id, data);
}
}
fn on_component_update(
&mut self,
entity_id: EntityId,
component_id: ComponentId,
update: S::ComponentUpdate,
) {
if let Some(ref mut entity) = self.added_this_cs.get_mut(&entity_id) {
let data: &mut S::ComponentData =
&mut entity.component_data.get_mut(&component_id).unwrap();
data.apply_update(&update);
} else {
let entity = self.entity_ids[&entity_id].borrow();
let chunk = self.entities.get_chunk_for_entity(&entity);
chunk.apply_component_update(component_id, &mut self.world_time, &entity, update);
}
}
fn on_authority_change(
&mut self,
entity_id: EntityId,
component_id: ComponentId,
authority: Authority,
) {
if let Some(ref mut entity) = self.added_this_cs.get_mut(&entity_id) {
entity.write_authority.insert(component_id, authority);
} else {
let entity = self.entity_ids[&entity_id].borrow();
let chunk = self.entities.get_chunk_for_entity(&entity);
chunk.apply_authority(component_id, &entity, authority);
}
}
fn on_create_entity_response(
&mut self,
request_id: RequestId,
entity_id: EntityId,
status_code: CommandStatus,
message: &str,
) {
// Give responder mutable access to World as all command responses
// happen in a single threaded environment.
let world_ptr = self as *mut World<S>;
let world = unsafe { &mut (*world_ptr) };
self.commands
.on_create_entity_response(world, request_id, entity_id, status_code, message);
}
fn on_delete_entity_response(
&mut self,
request_id: RequestId,
entity_id: EntityId,
status_code: CommandStatus,
message: &str,
) {
// Give responder mutable access to World as all command responses
// happen in a single threaded environment.
let world_ptr = self as *mut World<S>;
let world = unsafe { &mut (*world_ptr) };
self.commands
.on_delete_entity_response(world, request_id, entity_id, status_code, message);
}
fn on_command_request(
&mut self,
request_id: RequestId,
entity_id: EntityId,
component_id: ComponentId,
command_id: u32,
request: Box<Any>,
) {
// Give handler mutable access to World as all command handlers
// happen in a single threaded environment.
let world_ptr = self as *mut World<S>;
let world = unsafe { &mut (*world_ptr) };
self.commands.on_command_request(
world,
&mut self.connection,
request_id,
entity_id,
component_id,
command_id,
request,
);
}
fn on_command_response(
&mut self,
request_id: RequestId,
entity_id: EntityId,
response: Option<Box<Any>>,
status_code: CommandStatus,
message: &str,
) {
// Give responder mutable access to World as all command responses
// happen in a single threaded environment.
let world_ptr = self as *mut World<S>;
let world = unsafe { &mut (*world_ptr) };
self.commands.on_command_response(
world,
request_id,
entity_id,
response,
status_code,
message,
);
}
}
|
use super::GetFlag;
use super::{Gender, GetFlag::*, Results, SpoilerLevel};
use serde::Deserialize;
/// All valid flags for get character method
pub const CHARACTER_FLAGS: [GetFlag; 7] =
[Basic, Details, Measures, Traits, Vns, Voiced, Instances];
/// Results returned from get character method
#[derive(Deserialize, Debug, PartialEq)]
pub struct GetCharacterResults {
#[serde(flatten)]
pub results: Results,
pub items: Vec<GetCharacterResponse>,
}
/// All fields returned by get character method
/// fields are either Some or None depending on GetFlag param passed to get function
#[derive(Deserialize, Debug, PartialEq)]
pub struct GetCharacterResponse {
pub id: usize,
pub name: Option<String>,
#[serde(rename = "original")]
pub original_name: Option<String>,
pub gender: Option<Gender>,
#[serde(rename = "bloodt")]
pub blood_type: Option<BloodType>,
// TODO Deserialize to struct
pub birthday: Option<Vec<Option<u8>>>,
// TODO List of alternative names, separated by a newline
pub aliases: Option<String>,
pub description: Option<String>,
#[serde(rename = "image")]
pub image_url: Option<String>,
pub bust: Option<usize>,
pub waist: Option<usize>,
pub hip: Option<usize>,
pub height: Option<usize>,
pub weight: Option<usize>,
// TODO Deserialize to struct
pub traits: Option<Vec<Vec<usize>>>,
// TODO Deserialize to struct
pub vns: Option<Vec<(usize, usize, SpoilerLevel, String)>>,
pub voiced: Option<Vec<VA>>,
pub instances: Option<Vec<Instances>>,
}
/// Blood type, "a", "b", "ab" or "o"
#[derive(Deserialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum BloodType {
A,
B,
Ab,
O,
}
/// Character role in vn
#[derive(Deserialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
Main,
Primary,
Side,
Appears,
}
/// Voice actresses (staff) that voiced this character, per VN.
#[derive(Deserialize, Debug, PartialEq)]
pub struct VA {
id: usize,
#[serde(rename = "aid")]
alias_id: usize,
#[serde(rename = "vid")]
vn_id: usize,
note: Option<String>,
}
/// Instances of this character (excluding the character entry itself).
#[derive(Deserialize, Debug, PartialEq)]
pub struct Instances {
id: usize,
spoiler: SpoilerLevel,
name: String,
#[serde(rename = "original")]
original_name: String,
}
|
use std::collections::VecDeque;
use std::convert::{TryFrom, TryInto};
use std::str::FromStr;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct IntcodeDevice {
ip: usize,
relative_offset: isize,
pub memory: Vec<i64>,
pub input: VecDeque<i64>,
pub output: VecDeque<i64>
}
impl IntcodeDevice {
pub fn execute(&mut self) -> DeviceStatus {
loop {
let opcode = {
let opcode = self.memory.get(self.ip).cloned().expect("No more instructions");
if opcode > 0 { opcode as usize } else { panic!("Negative opcode"); }
};
use ParamMode::*;
match Self::parse_opcode(opcode) {
(99, [Position, Position, Position]) => return DeviceStatus::Halt,
(1, [mode1, mode2, mode3]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let value2 = self.get_value(self.get_param(2), mode2);
let result_addr = self.get_address(self.get_param(3), mode3);
self.memory[result_addr] = value1 + value2;
self.ip += 4;
},
(2, [mode1, mode2, mode3]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let value2 = self.get_value(self.get_param(2), mode2);
let result_addr = self.get_address(self.get_param(3), mode3);
self.memory[result_addr] = value1 * value2;
self.ip += 4;
},
(3, [mode1, Position, Position]) => {
let result_addr = self.get_address(self.get_param(1), mode1);
if let Some(input_value) = self.input.pop_front() {
self.memory[result_addr] = input_value;
self.ip += 2;
} else {
return DeviceStatus::WaitingInput
}
},
(4, [mode1, Position, Position]) => {
let value1 = self.get_value(self.get_param(1), mode1);
self.output.push_back(value1);
self.ip += 2;
},
(5, [mode1, mode2, Position]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let addr = self.get_value(self.get_param(2), mode2);
if value1 != 0 {
self.ip = self.check_ip(addr);
} else {
self.ip += 3;
}
},
(6, [mode1, mode2, Position]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let addr = self.get_value(self.get_param(2), mode2);
if value1 == 0 {
self.ip = self.check_ip(addr);
} else {
self.ip += 3;
}
},
(7, [mode1, mode2, mode3]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let value2 = self.get_value(self.get_param(2), mode2);
let result_addr = self.get_address(self.get_param(3), mode3);
self.memory[result_addr] = if value1 < value2 { 1 } else { 0 };
self.ip += 4;
},
(8, [mode1, mode2, mode3]) => {
let value1 = self.get_value(self.get_param(1), mode1);
let value2 = self.get_value(self.get_param(2), mode2);
let result_addr = self.get_address(self.get_param(3), mode3);
self.memory[result_addr] = if value1 == value2 { 1 } else { 0 };
self.ip += 4;
},
(9, [mode1, Position, Position]) => {
let value1 = self.get_value(self.get_param(1), mode1);
self.relative_offset += value1 as isize;
self.ip += 2;
},
_ => panic!("Invalid pair of opcode and modes")
}
}
}
fn parse_opcode(opcode: usize) -> (usize, [ParamMode ; 3]) {
(
opcode % 100,
[
(opcode / 100 % 10).try_into().expect("Invalid parameter mode"),
(opcode / 1000 % 10).try_into().expect("Invalid parameter mode"),
(opcode / 10000 % 10).try_into().expect("Invalid parameter mode")
]
)
}
fn check_ip(&self, ip: i64) -> usize {
ip.try_into().ok()
.filter(|&ip| ip < self.memory.len())
.expect("Cannot set ip out of bounds")
}
fn get_param(&self, param_pos: usize) -> i64 {
self.memory.get(self.ip + param_pos).cloned().expect("Can't fetch parameter")
}
fn get_address(&mut self, address: i64, mode: ParamMode) -> usize {
let real_address = match mode {
ParamMode::Position => address,
ParamMode::Relative => address + self.relative_offset as i64,
ParamMode::Immediate => panic!("Cannot use Immediate mode for indexing"),
}.try_into().expect("Cannot write to negative address");
self.require_memory_set(real_address);
real_address
}
fn get_value(&mut self, param: i64, mode: ParamMode) -> i64 {
match mode {
ParamMode::Immediate => param,
ParamMode::Relative | ParamMode::Position => {
let addr = self.get_address(param, mode);
self.require_memory_set(addr);
self.memory[addr]
}
}
}
fn require_memory_set(&mut self, address: usize) {
while self.memory.len() <= address {
self.memory.push(0);
}
}
pub fn reset(&mut self, base: &IntcodeDevice) {
self.ip = base.ip;
self.relative_offset = base.relative_offset;
self.memory.clear();
self.input.clear();
self.output.clear();
self.memory.extend(base.memory.iter().cloned());
self.input.extend(base.input.iter().cloned());
self.output.extend(base.output.iter().cloned());
}
pub fn write_str(&mut self, string: &str) {
self.input.extend(string.bytes().map(|b| b as i64));
}
}
impl FromStr for IntcodeDevice {
type Err = String;
fn from_str(input: &str) -> std::result::Result<Self, Self::Err> {
Ok(Self {
ip: 0,
relative_offset: 0,
input: VecDeque::new(),
output: VecDeque::new(),
memory: input
.trim()
.split(',')
.map(|n| n.parse::<i64>()
.map_err(|_| format!("Cannot parse {} as a positive integer", n))
)
.collect::<std::result::Result<_, _>>()?
})
}
}
#[derive(Debug)]
enum ParamMode { Immediate, Position, Relative }
impl TryFrom<usize> for ParamMode {
type Error = Box<dyn std::error::Error>;
fn try_from(mode: usize) -> Result<Self> {
match mode {
0 => Ok(Self::Position),
1 => Ok(Self::Immediate),
2 => Ok(Self::Relative),
_ => Err("Invalid parameter mode in opcode".into())
}
}
}
#[derive(PartialEq, Eq)]
pub enum DeviceStatus { Halt, WaitingInput }
|
pub use crate::*;
pub fn generate_implicants(initial: Vec<Implicant>) -> Vec<Implicant> {
let mut generated = vec![];
let terms_count = initial[0].terms.len();
for i in 0..2u32 << (terms_count - 1) {
let bit_terms = format!("{:#032b}", i);
println!("{}", bit_terms);
let bit_terms = String::from(&bit_terms[bit_terms.len() - terms_count..]);
let terms: Vec<_> = bit_terms
.split("")
.filter(|x| !x.is_empty())
.map(|x| match x {
"0" => Some(false),
"1" => Some(true),
_ => unreachable!(),
})
.collect();
let imp = Implicant {
terms,
naming: initial[0].naming.clone(),
};
let covered = initial
.iter()
.map(|x| {
x.terms
.iter()
.zip(imp.terms.iter())
.map(|(&x, &y)| x == y || x == None)
.all(|x| x)
})
.any(|x| x);
if !covered {
generated.push(imp);
}
}
generated
}
|
// A amaglamation of oauth2 library's Github, Wunderlist, and Microsoft's examples
use oauth2::basic::{BasicErrorResponse, BasicTokenType};
use oauth2::helpers;
use oauth2::TokenType;
use std::time::Duration;
use oauth2::reqwest::http_client;
use oauth2::{
AuthUrl, AuthType, AuthorizationCode, ClientId, ClientSecret, CsrfToken, RedirectUrl, Scope,
TokenResponse, TokenUrl, PkceCodeChallenge, AccessToken, Client,
EmptyExtraTokenFields, ExtraTokenFields, RefreshToken,
};
// use std::env;
use std::io::{BufRead, BufReader, Write};
use std::net::TcpListener;
use url::Url;
use serde::{Deserialize, Serialize};
type SpecialTokenResponse = NonStandardTokenResponse<EmptyExtraTokenFields>;
type SpecialClient = Client<BasicErrorResponse, SpecialTokenResponse, BasicTokenType>;
fn default_token_type() -> Option<BasicTokenType> {
Some(BasicTokenType::Bearer)
}
///
/// Non Standard OAuth2 token response.
///
/// This struct includes the fields defined in
/// [Section 5.1 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.1), as well as
/// extensions defined by the `EF` type parameter.
/// In this particular example token_type is optional to showcase how to deal with a non
/// compliant provider.
///
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct NonStandardTokenResponse<EF: ExtraTokenFields> {
access_token: AccessToken,
// basecamp does not follow the RFC specs and doesn't return the
// token_type. `NonStandardTokenResponse` makes the `token_type` optional.
#[serde(default = "default_token_type")]
token_type: Option<BasicTokenType>,
#[serde(skip_serializing_if = "Option::is_none")]
expires_in: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
refresh_token: Option<RefreshToken>,
#[serde(rename = "scope")]
#[serde(deserialize_with = "helpers::deserialize_space_delimited_vec")]
#[serde(serialize_with = "helpers::serialize_space_delimited_vec")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
scopes: Option<Vec<Scope>>,
#[serde(bound = "EF: ExtraTokenFields")]
#[serde(flatten)]
extra_fields: EF,
}
impl<EF> TokenResponse<BasicTokenType> for NonStandardTokenResponse<EF>
where
EF: ExtraTokenFields,
BasicTokenType: TokenType,
{
///
/// REQUIRED. The access token issued by the authorization server.
///
fn access_token(&self) -> &AccessToken {
&self.access_token
}
///
/// REQUIRED. The type of the token issued as described in
/// [Section 7.1](https://tools.ietf.org/html/rfc6749#section-7.1).
/// Value is case insensitive and deserialized to the generic `TokenType` parameter.
/// But in this particular case as the service is non compliant, it has a default value
///
fn token_type(&self) -> &BasicTokenType {
match &self.token_type {
Some(t) => t,
None => &BasicTokenType::Bearer,
}
}
///
/// RECOMMENDED. The lifetime in seconds of the access token. For example, the value 3600
/// denotes that the access token will expire in one hour from the time the response was
/// generated. If omitted, the authorization server SHOULD provide the expiration time via
/// other means or document the default value.
///
fn expires_in(&self) -> Option<Duration> {
self.expires_in.map(Duration::from_secs)
}
///
/// OPTIONAL. The refresh token, which can be used to obtain new access tokens using the same
/// authorization grant as described in
/// [Section 6](https://tools.ietf.org/html/rfc6749#section-6).
///
fn refresh_token(&self) -> Option<&RefreshToken> {
self.refresh_token.as_ref()
}
///
/// OPTIONAL, if identical to the scope requested by the client; otherwise, REQUIRED. The
/// scipe of the access token as described by
/// [Section 3.3](https://tools.ietf.org/html/rfc6749#section-3.3). If included in the response,
/// this space-delimited field is parsed into a `Vec` of individual scopes. If omitted from
/// the response, this field is `None`.
///
fn scopes(&self) -> Option<&Vec<Scope>> {
self.scopes.as_ref()
}
}
// TODO, I might still be able to do better with this return type, but I like it better than the String
pub fn get_auth_token(c_id: String, c_secret: String) -> Result<SpecialTokenResponse, String> {
// DO NOT ADD TO VCS
let client_id = ClientId::new(c_id);
let client_secret = ClientSecret::new(c_secret);
let auth_url = AuthUrl::new("https://launchpad.37signals.com/authorization/new?type=web_server".to_string())
.expect("Invalid authorization endpoint URL");
let token_url = TokenUrl::new("https://launchpad.37signals.com/authorization/token?type=web_server".to_string())
.expect("Invalid token endpoint URL");
// Set up the config for the Github OAuth2 process.
// let client = BasicClient::new(
let client = SpecialClient::new(
client_id,
Some(client_secret),
auth_url,
Some(token_url),
)
.set_auth_type(AuthType::RequestBody)
// This example will be running its own server at localhost:8080.
// See below for the server implementation.
.set_redirect_url(
RedirectUrl::new("http://localhost:8080".to_string()).expect("Invalid redirect URL"),
);
// Generate a PKCE challenge.
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
// Generate the authorization URL to which we'll redirect the user.
let (authorize_url, csrf_state) = client
.authorize_url(CsrfToken::new_random)
// This example is requesting access to the user's public repos and email.
// .add_scope(Scope::new("read".to_string()))
.set_pkce_challenge(pkce_challenge)
.url();
println!(
"Open this URL in your browser:\n{}\n",
authorize_url.to_string()
);
// A very naive implementation of the redirect server.
let listener = TcpListener::bind("127.0.0.1:8080").unwrap();
for stream in listener.incoming() {
if let Ok(mut stream) = stream {
let code;
let state;
{
let mut reader = BufReader::new(&stream);
let mut request_line = String::new();
reader.read_line(&mut request_line).unwrap();
let redirect_url = request_line.split_whitespace().nth(1).unwrap();
let url = Url::parse(&("http://localhost".to_string() + redirect_url)).unwrap();
let code_pair = url
.query_pairs()
.find(|pair| {
let &(ref key, _) = pair;
key == "code"
})
.unwrap();
let (_, value) = code_pair;
code = AuthorizationCode::new(value.into_owned());
let state_pair = url
.query_pairs()
.find(|pair| {
let &(ref key, _) = pair;
key == "state"
})
.unwrap();
let (_, value) = state_pair;
state = CsrfToken::new(value.into_owned());
}
let message = "Go back to your terminal :)";
let response = format!(
"HTTP/1.1 200 OK\r\ncontent-length: {}\r\n\r\n{}",
message.len(),
message
);
stream.write_all(response.as_bytes()).unwrap();
// println!(
// "Basecamp returned the following state:\n{} (expected `{}`)\n",
// state.secret(),
// csrf_state.secret()
// );
// Exchange the code with a token.
let token_res = client.exchange_code(code).set_pkce_verifier(pkce_verifier).request(http_client);
// println!("Basecamp returned the following token:\n{:?}\n", token_res);
if let Ok(token) = token_res {
// println!("access token {:?}", token.access_token().secret());
// NB: Github returns a single comma-separated "scope" parameter instead of multiple
// space-separated scopes. Github-specific clients can parse this scope into
// multiple scopes by splitting at the commas. Note that it's not safe for the
// library to do this by default because RFC 6749 allows scopes to contain commas.
let _scopes = if let Some(scopes_vec) = token.scopes() {
scopes_vec
.iter()
.map(|comma_separated| comma_separated.split(','))
.flatten()
.collect::<Vec<_>>()
} else {
Vec::new()
};
// println!("Basecamp returned the following scopes:\n{:?}\n", scopes);
if state.secret() == csrf_state.secret() {
return Ok(token);
} else {
return Err(String::from("Issue with issuing token"));
}
}
// The server will terminate itself after collecting the first code.
break;
}
}
Err(String::from("Failed the test!"))
} |
use crate::context::CommandRegistry;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::evaluate::{evaluate_baseline_expr, Scope};
use crate::parser::{hir, Operator};
use crate::prelude::*;
use crate::Text;
use chrono::{DateTime, Utc};
use chrono_humanize::Humanize;
use derive_new::new;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::path::PathBuf;
use std::time::SystemTime;
mod serde_bigint {
use num_traits::cast::FromPrimitive;
use num_traits::cast::ToPrimitive;
pub fn serialize<S>(big_int: &super::BigInt, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serde::Serialize::serialize(
&big_int
.to_i64()
.ok_or(serde::ser::Error::custom("expected a i64-sized bignum"))?,
serializer,
)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<super::BigInt, D::Error>
where
D: serde::Deserializer<'de>,
{
let x: i64 = serde::Deserialize::deserialize(deserializer)?;
Ok(super::BigInt::from_i64(x)
.ok_or(serde::de::Error::custom("expected a i64-sized bignum"))?)
}
}
mod serde_bigdecimal {
use num_traits::cast::FromPrimitive;
use num_traits::cast::ToPrimitive;
pub fn serialize<S>(big_decimal: &super::BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serde::Serialize::serialize(
&big_decimal
.to_f64()
.ok_or(serde::ser::Error::custom("expected a f64-sized bignum"))?,
serializer,
)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<super::BigDecimal, D::Error>
where
D: serde::Deserializer<'de>,
{
let x: f64 = serde::Deserialize::deserialize(deserializer)?;
Ok(super::BigDecimal::from_f64(x)
.ok_or(serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
}
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Deserialize, Serialize)]
pub enum Primitive {
Nothing,
#[serde(with = "serde_bigint")]
Int(BigInt),
#[serde(with = "serde_bigdecimal")]
Decimal(BigDecimal),
Bytes(u64),
String(String),
Pattern(String),
Boolean(bool),
Date(DateTime<Utc>),
Path(PathBuf),
#[serde(with = "serde_bytes")]
Binary(Vec<u8>),
// Stream markers (used as bookend markers rather than actual values)
BeginningOfStream,
EndOfStream,
}
impl From<BigDecimal> for Primitive {
fn from(decimal: BigDecimal) -> Primitive {
Primitive::Decimal(decimal)
}
}
impl From<f64> for Primitive {
fn from(float: f64) -> Primitive {
Primitive::Decimal(BigDecimal::from_f64(float).unwrap())
}
}
impl Primitive {
pub(crate) fn type_name(&self) -> String {
use Primitive::*;
match self {
Nothing => "nothing",
BeginningOfStream => "beginning-of-stream",
EndOfStream => "end-of-stream",
Path(_) => "path",
Int(_) => "int",
Decimal(_) => "decimal",
Bytes(_) => "bytes",
Pattern(_) => "pattern",
String(_) => "string",
Boolean(_) => "boolean",
Date(_) => "date",
Binary(_) => "binary",
}
.to_string()
}
pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Primitive::*;
match self {
Nothing => write!(f, "Nothing"),
BeginningOfStream => write!(f, "BeginningOfStream"),
EndOfStream => write!(f, "EndOfStream"),
Int(int) => write!(f, "{}", int),
Path(path) => write!(f, "{}", path.display()),
Decimal(decimal) => write!(f, "{}", decimal),
Bytes(bytes) => write!(f, "{}", bytes),
Pattern(string) => write!(f, "{:?}", string),
String(string) => write!(f, "{:?}", string),
Boolean(boolean) => write!(f, "{}", boolean),
Date(date) => write!(f, "{}", date),
Binary(binary) => write!(f, "{:?}", binary),
}
}
pub fn number(number: impl Into<Number>) -> Primitive {
let number = number.into();
match number {
Number::Int(int) => Primitive::Int(int),
Number::Decimal(decimal) => Primitive::Decimal(decimal),
}
}
pub fn format(&self, field_name: Option<&String>) -> String {
match self {
Primitive::Nothing => String::new(),
Primitive::BeginningOfStream => String::new(),
Primitive::EndOfStream => String::new(),
Primitive::Path(p) => format!("{}", p.display()),
Primitive::Bytes(b) => {
let byte = byte_unit::Byte::from_bytes(*b as u128);
if byte.get_bytes() == 0u128 {
return "—".to_string();
}
let byte = byte.get_appropriate_unit(false);
match byte.get_unit() {
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
_ => format!("{}", byte.format(1)),
}
}
Primitive::Int(i) => format!("{}", i),
Primitive::Decimal(decimal) => format!("{}", decimal),
Primitive::Pattern(s) => format!("{}", s),
Primitive::String(s) => format!("{}", s),
Primitive::Boolean(b) => match (b, field_name) {
(true, None) => format!("Yes"),
(false, None) => format!("No"),
(true, Some(s)) if !s.is_empty() => format!("{}", s),
(false, Some(s)) if !s.is_empty() => format!(""),
(true, Some(_)) => format!("Yes"),
(false, Some(_)) => format!("No"),
},
Primitive::Binary(_) => format!("<binary>"),
Primitive::Date(d) => format!("{}", d.humanize()),
}
}
pub fn style(&self) -> &'static str {
match self {
Primitive::Bytes(0) => "c", // centre 'missing' indicator
Primitive::Int(_) | Primitive::Bytes(_) | Primitive::Decimal(_) => "r",
_ => "",
}
}
}
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
pub struct Operation {
pub(crate) left: Value,
pub(crate) operator: Operator,
pub(crate) right: Value,
}
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Serialize, Deserialize, new)]
pub struct Block {
pub(crate) expressions: Vec<hir::Expression>,
pub(crate) source: Text,
pub(crate) tag: Tag,
}
impl Block {
pub fn invoke(&self, value: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
let scope = Scope::new(value.clone());
if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(self.tag));
}
let mut last = None;
for expr in self.expressions.iter() {
last = Some(evaluate_baseline_expr(
&expr,
&CommandRegistry::empty(),
&scope,
&self.source,
)?)
}
Ok(last.unwrap())
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize)]
pub enum Value {
Primitive(Primitive),
Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>),
Block(Block),
}
impl Into<Value> for Number {
fn into(self) -> Value {
match self {
Number::Int(int) => Value::int(int),
Number::Decimal(decimal) => Value::decimal(decimal),
}
}
}
impl Into<Value> for &Number {
fn into(self) -> Value {
match self {
Number::Int(int) => Value::int(int.clone()),
Number::Decimal(decimal) => Value::decimal(decimal.clone()),
}
}
}
pub fn debug_list(values: &Vec<Tagged<Value>>) -> ValuesDebug<'_> {
ValuesDebug { values }
}
pub struct ValuesDebug<'a> {
values: &'a Vec<Tagged<Value>>,
}
impl fmt::Debug for ValuesDebug<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list()
.entries(self.values.iter().map(|i| i.debug()))
.finish()
}
}
pub struct ValueDebug<'a> {
value: &'a Tagged<Value>,
}
impl fmt::Debug for ValueDebug<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.value.item() {
Value::Primitive(p) => p.debug(f),
Value::Row(o) => o.debug(f),
Value::Table(l) => debug_list(l).fmt(f),
Value::Block(_) => write!(f, "[[block]]"),
}
}
}
impl Tagged<Value> {
pub(crate) fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name();
Tagged::from_item(name, self.tag())
}
}
impl std::convert::TryFrom<&Tagged<Value>> for Block {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<Block, ShellError> {
match value.item() {
Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error(
"Block",
value.copy_tag(v.type_name()),
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for i64 {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() {
Value::Primitive(Primitive::Int(int)) => {
int.tagged(value.tag).coerce_into("converting to i64")
}
v => Err(ShellError::type_error(
"Integer",
value.copy_tag(v.type_name()),
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for String {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<String, ShellError> {
match value.item() {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error(
"String",
value.copy_tag(v.type_name()),
)),
}
}
}
impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
type Error = ShellError;
fn try_from(value: &Tagged<Value>) -> Result<Vec<u8>, ShellError> {
match value.item() {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
v => Err(ShellError::type_error(
"Binary",
value.copy_tag(v.type_name()),
)),
}
}
}
impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionary {
type Error = ShellError;
fn try_from(value: &'a Tagged<Value>) -> Result<&'a crate::data::Dictionary, ShellError> {
match value.item() {
Value::Row(d) => Ok(d),
v => Err(ShellError::type_error(
"Dictionary",
value.copy_tag(v.type_name()),
)),
}
}
}
#[derive(Serialize, Deserialize)]
pub enum Switch {
Present,
Absent,
}
impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
type Error = ShellError;
fn try_from(value: Option<&Tagged<Value>>) -> Result<Switch, ShellError> {
match value {
None => Ok(Switch::Absent),
Some(value) => match value.item() {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error(
"Boolean",
value.copy_tag(v.type_name()),
)),
},
}
}
}
impl Tagged<Value> {
pub(crate) fn debug(&self) -> ValueDebug<'_> {
ValueDebug { value: self }
}
}
impl Value {
pub(crate) fn type_name(&self) -> String {
match self {
Value::Primitive(p) => p.type_name(),
Value::Row(_) => format!("object"),
Value::Table(_) => format!("list"),
Value::Block(_) => format!("block"),
}
}
// TODO: This is basically a legacy construct, I think
pub fn data_descriptors(&self) -> Vec<String> {
match self {
Value::Primitive(_) => vec![],
Value::Row(o) => o
.entries
.keys()
.into_iter()
.map(|x| x.to_string())
.collect(),
Value::Block(_) => vec![],
Value::Table(_) => vec![],
}
}
pub(crate) fn get_data_by_key(&self, name: &str) -> Option<&Tagged<Value>> {
match self {
Value::Row(o) => o.get_data_by_key(name),
Value::Table(l) => {
for item in l {
match item {
Tagged {
item: Value::Row(o),
..
} => match o.get_data_by_key(name) {
Some(v) => return Some(v),
None => {}
},
_ => {}
}
}
None
}
_ => None,
}
}
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
let mut current = self;
for p in path.split(".") {
match current.get_data_by_key(p) {
Some(v) => current = v,
None => return None,
}
}
Some(Tagged::from_item(current, tag))
}
pub fn insert_data_at_path(
&self,
tag: Tag,
path: &str,
new_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
let split_path: Vec<_> = path.split(".").collect();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
if split_path.len() == 1 {
// Special case for inserting at the top level
current
.entries
.insert(path.to_string(), Tagged::from_item(new_value, tag));
return Some(Tagged::from_item(new_obj, tag));
}
for idx in 0..split_path.len() {
match current.entries.get_mut(split_path[idx]) {
Some(next) => {
if idx == (split_path.len() - 2) {
match &mut next.item {
Value::Row(o) => {
o.entries.insert(
split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag),
);
}
_ => {}
}
return Some(Tagged::from_item(new_obj, tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn replace_data_at_path(
&self,
tag: Tag,
path: &str,
replaced_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
let split_path: Vec<_> = path.split(".").collect();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
for idx in 0..split_path.len() {
match current.entries.get_mut(split_path[idx]) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag);
return Some(Tagged::from_item(new_obj, tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
match self {
p @ Value::Primitive(_) => MaybeOwned::Borrowed(p),
Value::Row(o) => o.get_data(desc),
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
}
}
pub(crate) fn format_leaf(&self, desc: Option<&String>) -> String {
match self {
Value::Primitive(p) => p.format(desc),
Value::Block(b) => itertools::join(
b.expressions
.iter()
.map(|e| e.source(&b.source).to_string()),
"; ",
),
Value::Row(_) => format!("[table: 1 row]"),
Value::Table(l) => format!(
"[table: {} {}]",
l.len(),
if l.len() == 1 { "row" } else { "rows" }
),
}
}
pub(crate) fn style_leaf(&self) -> &'static str {
match self {
Value::Primitive(p) => p.style(),
_ => "",
}
}
pub(crate) fn compare(
&self,
operator: &Operator,
other: &Value,
) -> Result<bool, (String, String)> {
match operator {
_ => {
let coerced = coerce_compare(self, other)?;
let ordering = coerced.compare();
use std::cmp::Ordering;
let result = match (operator, ordering) {
(Operator::Equal, Ordering::Equal) => true,
(Operator::NotEqual, Ordering::Less)
| (Operator::NotEqual, Ordering::Greater) => true,
(Operator::LessThan, Ordering::Less) => true,
(Operator::GreaterThan, Ordering::Greater) => true,
(Operator::GreaterThanOrEqual, Ordering::Greater)
| (Operator::GreaterThanOrEqual, Ordering::Equal) => true,
(Operator::LessThanOrEqual, Ordering::Less)
| (Operator::LessThanOrEqual, Ordering::Equal) => true,
_ => false,
};
Ok(result)
}
}
}
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match self {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::string(format!(
"Expected string, got {:?}",
other
))),
}
}
pub(crate) fn is_true(&self) -> bool {
match self {
Value::Primitive(Primitive::Boolean(true)) => true,
_ => false,
}
}
pub fn string(s: impl Into<String>) -> Value {
Value::Primitive(Primitive::String(s.into()))
}
pub fn pattern(s: impl Into<String>) -> Value {
Value::Primitive(Primitive::String(s.into()))
}
pub fn path(s: impl Into<PathBuf>) -> Value {
Value::Primitive(Primitive::Path(s.into()))
}
pub fn bytes(s: impl Into<u64>) -> Value {
Value::Primitive(Primitive::Bytes(s.into()))
}
pub fn int(s: impl Into<BigInt>) -> Value {
Value::Primitive(Primitive::Int(s.into()))
}
pub fn decimal(s: impl Into<BigDecimal>) -> Value {
Value::Primitive(Primitive::Decimal(s.into()))
}
pub fn binary(binary: Vec<u8>) -> Value {
Value::Primitive(Primitive::Binary(binary))
}
pub fn number(s: impl Into<Number>) -> Value {
let num = s.into();
match num {
Number::Int(int) => Value::int(int),
Number::Decimal(decimal) => Value::decimal(decimal),
}
}
pub fn boolean(s: impl Into<bool>) -> Value {
Value::Primitive(Primitive::Boolean(s.into()))
}
pub fn system_date(s: SystemTime) -> Value {
Value::Primitive(Primitive::Date(s.into()))
}
pub fn date_from_str(s: &str) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s)
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?;
let date = date.with_timezone(&chrono::offset::Utc);
Ok(Value::Primitive(Primitive::Date(date)))
}
pub fn nothing() -> Value {
Value::Primitive(Primitive::Nothing)
}
}
impl Tagged<Value> {
pub(crate) fn as_path(&self) -> Result<PathBuf, ShellError> {
match self.item() {
Value::Primitive(Primitive::Path(path)) => Ok(path.clone()),
other => Err(ShellError::type_error(
"Path",
other.type_name().tagged(self.tag()),
)),
}
}
}
pub(crate) fn select_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
let mut out = TaggedDictBuilder::new(tag);
let descs = obj.data_descriptors();
for field in fields {
match descs.iter().find(|d| *d == field) {
None => out.insert(field, Value::nothing()),
Some(desc) => out.insert(desc.clone(), obj.get_data(desc).borrow().clone()),
}
}
out.into_tagged_value()
}
pub(crate) fn reject_fields(obj: &Value, fields: &[String], tag: impl Into<Tag>) -> Tagged<Value> {
let mut out = TaggedDictBuilder::new(tag);
let descs = obj.data_descriptors();
for desc in descs {
if fields.iter().any(|field| *field == desc) {
continue;
} else {
out.insert(desc.clone(), obj.get_data(&desc).borrow().clone())
}
}
out.into_tagged_value()
}
enum CompareValues {
Ints(BigInt, BigInt),
Decimals(BigDecimal, BigDecimal),
String(String, String),
}
impl CompareValues {
fn compare(&self) -> std::cmp::Ordering {
match self {
CompareValues::Ints(left, right) => left.cmp(right),
CompareValues::Decimals(left, right) => left.cmp(right),
CompareValues::String(left, right) => left.cmp(right),
}
}
}
fn coerce_compare(left: &Value, right: &Value) -> Result<CompareValues, (String, String)> {
match (left, right) {
(Value::Primitive(left), Value::Primitive(right)) => coerce_compare_primitive(left, right),
_ => Err((left.type_name(), right.type_name())),
}
}
fn coerce_compare_primitive(
left: &Primitive,
right: &Primitive,
) -> Result<CompareValues, (String, String)> {
use Primitive::*;
Ok(match (left, right) {
(Int(left), Int(right)) => CompareValues::Ints(left.clone(), right.clone()),
(Int(left), Decimal(right)) => {
CompareValues::Decimals(BigDecimal::zero() + left, right.clone())
}
(Int(left), Bytes(right)) => CompareValues::Ints(left.clone(), BigInt::from(*right)),
(Decimal(left), Decimal(right)) => CompareValues::Decimals(left.clone(), right.clone()),
(Decimal(left), Int(right)) => {
CompareValues::Decimals(left.clone(), BigDecimal::zero() + right)
}
(Decimal(left), Bytes(right)) => {
CompareValues::Decimals(left.clone(), BigDecimal::from(*right))
}
(Bytes(left), Int(right)) => CompareValues::Ints(BigInt::from(*left), right.clone()),
(Bytes(left), Decimal(right)) => {
CompareValues::Decimals(BigDecimal::from(*left), right.clone())
}
(String(left), String(right)) => CompareValues::String(left.clone(), right.clone()),
_ => return Err((left.type_name(), right.type_name())),
})
}
|
use super::types::{DiffOptions, Only};
use crate::cmd::CmdRunner;
use crate::data::{Entry, Item, Status};
use crate::files;
use crate::index::Indexer;
use crate::path_str;
use crate::prompt::Prompt;
use anyhow::{bail, Result};
use crossterm::style::Stylize;
use inquire::MultiSelect;
use std::fmt;
use std::path::{Path, PathBuf};
#[derive(Debug)]
pub struct SyncOptions {
// Do not execute any file operations.
pub dryrun: bool,
// Ask user, by using the prompt field, to confirm each copy.
pub confirm: bool,
// When copying to home, create a backup file if it already exists.
pub backup: bool,
pub show_diff: bool,
pub diff_options: DiffOptions,
// Creates a git commit message.
pub git_commit: Option<String>,
// Run git push after committing.
pub git_push: bool,
pub interactive: bool,
}
impl Default for SyncOptions {
fn default() -> Self {
Self {
interactive: false,
dryrun: false,
confirm: true,
backup: true,
show_diff: false,
diff_options: DiffOptions::default(),
git_commit: None,
git_push: false,
}
}
}
pub struct SyncHandler {
indexer: Indexer,
prompt: Box<dyn Prompt>,
items: Vec<Item>,
options: SyncOptions,
runner: CmdRunner,
}
// Public methods.
impl SyncHandler {
pub fn new(
prompt: Box<dyn Prompt>,
home: PathBuf,
repository: PathBuf,
items: Vec<Item>,
options: SyncOptions,
only: Option<Only>,
) -> Self {
let runner = CmdRunner::new(repository.clone());
let indexer = Indexer::new(home, repository, only);
Self {
options,
prompt,
indexer,
items,
runner,
}
}
pub fn copy_to_home(&self) -> Result<()> {
self.copy(Target::Home)
}
pub fn copy_to_repo(&self) -> Result<()> {
self.copy(Target::Repo)?;
if let Some(msg) = &self.options.git_commit {
log::info!("Creating git commit with message: {msg}");
self.runner.run("git", to_strings(&["add", "."]))?;
self.runner
.run("git", to_strings(&["commit", "-m", msg.as_str()]))?;
if self.options.git_push {
log::info!("Running git push");
self.runner.run("git", to_strings(&["push"]))?;
}
}
Ok(())
}
fn copy(&self, target: Target) -> Result<()> {
let map = self.indexer.index(&self.items)?;
let entries: Vec<&Entry> = map
.iter()
.flat_map(|(_name, es)| es)
.filter(|entry| entry.is_ok())
.collect();
let entries = if self.options.interactive {
let files: Vec<String> = entries
.iter()
.filter_map(|e| match e {
Entry::Ok {
relpath, status, ..
} => match status {
Status::Ok => None,
Status::Diff => Some(relpath.to_string()),
Status::MissingHome if target.is_home() => Some(relpath.to_string()),
Status::MissingRepo if !target.is_home() => Some(relpath.to_string()),
_ => None,
},
Entry::Err(_) => None,
})
.collect();
let selected = MultiSelect::new("Select entries to sync", files).prompt()?;
let mut matched = Vec::new();
for entry in entries {
let relpath = entry.get_relpath().to_string();
if selected.contains(&relpath) {
matched.push(entry);
}
}
matched
} else {
entries
};
for entry in entries {
match entry {
Entry::Ok {
relpath,
status,
home_path,
repo_path,
} => self.make_copy(&target, relpath, status, home_path, repo_path)?,
Entry::Err(reason) => bail!("invalid entry: {}", reason),
}
}
Ok(())
}
fn make_copy(
&self,
target: &Target,
relpath: &str,
status: &Status,
home_path: &Path,
repo_path: &Path,
) -> Result<()> {
match status {
Status::Ok => {
log::info!("{} ok", relpath);
return Ok(());
}
Status::MissingHome if !target.is_home() => return Ok(()),
Status::MissingRepo if target.is_home() => return Ok(()),
_ => {}
}
let exec = !self.options.dryrun;
let (display_name, src, dst) = match target {
Target::Home => {
let s = format!("~/{}", relpath);
(s, repo_path, home_path)
}
Target::Repo => {
let s = path_str!(repo_path);
(s, home_path, repo_path)
}
};
let src_str = path_str!(src);
let dst_str = path_str!(dst);
if self.options.confirm {
if self.options.show_diff && matches!(status, Status::Diff) {
let mut cmd = self.options.diff_options.to_cmd(&src_str, &dst_str)?;
cmd.status()?;
}
let msg = format!("Write {}?", &display_name.blue());
if !self.prompt.confirm(&msg, false)? {
log::info!("Skipping {}", src_str);
return Ok(());
}
}
let dir = match dst.parent() {
Some(parent) => parent,
None => bail!("failed to get parent directory of {}", dst_str),
};
if exec {
if !dir.exists() {
log::info!("Creating directory: {:?}", dir);
files::create_dirs(dir)?;
}
if target.is_home() && dst.exists() && self.options.backup {
let filename = path_str!(dst.file_name().unwrap());
let filename = format!(".{filename}.backup");
let mut backup = PathBuf::from(&dst);
backup.set_file_name(filename);
files::copy(dst, &backup)?;
log::debug!("Created backup of {}", dst_str);
}
files::copy(src, dst)?;
}
println!(" {} {}", "".green(), &relpath);
Ok(())
}
}
enum Target {
Home,
Repo,
}
impl Target {
fn is_home(&self) -> bool {
match self {
Target::Home => true,
Target::Repo => false,
}
}
}
impl fmt::Display for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Target::Home => write!(f, "home"),
Target::Repo => write!(f, "repo"),
}
}
}
fn to_strings(v: &[&str]) -> Vec<String> {
v.to_vec().iter().map(|s| s.to_string()).collect()
}
|
use tools;
use std::path::Path;
/// # Thumbnail
///
/// This structure holds all Information about a Thumbnail
/// and provides a function to create a Thumbnail.
/// This structure is part of a Media Item and should not be
/// used alone.
///
/// # To-Do
/// Add the creation Function once FFMPEG can be directly used.
#[derive(Clone)]
pub struct Thumbnail {
pub item_id: u64,
pub file_path: String,
pub file_size: u64,
pub mime_type: String,
pub width: u16,
pub height: u16,
}
impl Thumbnail {
/// Create a new and Eepty Thumbnail
pub fn new() -> Thumbnail {
Thumbnail {
item_id: 0,
file_path: String::new(),
file_size: 0,
mime_type: String::new(),
width: 0,
height: 0,
}
}
/// Get a List of NameValue Pairs representing the Structures Attributes
pub fn get_name_value_pairs(&self) -> Vec<tools::NameValuePair> {
let pair_vec: Vec<tools::NameValuePair> =
vec![
tools::NameValuePair::new("itemId", &self.item_id.to_string()),
tools::NameValuePair::new("path", &self.file_path),
tools::NameValuePair::new("mimeType", &self.mime_type),
tools::NameValuePair::new("size", &self.file_size.to_string()),
tools::NameValuePair::new("width", &self.width.to_string()),
tools::NameValuePair::new("height", &self.height.to_string()),
];
pair_vec
}
/// Check if a Thumbnail is available
pub fn is_available(&self) -> bool {
if self.file_path.len() != 0 {
let path = Path::new(&self.file_path);
if path.exists() {
return true;
}
}
false
}
}
|
extern crate actix;
extern crate actix_web;
extern crate crypto_hash;
extern crate env_logger;
extern crate failure;
extern crate futures;
extern crate lettre;
extern crate lettre_email;
extern crate openssl;
extern crate rand;
extern crate serde_json;
extern crate time;
extern crate toml;
extern crate uuid;
#[macro_use]
extern crate log;
use actix_web::fs::NamedFile;
use actix_web::http::{Method, StatusCode};
use actix_web::middleware::Logger;
use actix_web::Binary;
use actix_web::{
http, server, App, AsyncResponder, HttpMessage, HttpRequest, HttpResponse, Responder, Result,
};
use openssl::ssl::{SslAcceptor, SslFiletype, SslMethod};
#[macro_use]
extern crate serde_derive;
use failure::Error;
use futures::future::Future;
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock};
mod process_json;
mod util;
use process_json::{process_public_json, PublicMessage, ServerResponse};
fn files(req: &HttpRequest) -> Result<NamedFile> {
let path: PathBuf = req.match_info().query("tail")?;
info!("files: {:?}", path);
let stpath = Path::new("static/").join(path);
Ok(NamedFile::open(stpath)?)
}
fn favicon(req: &HttpRequest) -> Result<NamedFile> {
let stpath = Path::new("static/favicon.ico");
Ok(NamedFile::open(stpath)?)
}
fn sitemap(req: &HttpRequest) -> Result<NamedFile> {
let stpath = Path::new("static/sitemap.txt");
Ok(NamedFile::open(stpath)?)
}
/// simple index handler
fn mainpage(req: &HttpRequest) -> Result<HttpResponse> {
info!(
"remote ip: {:?}, request:{:?}",
req.connection_info().remote(),
req
);
match util::load_string("static/index.html") {
Ok(s) => {
// response
Ok(
HttpResponse::build(StatusCode::OK)
.content_type("text/html; charset=utf-8")
.body(s),
)
}
Err(e) => Err(e.into()),
}
}
fn public(req: &HttpRequest) -> Box<Future<Item = String, Error = Error>> {
let ci = req.connection_info().clone();
req
.json()
.from_err()
.and_then(move |msg: PublicMessage| {
Ok(
match process_json::process_public_json(&(ci.remote()), msg) {
Ok(sr) => match serde_json::to_string(&sr) {
Ok(s) => s,
Err(e) => e.to_string(),
},
Err(e) => {
error!("uh oh, 'public' err: {:?}", e);
let se = ServerResponse {
what: "server error".to_string(),
content: serde_json::Value::String(e.to_string()),
};
match serde_json::to_string(&se) {
Ok(s) => s,
Err(e) => e.to_string(),
}
}
},
)
})
.responder()
}
#[derive(Deserialize, Debug)]
struct Config {
ip: String,
port: u16,
uid: Option<String>,
pwd: Option<String>,
tlskey: Option<String>,
tlscerts: Option<String>,
redirectport: Option<u16>,
redirectdomain: Option<String>,
}
fn defcon() -> Config {
Config {
ip: "127.0.0.1".to_string(),
port: 8000,
uid: None,
pwd: None,
tlskey: None,
tlscerts: None,
redirectport: None,
redirectdomain: None,
}
}
fn load_config() -> Config {
match util::load_string("config.toml") {
Err(e) => {
error!("error loading config.toml: {:?}", e);
defcon()
}
Ok(config_str) => match toml::from_str(config_str.as_str()) {
Ok(c) => c,
Err(e) => {
error!("error loading config.toml: {:?}", e);
defcon()
}
},
}
}
fn main() {
let config = load_config();
env_logger::init();
info!("server init!");
// load ssl keys
let optbuilder = match (config.tlskey, config.tlscerts) {
(Some(key), Some(certs)) => {
let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();
builder.set_private_key_file(key, SslFiletype::PEM).unwrap();
builder.set_certificate_chain_file(certs).unwrap();
Some(builder)
}
_ => {
warn!("tlskey, tlscerts not found; no tls for you!");
None
}
};
let sys = actix::System::new("schelme-bots");
{
let s = server::new(move || {
App::new()
.resource("/public", |r| r.method(Method::POST).f(public))
.resource(r"/static/{tail:.*}", |r| r.method(Method::GET).f(files))
.resource("/favicon.ico", |r| r.method(Method::GET).f(favicon))
.resource("/sitemap.txt", |r| r.method(Method::GET).f(sitemap))
.resource(r"/{tail:.*}", |r| r.method(Method::GET).f(mainpage))
});
match optbuilder {
Some(builder) => s.bind_ssl(format!("{}:{}", config.ip, config.port), builder),
None => s.bind(format!("{}:{}", config.ip, config.port)),
}
}
.expect(format!("Can not bind to port {}", config.port).as_str())
.start();
// do some http redirecting?
let _ = match (config.redirectport, config.redirectdomain) {
(Some(port), Some(domain)) => Some(
server::new(move || {
App::with_state(domain.clone())
.resource("{all:.*}", |r| {
r.f(|r| {
info!("redirect!{}", r.path());
HttpResponse::Found()
.header(
http::header::LOCATION,
format!("{}{}?{}", r.state(), r.path(), r.query_string()),
)
.finish()
})
})
.middleware(Logger::default())
.middleware(Logger::new("REDIRECTOR: %a %{User-Agent}i"))
})
.bind(format!("{}:{}", config.ip, port))
.expect(format!("Can not bind to port {}", port).as_str())
.start(),
),
_ => None,
};
sys.run();
}
|
#![no_main]
#[macro_use]extern crate lazy_static;
#[macro_use]extern crate kiss_ui;
extern crate winapi;
extern crate user32;
use winapi::{c_int,HWND,HINSTANCE,LPSTR};
mod ffi;
mod appsettings;
mod window;
mod helpers;
mod stsclient;
mod translator;
mod apphandler;
use appsettings::*;
#[no_mangle]
#[allow(non_snake_case)]
pub extern "system" fn WinMain(_: HINSTANCE, _: HINSTANCE, _: LPSTR, _: c_int) -> c_int {
let settings = AppSettings {
sts_url: "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13",
scope: "http://api.microsofttranslator.com",
client_id: "client_id",
client_secret: "client_secret",
translator_url: "http://api.microsofttranslator.com/v2/Http.svc/Translate",
source_lang: "en",
target_lang: "uk"
};
let hanlder = apphandler::init(settings);
let wnd_handle: HWND = window::create_background("HandyTranslator", hanlder);
helpers::add_tray_icon(wnd_handle, "HandyTranslator");
helpers::register_apphotkey(wnd_handle);
let mut msg = window::create_window_msg();
unsafe {
while user32::GetMessageW(&mut msg, 0 as HWND, 0, 0) > 0 {
user32::TranslateMessage(&mut msg);
user32::DispatchMessageW(&mut msg);
}
}
helpers::remove_tray_icon(wnd_handle);
helpers::unregister_apphotkey(wnd_handle);
0
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.