text stringlengths 8 4.13M |
|---|
/// ValidatorSigningInfo defines a validator's signing info for monitoring their
/// liveness activity.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ValidatorSigningInfo {
#[prost(string, tag = "1")]
pub address: std::string::String,
/// height at which validator was first a candidate OR was unjailed
#[prost(int64, tag = "2")]
pub start_height: i64,
/// index offset into signed block bit array
#[prost(int64, tag = "3")]
pub index_offset: i64,
/// timestamp validator cannot be unjailed until
#[prost(message, optional, tag = "4")]
pub jailed_until: ::std::option::Option<::prost_types::Timestamp>,
/// whether or not a validator has been tombstoned (killed out of validator
/// set)
#[prost(bool, tag = "5")]
pub tombstoned: bool,
/// missed blocks counter (to avoid scanning the array every time)
#[prost(int64, tag = "6")]
pub missed_blocks_counter: i64,
}
/// Params represents the parameters used for by the slashing module.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Params {
#[prost(int64, tag = "1")]
pub signed_blocks_window: i64,
#[prost(bytes, tag = "2")]
pub min_signed_per_window: std::vec::Vec<u8>,
#[prost(message, optional, tag = "3")]
pub downtime_jail_duration: ::std::option::Option<::prost_types::Duration>,
#[prost(bytes, tag = "4")]
pub slash_fraction_double_sign: std::vec::Vec<u8>,
#[prost(bytes, tag = "5")]
pub slash_fraction_downtime: std::vec::Vec<u8>,
}
/// GenesisState defines the slashing module's genesis state.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GenesisState {
/// params defines all the paramaters of related to deposit.
#[prost(message, optional, tag = "1")]
pub params: ::std::option::Option<Params>,
/// signing_infos represents a map between validator addresses and their
/// signing infos.
#[prost(message, repeated, tag = "2")]
pub signing_infos: ::std::vec::Vec<SigningInfo>,
/// signing_infos represents a map between validator addresses and their
/// missed blocks.
#[prost(message, repeated, tag = "3")]
pub missed_blocks: ::std::vec::Vec<ValidatorMissedBlocks>,
}
/// SigningInfo stores validator signing info of corresponding address.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SigningInfo {
/// address is the validator address.
#[prost(string, tag = "1")]
pub address: std::string::String,
/// validator_signing_info represents the signing info of this validator.
#[prost(message, optional, tag = "2")]
pub validator_signing_info: ::std::option::Option<ValidatorSigningInfo>,
}
/// ValidatorMissedBlocks contains array of missed blocks of corresponding
/// address.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ValidatorMissedBlocks {
/// address is the validator address.
#[prost(string, tag = "1")]
pub address: std::string::String,
/// missed_blocks is an array of missed blocks by the validator.
#[prost(message, repeated, tag = "2")]
pub missed_blocks: ::std::vec::Vec<MissedBlock>,
}
/// MissedBlock contains height and missed status as boolean.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MissedBlock {
/// index is the height at which the block was missed.
#[prost(int64, tag = "1")]
pub index: i64,
/// missed is the missed status.
#[prost(bool, tag = "2")]
pub missed: bool,
}
/// QueryParamsRequest is the request type for the Query/Params RPC method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryParamsRequest {}
/// QueryParamsResponse is the response type for the Query/Params RPC method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryParamsResponse {
#[prost(message, optional, tag = "1")]
pub params: ::std::option::Option<Params>,
}
/// QuerySigningInfoRequest is the request type for the Query/SigningInfo RPC
/// method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QuerySigningInfoRequest {
/// cons_address is the address to query signing info of
#[prost(string, tag = "1")]
pub cons_address: std::string::String,
}
/// QuerySigningInfoResponse is the response type for the Query/SigningInfo RPC
/// method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QuerySigningInfoResponse {
/// val_signing_info is the signing info of requested val cons address
#[prost(message, optional, tag = "1")]
pub val_signing_info: ::std::option::Option<ValidatorSigningInfo>,
}
/// QuerySigningInfosRequest is the request type for the Query/SigningInfos RPC
/// method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QuerySigningInfosRequest {
#[prost(message, optional, tag = "1")]
pub pagination: ::std::option::Option<super::super::base::query::v1beta1::PageRequest>,
}
/// QuerySigningInfosResponse is the response type for the Query/SigningInfos RPC
/// method
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QuerySigningInfosResponse {
/// info is the signing info of all validators
#[prost(message, repeated, tag = "1")]
pub info: ::std::vec::Vec<ValidatorSigningInfo>,
#[prost(message, optional, tag = "2")]
pub pagination: ::std::option::Option<super::super::base::query::v1beta1::PageResponse>,
}
#[doc = r" Generated client implementations."]
pub mod query_client {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = " Query provides defines the gRPC querier service"]
pub struct QueryClient<T> {
inner: tonic::client::Grpc<T>,
}
impl QueryClient<tonic::transport::Channel> {
#[doc = r" Attempt to create a new client by connecting to a given endpoint."]
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: std::convert::TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> QueryClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + HttpBody + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as HttpBody>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = tonic::client::Grpc::with_interceptor(inner, interceptor);
Self { inner }
}
#[doc = " Params queries the parameters of slashing module"]
pub async fn params(
&mut self,
request: impl tonic::IntoRequest<super::QueryParamsRequest>,
) -> Result<tonic::Response<super::QueryParamsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path =
http::uri::PathAndQuery::from_static("/cosmos.slashing.v1beta1.Query/Params");
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " SigningInfo queries the signing info of given cons address"]
pub async fn signing_info(
&mut self,
request: impl tonic::IntoRequest<super::QuerySigningInfoRequest>,
) -> Result<tonic::Response<super::QuerySigningInfoResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path =
http::uri::PathAndQuery::from_static("/cosmos.slashing.v1beta1.Query/SigningInfo");
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " SigningInfos queries signing info of all validators"]
pub async fn signing_infos(
&mut self,
request: impl tonic::IntoRequest<super::QuerySigningInfosRequest>,
) -> Result<tonic::Response<super::QuerySigningInfosResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path =
http::uri::PathAndQuery::from_static("/cosmos.slashing.v1beta1.Query/SigningInfos");
self.inner.unary(request.into_request(), path, codec).await
}
}
impl<T: Clone> Clone for QueryClient<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<T> std::fmt::Debug for QueryClient<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "QueryClient {{ ... }}")
}
}
}
#[doc = r" Generated server implementations."]
pub mod query_server {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = "Generated trait containing gRPC methods that should be implemented for use with QueryServer."]
#[async_trait]
pub trait Query: Send + Sync + 'static {
#[doc = " Params queries the parameters of slashing module"]
async fn params(
&self,
request: tonic::Request<super::QueryParamsRequest>,
) -> Result<tonic::Response<super::QueryParamsResponse>, tonic::Status>;
#[doc = " SigningInfo queries the signing info of given cons address"]
async fn signing_info(
&self,
request: tonic::Request<super::QuerySigningInfoRequest>,
) -> Result<tonic::Response<super::QuerySigningInfoResponse>, tonic::Status>;
#[doc = " SigningInfos queries signing info of all validators"]
async fn signing_infos(
&self,
request: tonic::Request<super::QuerySigningInfosRequest>,
) -> Result<tonic::Response<super::QuerySigningInfosResponse>, tonic::Status>;
}
#[doc = " Query provides defines the gRPC querier service"]
#[derive(Debug)]
pub struct QueryServer<T: Query> {
inner: _Inner<T>,
}
struct _Inner<T>(Arc<T>, Option<tonic::Interceptor>);
impl<T: Query> QueryServer<T> {
pub fn new(inner: T) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, None);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, Some(interceptor.into()));
Self { inner }
}
}
impl<T, B> Service<http::Request<B>> for QueryServer<T>
where
T: Query,
B: HttpBody + Send + Sync + 'static,
B::Error: Into<StdError> + Send + 'static,
{
type Response = http::Response<tonic::body::BoxBody>;
type Error = Never;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
let inner = self.inner.clone();
match req.uri().path() {
"/cosmos.slashing.v1beta1.Query/Params" => {
#[allow(non_camel_case_types)]
struct ParamsSvc<T: Query>(pub Arc<T>);
impl<T: Query> tonic::server::UnaryService<super::QueryParamsRequest> for ParamsSvc<T> {
type Response = super::QueryParamsResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::QueryParamsRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { (*inner).params(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = ParamsSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/cosmos.slashing.v1beta1.Query/SigningInfo" => {
#[allow(non_camel_case_types)]
struct SigningInfoSvc<T: Query>(pub Arc<T>);
impl<T: Query> tonic::server::UnaryService<super::QuerySigningInfoRequest> for SigningInfoSvc<T> {
type Response = super::QuerySigningInfoResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::QuerySigningInfoRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { (*inner).signing_info(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = SigningInfoSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/cosmos.slashing.v1beta1.Query/SigningInfos" => {
#[allow(non_camel_case_types)]
struct SigningInfosSvc<T: Query>(pub Arc<T>);
impl<T: Query> tonic::server::UnaryService<super::QuerySigningInfosRequest> for SigningInfosSvc<T> {
type Response = super::QuerySigningInfosResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::QuerySigningInfosRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { (*inner).signing_infos(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = SigningInfosSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => Box::pin(async move {
Ok(http::Response::builder()
.status(200)
.header("grpc-status", "12")
.body(tonic::body::BoxBody::empty())
.unwrap())
}),
}
}
}
impl<T: Query> Clone for QueryServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self { inner }
}
}
impl<T: Query> Clone for _Inner<T> {
fn clone(&self) -> Self {
Self(self.0.clone(), self.1.clone())
}
}
impl<T: std::fmt::Debug> std::fmt::Debug for _Inner<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl<T: Query> tonic::transport::NamedService for QueryServer<T> {
const NAME: &'static str = "cosmos.slashing.v1beta1.Query";
}
}
/// MsgUnjail defines the Msg/Unjail request type
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MsgUnjail {
#[prost(string, tag = "1")]
pub validator_addr: std::string::String,
}
/// MsgUnjailResponse defines the Msg/Unjail response type
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MsgUnjailResponse {}
#[doc = r" Generated client implementations."]
pub mod msg_client {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = " Msg defines the slashing Msg service."]
pub struct MsgClient<T> {
inner: tonic::client::Grpc<T>,
}
impl MsgClient<tonic::transport::Channel> {
#[doc = r" Attempt to create a new client by connecting to a given endpoint."]
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: std::convert::TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> MsgClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + HttpBody + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as HttpBody>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = tonic::client::Grpc::with_interceptor(inner, interceptor);
Self { inner }
}
#[doc = " Unjail defines a method for unjailing a jailed validator, thus returning"]
#[doc = " them into the bonded validator set, so they can begin receiving provisions"]
#[doc = " and rewards again."]
pub async fn unjail(
&mut self,
request: impl tonic::IntoRequest<super::MsgUnjail>,
) -> Result<tonic::Response<super::MsgUnjailResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static("/cosmos.slashing.v1beta1.Msg/Unjail");
self.inner.unary(request.into_request(), path, codec).await
}
}
impl<T: Clone> Clone for MsgClient<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<T> std::fmt::Debug for MsgClient<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "MsgClient {{ ... }}")
}
}
}
#[doc = r" Generated server implementations."]
pub mod msg_server {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = "Generated trait containing gRPC methods that should be implemented for use with MsgServer."]
#[async_trait]
pub trait Msg: Send + Sync + 'static {
#[doc = " Unjail defines a method for unjailing a jailed validator, thus returning"]
#[doc = " them into the bonded validator set, so they can begin receiving provisions"]
#[doc = " and rewards again."]
async fn unjail(
&self,
request: tonic::Request<super::MsgUnjail>,
) -> Result<tonic::Response<super::MsgUnjailResponse>, tonic::Status>;
}
#[doc = " Msg defines the slashing Msg service."]
#[derive(Debug)]
pub struct MsgServer<T: Msg> {
inner: _Inner<T>,
}
struct _Inner<T>(Arc<T>, Option<tonic::Interceptor>);
impl<T: Msg> MsgServer<T> {
pub fn new(inner: T) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, None);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, Some(interceptor.into()));
Self { inner }
}
}
impl<T, B> Service<http::Request<B>> for MsgServer<T>
where
T: Msg,
B: HttpBody + Send + Sync + 'static,
B::Error: Into<StdError> + Send + 'static,
{
type Response = http::Response<tonic::body::BoxBody>;
type Error = Never;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
let inner = self.inner.clone();
match req.uri().path() {
"/cosmos.slashing.v1beta1.Msg/Unjail" => {
#[allow(non_camel_case_types)]
struct UnjailSvc<T: Msg>(pub Arc<T>);
impl<T: Msg> tonic::server::UnaryService<super::MsgUnjail> for UnjailSvc<T> {
type Response = super::MsgUnjailResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::MsgUnjail>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { (*inner).unjail(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = UnjailSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => Box::pin(async move {
Ok(http::Response::builder()
.status(200)
.header("grpc-status", "12")
.body(tonic::body::BoxBody::empty())
.unwrap())
}),
}
}
}
impl<T: Msg> Clone for MsgServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self { inner }
}
}
impl<T: Msg> Clone for _Inner<T> {
fn clone(&self) -> Self {
Self(self.0.clone(), self.1.clone())
}
}
impl<T: std::fmt::Debug> std::fmt::Debug for _Inner<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl<T: Msg> tonic::transport::NamedService for MsgServer<T> {
const NAME: &'static str = "cosmos.slashing.v1beta1.Msg";
}
}
|
#[doc = "Reader of register COMP6_CSR"]
pub type R = crate::R<u32, super::COMP6_CSR>;
#[doc = "Writer for register COMP6_CSR"]
pub type W = crate::W<u32, super::COMP6_CSR>;
#[doc = "Register COMP6_CSR `reset()`'s with value 0"]
impl crate::ResetValue for super::COMP6_CSR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Comparator 6 enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum COMP6EN_A {
#[doc = "0: Comparator disabled"]
DISABLED = 0,
#[doc = "1: Comparator enabled"]
ENABLED = 1,
}
impl From<COMP6EN_A> for bool {
#[inline(always)]
fn from(variant: COMP6EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `COMP6EN`"]
pub type COMP6EN_R = crate::R<bool, COMP6EN_A>;
impl COMP6EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> COMP6EN_A {
match self.bits {
false => COMP6EN_A::DISABLED,
true => COMP6EN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == COMP6EN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == COMP6EN_A::ENABLED
}
}
#[doc = "Write proxy for field `COMP6EN`"]
pub struct COMP6EN_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Comparator disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(COMP6EN_A::DISABLED)
}
#[doc = "Comparator enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(COMP6EN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Comparator 6 inverting input selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum COMP6INMSEL_A {
#[doc = "0: 1/4 of VRefint"]
ONEQUARTERVREF = 0,
#[doc = "1: 1/2 of VRefint"]
ONEHALFVREF = 1,
#[doc = "2: 3/4 of VRefint"]
THREEQUARTERVREF = 2,
#[doc = "3: VRefint"]
VREF = 3,
#[doc = "4: PA4 or DAC1_CH1 output if enabled"]
PA4_DAC1_CH1 = 4,
#[doc = "5: DAC1_CH2"]
DAC1_CH2 = 5,
#[doc = "7: PB15"]
PB15 = 7,
}
impl From<COMP6INMSEL_A> for u8 {
#[inline(always)]
fn from(variant: COMP6INMSEL_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `COMP6INMSEL`"]
pub type COMP6INMSEL_R = crate::R<u8, COMP6INMSEL_A>;
impl COMP6INMSEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, COMP6INMSEL_A> {
use crate::Variant::*;
match self.bits {
0 => Val(COMP6INMSEL_A::ONEQUARTERVREF),
1 => Val(COMP6INMSEL_A::ONEHALFVREF),
2 => Val(COMP6INMSEL_A::THREEQUARTERVREF),
3 => Val(COMP6INMSEL_A::VREF),
4 => Val(COMP6INMSEL_A::PA4_DAC1_CH1),
5 => Val(COMP6INMSEL_A::DAC1_CH2),
7 => Val(COMP6INMSEL_A::PB15),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `ONEQUARTERVREF`"]
#[inline(always)]
pub fn is_one_quarter_vref(&self) -> bool {
*self == COMP6INMSEL_A::ONEQUARTERVREF
}
#[doc = "Checks if the value of the field is `ONEHALFVREF`"]
#[inline(always)]
pub fn is_one_half_vref(&self) -> bool {
*self == COMP6INMSEL_A::ONEHALFVREF
}
#[doc = "Checks if the value of the field is `THREEQUARTERVREF`"]
#[inline(always)]
pub fn is_three_quarter_vref(&self) -> bool {
*self == COMP6INMSEL_A::THREEQUARTERVREF
}
#[doc = "Checks if the value of the field is `VREF`"]
#[inline(always)]
pub fn is_vref(&self) -> bool {
*self == COMP6INMSEL_A::VREF
}
#[doc = "Checks if the value of the field is `PA4_DAC1_CH1`"]
#[inline(always)]
pub fn is_pa4_dac1_ch1(&self) -> bool {
*self == COMP6INMSEL_A::PA4_DAC1_CH1
}
#[doc = "Checks if the value of the field is `DAC1_CH2`"]
#[inline(always)]
pub fn is_dac1_ch2(&self) -> bool {
*self == COMP6INMSEL_A::DAC1_CH2
}
#[doc = "Checks if the value of the field is `PB15`"]
#[inline(always)]
pub fn is_pb15(&self) -> bool {
*self == COMP6INMSEL_A::PB15
}
}
#[doc = "Write proxy for field `COMP6INMSEL`"]
pub struct COMP6INMSEL_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6INMSEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6INMSEL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "1/4 of VRefint"]
#[inline(always)]
pub fn one_quarter_vref(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::ONEQUARTERVREF)
}
#[doc = "1/2 of VRefint"]
#[inline(always)]
pub fn one_half_vref(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::ONEHALFVREF)
}
#[doc = "3/4 of VRefint"]
#[inline(always)]
pub fn three_quarter_vref(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::THREEQUARTERVREF)
}
#[doc = "VRefint"]
#[inline(always)]
pub fn vref(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::VREF)
}
#[doc = "PA4 or DAC1_CH1 output if enabled"]
#[inline(always)]
pub fn pa4_dac1_ch1(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::PA4_DAC1_CH1)
}
#[doc = "DAC1_CH2"]
#[inline(always)]
pub fn dac1_ch2(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::DAC1_CH2)
}
#[doc = "PB15"]
#[inline(always)]
pub fn pb15(self) -> &'a mut W {
self.variant(COMP6INMSEL_A::PB15)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4);
self.w
}
}
#[doc = "Comparator 6 output selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum COMP6OUTSEL_A {
#[doc = "0: No selection"]
NOSELECTION = 0,
#[doc = "1: Timer 1 break input"]
TIMER1BREAKINPUT = 1,
#[doc = "2: Timer 1 break input 2"]
TIMER1BREAKINPUT2 = 2,
#[doc = "6: Timer 2 input capture 2"]
TIMER2INPUTCAPTURE2 = 6,
#[doc = "8: Timer 2 OCREF_CLR input"]
TIMER2OCREFCLEARINPUT = 8,
#[doc = "9: Timer 16 OCREF_CLR input"]
TIMER16OCREFCLEARINPUT = 9,
#[doc = "10: Timer 16 input capture 1"]
TIMER16INPUTCAPTURE1 = 10,
}
impl From<COMP6OUTSEL_A> for u8 {
#[inline(always)]
fn from(variant: COMP6OUTSEL_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `COMP6OUTSEL`"]
pub type COMP6OUTSEL_R = crate::R<u8, COMP6OUTSEL_A>;
impl COMP6OUTSEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, COMP6OUTSEL_A> {
use crate::Variant::*;
match self.bits {
0 => Val(COMP6OUTSEL_A::NOSELECTION),
1 => Val(COMP6OUTSEL_A::TIMER1BREAKINPUT),
2 => Val(COMP6OUTSEL_A::TIMER1BREAKINPUT2),
6 => Val(COMP6OUTSEL_A::TIMER2INPUTCAPTURE2),
8 => Val(COMP6OUTSEL_A::TIMER2OCREFCLEARINPUT),
9 => Val(COMP6OUTSEL_A::TIMER16OCREFCLEARINPUT),
10 => Val(COMP6OUTSEL_A::TIMER16INPUTCAPTURE1),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `NOSELECTION`"]
#[inline(always)]
pub fn is_no_selection(&self) -> bool {
*self == COMP6OUTSEL_A::NOSELECTION
}
#[doc = "Checks if the value of the field is `TIMER1BREAKINPUT`"]
#[inline(always)]
pub fn is_timer1break_input(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER1BREAKINPUT
}
#[doc = "Checks if the value of the field is `TIMER1BREAKINPUT2`"]
#[inline(always)]
pub fn is_timer1break_input2(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER1BREAKINPUT2
}
#[doc = "Checks if the value of the field is `TIMER2INPUTCAPTURE2`"]
#[inline(always)]
pub fn is_timer2input_capture2(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER2INPUTCAPTURE2
}
#[doc = "Checks if the value of the field is `TIMER2OCREFCLEARINPUT`"]
#[inline(always)]
pub fn is_timer2ocref_clear_input(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER2OCREFCLEARINPUT
}
#[doc = "Checks if the value of the field is `TIMER16OCREFCLEARINPUT`"]
#[inline(always)]
pub fn is_timer16ocref_clear_input(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER16OCREFCLEARINPUT
}
#[doc = "Checks if the value of the field is `TIMER16INPUTCAPTURE1`"]
#[inline(always)]
pub fn is_timer16input_capture1(&self) -> bool {
*self == COMP6OUTSEL_A::TIMER16INPUTCAPTURE1
}
}
#[doc = "Write proxy for field `COMP6OUTSEL`"]
pub struct COMP6OUTSEL_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6OUTSEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6OUTSEL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "No selection"]
#[inline(always)]
pub fn no_selection(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::NOSELECTION)
}
#[doc = "Timer 1 break input"]
#[inline(always)]
pub fn timer1break_input(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER1BREAKINPUT)
}
#[doc = "Timer 1 break input 2"]
#[inline(always)]
pub fn timer1break_input2(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER1BREAKINPUT2)
}
#[doc = "Timer 2 input capture 2"]
#[inline(always)]
pub fn timer2input_capture2(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER2INPUTCAPTURE2)
}
#[doc = "Timer 2 OCREF_CLR input"]
#[inline(always)]
pub fn timer2ocref_clear_input(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER2OCREFCLEARINPUT)
}
#[doc = "Timer 16 OCREF_CLR input"]
#[inline(always)]
pub fn timer16ocref_clear_input(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER16OCREFCLEARINPUT)
}
#[doc = "Timer 16 input capture 1"]
#[inline(always)]
pub fn timer16input_capture1(self) -> &'a mut W {
self.variant(COMP6OUTSEL_A::TIMER16INPUTCAPTURE1)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 10)) | (((value as u32) & 0x0f) << 10);
self.w
}
}
#[doc = "Comparator 6 output polarity\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum COMP6POL_A {
#[doc = "0: Output is not inverted"]
NOTINVERTED = 0,
#[doc = "1: Output is inverted"]
INVERTED = 1,
}
impl From<COMP6POL_A> for bool {
#[inline(always)]
fn from(variant: COMP6POL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `COMP6POL`"]
pub type COMP6POL_R = crate::R<bool, COMP6POL_A>;
impl COMP6POL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> COMP6POL_A {
match self.bits {
false => COMP6POL_A::NOTINVERTED,
true => COMP6POL_A::INVERTED,
}
}
#[doc = "Checks if the value of the field is `NOTINVERTED`"]
#[inline(always)]
pub fn is_not_inverted(&self) -> bool {
*self == COMP6POL_A::NOTINVERTED
}
#[doc = "Checks if the value of the field is `INVERTED`"]
#[inline(always)]
pub fn is_inverted(&self) -> bool {
*self == COMP6POL_A::INVERTED
}
}
#[doc = "Write proxy for field `COMP6POL`"]
pub struct COMP6POL_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6POL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6POL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Output is not inverted"]
#[inline(always)]
pub fn not_inverted(self) -> &'a mut W {
self.variant(COMP6POL_A::NOTINVERTED)
}
#[doc = "Output is inverted"]
#[inline(always)]
pub fn inverted(self) -> &'a mut W {
self.variant(COMP6POL_A::INVERTED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Comparator 6 blanking source\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum COMP6_BLANKING_A {
#[doc = "0: No blanking"]
NOBLANKING = 0,
#[doc = "3: TIM2 OC4 selected as blanking source"]
TIM2OC4 = 3,
#[doc = "4: TIM15 OC2 selected as blanking source"]
TIM15OC2 = 4,
}
impl From<COMP6_BLANKING_A> for u8 {
#[inline(always)]
fn from(variant: COMP6_BLANKING_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `COMP6_BLANKING`"]
pub type COMP6_BLANKING_R = crate::R<u8, COMP6_BLANKING_A>;
impl COMP6_BLANKING_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, COMP6_BLANKING_A> {
use crate::Variant::*;
match self.bits {
0 => Val(COMP6_BLANKING_A::NOBLANKING),
3 => Val(COMP6_BLANKING_A::TIM2OC4),
4 => Val(COMP6_BLANKING_A::TIM15OC2),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `NOBLANKING`"]
#[inline(always)]
pub fn is_no_blanking(&self) -> bool {
*self == COMP6_BLANKING_A::NOBLANKING
}
#[doc = "Checks if the value of the field is `TIM2OC4`"]
#[inline(always)]
pub fn is_tim2oc4(&self) -> bool {
*self == COMP6_BLANKING_A::TIM2OC4
}
#[doc = "Checks if the value of the field is `TIM15OC2`"]
#[inline(always)]
pub fn is_tim15oc2(&self) -> bool {
*self == COMP6_BLANKING_A::TIM15OC2
}
}
#[doc = "Write proxy for field `COMP6_BLANKING`"]
pub struct COMP6_BLANKING_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6_BLANKING_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6_BLANKING_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "No blanking"]
#[inline(always)]
pub fn no_blanking(self) -> &'a mut W {
self.variant(COMP6_BLANKING_A::NOBLANKING)
}
#[doc = "TIM2 OC4 selected as blanking source"]
#[inline(always)]
pub fn tim2oc4(self) -> &'a mut W {
self.variant(COMP6_BLANKING_A::TIM2OC4)
}
#[doc = "TIM15 OC2 selected as blanking source"]
#[inline(always)]
pub fn tim15oc2(self) -> &'a mut W {
self.variant(COMP6_BLANKING_A::TIM15OC2)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 18)) | (((value as u32) & 0x07) << 18);
self.w
}
}
#[doc = "Comparator 6 output\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum COMP6OUT_A {
#[doc = "0: Non-inverting input below inverting input"]
LOW = 0,
#[doc = "1: Non-inverting input above inverting input"]
HIGH = 1,
}
impl From<COMP6OUT_A> for bool {
#[inline(always)]
fn from(variant: COMP6OUT_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `COMP6OUT`"]
pub type COMP6OUT_R = crate::R<bool, COMP6OUT_A>;
impl COMP6OUT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> COMP6OUT_A {
match self.bits {
false => COMP6OUT_A::LOW,
true => COMP6OUT_A::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline(always)]
pub fn is_low(&self) -> bool {
*self == COMP6OUT_A::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline(always)]
pub fn is_high(&self) -> bool {
*self == COMP6OUT_A::HIGH
}
}
#[doc = "Comparator 6 lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum COMP6LOCK_A {
#[doc = "0: Comparator CSR bits are read-write"]
UNLOCKED = 0,
#[doc = "1: Comparator CSR bits are read-only"]
LOCKED = 1,
}
impl From<COMP6LOCK_A> for bool {
#[inline(always)]
fn from(variant: COMP6LOCK_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `COMP6LOCK`"]
pub type COMP6LOCK_R = crate::R<bool, COMP6LOCK_A>;
impl COMP6LOCK_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> COMP6LOCK_A {
match self.bits {
false => COMP6LOCK_A::UNLOCKED,
true => COMP6LOCK_A::LOCKED,
}
}
#[doc = "Checks if the value of the field is `UNLOCKED`"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == COMP6LOCK_A::UNLOCKED
}
#[doc = "Checks if the value of the field is `LOCKED`"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == COMP6LOCK_A::LOCKED
}
}
#[doc = "Write proxy for field `COMP6LOCK`"]
pub struct COMP6LOCK_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6LOCK_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: COMP6LOCK_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Comparator CSR bits are read-write"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut W {
self.variant(COMP6LOCK_A::UNLOCKED)
}
#[doc = "Comparator CSR bits are read-only"]
#[inline(always)]
pub fn locked(self) -> &'a mut W {
self.variant(COMP6LOCK_A::LOCKED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
#[doc = "Reader of field `COMP6INMSEL3`"]
pub type COMP6INMSEL3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `COMP6INMSEL3`"]
pub struct COMP6INMSEL3_W<'a> {
w: &'a mut W,
}
impl<'a> COMP6INMSEL3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
impl R {
#[doc = "Bit 0 - Comparator 6 enable"]
#[inline(always)]
pub fn comp6en(&self) -> COMP6EN_R {
COMP6EN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 4:6 - Comparator 6 inverting input selection"]
#[inline(always)]
pub fn comp6inmsel(&self) -> COMP6INMSEL_R {
COMP6INMSEL_R::new(((self.bits >> 4) & 0x07) as u8)
}
#[doc = "Bits 10:13 - Comparator 6 output selection"]
#[inline(always)]
pub fn comp6outsel(&self) -> COMP6OUTSEL_R {
COMP6OUTSEL_R::new(((self.bits >> 10) & 0x0f) as u8)
}
#[doc = "Bit 15 - Comparator 6 output polarity"]
#[inline(always)]
pub fn comp6pol(&self) -> COMP6POL_R {
COMP6POL_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bits 18:20 - Comparator 6 blanking source"]
#[inline(always)]
pub fn comp6_blanking(&self) -> COMP6_BLANKING_R {
COMP6_BLANKING_R::new(((self.bits >> 18) & 0x07) as u8)
}
#[doc = "Bit 30 - Comparator 6 output"]
#[inline(always)]
pub fn comp6out(&self) -> COMP6OUT_R {
COMP6OUT_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Comparator 6 lock"]
#[inline(always)]
pub fn comp6lock(&self) -> COMP6LOCK_R {
COMP6LOCK_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 22 - Comparator 6 inverting input selection"]
#[inline(always)]
pub fn comp6inmsel3(&self) -> COMP6INMSEL3_R {
COMP6INMSEL3_R::new(((self.bits >> 22) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Comparator 6 enable"]
#[inline(always)]
pub fn comp6en(&mut self) -> COMP6EN_W {
COMP6EN_W { w: self }
}
#[doc = "Bits 4:6 - Comparator 6 inverting input selection"]
#[inline(always)]
pub fn comp6inmsel(&mut self) -> COMP6INMSEL_W {
COMP6INMSEL_W { w: self }
}
#[doc = "Bits 10:13 - Comparator 6 output selection"]
#[inline(always)]
pub fn comp6outsel(&mut self) -> COMP6OUTSEL_W {
COMP6OUTSEL_W { w: self }
}
#[doc = "Bit 15 - Comparator 6 output polarity"]
#[inline(always)]
pub fn comp6pol(&mut self) -> COMP6POL_W {
COMP6POL_W { w: self }
}
#[doc = "Bits 18:20 - Comparator 6 blanking source"]
#[inline(always)]
pub fn comp6_blanking(&mut self) -> COMP6_BLANKING_W {
COMP6_BLANKING_W { w: self }
}
#[doc = "Bit 31 - Comparator 6 lock"]
#[inline(always)]
pub fn comp6lock(&mut self) -> COMP6LOCK_W {
COMP6LOCK_W { w: self }
}
#[doc = "Bit 22 - Comparator 6 inverting input selection"]
#[inline(always)]
pub fn comp6inmsel3(&mut self) -> COMP6INMSEL3_W {
COMP6INMSEL3_W { w: self }
}
}
|
use winapi::um::{
winuser::{WS_VISIBLE, WS_DISABLED, WS_GROUP, WS_TABSTOP},
wingdi::DeleteObject
};
use winapi::shared::windef::HBRUSH;
use crate::win32::window_helper as wh;
use crate::win32::base_helper::check_hwnd;
use crate::{Font, NwgError, RawEventHandler, unbind_raw_event_handler};
use super::{ControlBase, ControlHandle};
use std::cell::RefCell;
const NOT_BOUND: &'static str = "RadioButton is not yet bound to a winapi object";
const BAD_HANDLE: &'static str = "INTERNAL ERROR: RadioButton handle is not HWND!";
bitflags! {
/**
The radio button flags
* VISIBLE: The radio button is immediatly visible after creation
* DISABLED: The radio button cannot be interacted with by the user. It also has a grayed out look.
* TAB_STOP: The radio button can be selected using tab navigation
* GROUP: Creates a new radio button group
*/
pub struct RadioButtonFlags: u32 {
const VISIBLE = WS_VISIBLE;
const DISABLED = WS_DISABLED;
const TAB_STOP = WS_TABSTOP;
const GROUP = WS_GROUP;
}
}
/// Represents the check status of a radio button
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum RadioButtonState {
Checked,
Unchecked
}
/**
A radio button (also called option button) consists of a round button and an application-defined label,
icon, or bitmap that indicates a choice the user can make by selecting the button. An application typically uses radio buttons in a group box to enable the user to choose one of a set of related but mutually exclusive options.
Radiobutton is not behind any features.
Note: Internally, radio buttons are `Button` and as such, they trigger the same events
**Builder parameters:**
* `parent`: **Required.** The radio button parent container.
* `text`: The radio button text.
* `size`: The radio button size.
* `position`: The radio button position.
* `enabled`: If the radio button can be used by the user. It also has a grayed out look if disabled.
* `flags`: A combination of the RadioButtonFlags values.
* `ex_flags`: A combination of win32 window extended flags. Unlike `flags`, ex_flags must be used straight from winapi
* `font`: The font used for the radio button text
* `background_color`: The background color of the radio button. Defaults to the default window background (light gray)
* `check_state`: The default check state
**Control events:**
* `OnButtonClick`: When the adio button is clicked once by the user
* `OnButtonDoubleClick`: When the adio button is clicked twice rapidly by the user
* `MousePress(_)`: Generic mouse press events on the adio button
* `OnMouseMove`: Generic mouse mouse event
* `OnMouseWheel`: Generic mouse wheel event
```rust
use native_windows_gui as nwg;
/// Build two group of checkboxes on the same parent with the GROUP flags
fn build_radio_groups(radios: &mut [nwg::RadioButton], parent: &nwg::Window) {
use nwg::RadioButtonFlags as RadioF;
// Group 1
nwg::RadioButton::builder()
.flags(RadioF::VISIBLE | RadioF::GROUP)
.parent(parent)
.build(&mut radios[0]);
nwg::RadioButton::builder()
.parent(parent)
.build(&mut radios[1]);
// Group 2
nwg::RadioButton::builder()
.flags(RadioF::VISIBLE | RadioF::GROUP)
.parent(parent)
.build(&mut radios[2]);
nwg::RadioButton::builder()
.parent(parent)
.build(&mut radios[3]);
}
```
```rust
use native_windows_gui as nwg;
fn build_radio(radio: &mut nwg::RadioButton, window: &nwg::Window, font: &nwg::Font) {
nwg::RadioButton::builder()
.text("Hello")
.flags(nwg::RadioButtonFlags::VISIBLE)
.font(Some(font))
.parent(window)
.build(radio);
}
```
*/
#[derive(Default)]
pub struct RadioButton {
pub handle: ControlHandle,
background_brush: Option<HBRUSH>,
handler0: RefCell<Option<RawEventHandler>>,
}
impl RadioButton {
pub fn builder<'a>() -> RadioButtonBuilder<'a> {
RadioButtonBuilder {
text: "A radio button",
size: (100, 25),
position: (0, 0),
focus: false,
background_color: None,
check_state: RadioButtonState::Unchecked,
flags: None,
ex_flags: 0,
font: None,
parent: None
}
}
/// Return the check state of the check box
pub fn check_state(&self) -> RadioButtonState {
use winapi::um::winuser::{BM_GETCHECK, BST_CHECKED, BST_UNCHECKED};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
match wh::send_message(handle, BM_GETCHECK, 0, 0) as usize {
BST_UNCHECKED => RadioButtonState::Unchecked,
BST_CHECKED => RadioButtonState::Checked,
_ => unreachable!()
}
}
/// Sets the check state of the check box
pub fn set_check_state(&self, state: RadioButtonState) {
use winapi::um::winuser::{BM_SETCHECK, BST_CHECKED, BST_UNCHECKED};
use winapi::shared::minwindef::WPARAM;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let x = match state {
RadioButtonState::Unchecked => BST_UNCHECKED,
RadioButtonState::Checked => BST_CHECKED,
};
wh::send_message(handle, BM_SETCHECK, x as WPARAM, 0);
}
/// Return the font of the control
pub fn font(&self) -> Option<Font> {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let font_handle = wh::get_window_font(handle);
if font_handle.is_null() {
None
} else {
Some(Font { handle: font_handle })
}
}
/// Set the font of the control
pub fn set_font(&self, font: Option<&Font>) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_font(handle, font.map(|f| f.handle), true); }
}
/// Return true if the control currently has the keyboard focus
pub fn focus(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_focus(handle) }
}
/// Set the keyboard focus on the button.
pub fn set_focus(&self) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_focus(handle); }
}
/// Return true if the control user can interact with the control, return false otherwise
pub fn enabled(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_enabled(handle) }
}
/// Enable or disable the control
pub fn set_enabled(&self, v: bool) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_enabled(handle, v) }
}
/// Return true if the control is visible to the user. Will return true even if the
/// control is outside of the parent client view (ex: at the position (10000, 10000))
pub fn visible(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_visibility(handle) }
}
/// Show or hide the control to the user
pub fn set_visible(&self, v: bool) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_visibility(handle, v) }
}
/// Return the size of the radio button in the parent window
pub fn size(&self) -> (u32, u32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_size(handle) }
}
/// Set the size of the radio button in the parent window
pub fn set_size(&self, x: u32, y: u32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_size(handle, x, y, false) }
}
/// Return the position of the radio button in the parent window
pub fn position(&self) -> (i32, i32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_position(handle) }
}
/// Set the position of the radio button in the parent window
pub fn set_position(&self, x: i32, y: i32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_position(handle, x, y) }
}
/// Return the radio button label
pub fn text(&self) -> String {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_text(handle) }
}
/// Set the radio button label
pub fn set_text<'a>(&self, v: &'a str) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_text(handle, v) }
}
/// Winapi class name used during control creation
pub fn class_name(&self) -> &'static str {
"BUTTON"
}
/// Winapi base flags used during window creation
pub fn flags(&self) -> u32 {
WS_VISIBLE
}
/// Winapi flags required by the control
pub fn forced_flags(&self) -> u32 {
use winapi::um::winuser::{BS_NOTIFY, WS_CHILD, BS_AUTORADIOBUTTON};
BS_NOTIFY | WS_CHILD | BS_AUTORADIOBUTTON
}
/// Change the radio button background color.
fn hook_background_color(&mut self, c: [u8; 3]) {
use crate::bind_raw_event_handler_inner;
use winapi::um::winuser::{WM_CTLCOLORSTATIC};
use winapi::shared::{basetsd::UINT_PTR, windef::{HWND}, minwindef::LRESULT};
use winapi::um::wingdi::{CreateSolidBrush, RGB};
if self.handle.blank() { panic!("{}", NOT_BOUND); }
let handle = self.handle.hwnd().expect(BAD_HANDLE);
let parent_handle = ControlHandle::Hwnd(wh::get_window_parent(handle));
let brush = unsafe { CreateSolidBrush(RGB(c[0], c[1], c[2])) };
self.background_brush = Some(brush);
let handler = bind_raw_event_handler_inner(&parent_handle, handle as UINT_PTR, move |_hwnd, msg, _w, l| {
match msg {
WM_CTLCOLORSTATIC => {
let child = l as HWND;
if child == handle {
return Some(brush as LRESULT);
}
},
_ => {}
}
None
});
*self.handler0.borrow_mut() = Some(handler.unwrap());
}
}
impl Drop for RadioButton {
fn drop(&mut self) {
let handler = self.handler0.borrow();
if let Some(h) = handler.as_ref() {
drop(unbind_raw_event_handler(h));
}
if let Some(bg) = self.background_brush {
unsafe { DeleteObject(bg as _); }
}
self.handle.destroy();
}
}
pub struct RadioButtonBuilder<'a> {
text: &'a str,
size: (i32, i32),
position: (i32, i32),
focus: bool,
background_color: Option<[u8; 3]>,
check_state: RadioButtonState,
flags: Option<RadioButtonFlags>,
ex_flags: u32,
font: Option<&'a Font>,
parent: Option<ControlHandle>
}
impl<'a> RadioButtonBuilder<'a> {
pub fn flags(mut self, flags: RadioButtonFlags) -> RadioButtonBuilder<'a> {
self.flags = Some(flags);
self
}
pub fn ex_flags(mut self, flags: u32) -> RadioButtonBuilder<'a> {
self.ex_flags = flags;
self
}
pub fn text(mut self, text: &'a str) -> RadioButtonBuilder<'a> {
self.text = text;
self
}
pub fn size(mut self, size: (i32, i32)) -> RadioButtonBuilder<'a> {
self.size = size;
self
}
pub fn position(mut self, pos: (i32, i32)) -> RadioButtonBuilder<'a> {
self.position = pos;
self
}
pub fn focus(mut self, focus: bool) -> RadioButtonBuilder<'a> {
self.focus = focus;
self
}
pub fn check_state(mut self, check: RadioButtonState) -> RadioButtonBuilder<'a> {
self.check_state = check;
self
}
pub fn background_color(mut self, color: Option<[u8;3]>) -> RadioButtonBuilder<'a> {
self.background_color = color;
self
}
pub fn font(mut self, font: Option<&'a Font>) -> RadioButtonBuilder<'a> {
self.font = font;
self
}
pub fn parent<C: Into<ControlHandle>>(mut self, p: C) -> RadioButtonBuilder<'a> {
self.parent = Some(p.into());
self
}
pub fn build(self, out: &mut RadioButton) -> Result<(), NwgError> {
let flags = self.flags.map(|f| f.bits()).unwrap_or(out.flags());
let parent = match self.parent {
Some(p) => Ok(p),
None => Err(NwgError::no_parent("RadioButton"))
}?;
*out = Default::default();
out.handle = ControlBase::build_hwnd()
.class_name(out.class_name())
.forced_flags(out.forced_flags())
.flags(flags)
.ex_flags(self.ex_flags)
.size(self.size)
.position(self.position)
.text(self.text)
.parent(Some(parent))
.build()?;
if self.font.is_some() {
out.set_font(self.font);
} else {
out.set_font(Font::global_default().as_ref());
}
if self.background_color.is_some() {
out.hook_background_color(self.background_color.unwrap());
}
if self.focus {
out.set_focus();
}
out.set_check_state(self.check_state);
Ok(())
}
}
impl PartialEq for RadioButton {
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
|
test_stdout!(
without_binary_returns_false,
"false\nfalse\nfalse\nfalse\nfalse\nfalse\nfalse\nfalse\nfalse\nfalse\nfalse\n"
);
test_stdout!(with_binary_returns_true, "true\n");
|
use std::error::Error;
use wayland_client::{
protocol::{wl_compositor::WlCompositor, wl_seat::WlSeat, wl_shm::WlShm},
Display, GlobalManager,
};
use wayland_protocols::xdg_shell::client::xdg_wm_base;
mod debug_callbacks;
mod painter;
mod seat;
mod shared_memory;
mod surface;
mod buffer;
fn main() -> Result<(), Box<dyn Error>> {
let display = Display::connect_to_env()?;
let mut event_queue = display.create_event_queue();
let token = event_queue.token();
let attached = display.attach(token);
let global = GlobalManager::new_with_cb(&attached, debug_callbacks::print_global_event);
event_queue.sync_roundtrip(&mut (), |_, _, _| {
unreachable!();
})?;
// Globals
let compositor = global.instantiate_exact::<WlCompositor>(4)?;
let xdg_wm_base = global.instantiate_exact::<xdg_wm_base::XdgWmBase>(1)?;
let shm = global.instantiate_exact::<WlShm>(1)?;
let seat = global.instantiate_exact::<WlSeat>(5)?;
xdg_wm_base.quick_assign(|obj, event, _| match event {
xdg_wm_base::Event::Ping { serial } => obj.pong(serial),
_ => ()
});
let surface_state = surface::setup(&compositor, &xdg_wm_base, &shm);
seat::handle(&seat);
while !surface_state.borrow().is_closed() {
event_queue.dispatch(&mut (), |_, _, _| {})?;
}
Ok(())
}
|
extern crate failure;
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate dotenv;
#[macro_use]
extern crate clap;
#[macro_use]
extern crate serde_derive;
extern crate toml;
mod config;
use std::path::Path;
use failure::Error;
use clap::{App, Arg, AppSettings, SubCommand};
use self::config::Config;
const SUBCOMMAND_ACTION: &str = "action";
fn main() -> Result<(), Error> {
dotenv::dotenv().ok();
env_logger::init();
let matches = App::new(crate_name!())
.version(crate_version!())
.author(crate_authors!())
.about(crate_description!())
.setting(AppSettings::SubcommandRequired)
.arg(Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Sets a custom config file")
.takes_value(true))
.subcommand(SubCommand::with_name(SUBCOMMAND_ACTION).about("start an action"))
.get_matches();
let config = matches.value_of("config").unwrap_or("config.toml");
let path = Path::new(config);
let config = Config::read_or_create(path)?;
match matches.subcommand() {
(SUBCOMMAND_ACTION, _) => {
action(&config)?;
}
_ => {
matches.usage();
}
}
Ok(())
}
fn action(config: &Config) -> Result<(), Error> {
info!("Message: {}", config.message);
Ok(())
}
|
#[doc = "Reader of register DMAMUX_HWCFGR1"]
pub type R = crate::R<u32, super::DMAMUX_HWCFGR1>;
#[doc = "Reader of field `NUM_DMA_STREAMS`"]
pub type NUM_DMA_STREAMS_R = crate::R<u8, u8>;
#[doc = "Reader of field `NUM_DMA_PERIPH_REQ`"]
pub type NUM_DMA_PERIPH_REQ_R = crate::R<u8, u8>;
#[doc = "Reader of field `NUM_DMA_TRIG`"]
pub type NUM_DMA_TRIG_R = crate::R<u8, u8>;
#[doc = "Reader of field `NUM_DMA_REQGEN`"]
pub type NUM_DMA_REQGEN_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:7 - number of DMA request line multiplexer (output) channels"]
#[inline(always)]
pub fn num_dma_streams(&self) -> NUM_DMA_STREAMS_R {
NUM_DMA_STREAMS_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - number of DMA request lines from peripherals"]
#[inline(always)]
pub fn num_dma_periph_req(&self) -> NUM_DMA_PERIPH_REQ_R {
NUM_DMA_PERIPH_REQ_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - number of synchronization inputs"]
#[inline(always)]
pub fn num_dma_trig(&self) -> NUM_DMA_TRIG_R {
NUM_DMA_TRIG_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - number of DMA request generator channels"]
#[inline(always)]
pub fn num_dma_reqgen(&self) -> NUM_DMA_REQGEN_R {
NUM_DMA_REQGEN_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TPIO {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_EN0R {
bits: bool,
}
impl HIB_TPIO_EN0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_EN0W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_EN0W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u32) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_LEV0R {
bits: bool,
}
impl HIB_TPIO_LEV0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_LEV0W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_LEV0W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u32) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_PUEN0R {
bits: bool,
}
impl HIB_TPIO_PUEN0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_PUEN0W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_PUEN0W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u32) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_GFLTR0R {
bits: bool,
}
impl HIB_TPIO_GFLTR0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_GFLTR0W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_GFLTR0W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u32) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_EN1R {
bits: bool,
}
impl HIB_TPIO_EN1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_EN1W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_EN1W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 8);
self.w.bits |= ((value as u32) & 1) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_LEV1R {
bits: bool,
}
impl HIB_TPIO_LEV1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_LEV1W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_LEV1W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 9);
self.w.bits |= ((value as u32) & 1) << 9;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_PUEN1R {
bits: bool,
}
impl HIB_TPIO_PUEN1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_PUEN1W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_PUEN1W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 10);
self.w.bits |= ((value as u32) & 1) << 10;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_GFLTR1R {
bits: bool,
}
impl HIB_TPIO_GFLTR1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_GFLTR1W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_GFLTR1W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 11);
self.w.bits |= ((value as u32) & 1) << 11;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_EN2R {
bits: bool,
}
impl HIB_TPIO_EN2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_EN2W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_EN2W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 16);
self.w.bits |= ((value as u32) & 1) << 16;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_LEV2R {
bits: bool,
}
impl HIB_TPIO_LEV2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_LEV2W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_LEV2W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 17);
self.w.bits |= ((value as u32) & 1) << 17;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_PUEN2R {
bits: bool,
}
impl HIB_TPIO_PUEN2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_PUEN2W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_PUEN2W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 18);
self.w.bits |= ((value as u32) & 1) << 18;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_GFLTR2R {
bits: bool,
}
impl HIB_TPIO_GFLTR2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_GFLTR2W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_GFLTR2W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 19);
self.w.bits |= ((value as u32) & 1) << 19;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_EN3R {
bits: bool,
}
impl HIB_TPIO_EN3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_EN3W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_EN3W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 24);
self.w.bits |= ((value as u32) & 1) << 24;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_LEV3R {
bits: bool,
}
impl HIB_TPIO_LEV3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_LEV3W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_LEV3W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 25);
self.w.bits |= ((value as u32) & 1) << 25;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_PUEN3R {
bits: bool,
}
impl HIB_TPIO_PUEN3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_PUEN3W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_PUEN3W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 26);
self.w.bits |= ((value as u32) & 1) << 26;
self.w
}
}
#[doc = r"Value of the field"]
pub struct HIB_TPIO_GFLTR3R {
bits: bool,
}
impl HIB_TPIO_GFLTR3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _HIB_TPIO_GFLTR3W<'a> {
w: &'a mut W,
}
impl<'a> _HIB_TPIO_GFLTR3W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 27);
self.w.bits |= ((value as u32) & 1) << 27;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - TMPR0 Enable"]
#[inline(always)]
pub fn hib_tpio_en0(&self) -> HIB_TPIO_EN0R {
let bits = ((self.bits >> 0) & 1) != 0;
HIB_TPIO_EN0R { bits }
}
#[doc = "Bit 1 - TMPR0 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev0(&self) -> HIB_TPIO_LEV0R {
let bits = ((self.bits >> 1) & 1) != 0;
HIB_TPIO_LEV0R { bits }
}
#[doc = "Bit 2 - TMPR0 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen0(&self) -> HIB_TPIO_PUEN0R {
let bits = ((self.bits >> 2) & 1) != 0;
HIB_TPIO_PUEN0R { bits }
}
#[doc = "Bit 3 - TMPR0 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr0(&self) -> HIB_TPIO_GFLTR0R {
let bits = ((self.bits >> 3) & 1) != 0;
HIB_TPIO_GFLTR0R { bits }
}
#[doc = "Bit 8 - TMPR1Enable"]
#[inline(always)]
pub fn hib_tpio_en1(&self) -> HIB_TPIO_EN1R {
let bits = ((self.bits >> 8) & 1) != 0;
HIB_TPIO_EN1R { bits }
}
#[doc = "Bit 9 - TMPR1 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev1(&self) -> HIB_TPIO_LEV1R {
let bits = ((self.bits >> 9) & 1) != 0;
HIB_TPIO_LEV1R { bits }
}
#[doc = "Bit 10 - TMPR1 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen1(&self) -> HIB_TPIO_PUEN1R {
let bits = ((self.bits >> 10) & 1) != 0;
HIB_TPIO_PUEN1R { bits }
}
#[doc = "Bit 11 - TMPR1 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr1(&self) -> HIB_TPIO_GFLTR1R {
let bits = ((self.bits >> 11) & 1) != 0;
HIB_TPIO_GFLTR1R { bits }
}
#[doc = "Bit 16 - TMPR2 Enable"]
#[inline(always)]
pub fn hib_tpio_en2(&self) -> HIB_TPIO_EN2R {
let bits = ((self.bits >> 16) & 1) != 0;
HIB_TPIO_EN2R { bits }
}
#[doc = "Bit 17 - TMPR2 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev2(&self) -> HIB_TPIO_LEV2R {
let bits = ((self.bits >> 17) & 1) != 0;
HIB_TPIO_LEV2R { bits }
}
#[doc = "Bit 18 - TMPR2 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen2(&self) -> HIB_TPIO_PUEN2R {
let bits = ((self.bits >> 18) & 1) != 0;
HIB_TPIO_PUEN2R { bits }
}
#[doc = "Bit 19 - TMPR2 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr2(&self) -> HIB_TPIO_GFLTR2R {
let bits = ((self.bits >> 19) & 1) != 0;
HIB_TPIO_GFLTR2R { bits }
}
#[doc = "Bit 24 - TMPR3 Enable"]
#[inline(always)]
pub fn hib_tpio_en3(&self) -> HIB_TPIO_EN3R {
let bits = ((self.bits >> 24) & 1) != 0;
HIB_TPIO_EN3R { bits }
}
#[doc = "Bit 25 - TMPR3 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev3(&self) -> HIB_TPIO_LEV3R {
let bits = ((self.bits >> 25) & 1) != 0;
HIB_TPIO_LEV3R { bits }
}
#[doc = "Bit 26 - TMPR3 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen3(&self) -> HIB_TPIO_PUEN3R {
let bits = ((self.bits >> 26) & 1) != 0;
HIB_TPIO_PUEN3R { bits }
}
#[doc = "Bit 27 - TMPR3 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr3(&self) -> HIB_TPIO_GFLTR3R {
let bits = ((self.bits >> 27) & 1) != 0;
HIB_TPIO_GFLTR3R { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - TMPR0 Enable"]
#[inline(always)]
pub fn hib_tpio_en0(&mut self) -> _HIB_TPIO_EN0W {
_HIB_TPIO_EN0W { w: self }
}
#[doc = "Bit 1 - TMPR0 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev0(&mut self) -> _HIB_TPIO_LEV0W {
_HIB_TPIO_LEV0W { w: self }
}
#[doc = "Bit 2 - TMPR0 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen0(&mut self) -> _HIB_TPIO_PUEN0W {
_HIB_TPIO_PUEN0W { w: self }
}
#[doc = "Bit 3 - TMPR0 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr0(&mut self) -> _HIB_TPIO_GFLTR0W {
_HIB_TPIO_GFLTR0W { w: self }
}
#[doc = "Bit 8 - TMPR1Enable"]
#[inline(always)]
pub fn hib_tpio_en1(&mut self) -> _HIB_TPIO_EN1W {
_HIB_TPIO_EN1W { w: self }
}
#[doc = "Bit 9 - TMPR1 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev1(&mut self) -> _HIB_TPIO_LEV1W {
_HIB_TPIO_LEV1W { w: self }
}
#[doc = "Bit 10 - TMPR1 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen1(&mut self) -> _HIB_TPIO_PUEN1W {
_HIB_TPIO_PUEN1W { w: self }
}
#[doc = "Bit 11 - TMPR1 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr1(&mut self) -> _HIB_TPIO_GFLTR1W {
_HIB_TPIO_GFLTR1W { w: self }
}
#[doc = "Bit 16 - TMPR2 Enable"]
#[inline(always)]
pub fn hib_tpio_en2(&mut self) -> _HIB_TPIO_EN2W {
_HIB_TPIO_EN2W { w: self }
}
#[doc = "Bit 17 - TMPR2 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev2(&mut self) -> _HIB_TPIO_LEV2W {
_HIB_TPIO_LEV2W { w: self }
}
#[doc = "Bit 18 - TMPR2 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen2(&mut self) -> _HIB_TPIO_PUEN2W {
_HIB_TPIO_PUEN2W { w: self }
}
#[doc = "Bit 19 - TMPR2 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr2(&mut self) -> _HIB_TPIO_GFLTR2W {
_HIB_TPIO_GFLTR2W { w: self }
}
#[doc = "Bit 24 - TMPR3 Enable"]
#[inline(always)]
pub fn hib_tpio_en3(&mut self) -> _HIB_TPIO_EN3W {
_HIB_TPIO_EN3W { w: self }
}
#[doc = "Bit 25 - TMPR3 Trigger Level"]
#[inline(always)]
pub fn hib_tpio_lev3(&mut self) -> _HIB_TPIO_LEV3W {
_HIB_TPIO_LEV3W { w: self }
}
#[doc = "Bit 26 - TMPR3 Internal Weak Pull-up Enable"]
#[inline(always)]
pub fn hib_tpio_puen3(&mut self) -> _HIB_TPIO_PUEN3W {
_HIB_TPIO_PUEN3W { w: self }
}
#[doc = "Bit 27 - TMPR3 Glitch Filtering"]
#[inline(always)]
pub fn hib_tpio_gfltr3(&mut self) -> _HIB_TPIO_GFLTR3W {
_HIB_TPIO_GFLTR3W { w: self }
}
}
|
use fileutil;
use std::collections::HashMap;
#[derive(Debug)]
enum EventType {
BeginShift(i32),
FallAsleep(i32),
WakeUp(i32)
}
#[derive(Debug)]
struct Event<'a> {
timestamp: &'a str,
event_type: EventType
}
fn parse_line(line: &str) -> Event {
let timestamp: &str = &line[1..17];
let mins: i32 = (&line[15..17]).parse().unwrap();
let event_type_str: &str = &line[19..];
let event_type = if event_type_str.starts_with("Guard") {
let splits: Vec<&str> = event_type_str.split(" ").collect();
EventType::BeginShift(splits[1][1..].parse::<i32>().unwrap())
} else if event_type_str.starts_with("falls") {
EventType::FallAsleep(mins)
} else {
EventType::WakeUp(mins)
};
Event { timestamp, event_type }
}
fn tabulate_awake_mins(sleeps: &Vec<(i32, i32)>, awake_count: &mut[u32; 60]) -> () {
for &(start, end) in sleeps.iter() {
for min in start..end {
awake_count[min as usize] += 1;
}
}
}
// given an awake_count array, returns the pair (sleepiest mins, # times asleep on that min)
fn calc_sleepiest_min(awake_count: &[u32; 60]) -> (usize, u32) {
let mut max_idx = 0;
let mut max_idx_curr_count = 0;
for i in 0..awake_count.len() {
if awake_count[i] > max_idx_curr_count {
max_idx = i;
max_idx_curr_count = awake_count[i];
}
}
return (max_idx, max_idx_curr_count);
}
fn part1(sleeps_by_id: &HashMap<i32, Vec<(i32, i32)>>) -> () {
println!("part 1");
// Task 1: find the guard who sleeps the most
let mut curr_max_id = None;
let mut curr_max_mins = 0;
for (id, sleeps) in sleeps_by_id {
let sum: i32 = sleeps.iter()
.map(|pair| pair.1 - pair.0)
.sum();
if sum > curr_max_mins {
curr_max_mins = sum;
curr_max_id = Option::Some(id);
}
}
println!("guard {} slept the most ({} mins)", curr_max_id.unwrap(), curr_max_mins);
let max_id_sleeps: &Vec<(i32, i32)> =
(sleeps_by_id).get(curr_max_id.unwrap()).unwrap();
// Task 2: find the minute they sleep the most
let mut awake_count: [u32; 60] = [0; 60];
tabulate_awake_mins(max_id_sleeps, &mut awake_count);
let (sleepiest_min, times_asleep) = calc_sleepiest_min(&awake_count);
println!("max min asleep was {}", sleepiest_min);
}
fn part2(sleeps_by_id: &HashMap<i32, Vec<(i32, i32)>>) -> () {
// Strategy 2: Of all guards, which guard is most frequently asleep on the same minute?
println!("part 2");
// List with elements (guard id, awake_mins array)
let mut awake_list: Vec<(i32, [u32; 60])> = Vec::new();
for (id, sleeps) in sleeps_by_id.iter() {
let mut awake_mins: [u32; 60] = [0; 60];
tabulate_awake_mins(sleeps, &mut awake_mins);
awake_list.push((*id, awake_mins));
}
// Global maximum of how many times any guard has been asleep on any minute
let mut sleepiest_min_count: u32 = 0;
let mut sleepiest_guard_min: Option<(i32, usize)> = Option::None;
for &(id, awake_mins) in awake_list.iter() {
let (sleepiest_min, mins_count) = calc_sleepiest_min(&awake_mins);
if sleepiest_min_count < mins_count {
sleepiest_min_count = mins_count;
sleepiest_guard_min = Option::Some((id, sleepiest_min));
}
}
let (id, sleepiest_min) = sleepiest_guard_min.unwrap();
println!(
"guard {} was asleep the most on minute {} ({} times)",
id, sleepiest_min, sleepiest_min_count
);
}
pub fn run() -> () {
match fileutil::read_lines("./data/2018/04.txt") {
Ok(lines) => {
let mut chronological_events: Vec<Event> = (&lines).iter()
.map(|line| parse_line(&line))
.collect::<Vec<Event>>();
chronological_events.sort_by_key(|e| e.timestamp);
// map of form (guard id -> list of pairs of begin/end sleep mins)
let mut sleeps_by_id: HashMap<i32, Vec<(i32, i32)>> = HashMap::new();
let mut curr_id = Option::None;
let mut begin_sleep = Option::None;
for e in chronological_events {
match e.event_type {
EventType::BeginShift(id) => {
curr_id = Option::Some(id);
}
EventType::FallAsleep(sleep_min) => {
begin_sleep = Option::Some(sleep_min);
}
EventType::WakeUp(wake_min) => {
sleeps_by_id.entry(curr_id.unwrap())
.or_insert(Vec::<(i32, i32)>::new())
.push((begin_sleep.unwrap(), wake_min));
begin_sleep = Option::None;
}
}
}
part1(&sleeps_by_id);
part2(&sleeps_by_id);
}
Err(e) => panic!(e)
}
}
|
use std::borrow::{Borrow, BorrowMut};
fn main() {
let mut vec0 = Vec::new();
let vec_0 = &mut vec0;
let mut vec1 = fill_vec(vec_0 /*and put it here*/);
// Do not change the following line!
println!("{} has length {} content `{:?}`", "vec0", vec0.len(), vec0);
vec1.push(88);
println!("{} has length {} content `{:?}`", "vec1", vec1.len(), vec1);
}
fn fill_vec(vec: &mut Vec<i32>) -> Vec<i32> /*if you are not returning a value, here should be no type*/ {
let mut vec = vec.clone();
vec.push(22);
vec.push(44);
vec.push(66);
vec /*and then you don't have to return anything here*/
}
|
extern crate core;
use self::core::ptr;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use super::buffer::{Buffer, value_ptr};
use super::concurrent_queue::ConcurrentQueue;
/// A bounded queue allowing a single producer and multiple consumers.
pub struct SpmcConcurrentQueue<T> {
buffer: Buffer<T>,
_pad: [u64; 15],
/// This is separate from the `head` index, which is expected to
/// be highly contented, in the hope it stays in a Shared cache
/// line that's rarely invalidated.
tail_cache: AtomicUsize
}
impl<T> SpmcConcurrentQueue<T> {
/// Creates a single producer multiple consumer queue with the specified
/// capacity.
///
/// # Examples
///
/// ```
/// use cosmo::collection::{ConcurrentQueue, SpmcConcurrentQueue};
/// let q = SpmcConcurrentQueue::<u64>::with_capacity(1024);
/// assert_eq!(1024, q.capacity());
/// ```
pub fn with_capacity(initial_capacity: usize) -> Arc<SpmcConcurrentQueue<T>> {
Arc::new(SpmcConcurrentQueue {
buffer: Buffer::with_capacity(initial_capacity),
tail_cache: AtomicUsize::new(0),
_pad: [0; 15]
})
}
}
impl<T> ConcurrentQueue<T> for SpmcConcurrentQueue<T> {
/// Puts an item in the queue. This method only reads and modifies the
/// `tail` index, thus avoiding cache line ping-ponging.
///
/// # Examples
///
/// ```
/// use cosmo::collection::{ConcurrentQueue, SpmcConcurrentQueue};
/// let q = SpmcConcurrentQueue::<u64>::with_capacity(1024);
/// assert_eq!(None, q.offer(10));
/// ```
fn offer(&self, val: T) -> Option<T> {
let index = self.buffer.tail.load(Ordering::Relaxed);
unsafe {
let item = self.buffer.item(index);
if item.is_defined.load(Ordering::Acquire) {
return Some(val)
}
ptr::write(value_ptr(item), val);
item.is_defined.store(true, Ordering::Relaxed);
self.buffer.tail.store(index + 1, Ordering::Release);
None
}
}
/// Takes an item from the queue. This method uses the `head` index to
/// synchronize multiple consumers. The tail position is read from the
/// `tail_cache` to avoid cache line ping-ponging between the producer
/// and the consumers.
///
/// # Example
///
/// ```
/// use cosmo::collection::{ConcurrentQueue, SpmcConcurrentQueue};
/// let q = SpmcConcurrentQueue::<u64>::with_capacity(1024);
/// q.offer(10);
/// assert_eq!(Some(10), q.poll());
/// ```
fn poll(&self) -> Option<T> {
loop {
let index = self.buffer.head.load(Ordering::Acquire);
// We do a relaxed load, which means a speculative load of
// `self.buffer.tail` is allowed and harmless.
let current_tail_cache = self.tail_cache.load(Ordering::Relaxed);
if index >= current_tail_cache {
let current_tail = self.buffer.tail.load(Ordering::Relaxed);
if index >= current_tail {
return None
}
self.tail_cache.store(current_tail, Ordering::Relaxed);
}
if cas_head(self, index, Ordering::Release) {
unsafe {
let item = self.buffer.item(index);
let res = ptr::read(value_ptr(item));
item.is_defined.store(false, Ordering::Release);
return Some(res)
}
}
}
}
/// Returns the capacity of the queue.
fn capacity(&self) -> usize {
self.buffer.capacity
}
/// Returns how many items are in the queue.
fn size(&self) -> usize {
self.buffer.size()
}
}
#[inline]
fn cas_head<T>(q: &SpmcConcurrentQueue<T>, head: usize, order: Ordering) -> bool {
q.buffer.head.compare_and_swap(head, head + 1, order) == head
}
#[cfg(test)]
mod test {
use super::core::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Barrier};
use std::thread;
use super::SpmcConcurrentQueue;
use super::super::concurrent_queue::ConcurrentQueue;
#[test]
fn capacity_is_next_power_of_two() {
assert_eq!(16, SpmcConcurrentQueue::<i32>::with_capacity(10).capacity());
}
#[test]
fn adds_and_removes_a_value() {
let q = SpmcConcurrentQueue::<i32>::with_capacity(2);
assert_eq!(None, q.offer(34));
assert_eq!(Some(34), q.poll());
}
#[test]
fn gets_full() {
let q = SpmcConcurrentQueue::<i32>::with_capacity(2);
assert_eq!(None, q.offer(1));
assert_eq!(None, q.offer(2));
assert_eq!(Some(3), q.offer(3));
assert!(q.is_full());
}
#[test]
fn gets_empty() {
let q = SpmcConcurrentQueue::<i32>::with_capacity(2);
assert_eq!(None, q.poll());
assert!(q.is_empty());
}
#[derive(Debug)]
struct Payload {
value: u64,
dropped: Arc<AtomicBool>
}
impl Drop for Payload {
fn drop(&mut self) {
self.dropped.store(true, Ordering::Relaxed);
}
}
impl PartialEq<Payload> for Payload {
fn eq(&self, other: &Payload) -> bool {
self.value == other.value
}
}
#[test]
fn items_are_moved() {
let q = SpmcConcurrentQueue::<Payload>::with_capacity(2);
let dropped = Arc::new(AtomicBool::new(false));
let p1 = Payload { value: 67, dropped: dropped.clone() };
assert!(q.is_empty());
assert_eq!(None, q.offer(p1));
let p2 = q.poll().unwrap();
assert_eq!(67, p2.value);
assert!(!dropped.load(Ordering::Relaxed));
mem::drop(p2);
assert!(dropped.load(Ordering::Relaxed));
}
#[test]
fn lost_items_are_dropped() {
let q = SpmcConcurrentQueue::<Payload>::with_capacity(2);
let dropped = Arc::new(AtomicBool::new(false));
let p = Payload { value: 67, dropped: dropped.clone() };
assert_eq!(None, q.offer(p));
assert_eq!(1, q.size());
assert!(!dropped.load(Ordering::Relaxed));
mem::drop(q);
assert!(dropped.load(Ordering::Relaxed));
}
#[test]
fn two_threads_can_add_and_remove() {
const REPETITIONS: u64 = 5 * 1000 * 1000;
let q = SpmcConcurrentQueue::<u64>::with_capacity(1024);
let barrier = Arc::new(Barrier::new(2));
let cb = barrier.clone();
let cq = q.clone();
let c = thread::spawn(move|| {
cb.wait();
for i in 0..REPETITIONS {
let mut opt: Option<u64>;
while {
opt = cq.poll();
opt.is_none()
} {
thread::yield_now();
}
assert_eq!(i, opt.unwrap());
}
});
let pc = barrier.clone();
let pq = q.clone();
let p = thread::spawn(move|| {
pc.wait();
for i in 0..REPETITIONS {
while pq.offer(i).is_some() {
thread::yield_now();
}
}
});
c.join().unwrap();
p.join().unwrap();
}
#[test]
fn one_producer_two_consumers() {
const REPETITIONS: usize = 5 * 1000 * 1000;
let q = SpmcConcurrentQueue::<usize>::with_capacity(1024);
let barrier = Arc::new(Barrier::new(3));
let done = Arc::new(AtomicBool::new(false));
let mut consumers = Vec::<thread::JoinHandle<Vec<usize>>>::with_capacity(2);
for _ in 0..2 {
let barrier = barrier.clone();
let q = q.clone();
let done= done.clone();
consumers.push(thread::spawn(move|| {
let mut vals = Vec::<usize>::with_capacity(REPETITIONS);
barrier.wait();
loop {
match q.poll() {
Some(val) => vals.push(val),
None => {
if done.load(Ordering::Acquire) && q.is_empty() {
break;
}
thread::yield_now();
}
}
}
vals
}));
}
thread::spawn(move|| {
barrier.wait();
for i in 0..REPETITIONS {
while q.offer(i).is_some() {
thread::yield_now();
}
}
done.store(true, Ordering::Release);
}).join().unwrap();
let mut all_vals = Vec::<usize>::with_capacity(REPETITIONS);
for c in consumers {
all_vals.extend(&c.join().unwrap());
}
all_vals.sort();
for i in 0..REPETITIONS {
assert_eq!(i, *all_vals.get(i).unwrap());
}
}
}
|
extern crate diesel;
extern crate serde;
extern crate serde_json;
use serde_json::Value;
pub mod datasource;
pub mod deserializers;
pub mod modules;
pub mod nodes;
pub mod operations;
pub mod results;
pub mod rules;
pub mod schema;
pub mod visitor;
use decisionengine::datasource::DecisionDataset;
use decisionengine::modules::PassAllModule;
use decisionengine::schema::decision_strategy;
use decisionengine::visitor::DecisionTreeVisitor;
use diesel::pg::PgConnection;
use diesel::prelude::*;
use std::fs::File;
use std::io::prelude::*;
#[derive(PartialEq, Clone, Deserialize, Serialize)]
pub enum EvalResult {
Accept,
Reject,
}
pub trait Evaluatable {
fn eval(&mut self, input: &mut DecisionDataset) -> EvalResult;
fn accept<V: DecisionTreeVisitor>(&mut self, visitor: &mut V);
}
#[derive(Queryable)]
pub struct DecisionStrategy {
decision_strategy_id: i32,
decision_strategy_json: serde_json::Value,
}
impl DecisionStrategy {
pub fn from_id(id: i32, connection: &PgConnection) -> Self {
use decisionengine::schema::decision_strategy::dsl::*;
let ds: DecisionStrategy = decision_strategy
.find(id)
.first::<DecisionStrategy>(connection)
.expect("Error loading decision strategy");
ds
}
pub fn decision_strategy_id(&self) -> i32 {
self.decision_strategy_id
}
pub fn get_module(&self) -> Box<PassAllModule> {
Box::from(self::modules::deserialize_module(
&self.decision_strategy_json,
))
}
pub fn create(json: Value, connection: &PgConnection) -> DecisionStrategy {
use decisionengine::schema::decision_strategy;
let new_decision_strategy = NewDecisionStrategy {
decision_strategy_json: json,
};
diesel::insert_into(decision_strategy::table)
.values(&new_decision_strategy)
.get_result(connection)
.expect("Error saving new post")
}
}
#[derive(Insertable)]
#[table_name = "decision_strategy"]
struct NewDecisionStrategy {
decision_strategy_json: serde_json::Value,
}
pub struct DecisionEngine {}
impl DecisionEngine {
pub fn from_file(file: &mut File) -> Box<PassAllModule> {
let mut serialized_decision_strategy = String::new();
file.read_to_string(&mut serialized_decision_strategy)
.expect("Something went wrong while reading the decision_strategy file");
let decision_module_json: Value = match serde_json::from_str(&serialized_decision_strategy)
{
Ok(json) => json,
Err(error) => panic!(format!("Malformed JSON: {}", error)),
};
Box::from(self::modules::deserialize_module(&decision_module_json))
}
}
pub fn create() {}
|
//! LEGO EV3 color sensor.
use super::{Sensor, SensorPort};
use crate::{sensor_mode, Attribute, Device, Driver, Ev3Error, Ev3Result};
/// LEGO EV3 color sensor.
#[derive(Debug, Clone, Device, Sensor)]
pub struct ColorSensor {
driver: Driver,
}
impl ColorSensor {
fn new(driver: Driver) -> Self {
Self { driver }
}
findable!(
"lego-sensor",
["lego-ev3-color"],
SensorPort,
"ColorSensor",
"in"
);
sensor_mode!(
"COL-REFLECT",
MODE_COL_REFLECT,
"Reflected light - sets LED color to red",
set_mode_col_reflect,
is_mode_col_reflect
);
sensor_mode!(
"COL-AMBIENT",
MODE_COL_AMBIENT,
"Ambient light - sets LED color to blue (dimly lit)",
set_mode_col_ambient,
is_mode_col_ambient
);
sensor_mode!(
"COL-COLOR",
MODE_COL_COLOR,
"Color - sets LED color to white (all LEDs rapidly cycling)",
set_mode_col_color,
is_mode_col_color
);
sensor_mode!(
"REF-RAW",
MODE_REF_RAW,
"Raw Reflected - sets LED color to red",
set_mode_ref_raw,
is_mode_ref_raw
);
sensor_mode!(
"RGB-RAW",
MODE_RGB_RAW,
"Raw Color Components - sets LED color to white (all LEDs rapidly cycling)",
set_mode_rgb_raw,
is_mode_rgb_raw
);
sensor_mode!(
"COL-CAL",
MODE_COL_CAL,
"Calibration ??? - sets LED color to red, flashing every 4 seconds, then goes continuous",
set_mode_col_cal,
is_mode_col_cal
);
/// Get the color value for the modes `COL-REFLECT`, `COL-AMBIENT`, `COL-COLOR` and `REF-RAW`.
pub fn get_color(&self) -> Ev3Result<i32> {
self.get_value0()
}
/// Red component of the detected color, in the range 0-1020.
pub fn get_red(&self) -> Ev3Result<i32> {
self.get_value0()
}
/// Green component of the detected color, in the range 0-1020.
pub fn get_green(&self) -> Ev3Result<i32> {
self.get_value1()
}
/// Blue component of the detected color, in the range 0-1020.
pub fn get_blue(&self) -> Ev3Result<i32> {
self.get_value2()
}
/// Red, green and blue components of the detected color, each in the range 0-1020
pub fn get_rgb(&self) -> Ev3Result<(i32, i32, i32)> {
let red = self.get_red()?;
let green = self.get_green()?;
let blue = self.get_blue()?;
Ok((red, green, blue))
}
}
|
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::ClientEvent;
use faas_api::{FunctionCall, ProtocolMessage};
use failure::_core::task::{Context, Poll};
use fluence_libp2p::generate_swarm_event_type;
use libp2p::core::connection::{ConnectedPoint, ConnectionId};
use libp2p::core::either::EitherOutput;
use libp2p::ping::{Ping, PingConfig, PingResult};
use libp2p::swarm::{
IntoProtocolsHandler, IntoProtocolsHandlerSelect, NetworkBehaviour, NetworkBehaviourAction,
NotifyHandler, OneShotHandler, PollParameters,
};
use libp2p::PeerId;
use parity_multiaddr::Multiaddr;
use std::collections::VecDeque;
use std::error::Error;
pub type SwarmEventType = generate_swarm_event_type!(ClientBehaviour);
pub struct ClientBehaviour {
events: VecDeque<SwarmEventType>,
ping: Ping,
}
impl Default for ClientBehaviour {
fn default() -> Self {
let ping = Ping::new(PingConfig::new().with_keep_alive(true));
Self {
events: VecDeque::default(),
ping,
}
}
}
impl ClientBehaviour {
pub fn call(&mut self, peer_id: PeerId, call: FunctionCall) {
self.events
.push_back(NetworkBehaviourAction::NotifyHandler {
event: EitherOutput::First(ProtocolMessage::FunctionCall(call)),
handler: NotifyHandler::Any,
peer_id,
})
}
}
impl NetworkBehaviour for ClientBehaviour {
type ProtocolsHandler = IntoProtocolsHandlerSelect<
OneShotHandler<ProtocolMessage, ProtocolMessage, ProtocolMessage>,
<Ping as NetworkBehaviour>::ProtocolsHandler,
>;
type OutEvent = ClientEvent;
fn new_handler(&mut self) -> Self::ProtocolsHandler {
IntoProtocolsHandler::select(Default::default(), self.ping.new_handler())
}
fn addresses_of_peer(&mut self, _: &PeerId) -> Vec<Multiaddr> {
vec![]
}
fn inject_connected(&mut self, _: &PeerId) {}
fn inject_disconnected(&mut self, _: &PeerId) {}
fn inject_connection_established(
&mut self,
peer_id: &PeerId,
_: &ConnectionId,
cp: &ConnectedPoint,
) {
let multiaddr = match cp {
ConnectedPoint::Dialer { address } => address,
ConnectedPoint::Listener {
send_back_addr,
local_addr,
} => {
log::warn!(
"Someone connected to the client at {:?}. That's strange. {} @ {:?}",
local_addr,
peer_id,
send_back_addr
);
send_back_addr
}
};
self.events.push_back(NetworkBehaviourAction::GenerateEvent(
ClientEvent::NewConnection {
peer_id: peer_id.clone(),
multiaddr: multiaddr.clone(),
},
))
}
fn inject_connection_closed(
&mut self,
peer_id: &PeerId,
_: &ConnectionId,
cp: &ConnectedPoint,
) {
match cp {
ConnectedPoint::Dialer { address } => {
log::warn!(
"Disconnected from {} @ {:?}, reconnecting",
peer_id,
address
);
self.events.push_back(NetworkBehaviourAction::DialAddress {
address: address.clone(),
});
}
ConnectedPoint::Listener {
send_back_addr,
local_addr,
} => {
log::warn!(
"Peer {} @ {:?} disconnected, was connected to {:?}, won't reconnect",
peer_id,
send_back_addr,
local_addr
);
}
}
}
fn inject_event(
&mut self,
peer_id: PeerId,
cid: ConnectionId,
event: EitherOutput<ProtocolMessage, PingResult>,
) {
match event {
EitherOutput::First(ProtocolMessage::FunctionCall(call)) => self.events.push_back(
NetworkBehaviourAction::GenerateEvent(ClientEvent::FunctionCall {
call,
sender: peer_id,
}),
),
EitherOutput::Second(ping) => self.ping.inject_event(peer_id, cid, ping),
EitherOutput::First(ProtocolMessage::Upgrade) => {}
}
}
fn inject_addr_reach_failure(
&mut self,
_: Option<&PeerId>,
addr: &Multiaddr,
error: &dyn Error,
) {
log::warn!("Failed to connect to {:?}: {:?}, reconnecting", addr, error);
self.events.push_back(NetworkBehaviourAction::DialAddress {
address: addr.clone(),
});
}
fn poll(
&mut self,
cx: &mut Context<'_>,
params: &mut impl PollParameters,
) -> Poll<SwarmEventType> {
// just polling it to the end
while let Poll::Ready(_) = self.ping.poll(cx, params) {}
if let Some(event) = self.events.pop_front() {
return Poll::Ready(event);
}
Poll::Pending
}
}
|
use crate::error::Error;
use crate::player::PlayerName;
use serde::Serialize;
#[derive(Serialize)]
pub struct Response {
#[serde(flatten)]
resp: ResponseE,
}
impl Response {
pub fn message(src: Option<PlayerName>, text: String) -> Self {
Response {
resp: ResponseE::Message { from: src, text },
}
}
}
impl From<Error> for Response {
fn from(e: Error) -> Self {
Response { resp: e.into() }
}
}
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub enum ResponseE {
Message {
from: Option<PlayerName>,
text: String,
},
Error(Error),
}
impl From<Error> for ResponseE {
fn from(e: Error) -> Self {
ResponseE::Error(e)
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy, :: core :: fmt :: Debug, :: core :: cmp :: PartialEq, :: core :: cmp :: Eq)]
#[repr(transparent)]
pub struct HCS_CALLBACK(pub isize);
impl ::core::default::Default for HCS_CALLBACK {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
unsafe impl ::windows::core::Handle for HCS_CALLBACK {}
unsafe impl ::windows::core::Abi for HCS_CALLBACK {
type Abi = Self;
}
|
// auto generated, do not modify.
// created: Mon Feb 22 23:57:02 2016
// src-file: /QtWidgets/qmenu.h
// dst-file: /src/widgets/qmenu.rs
//
// header block begin =>
#![feature(libc)]
#![feature(core)]
#![feature(collections)]
extern crate libc;
use self::libc::*;
// <= header block end
// main block begin =>
// <= main block end
// use block begin =>
use super::qwidget::*; // 773
use std::ops::Deref;
use super::qaction::*; // 773
use super::super::gui::qicon::*; // 771
use super::super::core::qstring::*; // 771
use super::super::core::qobjectdefs::*; // 771
// use super::qplatformmenu::*; // 775
use super::super::core::qpoint::*; // 771
use super::super::core::qobject::*; // 771
use super::super::gui::qkeysequence::*; // 771
use super::super::core::qrect::*; // 771
use super::super::core::qsize::*; // 771
// <= use block end
// ext block begin =>
// #[link(name = "Qt5Core")]
// #[link(name = "Qt5Gui")]
// #[link(name = "Qt5Widgets")]
// #[link(name = "QtInline")]
extern {
fn QMenu_Class_Size() -> c_int;
// proto: bool QMenu::isTearOffEnabled();
fn C_ZNK5QMenu16isTearOffEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: bool QMenu::toolTipsVisible();
fn C_ZNK5QMenu15toolTipsVisibleEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: QAction * QMenu::menuAction();
fn C_ZNK5QMenu10menuActionEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAction * QMenu::addAction(const QIcon & icon, const QString & text);
fn C_ZN5QMenu9addActionERK5QIconRK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: void QMenu::setTearOffEnabled(bool );
fn C_ZN5QMenu17setTearOffEnabledEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: QAction * QMenu::addSection(const QString & text);
fn C_ZN5QMenu10addSectionERK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: const QMetaObject * QMenu::metaObject();
fn C_ZNK5QMenu10metaObjectEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMenu::clear();
fn C_ZN5QMenu5clearEv(qthis: u64 /* *mut c_void*/);
// proto: QAction * QMenu::insertMenu(QAction * before, QMenu * menu);
fn C_ZN5QMenu10insertMenuEP7QActionPS_(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: QIcon QMenu::icon();
fn C_ZNK5QMenu4iconEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAction * QMenu::insertSection(QAction * before, const QString & text);
fn C_ZN5QMenu13insertSectionEP7QActionRK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: QPlatformMenu * QMenu::platformMenu();
fn C_ZN5QMenu12platformMenuEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMenu::setNoReplayFor(QWidget * widget);
fn C_ZN5QMenu14setNoReplayForEP7QWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMenu::setIcon(const QIcon & icon);
fn C_ZN5QMenu7setIconERK5QIcon(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QAction * QMenu::exec(const QPoint & pos, QAction * at);
fn C_ZN5QMenu4execERK6QPointP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: bool QMenu::separatorsCollapsible();
fn C_ZNK5QMenu21separatorsCollapsibleEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: QMenu * QMenu::addMenu(const QString & title);
fn C_ZN5QMenu7addMenuERK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: QAction * QMenu::addSeparator();
fn C_ZN5QMenu12addSeparatorEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMenu::hideTearOffMenu();
fn C_ZN5QMenu15hideTearOffMenuEv(qthis: u64 /* *mut c_void*/);
// proto: QAction * QMenu::addAction(const QString & text, const QObject * receiver, const char * member, const QKeySequence & shortcut);
fn C_ZN5QMenu9addActionERK7QStringPK7QObjectPKcRK12QKeySequence(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void, arg2: *mut c_char, arg3: *mut c_void) -> *mut c_void;
// proto: void QMenu::QMenu(QWidget * parent);
fn C_ZN5QMenuC2EP7QWidget(arg0: *mut c_void) -> u64;
// proto: void QMenu::setActiveAction(QAction * act);
fn C_ZN5QMenu15setActiveActionEP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMenu::setSeparatorsCollapsible(bool collapse);
fn C_ZN5QMenu24setSeparatorsCollapsibleEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: QAction * QMenu::addAction(const QString & text);
fn C_ZN5QMenu9addActionERK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: QAction * QMenu::activeAction();
fn C_ZNK5QMenu12activeActionEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: bool QMenu::isEmpty();
fn C_ZNK5QMenu7isEmptyEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: QAction * QMenu::addAction(const QIcon & icon, const QString & text, const QObject * receiver, const char * member, const QKeySequence & shortcut);
fn C_ZN5QMenu9addActionERK5QIconRK7QStringPK7QObjectPKcRK12QKeySequence(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void, arg2: *mut c_void, arg3: *mut c_char, arg4: *mut c_void) -> *mut c_void;
// proto: QRect QMenu::actionGeometry(QAction * );
fn C_ZNK5QMenu14actionGeometryEP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: void QMenu::QMenu(const QString & title, QWidget * parent);
fn C_ZN5QMenuC2ERK7QStringP7QWidget(arg0: *mut c_void, arg1: *mut c_void) -> u64;
// proto: QAction * QMenu::insertSeparator(QAction * before);
fn C_ZN5QMenu15insertSeparatorEP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: QAction * QMenu::addSection(const QIcon & icon, const QString & text);
fn C_ZN5QMenu10addSectionERK5QIconRK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: bool QMenu::isTearOffMenuVisible();
fn C_ZNK5QMenu20isTearOffMenuVisibleEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: void QMenu::~QMenu();
fn C_ZN5QMenuD2Ev(qthis: u64 /* *mut c_void*/);
// proto: QString QMenu::title();
fn C_ZNK5QMenu5titleEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAction * QMenu::defaultAction();
fn C_ZNK5QMenu13defaultActionEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAction * QMenu::addMenu(QMenu * menu);
fn C_ZN5QMenu7addMenuEPS_(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: QSize QMenu::sizeHint();
fn C_ZNK5QMenu8sizeHintEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMenu::setDefaultAction(QAction * );
fn C_ZN5QMenu16setDefaultActionEP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QAction * QMenu::actionAt(const QPoint & );
fn C_ZNK5QMenu8actionAtERK6QPoint(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: QAction * QMenu::insertSection(QAction * before, const QIcon & icon, const QString & text);
fn C_ZN5QMenu13insertSectionEP7QActionRK5QIconRK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void, arg2: *mut c_void) -> *mut c_void;
// proto: void QMenu::popup(const QPoint & pos, QAction * at);
fn C_ZN5QMenu5popupERK6QPointP7QAction(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void);
// proto: void QMenu::setToolTipsVisible(bool visible);
fn C_ZN5QMenu18setToolTipsVisibleEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: void QMenu::setTitle(const QString & title);
fn C_ZN5QMenu8setTitleERK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QMenu * QMenu::addMenu(const QIcon & icon, const QString & title);
fn C_ZN5QMenu7addMenuERK5QIconRK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void) -> *mut c_void;
// proto: QAction * QMenu::exec();
fn C_ZN5QMenu4execEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
fn QMenu_SlotProxy_connect__ZN5QMenu11aboutToShowEv(qthis: *mut c_void, ffifptr: *mut c_void, rsfptr: *mut c_void);
fn QMenu_SlotProxy_connect__ZN5QMenu7hoveredEP7QAction(qthis: *mut c_void, ffifptr: *mut c_void, rsfptr: *mut c_void);
fn QMenu_SlotProxy_connect__ZN5QMenu11aboutToHideEv(qthis: *mut c_void, ffifptr: *mut c_void, rsfptr: *mut c_void);
fn QMenu_SlotProxy_connect__ZN5QMenu9triggeredEP7QAction(qthis: *mut c_void, ffifptr: *mut c_void, rsfptr: *mut c_void);
} // <= ext block end
// body block begin =>
// class sizeof(QMenu)=1
#[derive(Default)]
pub struct QMenu {
qbase: QWidget,
pub qclsinst: u64 /* *mut c_void*/,
pub _aboutToShow: QMenu_aboutToShow_signal,
pub _triggered: QMenu_triggered_signal,
pub _hovered: QMenu_hovered_signal,
pub _aboutToHide: QMenu_aboutToHide_signal,
}
impl /*struct*/ QMenu {
pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QMenu {
return QMenu{qbase: QWidget::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
}
}
impl Deref for QMenu {
type Target = QWidget;
fn deref(&self) -> &QWidget {
return & self.qbase;
}
}
impl AsRef<QWidget> for QMenu {
fn as_ref(& self) -> & QWidget {
return & self.qbase;
}
}
// proto: bool QMenu::isTearOffEnabled();
impl /*struct*/ QMenu {
pub fn isTearOffEnabled<RetType, T: QMenu_isTearOffEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isTearOffEnabled(self);
// return 1;
}
}
pub trait QMenu_isTearOffEnabled<RetType> {
fn isTearOffEnabled(self , rsthis: & QMenu) -> RetType;
}
// proto: bool QMenu::isTearOffEnabled();
impl<'a> /*trait*/ QMenu_isTearOffEnabled<i8> for () {
fn isTearOffEnabled(self , rsthis: & QMenu) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu16isTearOffEnabledEv()};
let mut ret = unsafe {C_ZNK5QMenu16isTearOffEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: bool QMenu::toolTipsVisible();
impl /*struct*/ QMenu {
pub fn toolTipsVisible<RetType, T: QMenu_toolTipsVisible<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.toolTipsVisible(self);
// return 1;
}
}
pub trait QMenu_toolTipsVisible<RetType> {
fn toolTipsVisible(self , rsthis: & QMenu) -> RetType;
}
// proto: bool QMenu::toolTipsVisible();
impl<'a> /*trait*/ QMenu_toolTipsVisible<i8> for () {
fn toolTipsVisible(self , rsthis: & QMenu) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu15toolTipsVisibleEv()};
let mut ret = unsafe {C_ZNK5QMenu15toolTipsVisibleEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: QAction * QMenu::menuAction();
impl /*struct*/ QMenu {
pub fn menuAction<RetType, T: QMenu_menuAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.menuAction(self);
// return 1;
}
}
pub trait QMenu_menuAction<RetType> {
fn menuAction(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::menuAction();
impl<'a> /*trait*/ QMenu_menuAction<QAction> for () {
fn menuAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu10menuActionEv()};
let mut ret = unsafe {C_ZNK5QMenu10menuActionEv(rsthis.qclsinst)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::addAction(const QIcon & icon, const QString & text);
impl /*struct*/ QMenu {
pub fn addAction<RetType, T: QMenu_addAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.addAction(self);
// return 1;
}
}
pub trait QMenu_addAction<RetType> {
fn addAction(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::addAction(const QIcon & icon, const QString & text);
impl<'a> /*trait*/ QMenu_addAction<QAction> for (&'a QIcon, &'a QString) {
fn addAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu9addActionERK5QIconRK7QString()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu9addActionERK5QIconRK7QString(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::setTearOffEnabled(bool );
impl /*struct*/ QMenu {
pub fn setTearOffEnabled<RetType, T: QMenu_setTearOffEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setTearOffEnabled(self);
// return 1;
}
}
pub trait QMenu_setTearOffEnabled<RetType> {
fn setTearOffEnabled(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setTearOffEnabled(bool );
impl<'a> /*trait*/ QMenu_setTearOffEnabled<()> for (i8) {
fn setTearOffEnabled(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu17setTearOffEnabledEb()};
let arg0 = self as c_char;
unsafe {C_ZN5QMenu17setTearOffEnabledEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QAction * QMenu::addSection(const QString & text);
impl /*struct*/ QMenu {
pub fn addSection<RetType, T: QMenu_addSection<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.addSection(self);
// return 1;
}
}
pub trait QMenu_addSection<RetType> {
fn addSection(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::addSection(const QString & text);
impl<'a> /*trait*/ QMenu_addSection<QAction> for (&'a QString) {
fn addSection(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu10addSectionERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu10addSectionERK7QString(rsthis.qclsinst, arg0)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: const QMetaObject * QMenu::metaObject();
impl /*struct*/ QMenu {
pub fn metaObject<RetType, T: QMenu_metaObject<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.metaObject(self);
// return 1;
}
}
pub trait QMenu_metaObject<RetType> {
fn metaObject(self , rsthis: & QMenu) -> RetType;
}
// proto: const QMetaObject * QMenu::metaObject();
impl<'a> /*trait*/ QMenu_metaObject<QMetaObject> for () {
fn metaObject(self , rsthis: & QMenu) -> QMetaObject {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu10metaObjectEv()};
let mut ret = unsafe {C_ZNK5QMenu10metaObjectEv(rsthis.qclsinst)};
let mut ret1 = QMetaObject::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::clear();
impl /*struct*/ QMenu {
pub fn clear<RetType, T: QMenu_clear<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.clear(self);
// return 1;
}
}
pub trait QMenu_clear<RetType> {
fn clear(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::clear();
impl<'a> /*trait*/ QMenu_clear<()> for () {
fn clear(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu5clearEv()};
unsafe {C_ZN5QMenu5clearEv(rsthis.qclsinst)};
// return 1;
}
}
// proto: QAction * QMenu::insertMenu(QAction * before, QMenu * menu);
impl /*struct*/ QMenu {
pub fn insertMenu<RetType, T: QMenu_insertMenu<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.insertMenu(self);
// return 1;
}
}
pub trait QMenu_insertMenu<RetType> {
fn insertMenu(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::insertMenu(QAction * before, QMenu * menu);
impl<'a> /*trait*/ QMenu_insertMenu<QAction> for (&'a QAction, &'a QMenu) {
fn insertMenu(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu10insertMenuEP7QActionPS_()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu10insertMenuEP7QActionPS_(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QIcon QMenu::icon();
impl /*struct*/ QMenu {
pub fn icon<RetType, T: QMenu_icon<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.icon(self);
// return 1;
}
}
pub trait QMenu_icon<RetType> {
fn icon(self , rsthis: & QMenu) -> RetType;
}
// proto: QIcon QMenu::icon();
impl<'a> /*trait*/ QMenu_icon<QIcon> for () {
fn icon(self , rsthis: & QMenu) -> QIcon {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu4iconEv()};
let mut ret = unsafe {C_ZNK5QMenu4iconEv(rsthis.qclsinst)};
let mut ret1 = QIcon::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::insertSection(QAction * before, const QString & text);
impl /*struct*/ QMenu {
pub fn insertSection<RetType, T: QMenu_insertSection<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.insertSection(self);
// return 1;
}
}
pub trait QMenu_insertSection<RetType> {
fn insertSection(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::insertSection(QAction * before, const QString & text);
impl<'a> /*trait*/ QMenu_insertSection<QAction> for (&'a QAction, &'a QString) {
fn insertSection(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu13insertSectionEP7QActionRK7QString()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu13insertSectionEP7QActionRK7QString(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QPlatformMenu * QMenu::platformMenu();
impl /*struct*/ QMenu {
pub fn platformMenu<RetType, T: QMenu_platformMenu<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.platformMenu(self);
// return 1;
}
}
pub trait QMenu_platformMenu<RetType> {
fn platformMenu(self , rsthis: & QMenu) -> RetType;
}
// proto: QPlatformMenu * QMenu::platformMenu();
impl<'a> /*trait*/ QMenu_platformMenu<u64> for () {
fn platformMenu(self , rsthis: & QMenu) -> u64 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu12platformMenuEv()};
let mut ret = unsafe {C_ZN5QMenu12platformMenuEv(rsthis.qclsinst)};
return ret as u64; // 4
// return 1;
}
}
// proto: void QMenu::setNoReplayFor(QWidget * widget);
impl /*struct*/ QMenu {
pub fn setNoReplayFor<RetType, T: QMenu_setNoReplayFor<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setNoReplayFor(self);
// return 1;
}
}
pub trait QMenu_setNoReplayFor<RetType> {
fn setNoReplayFor(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setNoReplayFor(QWidget * widget);
impl<'a> /*trait*/ QMenu_setNoReplayFor<()> for (&'a QWidget) {
fn setNoReplayFor(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu14setNoReplayForEP7QWidget()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN5QMenu14setNoReplayForEP7QWidget(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMenu::setIcon(const QIcon & icon);
impl /*struct*/ QMenu {
pub fn setIcon<RetType, T: QMenu_setIcon<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setIcon(self);
// return 1;
}
}
pub trait QMenu_setIcon<RetType> {
fn setIcon(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setIcon(const QIcon & icon);
impl<'a> /*trait*/ QMenu_setIcon<()> for (&'a QIcon) {
fn setIcon(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu7setIconERK5QIcon()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN5QMenu7setIconERK5QIcon(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QAction * QMenu::exec(const QPoint & pos, QAction * at);
impl /*struct*/ QMenu {
pub fn exec<RetType, T: QMenu_exec<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.exec(self);
// return 1;
}
}
pub trait QMenu_exec<RetType> {
fn exec(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::exec(const QPoint & pos, QAction * at);
impl<'a> /*trait*/ QMenu_exec<QAction> for (&'a QPoint, Option<&'a QAction>) {
fn exec(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu4execERK6QPointP7QAction()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = (if self.1.is_none() {0} else {self.1.unwrap().qclsinst}) as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu4execERK6QPointP7QAction(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QMenu::separatorsCollapsible();
impl /*struct*/ QMenu {
pub fn separatorsCollapsible<RetType, T: QMenu_separatorsCollapsible<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.separatorsCollapsible(self);
// return 1;
}
}
pub trait QMenu_separatorsCollapsible<RetType> {
fn separatorsCollapsible(self , rsthis: & QMenu) -> RetType;
}
// proto: bool QMenu::separatorsCollapsible();
impl<'a> /*trait*/ QMenu_separatorsCollapsible<i8> for () {
fn separatorsCollapsible(self , rsthis: & QMenu) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu21separatorsCollapsibleEv()};
let mut ret = unsafe {C_ZNK5QMenu21separatorsCollapsibleEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: QMenu * QMenu::addMenu(const QString & title);
impl /*struct*/ QMenu {
pub fn addMenu<RetType, T: QMenu_addMenu<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.addMenu(self);
// return 1;
}
}
pub trait QMenu_addMenu<RetType> {
fn addMenu(self , rsthis: & QMenu) -> RetType;
}
// proto: QMenu * QMenu::addMenu(const QString & title);
impl<'a> /*trait*/ QMenu_addMenu<QMenu> for (&'a QString) {
fn addMenu(self , rsthis: & QMenu) -> QMenu {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu7addMenuERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu7addMenuERK7QString(rsthis.qclsinst, arg0)};
let mut ret1 = QMenu::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::addSeparator();
impl /*struct*/ QMenu {
pub fn addSeparator<RetType, T: QMenu_addSeparator<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.addSeparator(self);
// return 1;
}
}
pub trait QMenu_addSeparator<RetType> {
fn addSeparator(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::addSeparator();
impl<'a> /*trait*/ QMenu_addSeparator<QAction> for () {
fn addSeparator(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu12addSeparatorEv()};
let mut ret = unsafe {C_ZN5QMenu12addSeparatorEv(rsthis.qclsinst)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::hideTearOffMenu();
impl /*struct*/ QMenu {
pub fn hideTearOffMenu<RetType, T: QMenu_hideTearOffMenu<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.hideTearOffMenu(self);
// return 1;
}
}
pub trait QMenu_hideTearOffMenu<RetType> {
fn hideTearOffMenu(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::hideTearOffMenu();
impl<'a> /*trait*/ QMenu_hideTearOffMenu<()> for () {
fn hideTearOffMenu(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu15hideTearOffMenuEv()};
unsafe {C_ZN5QMenu15hideTearOffMenuEv(rsthis.qclsinst)};
// return 1;
}
}
// proto: QAction * QMenu::addAction(const QString & text, const QObject * receiver, const char * member, const QKeySequence & shortcut);
impl<'a> /*trait*/ QMenu_addAction<QAction> for (&'a QString, &'a QObject, &'a String, Option<&'a QKeySequence>) {
fn addAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu9addActionERK7QStringPK7QObjectPKcRK12QKeySequence()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let arg2 = self.2.as_ptr() as *mut c_char;
let arg3 = (if self.3.is_none() {0} else {self.3.unwrap().qclsinst}) as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu9addActionERK7QStringPK7QObjectPKcRK12QKeySequence(rsthis.qclsinst, arg0, arg1, arg2, arg3)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::QMenu(QWidget * parent);
impl /*struct*/ QMenu {
pub fn new<T: QMenu_new>(value: T) -> QMenu {
let rsthis = value.new();
return rsthis;
// return 1;
}
}
pub trait QMenu_new {
fn new(self) -> QMenu;
}
// proto: void QMenu::QMenu(QWidget * parent);
impl<'a> /*trait*/ QMenu_new for (Option<&'a QWidget>) {
fn new(self) -> QMenu {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenuC2EP7QWidget()};
let ctysz: c_int = unsafe{QMenu_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let arg0 = (if self.is_none() {0} else {self.unwrap().qclsinst}) as *mut c_void;
let qthis: u64 = unsafe {C_ZN5QMenuC2EP7QWidget(arg0)};
let rsthis = QMenu{qbase: QWidget::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: void QMenu::setActiveAction(QAction * act);
impl /*struct*/ QMenu {
pub fn setActiveAction<RetType, T: QMenu_setActiveAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setActiveAction(self);
// return 1;
}
}
pub trait QMenu_setActiveAction<RetType> {
fn setActiveAction(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setActiveAction(QAction * act);
impl<'a> /*trait*/ QMenu_setActiveAction<()> for (&'a QAction) {
fn setActiveAction(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu15setActiveActionEP7QAction()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN5QMenu15setActiveActionEP7QAction(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMenu::setSeparatorsCollapsible(bool collapse);
impl /*struct*/ QMenu {
pub fn setSeparatorsCollapsible<RetType, T: QMenu_setSeparatorsCollapsible<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setSeparatorsCollapsible(self);
// return 1;
}
}
pub trait QMenu_setSeparatorsCollapsible<RetType> {
fn setSeparatorsCollapsible(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setSeparatorsCollapsible(bool collapse);
impl<'a> /*trait*/ QMenu_setSeparatorsCollapsible<()> for (i8) {
fn setSeparatorsCollapsible(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu24setSeparatorsCollapsibleEb()};
let arg0 = self as c_char;
unsafe {C_ZN5QMenu24setSeparatorsCollapsibleEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QAction * QMenu::addAction(const QString & text);
impl<'a> /*trait*/ QMenu_addAction<QAction> for (&'a QString) {
fn addAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu9addActionERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu9addActionERK7QString(rsthis.qclsinst, arg0)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::activeAction();
impl /*struct*/ QMenu {
pub fn activeAction<RetType, T: QMenu_activeAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.activeAction(self);
// return 1;
}
}
pub trait QMenu_activeAction<RetType> {
fn activeAction(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::activeAction();
impl<'a> /*trait*/ QMenu_activeAction<QAction> for () {
fn activeAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu12activeActionEv()};
let mut ret = unsafe {C_ZNK5QMenu12activeActionEv(rsthis.qclsinst)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QMenu::isEmpty();
impl /*struct*/ QMenu {
pub fn isEmpty<RetType, T: QMenu_isEmpty<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isEmpty(self);
// return 1;
}
}
pub trait QMenu_isEmpty<RetType> {
fn isEmpty(self , rsthis: & QMenu) -> RetType;
}
// proto: bool QMenu::isEmpty();
impl<'a> /*trait*/ QMenu_isEmpty<i8> for () {
fn isEmpty(self , rsthis: & QMenu) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu7isEmptyEv()};
let mut ret = unsafe {C_ZNK5QMenu7isEmptyEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: QAction * QMenu::addAction(const QIcon & icon, const QString & text, const QObject * receiver, const char * member, const QKeySequence & shortcut);
impl<'a> /*trait*/ QMenu_addAction<QAction> for (&'a QIcon, &'a QString, &'a QObject, &'a String, Option<&'a QKeySequence>) {
fn addAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu9addActionERK5QIconRK7QStringPK7QObjectPKcRK12QKeySequence()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let arg2 = self.2.qclsinst as *mut c_void;
let arg3 = self.3.as_ptr() as *mut c_char;
let arg4 = (if self.4.is_none() {0} else {self.4.unwrap().qclsinst}) as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu9addActionERK5QIconRK7QStringPK7QObjectPKcRK12QKeySequence(rsthis.qclsinst, arg0, arg1, arg2, arg3, arg4)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QRect QMenu::actionGeometry(QAction * );
impl /*struct*/ QMenu {
pub fn actionGeometry<RetType, T: QMenu_actionGeometry<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.actionGeometry(self);
// return 1;
}
}
pub trait QMenu_actionGeometry<RetType> {
fn actionGeometry(self , rsthis: & QMenu) -> RetType;
}
// proto: QRect QMenu::actionGeometry(QAction * );
impl<'a> /*trait*/ QMenu_actionGeometry<QRect> for (&'a QAction) {
fn actionGeometry(self , rsthis: & QMenu) -> QRect {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu14actionGeometryEP7QAction()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK5QMenu14actionGeometryEP7QAction(rsthis.qclsinst, arg0)};
let mut ret1 = QRect::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::QMenu(const QString & title, QWidget * parent);
impl<'a> /*trait*/ QMenu_new for (&'a QString, Option<&'a QWidget>) {
fn new(self) -> QMenu {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenuC2ERK7QStringP7QWidget()};
let ctysz: c_int = unsafe{QMenu_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = (if self.1.is_none() {0} else {self.1.unwrap().qclsinst}) as *mut c_void;
let qthis: u64 = unsafe {C_ZN5QMenuC2ERK7QStringP7QWidget(arg0, arg1)};
let rsthis = QMenu{qbase: QWidget::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: QAction * QMenu::insertSeparator(QAction * before);
impl /*struct*/ QMenu {
pub fn insertSeparator<RetType, T: QMenu_insertSeparator<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.insertSeparator(self);
// return 1;
}
}
pub trait QMenu_insertSeparator<RetType> {
fn insertSeparator(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::insertSeparator(QAction * before);
impl<'a> /*trait*/ QMenu_insertSeparator<QAction> for (&'a QAction) {
fn insertSeparator(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu15insertSeparatorEP7QAction()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu15insertSeparatorEP7QAction(rsthis.qclsinst, arg0)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::addSection(const QIcon & icon, const QString & text);
impl<'a> /*trait*/ QMenu_addSection<QAction> for (&'a QIcon, &'a QString) {
fn addSection(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu10addSectionERK5QIconRK7QString()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu10addSectionERK5QIconRK7QString(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QMenu::isTearOffMenuVisible();
impl /*struct*/ QMenu {
pub fn isTearOffMenuVisible<RetType, T: QMenu_isTearOffMenuVisible<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isTearOffMenuVisible(self);
// return 1;
}
}
pub trait QMenu_isTearOffMenuVisible<RetType> {
fn isTearOffMenuVisible(self , rsthis: & QMenu) -> RetType;
}
// proto: bool QMenu::isTearOffMenuVisible();
impl<'a> /*trait*/ QMenu_isTearOffMenuVisible<i8> for () {
fn isTearOffMenuVisible(self , rsthis: & QMenu) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu20isTearOffMenuVisibleEv()};
let mut ret = unsafe {C_ZNK5QMenu20isTearOffMenuVisibleEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: void QMenu::~QMenu();
impl /*struct*/ QMenu {
pub fn free<RetType, T: QMenu_free<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.free(self);
// return 1;
}
}
pub trait QMenu_free<RetType> {
fn free(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::~QMenu();
impl<'a> /*trait*/ QMenu_free<()> for () {
fn free(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenuD2Ev()};
unsafe {C_ZN5QMenuD2Ev(rsthis.qclsinst)};
// return 1;
}
}
// proto: QString QMenu::title();
impl /*struct*/ QMenu {
pub fn title<RetType, T: QMenu_title<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.title(self);
// return 1;
}
}
pub trait QMenu_title<RetType> {
fn title(self , rsthis: & QMenu) -> RetType;
}
// proto: QString QMenu::title();
impl<'a> /*trait*/ QMenu_title<QString> for () {
fn title(self , rsthis: & QMenu) -> QString {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu5titleEv()};
let mut ret = unsafe {C_ZNK5QMenu5titleEv(rsthis.qclsinst)};
let mut ret1 = QString::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::defaultAction();
impl /*struct*/ QMenu {
pub fn defaultAction<RetType, T: QMenu_defaultAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.defaultAction(self);
// return 1;
}
}
pub trait QMenu_defaultAction<RetType> {
fn defaultAction(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::defaultAction();
impl<'a> /*trait*/ QMenu_defaultAction<QAction> for () {
fn defaultAction(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu13defaultActionEv()};
let mut ret = unsafe {C_ZNK5QMenu13defaultActionEv(rsthis.qclsinst)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::addMenu(QMenu * menu);
impl<'a> /*trait*/ QMenu_addMenu<QAction> for (&'a QMenu) {
fn addMenu(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu7addMenuEPS_()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu7addMenuEPS_(rsthis.qclsinst, arg0)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QSize QMenu::sizeHint();
impl /*struct*/ QMenu {
pub fn sizeHint<RetType, T: QMenu_sizeHint<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.sizeHint(self);
// return 1;
}
}
pub trait QMenu_sizeHint<RetType> {
fn sizeHint(self , rsthis: & QMenu) -> RetType;
}
// proto: QSize QMenu::sizeHint();
impl<'a> /*trait*/ QMenu_sizeHint<QSize> for () {
fn sizeHint(self , rsthis: & QMenu) -> QSize {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu8sizeHintEv()};
let mut ret = unsafe {C_ZNK5QMenu8sizeHintEv(rsthis.qclsinst)};
let mut ret1 = QSize::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::setDefaultAction(QAction * );
impl /*struct*/ QMenu {
pub fn setDefaultAction<RetType, T: QMenu_setDefaultAction<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setDefaultAction(self);
// return 1;
}
}
pub trait QMenu_setDefaultAction<RetType> {
fn setDefaultAction(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setDefaultAction(QAction * );
impl<'a> /*trait*/ QMenu_setDefaultAction<()> for (&'a QAction) {
fn setDefaultAction(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu16setDefaultActionEP7QAction()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN5QMenu16setDefaultActionEP7QAction(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QAction * QMenu::actionAt(const QPoint & );
impl /*struct*/ QMenu {
pub fn actionAt<RetType, T: QMenu_actionAt<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.actionAt(self);
// return 1;
}
}
pub trait QMenu_actionAt<RetType> {
fn actionAt(self , rsthis: & QMenu) -> RetType;
}
// proto: QAction * QMenu::actionAt(const QPoint & );
impl<'a> /*trait*/ QMenu_actionAt<QAction> for (&'a QPoint) {
fn actionAt(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK5QMenu8actionAtERK6QPoint()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK5QMenu8actionAtERK6QPoint(rsthis.qclsinst, arg0)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::insertSection(QAction * before, const QIcon & icon, const QString & text);
impl<'a> /*trait*/ QMenu_insertSection<QAction> for (&'a QAction, &'a QIcon, &'a QString) {
fn insertSection(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu13insertSectionEP7QActionRK5QIconRK7QString()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let arg2 = self.2.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu13insertSectionEP7QActionRK5QIconRK7QString(rsthis.qclsinst, arg0, arg1, arg2)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMenu::popup(const QPoint & pos, QAction * at);
impl /*struct*/ QMenu {
pub fn popup<RetType, T: QMenu_popup<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.popup(self);
// return 1;
}
}
pub trait QMenu_popup<RetType> {
fn popup(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::popup(const QPoint & pos, QAction * at);
impl<'a> /*trait*/ QMenu_popup<()> for (&'a QPoint, Option<&'a QAction>) {
fn popup(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu5popupERK6QPointP7QAction()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = (if self.1.is_none() {0} else {self.1.unwrap().qclsinst}) as *mut c_void;
unsafe {C_ZN5QMenu5popupERK6QPointP7QAction(rsthis.qclsinst, arg0, arg1)};
// return 1;
}
}
// proto: void QMenu::setToolTipsVisible(bool visible);
impl /*struct*/ QMenu {
pub fn setToolTipsVisible<RetType, T: QMenu_setToolTipsVisible<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setToolTipsVisible(self);
// return 1;
}
}
pub trait QMenu_setToolTipsVisible<RetType> {
fn setToolTipsVisible(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setToolTipsVisible(bool visible);
impl<'a> /*trait*/ QMenu_setToolTipsVisible<()> for (i8) {
fn setToolTipsVisible(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu18setToolTipsVisibleEb()};
let arg0 = self as c_char;
unsafe {C_ZN5QMenu18setToolTipsVisibleEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMenu::setTitle(const QString & title);
impl /*struct*/ QMenu {
pub fn setTitle<RetType, T: QMenu_setTitle<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setTitle(self);
// return 1;
}
}
pub trait QMenu_setTitle<RetType> {
fn setTitle(self , rsthis: & QMenu) -> RetType;
}
// proto: void QMenu::setTitle(const QString & title);
impl<'a> /*trait*/ QMenu_setTitle<()> for (&'a QString) {
fn setTitle(self , rsthis: & QMenu) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu8setTitleERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN5QMenu8setTitleERK7QString(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QMenu * QMenu::addMenu(const QIcon & icon, const QString & title);
impl<'a> /*trait*/ QMenu_addMenu<QMenu> for (&'a QIcon, &'a QString) {
fn addMenu(self , rsthis: & QMenu) -> QMenu {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu7addMenuERK5QIconRK7QString()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN5QMenu7addMenuERK5QIconRK7QString(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QMenu::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAction * QMenu::exec();
impl<'a> /*trait*/ QMenu_exec<QAction> for () {
fn exec(self , rsthis: & QMenu) -> QAction {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN5QMenu4execEv()};
let mut ret = unsafe {C_ZN5QMenu4execEv(rsthis.qclsinst)};
let mut ret1 = QAction::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
#[derive(Default)] // for QMenu_aboutToShow
pub struct QMenu_aboutToShow_signal{poi:u64}
impl /* struct */ QMenu {
pub fn aboutToShow(&self) -> QMenu_aboutToShow_signal {
return QMenu_aboutToShow_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMenu_aboutToShow_signal {
pub fn connect<T: QMenu_aboutToShow_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMenu_aboutToShow_signal_connect {
fn connect(self, sigthis: QMenu_aboutToShow_signal);
}
#[derive(Default)] // for QMenu_triggered
pub struct QMenu_triggered_signal{poi:u64}
impl /* struct */ QMenu {
pub fn triggered(&self) -> QMenu_triggered_signal {
return QMenu_triggered_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMenu_triggered_signal {
pub fn connect<T: QMenu_triggered_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMenu_triggered_signal_connect {
fn connect(self, sigthis: QMenu_triggered_signal);
}
#[derive(Default)] // for QMenu_hovered
pub struct QMenu_hovered_signal{poi:u64}
impl /* struct */ QMenu {
pub fn hovered(&self) -> QMenu_hovered_signal {
return QMenu_hovered_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMenu_hovered_signal {
pub fn connect<T: QMenu_hovered_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMenu_hovered_signal_connect {
fn connect(self, sigthis: QMenu_hovered_signal);
}
#[derive(Default)] // for QMenu_aboutToHide
pub struct QMenu_aboutToHide_signal{poi:u64}
impl /* struct */ QMenu {
pub fn aboutToHide(&self) -> QMenu_aboutToHide_signal {
return QMenu_aboutToHide_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMenu_aboutToHide_signal {
pub fn connect<T: QMenu_aboutToHide_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMenu_aboutToHide_signal_connect {
fn connect(self, sigthis: QMenu_aboutToHide_signal);
}
// aboutToShow()
extern fn QMenu_aboutToShow_signal_connect_cb_0(rsfptr:fn(), ) {
println!("{}:{}", file!(), line!());
rsfptr();
}
extern fn QMenu_aboutToShow_signal_connect_cb_box_0(rsfptr_raw:*mut Box<Fn()>, ) {
println!("{}:{}", file!(), line!());
let rsfptr = unsafe{Box::from_raw(rsfptr_raw)};
// rsfptr();
unsafe{(*rsfptr_raw)()};
}
impl /* trait */ QMenu_aboutToShow_signal_connect for fn() {
fn connect(self, sigthis: QMenu_aboutToShow_signal) {
// do smth...
// self as u64; // error for Fn, Ok for fn
self as *mut c_void as u64;
self as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_aboutToShow_signal_connect_cb_0 as *mut c_void;
let arg2 = self as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu11aboutToShowEv(arg0, arg1, arg2)};
}
}
impl /* trait */ QMenu_aboutToShow_signal_connect for Box<Fn()> {
fn connect(self, sigthis: QMenu_aboutToShow_signal) {
// do smth...
// Box::into_raw(self) as u64;
// Box::into_raw(self) as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_aboutToShow_signal_connect_cb_box_0 as *mut c_void;
let arg2 = Box::into_raw(Box::new(self)) as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu11aboutToShowEv(arg0, arg1, arg2)};
}
}
// hovered(class QAction *)
extern fn QMenu_hovered_signal_connect_cb_1(rsfptr:fn(QAction), arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsarg0 = QAction::inheritFrom(arg0 as u64);
rsfptr(rsarg0);
}
extern fn QMenu_hovered_signal_connect_cb_box_1(rsfptr_raw:*mut Box<Fn(QAction)>, arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsfptr = unsafe{Box::from_raw(rsfptr_raw)};
let rsarg0 = QAction::inheritFrom(arg0 as u64);
// rsfptr(rsarg0);
unsafe{(*rsfptr_raw)(rsarg0)};
}
impl /* trait */ QMenu_hovered_signal_connect for fn(QAction) {
fn connect(self, sigthis: QMenu_hovered_signal) {
// do smth...
// self as u64; // error for Fn, Ok for fn
self as *mut c_void as u64;
self as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_hovered_signal_connect_cb_1 as *mut c_void;
let arg2 = self as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu7hoveredEP7QAction(arg0, arg1, arg2)};
}
}
impl /* trait */ QMenu_hovered_signal_connect for Box<Fn(QAction)> {
fn connect(self, sigthis: QMenu_hovered_signal) {
// do smth...
// Box::into_raw(self) as u64;
// Box::into_raw(self) as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_hovered_signal_connect_cb_box_1 as *mut c_void;
let arg2 = Box::into_raw(Box::new(self)) as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu7hoveredEP7QAction(arg0, arg1, arg2)};
}
}
// aboutToHide()
extern fn QMenu_aboutToHide_signal_connect_cb_2(rsfptr:fn(), ) {
println!("{}:{}", file!(), line!());
rsfptr();
}
extern fn QMenu_aboutToHide_signal_connect_cb_box_2(rsfptr_raw:*mut Box<Fn()>, ) {
println!("{}:{}", file!(), line!());
let rsfptr = unsafe{Box::from_raw(rsfptr_raw)};
// rsfptr();
unsafe{(*rsfptr_raw)()};
}
impl /* trait */ QMenu_aboutToHide_signal_connect for fn() {
fn connect(self, sigthis: QMenu_aboutToHide_signal) {
// do smth...
// self as u64; // error for Fn, Ok for fn
self as *mut c_void as u64;
self as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_aboutToHide_signal_connect_cb_2 as *mut c_void;
let arg2 = self as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu11aboutToHideEv(arg0, arg1, arg2)};
}
}
impl /* trait */ QMenu_aboutToHide_signal_connect for Box<Fn()> {
fn connect(self, sigthis: QMenu_aboutToHide_signal) {
// do smth...
// Box::into_raw(self) as u64;
// Box::into_raw(self) as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_aboutToHide_signal_connect_cb_box_2 as *mut c_void;
let arg2 = Box::into_raw(Box::new(self)) as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu11aboutToHideEv(arg0, arg1, arg2)};
}
}
// triggered(class QAction *)
extern fn QMenu_triggered_signal_connect_cb_3(rsfptr:fn(QAction), arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsarg0 = QAction::inheritFrom(arg0 as u64);
rsfptr(rsarg0);
}
extern fn QMenu_triggered_signal_connect_cb_box_3(rsfptr_raw:*mut Box<Fn(QAction)>, arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsfptr = unsafe{Box::from_raw(rsfptr_raw)};
let rsarg0 = QAction::inheritFrom(arg0 as u64);
// rsfptr(rsarg0);
unsafe{(*rsfptr_raw)(rsarg0)};
}
impl /* trait */ QMenu_triggered_signal_connect for fn(QAction) {
fn connect(self, sigthis: QMenu_triggered_signal) {
// do smth...
// self as u64; // error for Fn, Ok for fn
self as *mut c_void as u64;
self as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_triggered_signal_connect_cb_3 as *mut c_void;
let arg2 = self as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu9triggeredEP7QAction(arg0, arg1, arg2)};
}
}
impl /* trait */ QMenu_triggered_signal_connect for Box<Fn(QAction)> {
fn connect(self, sigthis: QMenu_triggered_signal) {
// do smth...
// Box::into_raw(self) as u64;
// Box::into_raw(self) as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMenu_triggered_signal_connect_cb_box_3 as *mut c_void;
let arg2 = Box::into_raw(Box::new(self)) as *mut c_void;
unsafe {QMenu_SlotProxy_connect__ZN5QMenu9triggeredEP7QAction(arg0, arg1, arg2)};
}
}
// <= body block end
|
use crate::models::TodoList;
use deadpool_postgres::Client;
use std::io;
use tokio_pg_mapper::FromTokioPostgresRow;
pub async fn get_todos(db: &Client) -> Result<Vec<TodoList>, io::Error> {
let stmt = db.prepare("SELECT * FROM todo_list").await.unwrap();
let todos = db
.query(&stmt, &[])
.await
.expect("error getting todo")
.iter()
.map(|row| TodoList::from_row_ref(row).unwrap())
.collect::<Vec<TodoList>>();
Ok(todos)
}
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//! Useful filters for common operations.
pub use capture_bytes::CaptureBytesFactory;
pub use compress::CompressFactory;
pub use concatenate_bytes::ConcatBytesFactory;
pub use debug::DebugFactory;
pub use load_balancer::LoadBalancerFilterFactory;
pub use local_rate_limit::RateLimitFilterFactory;
pub use token_router::TokenRouterFactory;
mod capture_bytes;
mod compress;
mod concatenate_bytes;
mod debug;
mod load_balancer;
mod local_rate_limit;
mod token_router;
pub const CAPTURED_BYTES: &str = "quilkin.dev/captured_bytes";
|
//! [<img alt="github" src="https://img.shields.io/badge/github-udoprog/unicycle-8da0cb?style=for-the-badge&logo=github" height="20">](https://github.com/udoprog/unicycle)
//! [<img alt="crates.io" src="https://img.shields.io/crates/v/unicycle.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/unicycle)
//! [<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-unicycle-66c2a5?style=for-the-badge&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/unicycle)
//!
//! A scheduler for driving a large number of futures.
//!
//! Unicycle provides a collection of [Unordered] types:
//!
//! * [FuturesUnordered]
//! * [StreamsUnordered]
//! * [IndexedStreamsUnordered]
//!
//! These are async abstractions that runs a set of futures or streams which may
//! complete in any order.
//! Similarly to [FuturesUnordered][futures-rs] from the [futures crate].
//! But we aim to provide a stronger guarantee of fairness (see below), and
//! better memory locality for the futures being pollled.
//!
//! **Note:** This project is experimental. It involves some amount of unsafe and
//! possibly bad assumptions which needs to be either vetted or removed before you
//! should consider putting it in production.
//!
//! <br>
//!
//! ## Features
//!
//! * `parking-lot` - To enable locking using the [parking_lot] crate (default).
//! * `futures-rs` - Enable the used of the Stream type from [futures-rs].
//! This is required to get access to [StreamsUnordered] and
//! [IndexedStreamsUnordered] since these wrap over [futures-rs] types. (default)
//!
//! <br>
//!
//! ## Examples
//!
//! ```no_miri
//! use tokio::time;
//! use std::time::Duration;
//! use unicycle::FuturesUnordered;
//!
//! #[tokio::main]
//! async fn main() {
//! let mut futures = FuturesUnordered::new();
//!
//! futures.push(time::sleep(Duration::from_secs(2)));
//! futures.push(time::sleep(Duration::from_secs(3)));
//! futures.push(time::sleep(Duration::from_secs(1)));
//!
//! while let Some(_) = futures.next().await {
//! println!("tick");
//! }
//!
//! println!("done!");
//! }
//! ```
//!
//! [Unordered] types can be created from iterators:
//!
//! ```no_miri
//! use tokio::time;
//! use std::time::Duration;
//! use unicycle::FuturesUnordered;
//!
//! #[tokio::main]
//! async fn main() {
//! let mut futures = Vec::new();
//!
//! futures.push(time::sleep(Duration::from_secs(2)));
//! futures.push(time::sleep(Duration::from_secs(3)));
//! futures.push(time::sleep(Duration::from_secs(1)));
//!
//! let mut futures = futures.into_iter().collect::<FuturesUnordered<_>>();
//!
//! while let Some(_) = futures.next().await {
//! println!("tick");
//! }
//!
//! println!("done!");
//! }
//! ```
//!
//! <br>
//!
//! ## Fairness
//!
//! You can think of abstractions like Unicycle as schedulers. They are provided
//! a set of child tasks, and try to do their best to drive them to completion.
//! In this regard, it's interesting to talk about _fairness_ in how the tasks
//! are being driven.
//!
//! The current implementation of [FuturesUnordered][futures-rs] maintains a
//! queue of tasks interested in waking up. As a task is woken up it's added to
//! the head of this queue to signal its interest in being polled. When
//! [FuturesUnordered][futures-rs] works it drains this queue in a loop and
//! polls the associated task. This process has a side effect where tasks who
//! aggressively signal interest in waking up will receive priority and be
//! polled more frequently. Since there is a higher chance that while the queue
//! is being drained, their interest will be re-added at the head of the queue
//! immeidately. This can lead to instances where a small number of tasks can
//! can cause the polling loop of [FuturesUnordered][futures-rs] to [spin
//! abnormally]. This issue was [reported by Jon Gjengset] and is improved on by
//! [limiting the amount FuturesUnordered is allowed to spin].
//!
//! Unicycle addresses this by limiting how frequently a child task may be
//! polled per _polling cycle_. This is done by tracking polling interest in two
//! separate sets. Once we are polled, we swap out the active set then take the
//! swapped out set and use as a basis for what to poll in order while limiting
//! ourselves to only poll _once_ per child task. Additional wakeups are only
//! registered in the swapped in set which will be polled the next cycle.
//!
//! This way we hope to achieve a higher degree of fairness, never favoring the
//! behavior of one particular task.
//!
//! <br>
//!
//! ## Architecture
//!
//! The [Unordered] type stores all futures being polled in a [PinSlab]
//! (Inspired by the [slab] crate). A slab is capable of utomatically reclaiming
//! storage at low cost, and will maintain decent memory locality. A [PinSlab]
//! is different from a [Slab] in how it allocates the memory regions it uses to
//! store objects. While a regular [Slab] is simply backed by a vector which
//! grows as appropriate, this approach is not viable for pinning, since it
//! would cause the objects to move while being reallocated. Instead [PinSlab]
//! maintains a growable collection of fixed-size memory regions, allowing it to
//! store and reference immovable objects through the [pin API]. Each future
//! inserted into the slab is assigned an _index_, which we will be using below.
//! We now call the inserted future a _task_, and you can think of this index as
//! a unique task identifier.
//!
//! Next to the slab we maintain two [BitSets][BitSet], one _active_ and one
//! _alternate_. When a task registers interest in waking up, the bit associated
//! with its index is set in the active set, and the latest waker passed into
//! [Unordered] is called to wake it up. Once [Unordered] is polled, it
//! atomically swaps the active and alternate [BitSets][BitSet], waits until it
//! has exclusive access to the now _alternate_ [BitSet], and drains it from all
//! the indexes which have been flagged to determine which tasks to poll. Each
//! task is then polled _once_ in order. If the task is [Ready], its result is
//! yielded. After we receive control again, we continue draining the alternate
//! set in this manner, until it is empty. When this is done we yield once, then
//! we start the cycle over again.
//!
//! [BitSet]: https://docs.rs/uniset/latest/uniset/struct.BitSet.html
//! [futures crate]: https://docs.rs/futures/latest/futures
//! [futures-rs]: https://crates.io/crates/futures
//! [futures-rs]: https://docs.rs/futures/latest/futures/stream/struct.FuturesUnordered.html
//! [FuturesUnordered]: https://docs.rs/unicycle/latest/unicycle/type.FuturesUnordered.html
//! [IndexedStreamsUnordered]: https://docs.rs/unicycle/latest/unicycle/type.IndexedStreamsUnordered.html
//! [limiting the amount FuturesUnordered is allowed to spin]: https://github.com/rust-lang/futures-rs/pull/2049
//! [parking_lot]: https://crates.io/crates/parking_lot
//! [pin API]: https://doc.rust-lang.org/std/pin/index.html
//! [PinSlab]: https://docs.rs/unicycle/latest/unicycle/pin_slab/struct.PinSlab.html
//! [Ready]: https://doc.rust-lang.org/std/task/enum.Poll.html
//! [reported by Jon Gjengset]: https://github.com/rust-lang/futures-rs/issues/2047
//! [Slab]: https://docs.rs/slab/latest/slab/struct.Slab.html
//! [slab]: https://github.com/carllerche/slab
//! [spin abnormally]: https://github.com/udoprog/unicycle/blob/main/tests/spinning_futures_unordered_test.rs
//! [StreamsUnordered]: https://docs.rs/unicycle/latest/unicycle/type.StreamsUnordered.html
//! [Unordered]: https://docs.rs/unicycle/latest/unicycle/struct.Unordered.html
#![deny(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![deny(rustdoc::broken_intra_doc_links)]
#![allow(clippy::should_implement_trait)]
mod lock;
pub mod pin_slab;
#[doc(hidden)]
pub mod pin_vec;
mod wake_set;
mod waker;
use std::future::Future;
use std::iter;
use std::marker;
use std::mem;
use std::pin::Pin;
use std::ptr;
use std::sync::Arc;
use std::task::{Context, Poll};
use uniset::BitSet;
use waker::InternalWakers;
use self::pin_slab::PinSlab;
use self::wake_set::{SharedWakeSet, WakeSet};
use self::waker::SharedWaker;
#[cfg(feature = "futures-rs")]
use futures_core::{FusedFuture, FusedStream, Stream};
/// Our very own homemade `ready!` impl.
macro_rules! ready {
($expr:expr) => {
match $expr {
Poll::Ready(value) => value,
Poll::Pending => return Poll::Pending,
}
};
}
/// A container for an unordered collection of [Future]s.
///
/// # Examples
///
/// ```rust,no_run
/// use tokio::time;
/// use std::time::Duration;
///
/// #[tokio::main]
/// async fn main() {
/// let mut futures = unicycle::FuturesUnordered::new();
///
/// futures.push(time::sleep(Duration::from_secs(2)));
/// futures.push(time::sleep(Duration::from_secs(3)));
/// futures.push(time::sleep(Duration::from_secs(1)));
///
/// while let Some(_) = futures.next().await {
/// println!("tick");
/// }
///
/// println!("done!");
/// }
/// ```
pub type FuturesUnordered<T> = Unordered<T, Futures>;
/// Data that is shared across all sub-tasks.
struct Shared {
/// The currently registered parent waker.
waker: SharedWaker,
/// The currently registered wake set.
wake_set: SharedWakeSet,
/// The collection of all wakers currently or previously in use.
all_wakers: InternalWakers,
}
impl Shared {
/// Construct new shared data.
fn new() -> Self {
Self {
waker: SharedWaker::new(),
wake_set: SharedWakeSet::new(),
all_wakers: InternalWakers::new(),
}
}
/// Swap the active wake set with the alternate one.
/// Also makes sure that the capacity of the active bitset is updated if the
/// alternate one has.
///
/// # Safety
///
/// Caller must be assured that they are the only one who is attempting to
/// swap out the wake sets.
unsafe fn poll_swap_active<'a>(
&self,
cx: &Context<'_>,
alternate: &mut *mut WakeSet,
) -> Poll<(bool, &'a mut BitSet)> {
let non_empty = {
let alternate = (**alternate).as_mut_set();
let non_empty = !alternate.is_empty();
// We always force a swap if the capacity has changed, because then
// we expect subtasks to poll the swapped in set since they were
// newly added.
if non_empty {
return Poll::Ready((true, alternate));
}
// Note: We must swap the waker before we swap the set.
if !self.waker.swap(cx.waker()) {
return Poll::Pending;
}
non_empty
};
let wake_set = self.swap_active(alternate);
Poll::Ready((non_empty, wake_set))
}
/// Perform a swap of the active sets. This is safe, because we perform the
/// appropriate locking while swapping the sets.
///
/// # Safety
///
/// We must ensure that we have unique access to the alternate set being
/// swapped.
unsafe fn swap_active<'a>(&self, alternate: &mut *mut WakeSet) -> &'a mut BitSet {
// Unlock. At this position, if someone adds an element to the wake set
// they are also bound to call wake, which will cause us to wake up.
//
// There is a race going on between locking and unlocking, and it's
// beneficial for child tasks to observe the locked state of the wake
// set so they refetch the other set instead of having to wait until
// another wakeup.
(**alternate).unlock_exclusive();
let next = mem::replace(alternate, ptr::null_mut());
debug_assert!(!next.is_null());
*alternate = self.wake_set.swap(next);
// Make sure no one else is using the alternate wake.
//
// Safety: We are the only one swapping alternate, so at
// this point we know that we have access to the most recent
// active set. We _must_ call lock_exclusive before we
// can punt this into a mutable reference though, because at
// this point inner futures will still have access to references
// to it (under a lock!). We must wait for these to expire.
//
// We also unfortunately can't yield here, because we've swapped the
// alternate set which could be used when pushing to the set.
(**alternate).lock_exclusive();
// Safety: While this is live we must _not_ mess with
// `alternate` in any way.
(**alternate).as_mut_set()
}
}
mod private {
pub trait Sealed {}
impl Sealed for super::Futures {}
#[cfg(feature = "futures-rs")]
impl Sealed for super::Streams {}
#[cfg(feature = "futures-rs")]
impl Sealed for super::IndexedStreams {}
}
/// Trait implemented by sentinels for the [Unordered] type.
pub trait Sentinel: self::private::Sealed {}
/// Sentinel type for futures.
///
/// [Unordered] instances which handle futures have the signature
/// `Unordered<T, Futures>`, since it allows for a different implementation of
/// [Stream].
pub struct Futures(());
impl Sentinel for Futures {}
/// A container for an unordered collection of [Future]s or [Stream]s.
///
/// You should use one of the following type aliases to construct it:
/// * [FuturesUnordered]
/// * [StreamsUnordered]
/// * [IndexedStreamsUnordered]
///
/// # Examples
///
/// ```rust,no_run
/// use tokio::time;
/// use std::time::Duration;
///
/// #[tokio::main]
/// async fn main() {
/// let mut futures = unicycle::FuturesUnordered::new();
///
/// futures.push(time::sleep(Duration::from_secs(2)));
/// futures.push(time::sleep(Duration::from_secs(3)));
/// futures.push(time::sleep(Duration::from_secs(1)));
///
/// while let Some(_) = futures.next().await {
/// println!("tick");
/// }
///
/// println!("done!");
/// }
/// ```
pub struct Unordered<T, S>
where
S: Sentinel,
{
/// Slab of futures being polled.
/// They need to be pinned on the heap, since the slab might grow to
/// accomodate more futures.
slab: PinSlab<T>,
/// Shared parent waker.
/// Includes the current wake target. Each time we poll, we swap back and
/// forth between this and `alternate`.
shared: Arc<Shared>,
/// Alternate wake set, used for growing the existing set when futures are
/// added. This is then swapped out with the active set to receive polls.
alternate: *mut WakeSet,
/// Marker for the sentinel.
_marker: marker::PhantomData<S>,
}
// Safety: Unordered is ultimately a container of `T`, and is `Send` only if `T`
// themselves are `Send`.
unsafe impl<T, S> Send for Unordered<T, S>
where
T: Send,
S: Sentinel,
{
}
// Safety: Unordered is ultimately a container of `T`, and is `Sync` only if `T`
// themselves are `Sync`.
unsafe impl<T, S> Sync for Unordered<T, S>
where
T: Sync,
S: Sentinel,
{
}
impl<T, S> Unpin for Unordered<T, S> where S: Sentinel {}
impl<T, S> Unordered<T, S>
where
S: Sentinel,
Self: PollNext,
{
/// Creates a future that resolves to the next item in the unordered set.
///
/// Functions like [`StreamExt::next`] would from the [futures] crate, but
/// doesn't depend on the [Stream] trait.
///
/// [`StreamExt::next`]: https://docs.rs/futures/latest/futures/stream/trait.StreamExt.html#method.next
/// [futures]: https://crates.io/crates/futures
///
/// # Examples
///
/// ```rust
/// use tokio::time;
/// use std::time::Duration;
/// use unicycle::FuturesUnordered;
///
/// #[tokio::main]
/// async fn main() {
/// let mut futures = FuturesUnordered::new();
///
/// futures.push(time::sleep(Duration::from_secs(2)));
/// futures.push(time::sleep(Duration::from_secs(3)));
/// futures.push(time::sleep(Duration::from_secs(1)));
///
/// while let Some(_) = futures.next().await {
/// println!("tick");
/// }
///
/// println!("done!");
/// }
/// ```
pub fn next(&mut self) -> Next<'_, Self> {
Next(self)
}
}
/// Future returned by [Unordered::next].
pub struct Next<'a, T>(&'a mut T);
impl<T> Future for Next<'_, T>
where
T: Unpin + PollNext,
{
type Output = Option<T::Item>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Pin::new(&mut *self.0).poll_next(cx)
}
}
impl<T> FuturesUnordered<T> {
/// Construct a new, empty [FuturesUnordered].
///
/// # Examples
///
/// ```
/// use unicycle::FuturesUnordered;
///
/// let mut futures = FuturesUnordered::new();
/// assert!(futures.is_empty());
///
/// futures.push(async { 42 });
/// ```
pub fn new() -> Self {
Self::new_internal()
}
}
/// Trait for providing a `poll_next` implementation for various unordered set
/// types.
///
/// This is like the lightweight unicycle version of the [Stream] trait, but we
/// provide it here so we can shim in our own generic [next] implementation.
///
/// [next]: Unordered::next
pub trait PollNext {
/// The output of the poll.
type Item;
/// Poll the stream for the next item.
///
/// Once this completes with `Poll::Ready(None)`, no more items are expected
/// and it should not be polled again.
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>>;
}
impl<T> PollNext for FuturesUnordered<T>
where
T: Future,
{
type Item = T::Output;
/// Internal poll function for `FuturesUnordered<T>`.
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<T::Output>> {
let Self {
ref mut slab,
ref shared,
ref mut alternate,
..
} = *self.as_mut();
if slab.is_empty() {
// Nothing to poll, nothing to add. End the stream since we don't have work to do.
return Poll::Ready(None);
}
// Safety: We have exclusive access to Unordered, which is the only
// implementation that is trying to swap the wake sets.
let (non_empty, wake_last) = ready!(unsafe { shared.poll_swap_active(cx, alternate) });
for index in wake_last.drain() {
// NB: Since we defer pollables a little, a future might
// have been polled and subsequently removed from the slab.
// So we don't treat this as an error here.
// If on the other hand it was removed _and_ re-added, we have
// a case of a spurious poll. Luckily, that doesn't bother a
// future much.
let fut = match slab.get_pin_mut(index) {
Some(fut) => fut,
None => continue,
};
// Construct a new lightweight waker only capable of waking by
// reference, with referential access to `shared`.
let result = self::waker::poll_with_ref(shared, index, move |cx| fut.poll(cx));
if let Poll::Ready(result) = result {
let removed = slab.remove(index);
debug_assert!(removed);
return Poll::Ready(Some(result));
}
}
if slab.is_empty() {
return Poll::Ready(None);
}
// We need to wake again to take care of the alternate set that was
// swapped in.
if non_empty {
cx.waker().wake_by_ref();
}
Poll::Pending
}
}
impl<T, S> Unordered<T, S>
where
S: Sentinel,
{
#[inline(always)]
fn new_internal() -> Self {
Self {
slab: PinSlab::new(),
shared: Arc::new(Shared::new()),
alternate: Box::into_raw(Box::new(WakeSet::locked())),
_marker: marker::PhantomData,
}
}
/// Test if the collection of futures is empty.
///
/// # Examples
///
/// ```
/// use std::future::Ready;
/// use unicycle::FuturesUnordered;
///
/// let mut futures = FuturesUnordered::<Ready<()>>::new();
/// assert!(futures.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.slab.is_empty()
}
/// Push the given future or stream to [Unordered] and return its task
/// index.
///
/// Newly added futures are guaranteed to be polled, but there is no
/// guarantee in which order this will happen.
///
/// Pushed tasks are pinned by the [Unordered] collection automatically.
///
/// # Examples
///
/// ```
/// use unicycle::FuturesUnordered;
///
/// let mut futures = FuturesUnordered::new();
/// assert!(futures.is_empty());
/// futures.push(async { 42 });
/// assert!(!futures.is_empty());
/// ```
pub fn push(&mut self, future: T) -> usize {
let index = self.slab.insert(future);
let (old, new) = {
// Safety: At this point we know we have exclusive access to the set.
let set = unsafe { (*self.alternate).as_mut_set() };
let old = set.capacity();
set.set(index);
let new = set.capacity();
(old, new)
};
// Fast Path: Did not grow the alternate set, so no need to grow the
// active set either.
if new <= old {
return index;
}
// Slow Path: Swap out the active set and grow it to accomodate the same
// number of elements as the now alternate set was grown to.
// This works out, because if it's non-empty, the next time we poll
// the unordered set it will be processed. It it's empty, it will be
// swapped out with the active set which now contains the newly added
// futures.
// Safety: We have unique access to the alternate set being modified.
unsafe {
self.shared.swap_active(&mut self.alternate).reserve(new);
}
index
}
/// Get a pinned mutable reference to the stream or future at the given
/// index.
///
/// # Examples
///
/// ```rust
/// use unicycle::FuturesUnordered;
/// use futures::future::poll_fn;
/// use std::future::Future as _;
///
/// #[tokio::main]
/// async fn main() {
/// let mut futures = FuturesUnordered::new();
/// let index = futures.push(async { 42 });
///
/// let result = poll_fn(|cx| {
/// futures.get_pin_mut(index).expect("expected future").poll(cx)
/// }).await;
///
/// assert_eq!(result, 42);
/// }
/// ```
pub fn get_pin_mut(&mut self, index: usize) -> Option<Pin<&mut T>> {
self.slab.get_pin_mut(index)
}
/// Get a mutable reference to the stream or future at the given index.
/// Requires that the stores stream or future is [Unpin].
///
/// # Examples
///
/// ```rust
/// use unicycle::FuturesUnordered;
/// use futures::future::{ready, poll_fn};
/// use std::{pin::Pin, future::Future as _};
///
/// #[tokio::main]
/// async fn main() {
/// let mut futures = FuturesUnordered::new();
/// let index = futures.push(ready(42));
///
/// let result = poll_fn(|cx| {
/// Pin::new(futures.get_mut(index).expect("expected future")).poll(cx)
/// }).await;
///
/// assert_eq!(result, 42);
/// }
/// ```
pub fn get_mut(&mut self, index: usize) -> Option<&mut T>
where
T: Unpin,
{
self.slab.get_mut(index)
}
}
impl<T> Default for Unordered<T, Futures> {
fn default() -> Self {
Self::new()
}
}
impl<T, S> Drop for Unordered<T, S>
where
S: Sentinel,
{
fn drop(&mut self) {
// Cancel all child futures in an attempt to prevent them from
// attempting to call wake on the shared wake set.
self.slab.clear();
// We intend to drop both wake sets. Therefore we need exclusive access
// to both wakers. Unfortunately that means that at this point, any call
// to wakes will have to serialize behind the shared wake set while the
// alternate set is being dropped.
let _write = self.shared.wake_set.prevent_drop_write();
// Safety: we uniquely own `alternate`, so we are responsible for
// dropping it. This is asserted when we swap it out during a poll by
// calling WakeSet::lock_exclusive. We are also the _only_ one
// swapping `wake_alternative`, so we know that can't happen here.
unsafe {
drop(Box::from_raw(self.alternate));
}
}
}
impl<T, S> iter::Extend<T> for Unordered<T, S>
where
S: Sentinel,
{
fn extend<I>(&mut self, iter: I)
where
I: IntoIterator<Item = T>,
{
for value in iter {
self.push(value);
}
}
}
impl<T> iter::FromIterator<T> for FuturesUnordered<T>
where
T: Future,
{
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut futures = FuturesUnordered::new();
futures.extend(iter);
futures
}
}
macro_rules! cfg_futures_rs {
($($item:item)*) => {
$(
#[cfg(feature = "futures-rs")]
#[cfg_attr(docsrs, doc(cfg(feature = "futures-rs")))]
$item
)*
}
}
cfg_futures_rs! {
/// Sentinel type for streams.
///
/// [Unordered] instances which handle futures have the signature
/// `Unordered<T, Streams>`, since it allows for a different implementation of
/// [Stream].
pub struct Streams(());
impl Sentinel for Streams {}
/// Sentinel type for streams which are indexed - for each value they yield,
/// they also yield the task identifier associated with them.
///
/// [Unordered] instances which handle futures have the signature
/// `Unordered<T, IndexedStreams>`, since it allows for a different
/// implementation of [Stream].
pub struct IndexedStreams(());
impl Sentinel for IndexedStreams {}
/// A container for an unordered collection of [Stream]s.
///
/// # Examples
///
/// ```rust,no_run
/// use tokio::{net::TcpListener, time};
/// use tokio_util::codec::{Framed, LengthDelimitedCodec};
/// use std::error::Error;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// let mut listener = TcpListener::bind("127.0.0.1:8080").await?;
/// let mut clients = unicycle::StreamsUnordered::new();
///
/// loop {
/// tokio::select! {
/// result = listener.accept() => {
/// let (stream, _) = result?;
/// clients.push(Framed::new(stream, LengthDelimitedCodec::new()));
/// },
/// Some(frame) = clients.next() => {
/// println!("received frame: {:?}", frame);
/// }
/// }
/// }
/// }
/// ```
pub type StreamsUnordered<T> = Unordered<T, Streams>;
/// A container for an unordered collection of [Stream]s, which also yields the
/// index that produced the next item.
///
/// # Examples
///
/// ```rust,no_run
/// use tokio::{net::TcpListener, time};
/// use tokio_util::codec::{Framed, LengthDelimitedCodec};
/// use std::error::Error;
///
/// #[tokio::main]
/// async fn main() -> Result<(), Box<dyn Error>> {
/// let mut listener = TcpListener::bind("127.0.0.1:8080").await?;
/// let mut clients = unicycle::IndexedStreamsUnordered::new();
///
/// loop {
/// tokio::select! {
/// result = listener.accept() => {
/// let (stream, _) = result?;
/// clients.push(Framed::new(stream, LengthDelimitedCodec::new()));
/// },
/// Some((index, frame)) = clients.next() => {
/// match frame {
/// Some(frame) => println!("{}: received frame: {:?}", index, frame),
/// None => println!("{}: client disconnected", index),
/// }
/// }
/// }
/// }
/// }
/// ```
pub type IndexedStreamsUnordered<T> = Unordered<T, IndexedStreams>;
impl<T> StreamsUnordered<T> {
/// Construct a new, empty [StreamsUnordered].
///
/// # Examples
///
/// ```rust
/// use tokio_stream::iter;
/// use unicycle::StreamsUnordered;
///
/// #[tokio::main]
/// async fn main() {
/// let mut streams = StreamsUnordered::new();
/// assert!(streams.is_empty());
///
/// streams.push(iter(vec![1, 2, 3, 4]));
/// streams.push(iter(vec![5, 6, 7, 8]));
///
/// let mut received = Vec::new();
///
/// while let Some(value) = streams.next().await {
/// received.push(value);
/// }
///
/// assert_eq!(vec![5, 1, 6, 2, 7, 3, 8, 4], received);
/// }
/// ```
pub fn new() -> Self {
Self::new_internal()
}
}
impl<T> IndexedStreamsUnordered<T> {
/// Construct a new, empty [IndexedStreamsUnordered].
///
/// This is the same as [StreamsUnordered], except that it yields the index
/// of the stream who'se value was just yielded, alongside the yielded
/// value.
///
/// # Examples
///
/// ```rust
/// use tokio_stream::iter;
/// use unicycle::IndexedStreamsUnordered;
///
/// #[tokio::main]
/// async fn main() {
/// let mut streams = IndexedStreamsUnordered::new();
/// assert!(streams.is_empty());
///
/// streams.push(iter(vec![1, 2]));
/// streams.push(iter(vec![5, 6]));
///
/// let mut received = Vec::new();
///
/// while let Some(value) = streams.next().await {
/// received.push(value);
/// }
///
/// assert_eq!(
/// vec![
/// (1, Some(5)),
/// (0, Some(1)),
/// (1, Some(6)),
/// (0, Some(2)),
/// (1, None),
/// (0, None)
/// ],
/// received
/// );
/// }
/// ```
pub fn new() -> Self {
Self::new_internal()
}
}
/// Provide `Stream` implementation through `PollNext`.
impl<T, S> Stream for Unordered<T, S>
where
S: Sentinel,
Self: PollNext,
{
type Item = <Self as PollNext>::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
<Self as PollNext>::poll_next(self, cx)
}
}
impl<T, S> FusedStream for Unordered<T, S> where S: Sentinel, Self: PollNext, {
fn is_terminated(&self) -> bool {
self.is_empty()
}
}
impl<T> PollNext for StreamsUnordered<T>
where
T: Stream,
{
type Item = T::Item;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let Self {
ref mut slab,
ref shared,
ref mut alternate,
..
} = *self.as_mut();
if slab.is_empty() {
// Nothing to poll, nothing to add. End the stream since we don't have work to do.
return Poll::Ready(None);
}
// Safety: We have exclusive access to Unordered, which is the only
// implementation that is trying to swap the wake sets.
let (non_empty, wake_last) = ready!(unsafe { shared.poll_swap_active(cx, alternate) });
for index in wake_last.drain() {
// NB: Since we defer pollables a little, a future might
// have been polled and subsequently removed from the slab.
// So we don't treat this as an error here.
// If on the other hand it was removed _and_ re-added, we have
// a case of a spurious poll. Luckily, that doesn't bother a
// future much.
let stream = match slab.get_pin_mut(index) {
Some(stream) => stream,
None => continue,
};
// Construct a new lightweight waker only capable of waking by
// reference, with referential access to `shared`.
let result = self::waker::poll_with_ref(shared, index, move |cx| stream.poll_next(cx));
if let Poll::Ready(result) = result {
match result {
Some(value) => {
cx.waker().wake_by_ref();
shared.wake_set.wake(index);
return Poll::Ready(Some(value));
}
None => {
let removed = slab.remove(index);
debug_assert!(removed);
}
}
}
}
// We have successfully polled the last snapshot.
// Yield and make sure that we are polled again.
if slab.is_empty() {
return Poll::Ready(None);
}
// We need to wake again to take care of the alternate set that was
// swapped in.
if non_empty {
cx.waker().wake_by_ref();
}
Poll::Pending
}
}
impl<T> PollNext for IndexedStreamsUnordered<T>
where
T: Stream,
{
type Item = (usize, Option<T::Item>);
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let Self {
ref mut slab,
ref shared,
ref mut alternate,
..
} = *self.as_mut();
if slab.is_empty() {
// Nothing to poll, nothing to add. End the stream since we don't have work to do.
return Poll::Ready(None);
}
// Safety: We have exclusive access to Unordered, which is the only
// implementation that is trying to swap the wake sets.
let (non_empty, wake_last) = ready!(unsafe { shared.poll_swap_active(cx, alternate) });
for index in wake_last.drain() {
// NB: Since we defer pollables a little, a future might
// have been polled and subsequently removed from the slab.
// So we don't treat this as an error here.
// If on the other hand it was removed _and_ re-added, we have
// a case of a spurious poll. Luckily, that doesn't bother a
// future much.
let stream = match slab.get_pin_mut(index) {
Some(stream) => stream,
None => continue,
};
// Construct a new lightweight waker only capable of waking by
// reference, with referential access to `shared`.
let result = self::waker::poll_with_ref(shared, index, move |cx| stream.poll_next(cx));
if let Poll::Ready(result) = result {
match result {
Some(value) => {
cx.waker().wake_by_ref();
shared.wake_set.wake(index);
return Poll::Ready(Some((index, Some(value))));
}
None => {
cx.waker().wake_by_ref();
let removed = slab.remove(index);
debug_assert!(removed);
return Poll::Ready(Some((index, None)));
}
}
}
}
// We have successfully polled the last snapshot.
// Yield and make sure that we are polled again.
if slab.is_empty() {
return Poll::Ready(None);
}
// We need to wake again to take care of the alternate set that was
// swapped in.
if non_empty {
cx.waker().wake_by_ref();
}
Poll::Pending
}
}
impl<T> iter::FromIterator<T> for StreamsUnordered<T>
where
T: Stream,
{
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut streams = StreamsUnordered::new();
streams.extend(iter);
streams
}
}
impl<T> FusedFuture for Next<'_, T>
where
T: Unpin + PollNext + FusedStream,
{
fn is_terminated(&self) -> bool {
self.0.is_terminated()
}
}
}
|
use chrono::prelude::*;
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
#[derive(Hash, Debug, PartialEq)]
struct MuseumNumber {
value: u32,
}
#[derive(Debug)]
struct Artifact {
id: MuseumNumber,
name: String,
date: DateTime<Utc>,
}
impl PartialEq for Artifact {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Artifact {}
impl Hash for Artifact {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.id.hash(hasher);
}
}
#[test]
fn hash_trait_test() {
let mut col = HashSet::new();
col.insert(Artifact {
id: MuseumNumber { value: 32 },
name: String::from("Indian bow"),
date: Utc::now(),
});
let mut col2 = HashSet::new();
col2.insert(Artifact {
id: MuseumNumber { value: 32 },
name: String::from("Indian bow"),
date: Utc::now(),
});
assert_eq!(col, col2);
}
|
//! Private module for selective re-export.
use crate::actor::{Actor, Id, Network};
use crate::{Representative, Rewrite, RewritePlan};
use std::fmt::Debug;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use super::timers::Timers;
/// Represents a snapshot in time for the entire actor system.
pub struct ActorModelState<A: Actor, H = ()> {
pub actor_states: Vec<Arc<A::State>>,
pub network: Network<A::Msg>,
pub timers_set: Vec<Timers<A::Timer>>,
pub crashed: Vec<bool>,
pub history: H,
}
impl<A, H> serde::Serialize for ActorModelState<A, H>
where
A: Actor,
A::State: serde::Serialize,
A::Msg: serde::Serialize,
A::Timer: serde::Serialize,
H: serde::Serialize,
{
fn serialize<Ser: serde::Serializer>(&self, ser: Ser) -> Result<Ser::Ok, Ser::Error> {
use serde::ser::SerializeStruct;
let mut out = ser.serialize_struct("ActorModelState", 4)?;
out.serialize_field("actor_states", &self.actor_states)?;
out.serialize_field("network", &self.network)?;
out.serialize_field("is_timer_set", &self.timers_set)?;
out.serialize_field("history", &self.history)?;
out.end()
}
}
// Manual implementation to avoid `Clone` constraint that `#derive(Clone)` would introduce on
// `ActorModelState<A, H>` type parameters.
impl<A, H> Clone for ActorModelState<A, H>
where
A: Actor,
H: Clone,
{
fn clone(&self) -> Self {
ActorModelState {
actor_states: self.actor_states.clone(),
history: self.history.clone(),
timers_set: self.timers_set.clone(),
network: self.network.clone(),
crashed: self.crashed.clone(),
}
}
}
// Manual implementation to avoid `Debug` constraint that `#derive(Debug)` would introduce on
// `ActorModelState<A, H>` type parameters.
impl<A, H> Debug for ActorModelState<A, H>
where
A: Actor,
H: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut builder = f.debug_struct("ActorModelState");
builder.field("actor_states", &self.actor_states);
builder.field("history", &self.history);
builder.field("is_timer_set", &self.timers_set);
builder.field("network", &self.network);
builder.finish()
}
}
// Manual implementation to avoid `Eq` constraint that `#derive(Eq)` would introduce on
// `ActorModelState<A, H>` type parameters.
impl<A, H> Eq for ActorModelState<A, H>
where
A: Actor,
A::State: Eq,
H: Eq,
{
}
// Manual implementation to avoid `Hash` constraint that `#derive(Hash)` would introduce on
// `ActorModelState<A, H>` type parameters.
impl<A, H> Hash for ActorModelState<A, H>
where
A: Actor,
H: Hash,
{
fn hash<Hash: Hasher>(&self, state: &mut Hash) {
self.actor_states.hash(state);
self.history.hash(state);
self.timers_set.hash(state);
self.network.hash(state);
}
}
// Manual implementation to avoid `PartialEq` constraint that `#derive(PartialEq)` would
// introduce on `ActorModelState<A, H>` type parameters.
impl<A, H> PartialEq for ActorModelState<A, H>
where
A: Actor,
A::State: PartialEq,
H: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.actor_states.eq(&other.actor_states)
&& self.history.eq(&other.history)
&& self.timers_set.eq(&other.timers_set)
&& self.network.eq(&other.network)
}
}
impl<A, H> Representative for ActorModelState<A, H>
where
A: Actor,
A::Msg: Rewrite<Id>,
A::State: Ord + Rewrite<Id>,
H: Rewrite<Id>,
{
fn representative(&self) -> Self {
let plan = RewritePlan::from_values_to_sort(&self.actor_states);
Self {
actor_states: plan.reindex(&self.actor_states),
network: self.network.rewrite(&plan),
timers_set: plan.reindex(&self.timers_set),
crashed: plan.reindex(&self.crashed),
history: self.history.rewrite(&plan),
}
}
}
#[cfg(test)]
mod test {
use crate::actor::timers::Timers;
use crate::actor::{Actor, ActorModelState, Envelope, Id, Network, Out};
use crate::{Representative, Rewrite, RewritePlan};
use std::sync::Arc;
#[test]
fn can_find_representative_from_equivalence_class() {
let empty_timers = Timers::new();
let mut non_empty_timers = Timers::new();
non_empty_timers.set(());
#[rustfmt::skip]
let state = ActorModelState::<A, History> {
actor_states: vec![
Arc::new(ActorState { acks: vec![Id::from(1), Id::from(2)]}),
Arc::new(ActorState { acks: vec![]}),
Arc::new(ActorState { acks: vec![Id::from(1)]}),
],
network: Network::new_unordered_duplicating([
// Id(0) sends peers "Write(X)" and receives two acks.
Envelope { src: 0.into(), dst: 1.into(), msg: "Write(X)" },
Envelope { src: 0.into(), dst: 2.into(), msg: "Write(X)" },
Envelope { src: 1.into(), dst: 0.into(), msg: "Ack(X)" },
Envelope { src: 2.into(), dst: 0.into(), msg: "Ack(X)" },
// Id(2) sends peers "Write(Y)" and receives one ack.
Envelope { src: 2.into(), dst: 0.into(), msg: "Write(Y)" },
Envelope { src: 2.into(), dst: 1.into(), msg: "Write(Y)" },
Envelope { src: 1.into(), dst: 2.into(), msg: "Ack(Y)" },
]),
timers_set: vec![non_empty_timers.clone(), empty_timers.clone(), non_empty_timers.clone()],
crashed: vec![false; 3],
history: History {
send_sequence: vec![
// Id(0) sends two writes
0.into(), 0.into(),
// Id(2) sends two writes
2.into(), 2.into(),
// Id(2) gets two replies (although only one was delivered)
1.into(), 0.into(),
// Id(0) gets two replies
1.into(), 2.into(),
],
},
};
let representative_state = state.representative();
// The chosen rewrite plan is:
// - reindexing: x[0] <- x[1], x[1] <- x[2], x[2] <- x[0]
// - rewriting: Id(0) -> Id(2), Id(1) -> Id(0), Id(2) -> Id(1)
#[rustfmt::skip]
assert_eq!(representative_state, ActorModelState {
actor_states: vec![
Arc::new(ActorState { acks: vec![]}),
Arc::new(ActorState { acks: vec![Id::from(0)]}),
Arc::new(ActorState { acks: vec![Id::from(0), Id::from(1)]}),
],
network: Network::new_unordered_duplicating([
// Id(2) sends peers "Write(X)" and receives two acks.
Envelope { src: 2.into(), dst: 0.into(), msg: "Write(X)" },
Envelope { src: 2.into(), dst: 1.into(), msg: "Write(X)" },
Envelope { src: 0.into(), dst: 2.into(), msg: "Ack(X)" },
Envelope { src: 1.into(), dst: 2.into(), msg: "Ack(X)" },
// Id(1) sends peers "Write(Y)" and receives one ack.
Envelope { src: 1.into(), dst: 2.into(), msg: "Write(Y)" },
Envelope { src: 1.into(), dst: 0.into(), msg: "Write(Y)" },
Envelope { src: 0.into(), dst: 1.into(), msg: "Ack(Y)" },
]),
timers_set: vec![empty_timers, non_empty_timers.clone(), non_empty_timers.clone()],
crashed: vec![false; 3],
history: History {
send_sequence: vec![
// Id(2) sends two writes
2.into(), 2.into(),
// Id(1) sends two writes
1.into(), 1.into(),
// Id(1) gets two replies (although only one was delivered)
0.into(), 2.into(),
// Id(2) gets two replies
0.into(), 1.into(),
],
},
});
}
struct A;
impl Actor for A {
type Msg = &'static str;
type State = ActorState;
type Timer = ();
fn on_start(&self, _id: Id, _o: &mut Out<Self>) -> Self::State {
unimplemented!();
}
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
struct ActorState {
acks: Vec<Id>,
}
impl Rewrite<Id> for ActorState {
fn rewrite<S>(&self, plan: &RewritePlan<Id, S>) -> Self {
Self {
acks: self.acks.rewrite(plan),
}
}
}
#[derive(Debug, PartialEq)]
struct History {
send_sequence: Vec<Id>,
}
impl Rewrite<Id> for History {
fn rewrite<S>(&self, plan: &RewritePlan<Id, S>) -> Self {
Self {
send_sequence: self.send_sequence.rewrite(plan),
}
}
}
}
|
//! https://github.com/lumen/otp/tree/lumen/lib/snmp/src/app
use super::*;
test_compiles_lumen_otp!(snmp);
test_compiles_lumen_otp!(snmp_app imports "lib/kernel/src/application", "lib/snmp/src/app/snmp_app_sup", "lib/kernel/src/error_logger", "lib/stdlib/src/lists");
test_compiles_lumen_otp!(snmp_app_sup);
fn includes() -> Vec<&'static str> {
let mut includes = super::includes();
includes.extend(vec!["lib/kernel/src", "lib/snmp/src/misc"]);
includes
}
fn relative_directory_path() -> PathBuf {
super::relative_directory_path().join("app")
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::sync::Arc;
use common_catalog::table::Table;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use common_storages_view::view_table::ViewTable;
use common_storages_view::view_table::QUERY;
pub struct StatisticsTable {}
impl StatisticsTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let query = "SELECT \
NULL as table_catalog, \
NULL as table_schema, \
NULL as table_name, \
NULL as non_unique, \
NULL as index_schema, \
NULL as index_name, \
NULL as seq_in_index, \
NULL as column_name, \
NULL as collation, \
NULL as cardinality, \
NULL as sub_part, \
NULL as packed, \
NULL as nullable, \
NULL as index_type, \
NULL as comment, \
NULL as index_comment"
.to_string();
let mut options = BTreeMap::new();
options.insert(QUERY.to_string(), query);
let table_info = TableInfo {
desc: "'information_schema'.'statistics'".to_string(),
name: "statistics".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
options,
engine: "VIEW".to_string(),
..Default::default()
},
..Default::default()
};
ViewTable::create(table_info)
}
}
|
use x86_64::{instructions::port::Port, structures::idt::InterruptStackFrame};
struct Driver {
data_port: Port<u8>,
status_command_port: Port<u8>,
}
impl Driver {
const fn new() -> Self {
Self {
data_port: Port::new(0x60),
status_command_port: Port::new(0x64),
}
}
/// Follows the initialization sequence from [the osdev wiki](https://wiki.osdev.org/%228042%22_PS/2_Controller#Initialising_the_PS.2F2_Controller),
/// skipping some steps.
unsafe fn initialize(&mut self) {
// Disable devices to prevent incoming data from messing up the initialization sequence
self.status_command_port.write(0xAD);
self.status_command_port.write(0xA7);
// // Flush data buffer
if self.status_command_port.read() & 1 != 0 {
self.data_port.read();
}
// // Disable all IRQs
self.send_command(0x20);
while self.status_command_port.read() & 1 == 0 {}
let config = self.data_port.read();
let mut config = self.get_config();
config &= !0b00000011; // Clears bits 0 and 1, which are first/second port IRQs enable
config |= 1 << 6; // Translation from scancode set 2 to set 1
self.set_config(config);
// Issue self test. 0x55 is success.
self.status_command_port.write(0xAA);
if self.read_data() != 0x55 {
panic!("PS/2 controller failed self test");
}
// Test first port. 0x00 is success.
self.status_command_port.write(0xAB);
let response = self.read_data();
if response != 0x00 {
panic!(
"PS/2 controller port failed test: response was {:#02X}",
response
);
}
// Set IRQ1 handler
crate::idt::register_irq(0x20 + 1, default_handler);
// Enable IRQ1 in the PIC
crate::pic::enable_interrupt(1);
// Enable first port interrupt (IRQ1)
let mut config = self.get_config();
config |= 0b1; // Sets bit 0, which is first port IRQ enable
self.set_config(config);
// Enable first port
self.send_command(0xAE);
}
unsafe fn get_config(&mut self) -> u8 {
self.status_command_port.write(0x20);
self.read_data()
}
unsafe fn set_config(&mut self, config: u8) {
self.status_command_port.write(0x60);
self.write_data(config);
}
unsafe fn read_data(&mut self) -> u8 {
// While bit 0 (output buffer full) is not set, wait
while self.status_command_port.read() & 1 == 0 {}
self.data_port.read()
}
unsafe fn write_data(&mut self, data: u8) {
// While bit 1 (input buffer full) is set, wait
while self.status_command_port.read() & 0b10 != 0 {}
self.data_port.write(data);
}
unsafe fn send_command(&mut self, command: u8) {
self.status_command_port.write(command);
}
}
static mut DRIVER: Driver = Driver {
data_port: Port::new(0x60),
status_command_port: Port::new(0x64),
};
/// Initializes the PS/2 controller.
///
/// # Safety
///
/// This should not be called if another call to this function has not yet returned.
///
/// The modules `printer` and `pic` must be initialized before this function is called.
pub unsafe fn initialize() {
DRIVER.initialize();
}
pub unsafe fn send_byte(byte: u8) {
DRIVER.write_data(byte);
}
pub unsafe fn get_byte() -> u8 {
DRIVER.read_data()
}
extern "x86-interrupt" fn default_handler(stack_frame: InterruptStackFrame) {
println!("Default handler");
unsafe {
DRIVER.read_data();
}
unsafe { crate::pic::send_eoi(1) };
}
|
use kv::Error as KVError;
use magic_crypt::MagicCryptError as MagicError;
use otp::Error as OTPStringError;
use std::error::Error as StdError;
use std::fmt::{
Display,
Formatter,
Result as FmtResult,
};
use std::io::Error as IoError;
use std::result::Result as StdResult;
/// Error type of the Notp project.
#[derive(Debug)]
pub(crate) enum NotpError {
/// Standart error that being used for widely in this project. As long as
/// error's not related with the other types, Generic would be the
/// choice of error.
Generic(String),
/// I/O related errors. Most of the time, creating and deleting folders
/// will return to this error.
Io(IoError),
/// Key Value error. It is related to the KV Store. It is going to be
/// depreciated or generalized with some sort of trait.
Kv(KVError),
/// Encryption related error. More specificly magicrypt related errors.
McError(MagicError),
/// Errors that is thrown while generating OTP code.
OTPStringError(OTPStringError),
}
use NotpError::*;
impl Display for NotpError {
fn fmt(
&self,
f: &mut Formatter<'_>,
) -> FmtResult {
match self {
Generic(ref string) => write!(f, "Generic error: {}", string),
Kv(ref err) => write!(f, "SecretStore error: {}", err),
Io(ref err) => write!(f, "I/O error: {}", err),
OTPStringError(ref err) => write!(f, "OTP Error: {}", err),
McError(ref err) => {
write!(f, "Encryption/Decryption error: {}", err)
}
}
}
}
impl StdError for NotpError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
Kv(ref err) => Some(err),
Io(ref err) => Some(err),
McError(ref err) => Some(err),
OTPStringError(ref err) => Some(err),
Generic(_) => None,
}
}
}
impl From<KVError> for NotpError {
fn from(err: KVError) -> Self {
Kv(err)
}
}
impl From<IoError> for NotpError {
fn from(err: IoError) -> Self {
Io(err)
}
}
impl From<MagicError> for NotpError {
fn from(err: MagicError) -> Self {
McError(err)
}
}
impl From<String> for NotpError {
fn from(err: String) -> Self {
Generic(err)
}
}
impl From<OTPStringError> for NotpError {
fn from(err: OTPStringError) -> Self {
OTPStringError(err)
}
}
/// Result type. It returns type T with NotpError.
pub(crate) type NotpResult<T> = StdResult<T, NotpError>;
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[cfg(feature = "Security_Cryptography_Certificates")]
pub mod Certificates;
#[cfg(feature = "Security_Cryptography_Core")]
pub mod Core;
#[cfg(feature = "Security_Cryptography_DataProtection")]
pub mod DataProtection;
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct BinaryStringEncoding(pub i32);
impl BinaryStringEncoding {
pub const Utf8: BinaryStringEncoding = BinaryStringEncoding(0i32);
pub const Utf16LE: BinaryStringEncoding = BinaryStringEncoding(1i32);
pub const Utf16BE: BinaryStringEncoding = BinaryStringEncoding(2i32);
}
impl ::core::convert::From<i32> for BinaryStringEncoding {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for BinaryStringEncoding {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for BinaryStringEncoding {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Security.Cryptography.BinaryStringEncoding;i4)");
}
impl ::windows::core::DefaultType for BinaryStringEncoding {
type DefaultType = Self;
}
pub struct CryptographicBuffer {}
impl CryptographicBuffer {
#[cfg(feature = "Storage_Streams")]
pub fn Compare<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>, Param1: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>>(object1: Param0, object2: Param1) -> ::windows::core::Result<bool> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: bool = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), object1.into_param().abi(), object2.into_param().abi(), &mut result__).from_abi::<bool>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn GenerateRandom(length: u32) -> ::windows::core::Result<super::super::Storage::Streams::IBuffer> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), length, &mut result__).from_abi::<super::super::Storage::Streams::IBuffer>(result__)
})
}
pub fn GenerateRandomNumber() -> ::windows::core::Result<u32> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: u32 = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<u32>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn CreateFromByteArray(value: &[<u8 as ::windows::core::DefaultType>::DefaultType]) -> ::windows::core::Result<super::super::Storage::Streams::IBuffer> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), value.len() as u32, ::core::mem::transmute(value.as_ptr()), &mut result__).from_abi::<super::super::Storage::Streams::IBuffer>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn CopyToByteArray<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>>(buffer: Param0, value: &mut ::windows::core::Array<u8>) -> ::windows::core::Result<()> {
Self::ICryptographicBufferStatics(|this| unsafe { (::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), buffer.into_param().abi(), value.set_abi_len(), value as *mut _ as _).ok() })
}
#[cfg(feature = "Storage_Streams")]
pub fn DecodeFromHexString<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(value: Param0) -> ::windows::core::Result<super::super::Storage::Streams::IBuffer> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).11)(::core::mem::transmute_copy(this), value.into_param().abi(), &mut result__).from_abi::<super::super::Storage::Streams::IBuffer>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn EncodeToHexString<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>>(buffer: Param0) -> ::windows::core::Result<::windows::core::HSTRING> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).12)(::core::mem::transmute_copy(this), buffer.into_param().abi(), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn DecodeFromBase64String<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(value: Param0) -> ::windows::core::Result<super::super::Storage::Streams::IBuffer> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).13)(::core::mem::transmute_copy(this), value.into_param().abi(), &mut result__).from_abi::<super::super::Storage::Streams::IBuffer>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn EncodeToBase64String<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>>(buffer: Param0) -> ::windows::core::Result<::windows::core::HSTRING> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).14)(::core::mem::transmute_copy(this), buffer.into_param().abi(), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn ConvertStringToBinary<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(value: Param0, encoding: BinaryStringEncoding) -> ::windows::core::Result<super::super::Storage::Streams::IBuffer> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).15)(::core::mem::transmute_copy(this), value.into_param().abi(), encoding, &mut result__).from_abi::<super::super::Storage::Streams::IBuffer>(result__)
})
}
#[cfg(feature = "Storage_Streams")]
pub fn ConvertBinaryToString<'a, Param1: ::windows::core::IntoParam<'a, super::super::Storage::Streams::IBuffer>>(encoding: BinaryStringEncoding, buffer: Param1) -> ::windows::core::Result<::windows::core::HSTRING> {
Self::ICryptographicBufferStatics(|this| unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).16)(::core::mem::transmute_copy(this), encoding, buffer.into_param().abi(), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
})
}
pub fn ICryptographicBufferStatics<R, F: FnOnce(&ICryptographicBufferStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<CryptographicBuffer, ICryptographicBufferStatics> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
impl ::windows::core::RuntimeName for CryptographicBuffer {
const NAME: &'static str = "Windows.Security.Cryptography.CryptographicBuffer";
}
#[repr(transparent)]
#[doc(hidden)]
pub struct ICryptographicBufferStatics(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for ICryptographicBufferStatics {
type Vtable = ICryptographicBufferStatics_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x320b7e22_3cb0_4cdf_8663_1d28910065eb);
}
#[repr(C)]
#[doc(hidden)]
pub struct ICryptographicBufferStatics_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, object1: ::windows::core::RawPtr, object2: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, length: u32, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut u32) -> ::windows::core::HRESULT,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value_array_size: u32, value: *const u8, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, buffer: ::windows::core::RawPtr, value_array_size: *mut u32, value: *mut *mut u8) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, buffer: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, buffer: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, encoding: BinaryStringEncoding, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
#[cfg(feature = "Storage_Streams")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, encoding: BinaryStringEncoding, buffer: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage_Streams"))] usize,
);
|
use tokio::io::AsyncRead;
// Import multer types.
use multer::Multipart;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Generate an `AsyncRead` and the boundary from somewhere e.g. server request body.
let (reader, boundary) = get_async_reader_from_somewhere().await;
// Create a `Multipart` instance from that async reader and the boundary.
let mut multipart = Multipart::with_reader(reader, boundary);
// Iterate over the fields, use `next_field()` to get the next field.
while let Some(mut field) = multipart.next_field().await? {
// Get field name.
let name = field.name();
// Get the field's filename if provided in "Content-Disposition" header.
let file_name = field.file_name();
println!("Name: {:?}, File Name: {:?}", name, file_name);
// Process the field data chunks e.g. store them in a file.
let mut field_bytes_len = 0;
while let Some(field_chunk) = field.chunk().await? {
// Do something with field chunk.
field_bytes_len += field_chunk.len();
}
println!("Field Bytes Length: {:?}", field_bytes_len);
}
Ok(())
}
// Generate an `AsyncRead` and the boundary from somewhere e.g. server request body.
async fn get_async_reader_from_somewhere() -> (impl AsyncRead, &'static str) {
let data = "--X-BOUNDARY\r\nContent-Disposition: form-data; name=\"my_text_field\"\r\n\r\nabcd\r\n--X-BOUNDARY\r\nContent-Disposition: form-data; name=\"my_file_field\"; filename=\"a-text-file.txt\"\r\nContent-Type: text/plain\r\n\r\nHello world\nHello\r\nWorld\rAgain\r\n--X-BOUNDARY--\r\n";
(data.as_bytes(), "X-BOUNDARY")
}
|
//! <https://github.com/EOSIO/eosio.cdt/blob/796ff8bee9a0fc864f665a0a4d018e0ff18ac383/libraries/eosiolib/contracts/eosio/producer_schedule.hpp#L54-L69>
use crate::{AccountName, NumBytes, PublicKey, Read, Write};
use alloc::vec::Vec;
/// Maps producer with its signing key, used for producer schedule
/// <https://github.com/EOSIO/eosio.cdt/blob/796ff8bee9a0fc864f665a0a4d018e0ff18ac383/libraries/eosiolib/contracts/eosio/producer_schedule.hpp#L15-L45>
#[derive(Read, Write, NumBytes, Clone, Default, Debug)]
#[eosio(crate_path = "crate::bytes")]
pub struct ProducerKey {
/// Name of the producer
pub producer_name: AccountName,
/// Block signing key used by this producer
pub block_signing_key: PublicKey,
}
/// Defines both the order, account name, and signing keys of the active set
/// of producers.
#[derive(Read, Write, NumBytes, Clone, Default, Debug)]
#[eosio(crate_path = "crate::bytes")]
pub struct ProducerSchedule {
/// Version number of the schedule. It is sequentially incrementing
/// version number.
pub version: u32,
/// List of producers for this schedule, including its signing key
pub producers: Vec<ProducerKey>,
}
/// pairs a public key with an integer weight
#[derive(Read, Write, NumBytes, Clone, Default, Debug)]
#[eosio(crate_path = "crate::bytes")]
pub struct KeyWeight {
/// public key used in a weighted threshold multi-sig authority
pub key: PublicKey,
/// weight associated with a signature from the private key associated with the accompanying public key
pub weight: u64,
}
impl From<PublicKey> for KeyWeight {
fn from(key: PublicKey) -> Self {
Self { key, weight: 1 }
}
}
/// block signing authority version 0
/// this authority allows for a weighted threshold multi-sig per-producer
#[derive(Read, Write, NumBytes, Clone, Default, Debug)]
#[eosio(crate_path = "crate::bytes")]
pub struct BlockSigningAuthority {
/// minimum threshold of accumulated weights from component keys that satisfies this authority
pub threshold: u32,
/// component keys and their associated weights
pub keys: Vec<KeyWeight>,
}
impl From<PublicKey> for BlockSigningAuthority {
#[inline]
fn from(key: PublicKey) -> Self {
Self {
threshold: 1,
keys: vec![key.into()],
}
}
}
/// Maps producer with its signing key, used for producer schedule
#[derive(Read, Write, NumBytes, Clone, Default, Debug)]
#[eosio(crate_path = "crate::bytes")]
pub struct ProducerAuthority {
/// Name of the producer
pub producer_name: AccountName,
/// The block signing authority used by this producer
pub authority: BlockSigningAuthority,
}
|
use coi::{container, Inject};
#[derive(Inject)]
#[coi(provides Impl1 with Impl1::new())]
struct Impl1 {
num: usize,
}
impl Impl1 {
fn new() -> Self {
Self { num: 0 }
}
}
#[test]
fn can_inject_struct_with_non_inject_non_arc_field() {
let container = container! {
impl1 => Impl1Provider,
};
let impl1 = container.resolve::<Impl1>("impl1").expect("Should exist");
let _ = impl1.num;
}
|
use rocket;
use rocket_cors;
use crate::connection;
use crate::authorize;
use crate::conditions;
//use rocket::{Request, State, Data, Response};
use rocket::http::Method;
//use rocket::{get, routes};
use rocket::routes;
use rocket_cors::{AllowedHeaders, AllowedOrigins, Error};
pub fn create_routes() -> Result<(), Error>{
let allowed_origins = AllowedOrigins::all();
let cors = rocket_cors::CorsOptions {
allowed_origins,
allowed_methods: vec![Method::Get,Method::Put,Method::Options, Method::Post, Method::Delete].into_iter().map(From::from).collect(),
allowed_headers: AllowedHeaders::all(),
allow_credentials: true,
..Default::default()
}
.to_cors()?;
rocket::ignite()
.manage(connection::init_pool())
.mount("/authorize",
routes![authorize::handler::registeruser,
authorize::handler::all,
authorize::handler::loginuser,],)
.mount("/conditions",
routes![conditions::handler::conditions,],)
.register(catchers![authorize::handler::not_found])
.attach(cors)
.launch();
Ok(())
}
|
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::errors::{Error, Result};
use crate::job;
use crate::job::TargetProcess;
use crate::util::slurp_string;
use std::path::PathBuf;
use std::{collections::HashMap, fs, path::Path};
#[derive(Deserialize, Debug)]
pub(crate) struct After {
run: String,
cmd: String,
args: Vec<String>,
#[serde(default = "Default::default")]
env: HashMap<String, String>,
}
impl After {
pub(crate) fn spawn(&self, _base: &Path) -> Result<Option<TargetProcess>> {
let cmd = job::which(&self.cmd)?;
let mut process = job::TargetProcess::new_with_stderr(&cmd, &self.args, &self.env)?;
process.wait_with_output()?;
Ok(Some(process))
}
}
pub(crate) fn load_after(path: &Path) -> Result<After> {
let mut tags_data = slurp_string(path)?;
match simd_json::from_str(&mut tags_data) {
Ok(s) => Ok(s),
Err(_not_well_formed) => Err(Error::from(format!(
"Unable to load `after.json` from path: {}",
path.display()
))),
}
}
pub(crate) struct AfterController {
base: PathBuf,
}
impl AfterController {
pub(crate) fn new(base: &Path) -> Self {
Self {
base: base.to_path_buf(),
}
}
pub(crate) fn spawn(&mut self) -> Result<()> {
let root = &self.base;
let after_path = root.join("after.json");
// This is optional
if (&after_path).is_file() {
let after_json = load_after(&after_path)?;
let after_process = after_json.spawn(root)?;
if let Some(mut process) = after_process {
let after_out_file = root.join("after.out.log");
let after_err_file = root.join("after.err.log");
let after_process = std::thread::spawn(move || {
if let Err(e) = process.tail(&after_out_file, &after_err_file) {
eprintln!("failed to tail tremor process: {}", e);
}
});
if after_process.join().is_err() {
return Err("Failed to join test after thread/process error".into());
}
}
}
Ok(())
}
}
pub(crate) fn update_evidence(root: &Path, evidence: &mut HashMap<String, String>) -> Result<()> {
let after_out_file = root.join("after.out.log");
let after_err_file = root.join("after.err.log");
if let Ok(x) = fs::metadata(&after_out_file) {
if x.is_file() {
evidence.insert("after: stdout".to_string(), slurp_string(&after_out_file)?);
}
}
if let Ok(x) = fs::metadata(&after_err_file) {
if x.is_file() {
evidence.insert("before: stderr".to_string(), slurp_string(&after_err_file)?);
}
}
Ok(())
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(label_break_value)]
// These are forbidden occurences of label-break-value
fn labeled_unsafe() {
unsafe 'b: {} //~ ERROR expected one of `extern`, `fn`, or `{`
}
fn labeled_if() {
if true 'b: {} //~ ERROR expected `{`, found `'b`
}
fn labeled_else() {
if true {} else 'b: {} //~ ERROR expected `{`, found `'b`
}
fn labeled_match() {
match false 'b: {} //~ ERROR expected one of `.`, `?`, `{`, or an operator
}
pub fn main() {}
|
use crate::ast;
use crate::parsing::{LexerMode, ResolveError, ResolveErrorKind};
use crate::shared::Description;
use crate::Spanned;
use runestick::SpannedError;
use thiserror::Error;
error! {
/// An error raised during parsing.
#[derive(Debug, Clone)]
pub struct ParseError {
kind: ParseErrorKind,
}
impl From<ResolveError>;
}
impl ParseError {
/// Construct an expectation error.
pub(crate) fn expected<A, E>(actual: A, expected: E) -> Self
where
A: Description + Spanned,
E: Description,
{
Self::new(
actual.span(),
ParseErrorKind::Expected {
actual: actual.description(),
expected: expected.description(),
},
)
}
/// Construct an unsupported error.
pub(crate) fn unsupported<T, E>(actual: T, what: E) -> Self
where
T: Spanned,
E: Description,
{
Self::new(
actual.span(),
ParseErrorKind::Unsupported {
what: what.description(),
},
)
}
}
impl From<ParseError> for SpannedError {
fn from(error: ParseError) -> Self {
SpannedError::new(error.span, *error.kind)
}
}
/// Error when parsing.
#[derive(Debug, Clone, Copy, Error)]
#[allow(missing_docs)]
pub enum ParseErrorKind {
#[error("{message}")]
Custom { message: &'static str },
#[error("{error}")]
ResolveError { error: ResolveErrorKind },
#[error("expected end of file, but got `{actual}`")]
ExpectedEof { actual: ast::Kind },
#[error("unexpected end of file")]
UnexpectedEof,
#[error("bad lexer mode `{actual}`, expected `{expected}`")]
BadLexerMode {
actual: LexerMode,
expected: LexerMode,
},
#[error("expected {expected}, but got `{actual}`")]
Expected {
actual: &'static str,
expected: &'static str,
},
#[error("{what} is not supported")]
Unsupported { what: &'static str },
#[error("expected escape sequence")]
ExpectedEscape,
#[error("unterminated string literal")]
UnterminatedStrLit,
#[error("unterminated byte string literal")]
UnterminatedByteStrLit,
#[error("unterminated character literal")]
UnterminatedCharLit,
#[error("unterminated byte literal")]
UnterminatedByteLit,
#[error("expected character literal to be closed")]
ExpectedCharClose,
#[error("expected label or character")]
ExpectedCharOrLabel,
#[error("expected byte literal to be closed")]
ExpectedByteClose,
#[error("unexpected character `{c}`")]
UnexpectedChar { c: char },
#[error("group required in expression to determine precedence")]
PrecedenceGroupRequired,
#[error("number literal out of bounds `-9223372036854775808` to `9223372036854775807`")]
BadNumberOutOfBounds,
#[error("unsupported field access")]
BadFieldAccess,
#[error("expected close delimiter `{expected}`, but got `{actual}`")]
ExpectedMacroCloseDelimiter {
expected: ast::Kind,
actual: ast::Kind,
},
#[error("bad number literal")]
BadNumber,
#[error("can only specify one attribute named `{name}`")]
MultipleMatchingAttributes { name: &'static str },
}
|
use procon_reader::ProconReader;
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let a: f64 = rd.get();
let b: f64 = rd.get();
let ans = (a * b) / 100.0;
println!("{}", ans);
}
|
use crate::{
core::{
channel_list::{Channel, CHANNEL_HALF},
error::Error,
frame_buffer::Slice,
refptr::{OpaquePtr, Ref, RefMut},
},
deep::deep_image_level::{DeepImageLevelRef, DeepImageLevelRefMut},
};
use openexr_sys as sys;
type Result<T, E = Error> = std::result::Result<T, E>;
/// Holds the per-pixel number of samples in a deep image.
///
#[repr(transparent)]
pub struct SampleCountChannel(pub(crate) *mut sys::Imf_SampleCountChannel_t);
unsafe impl OpaquePtr for SampleCountChannel {
type SysPointee = sys::Imf_SampleCountChannel_t;
type Pointee = SampleCountChannel;
}
pub type SampleCountChannelRef<'a, P = SampleCountChannel> = Ref<'a, P>;
pub type SampleCountChannelRefMut<'a, P = SampleCountChannel> = RefMut<'a, P>;
impl SampleCountChannel {
/// Get the [`Channel`] description for this channel
///
pub fn channel(&self) -> Channel {
let mut c = CHANNEL_HALF;
unsafe {
sys::Imf_SampleCountChannel_channel(self.0, &mut c);
}
c
}
/// Get the sampling rate in the `x` direction
///
pub fn x_sampling(&self) -> i32 {
let mut v = 0;
unsafe {
sys::Imf_SampleCountChannel_xSampling(self.0, &mut v);
v
}
}
/// Get the sampling rate in the `y` direction
///
pub fn y_sampling(&self) -> i32 {
let mut v = 0;
unsafe {
sys::Imf_SampleCountChannel_ySampling(self.0, &mut v);
v
}
}
/// Is this channel perceptually linear?
///
pub fn p_linear(&self) -> bool {
let mut v = false;
unsafe {
sys::Imf_SampleCountChannel_pLinear(self.0, &mut v);
v
}
}
/// How many pixels are there in each row of this channel
///
pub fn pixels_per_row(&self) -> i32 {
let mut v = 0;
unsafe {
sys::Imf_SampleCountChannel_pixelsPerRow(self.0, &mut v);
v
}
}
/// How many pixels are there in each column of this channel
///
pub fn pixels_per_column(&self) -> i32 {
let mut v = 0;
unsafe {
sys::Imf_SampleCountChannel_pixelsPerColumn(self.0, &mut v);
v
}
}
/// How many pixels are there total in this channel
///
pub fn num_pixels(&self) -> usize {
let mut v = 0;
unsafe {
sys::Imf_SampleCountChannel_numPixels(self.0, &mut v);
v as usize
}
}
/// Construct a [`Slice`] for this Channel
///
pub fn slice(&self) -> Slice {
let mut s = sys::Imf_Slice_t::default();
unsafe {
sys::Imf_SampleCountChannel_slice(self.0, &mut s);
}
Slice(s)
}
/// Get the level which this channel is a part of
///
pub fn deep_level(&self) -> DeepImageLevelRef {
let mut ptr = std::ptr::null();
unsafe {
sys::Imf_SampleCountChannel_deepLevel_const(self.0, &mut ptr);
}
DeepImageLevelRef::new(ptr)
}
/// Get the level which this channel is a part of
///
pub fn deep_level_mut(&mut self) -> DeepImageLevelRefMut {
let mut ptr = std::ptr::null_mut();
unsafe {
sys::Imf_SampleCountChannel_deepLevel(self.0, &mut ptr);
}
DeepImageLevelRefMut::new(ptr)
}
/// Get a reference to the pixel at `[x, y]`
///
pub fn get(&self, x: i32, y: i32) -> Option<&i32> {
let mut ptr = std::ptr::null();
unsafe {
sys::Imf_SampleCountChannel_at(self.0, &mut ptr, x, y)
.into_result()
.ok()
.map(|_| &*(ptr as *const i32))
}
}
/// Get row `r` of the channel as a slice
///
pub fn row(&self, r: i32) -> Option<&[i32]> {
if r >= self.pixels_per_column() {
None
} else {
let mut ptr = std::ptr::null();
unsafe {
sys::Imf_SampleCountChannel_row(self.0, &mut ptr, r);
if ptr.is_null() {
None
} else {
Some(std::slice::from_raw_parts(
ptr as *const i32,
self.pixels_per_row() as usize,
))
}
}
}
}
}
|
use config::Config;
use errors::*;
use clap::{App, Arg, ArgMatches, SubCommand};
pub fn setup<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("http")
.about("Makes changes to the HTTP configuration")
.arg(
Arg::with_name("socket")
.long("socket")
.value_name("ADDR")
.help("Changes the socket address")
.takes_value(true)
)
.arg(
Arg::with_name("workers")
.long("workers")
.value_name("NUMBER")
.help("Changes the number of worker threads")
.takes_value(true)
)
}
pub fn call(args: &ArgMatches) -> Result<()> {
// Load the config
let config_file = args.value_of("config").unwrap();
let mut config = Config::load(config_file)?;
// Change socket, if requested
if let Some(socket_str) = args.value_of("socket") {
let socket = socket_str.parse()?; // TODO Chain the error!
config.http.socket = socket;
}
// Change number of workers, if requested
if let Some(workers_str) = args.value_of("workers") {
let workers = workers_str.parse()?; // TODO Chain the error!
config.http.workers = workers;
}
// Store the config
config.store(config_file)?;
Ok(())
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! WatcherConnection handles interaction with directory watchers as described in io.fidl.
use {
fidl_fuchsia_io::{
WatchedEvent, MAX_FILENAME, WATCH_EVENT_EXISTING, WATCH_EVENT_IDLE, WATCH_MASK_EXISTING,
WATCH_MASK_IDLE,
},
fuchsia_async::Channel,
fuchsia_zircon::MessageBuf,
futures::{task::Waker, Poll},
std::iter,
};
pub struct WatcherConnection {
mask: u32,
channel: Channel,
}
impl WatcherConnection {
pub fn new(mask: u32, channel: Channel) -> Self {
WatcherConnection { mask, channel }
}
/// A helper used by other send_event*() methods. Sends a collection of
/// fidl_fuchsia_io::WatchEvent instances over this watcher connection.
fn send_event_structs(
&self,
events: &mut Iterator<Item = WatchedEvent>,
) -> Result<(), fidl::Error> {
// Unfortunately, io.fidl currently does not provide encoding for the watcher events.
// Seems to be due to
//
// https://fuchsia.atlassian.net/browse/ZX-2645
//
// As soon as that is fixed we should switch to the generated binding.
//
// For now this code duplicates what the C++ version is doing:
//
// https://fuchsia.googlesource.com/zircon/+/1dcb46aa1c4001e9d1d68b8ff5d8fae0c00fbb49/system/ulib/fs/watcher.cpp
//
// There is no Transaction wrapping the messages, as for the full blown FIDL events.
let buffer = &mut vec![];
let (bytes, handles) = (&mut vec![], &mut vec![]);
for mut event in events {
// Keep bytes and handles across loop iterations, to reduce reallocations.
bytes.clear();
handles.clear();
fidl::encoding::Encoder::encode(bytes, handles, &mut event)?;
if handles.len() > 0 {
panic!("WatchedEvent struct is not expected to contain any handles")
}
if buffer.len() + bytes.len() >= fidl_fuchsia_io::MAX_BUF as usize {
self.channel
.write(&*buffer, &mut vec![])
.map_err(fidl::Error::ServerResponseWrite)?;
buffer.clear();
}
buffer.append(bytes);
}
if buffer.len() > 0 {
self.channel.write(&*buffer, &mut vec![]).map_err(fidl::Error::ServerResponseWrite)?;
}
Ok(())
}
/// Constructs and sends a fidl_fuchsia_io::WatchEvent instance over the watcher connection.
///
/// `event` is one of the WATCH_EVENT_* constants, with the values used to populate the `event`
/// field.
pub fn send_event(&self, event: u8, name: &str) -> Result<(), fidl::Error> {
// This assertion is never expected to trigger as the only caller of this interface is the
// [`PseudoDirectory`] instance that is expected to only pass entry names in here. And
// [`add_entry`] will not allow entries longer than [`MAX_FILENAME`].
assert!(
name.len() < MAX_FILENAME as usize,
"name argument should not contain more than {} bytes.\n\
Got: {}\n\
Content: '{}'",
MAX_FILENAME,
name.len(),
name
);
self.send_event_structs(&mut iter::once(WatchedEvent {
event,
len: name.len() as u8,
name: name.as_bytes().to_vec(),
}))
}
/// Constructs and sends a fidl_fuchsia_io::WatchEvent instance over the watcher connection,
/// skipping the operation if the watcher did not request this kind of events to be delivered -
/// filtered by the mask value.
pub fn send_event_check_mask(
&self,
mask: u32,
event: u8,
name: &str,
) -> Result<(), fidl::Error> {
if self.mask & mask == 0 {
return Ok(());
}
self.send_event(event, name)
}
/// Sends one fidl_fuchsia_io::WatchEvent instance of type WATCH_EVENT_EXISTING, for every name
/// in the list. If the watcher has requested this kind of events - similar to to
/// [`send_event_check_mask`] above, but with a predefined mask and event type.
pub fn send_events_existing(
&self,
names: &mut Iterator<Item = &str>,
) -> Result<(), fidl::Error> {
if self.mask & WATCH_MASK_EXISTING == 0 {
return Ok(());
}
self.send_event_structs(&mut names.map(|name| {
// This assertion is never expected to trigger as the only caller of this interface is
// the [`PseudoDirectory`] instance that is expected to only pass entry names in here.
// And [`add_entry`] will not allow entries longer than [`MAX_FILENAME`].
assert!(
name.len() < MAX_FILENAME as usize,
"name argument should not contain more than {} bytes.\n\
Got: {}\n\
Content: '{}'",
MAX_FILENAME,
name.len(),
name
);
WatchedEvent {
event: WATCH_EVENT_EXISTING,
len: name.len() as u8,
name: name.as_bytes().to_vec(),
}
}))
}
/// Sends one instance of fidl_fuchsia_io::WatchEvent of type WATCH_MASK_IDLE. If the watcher
/// has requested this kind of events - similar to to [`send_event_check_mask`] above, but with
/// the predefined mask and event type.
pub fn send_event_idle(&self) -> Result<(), fidl::Error> {
if self.mask & WATCH_MASK_IDLE == 0 {
return Ok(());
}
self.send_event(WATCH_EVENT_IDLE, "")
}
/// Checks if the watcher has closed the connection. And sets the waker to trigger when the
/// connection is closed if it was still opened during the call.
pub fn is_dead(&self, lw: &Waker) -> bool {
let channel = &self.channel;
if channel.is_closed() {
return true;
}
// Make sure we will be notified when the watcher has closed its connected or when any
// message is send.
//
// We are going to close the connection when we receive any message as this is currently an
// error. When we fix ZX-2645 and wrap the watcher connection with FIDL, it would be up to
// the binding code to fail on any unexpected messages. At that point we can switch to
// fuchsia_async::OnSignals and only monitor for the close event.
//
// We rely on [`Channel::recv_from()`] to invoke [`Channel::poll_read()`], which would call
// [`RWHandle::poll_read()`] that would set the signal mask to `READABLE | CLOSE`.
let mut msg = MessageBuf::new();
match channel.recv_from(&mut msg, lw) {
// We are not expecting any messages. Returning true would cause this watcher
// connection to be dropped and closed as a result.
Poll::Ready(_) => true,
// Poll::Pending is actually the only value we are expecting to see from a watcher that
// did not close it's side of the connection. And when the connection is closed, we
// are expecting Poll::Ready(Err(Status::PEER_CLOSED.into_raw())), but that is covered
// by the case above.
Poll::Pending => false,
}
}
}
|
use std::collections::HashSet;
pub fn anagrams_for<'a>(word: &'a str, possible_anagrams: &'a [&str]) -> HashSet<&'a str> {
let sorted_word = sort_str(&word);
let lowercased_word = word.to_lowercase();
possible_anagrams
.iter()
.filter(|x|
word.len() == x.len()
&& sorted_word == sort_str(&x)
&& lowercased_word != x.to_lowercase())
.cloned()
.collect()
}
fn sort_str(s: &str) -> Vec<char> {
let mut ret_val: Vec<char> = s.to_lowercase().chars().collect();
ret_val.sort();
ret_val
} |
use chan;
use clap::ArgMatches;
use std::cmp;
use std::convert::From;
use std::fs::File;
use std::io;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Read;
use std::path::PathBuf;
use std::thread;
use tantivy;
use tantivy::merge_policy::NoMergePolicy;
use tantivy::Index;
use time::PreciseTime;
pub fn run_redis_cli(argmatch: &ArgMatches) -> Result<(), String> {
let index_directory = PathBuf::from(argmatch.value_of("index").unwrap());
let document_source = argmatch
.value_of("file")
.map(|path| DocumentSource::FromFile(PathBuf::from(path)))
.unwrap_or(DocumentSource::FromPipe);
let no_merge = argmatch.is_present("nomerge");
let mut num_threads = value_t!(argmatch, "num_threads", usize)
.map_err(|_| format!("Failed to read num_threads argument as an integer."))?;
if num_threads == 0 {
num_threads = 1;
}
let buffer_size = value_t!(argmatch, "memory_size", usize)
.map_err(|_| format!("Failed to read the buffer size argument as an integer."))?;
let buffer_size_per_thread = buffer_size / num_threads;
run_redis(
index_directory,
document_source,
buffer_size_per_thread,
num_threads,
no_merge,
)
.map_err(|e| format!("Indexing failed : {:?}", e))
}
fn run_redis(
directory: PathBuf,
document_source: DocumentSource,
buffer_size_per_thread: usize,
num_threads: usize,
no_merge: bool,
) -> tantivy::Result<()> {
let index = Index::open_in_dir(&directory)?;
let schema = index.schema();
let (line_sender, line_receiver) = chan::sync(10_000);
let (doc_sender, doc_receiver) = chan::sync(10_000);
thread::spawn(move || {
let articles = document_source.read().unwrap();
for article_line_res in articles.lines() {
let article_line = article_line_res.unwrap();
line_sender.send(article_line);
}
});
let num_threads_to_parse_json = cmp::max(1, num_threads / 4);
info!("Using {} threads to parse json", num_threads_to_parse_json);
for _ in 0..num_threads_to_parse_json {
let schema_clone = schema.clone();
let doc_sender_clone = doc_sender.clone();
let line_receiver_clone = line_receiver.clone();
thread::spawn(move || {
for article_line in line_receiver_clone {
match schema_clone.parse_document(&article_line) {
Ok(doc) => {
doc_sender_clone.send(doc);
}
Err(err) => {
println!("Failed to add document doc {:?}", err);
}
}
}
});
}
Ok(())
}
enum DocumentSource {
FromPipe,
FromFile(PathBuf),
}
impl DocumentSource {
fn read(&self) -> io::Result<BufReader<Box<dyn Read>>> {
Ok(match self {
&DocumentSource::FromPipe => BufReader::new(Box::new(io::stdin())),
&DocumentSource::FromFile(ref filepath) => {
let read_file = File::open(&filepath)?;
BufReader::new(Box::new(read_file))
}
})
}
}
|
use crate::engine::Result;
use crate::thread_pool::ThreadPool;
/// Using thre `ThreadPool` type from the [`rayon`](https://docs.rs/rayon/1.5.0/rayon/) crate.
pub struct RayonThreadPool {
pool: rayon::ThreadPool,
}
impl ThreadPool for RayonThreadPool {
fn new(threads: u32) -> Result<Self>
where
Self: Sized,
{
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(threads as usize)
.build()
.map_err(|err| err.to_string())?;
Ok(RayonThreadPool { pool })
}
fn spawn<F>(&self, job: F)
where
F: FnOnce() + Send + 'static,
{
self.pool.spawn(job);
}
}
|
use std::collections::{HashMap, HashSet};
use std::env;
use std::fs;
use std::io::{self, BufReader, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::OnceLock;
use anyhow::bail;
use cargo_metadata::{Message, MetadataCommand};
use clap::Args;
use serde::Deserialize;
use walkdir::{DirEntry, WalkDir};
#[derive(Deserialize)]
struct TargetSpec {
#[serde(rename = "llvm-target")]
llvm_target: String,
}
#[derive(Args)]
pub struct Config {
/// The working directory for the build
#[clap(hide(true), long, env("CARGO_MAKE_WORKING_DIRECTORY"))]
cwd: Option<PathBuf>,
/// The path to the root of your LLVM installation, e.g. ~/.local/share/llvm/
///
/// The given path should contain include/ and lib/ directories
#[clap(long("llvm"), alias("llvm-prefix"), env("LLVM_PREFIX"))]
llvm_prefix: PathBuf,
/// Enables more informational output during the build
///
/// This is enabled by default in CI
#[clap(short, long, env("VERBOSE"))]
verbose: bool,
/// When true, this build is being run under CI
#[clap(hide(true), long, env("CI"))]
ci: bool,
/// Enables link-time optimization of the build
#[clap(long, env("FIREFLY_BUILD_LTO"))]
lto: bool,
/// The cargo profile to build with
#[clap(long, env("FIREFLY_BUILD_PROFILE"), default_value = "debug")]
profile: String,
#[clap(hide(true), long, env("FIREFLY_BUILD_TYPE"), default_value = "dynamic")]
build_type: String,
/// Whether this build should be statically linked
#[clap(long("static"))]
link_static: bool,
/// Whether this build should be dynamically linked
#[clap(long("dynamic"), conflicts_with("link-static"))]
link_dynamic: bool,
/// If provided, enables building the compiler with the given sanitizer
#[clap(long, env("SANITIZER"))]
sanitizer: Option<String>,
/// The name of the cargo toolchain to use
#[clap(long, env("CARGO_MAKE_TOOLCHAIN"), default_value = "nightly")]
toolchain: String,
/// The name of the target platform to build for
#[clap(long, env("CARGO_MAKE_RUST_TARGET_TRIPLE"))]
target_triple: String,
/// The vendor value of the current Rust target
#[clap(long, env("CARGO_MAKE_RUST_TARGET_VENDOR"))]
target_vendor: Option<String>,
/// The os value of the current Rust target
#[clap(long, env("CARGO_MAKE_RUST_TARGET_OS"))]
target_os: Option<String>,
/// The directory in which cargo will produce its build output
#[clap(long, env("CARGO_TARGET_DIR"))]
target_dir: Option<PathBuf>,
/// The location where the compiler binaries should be symlinked
#[clap(long, env("FIREFLY_BIN_DIR"), default_value = "./bin")]
bin_dir: PathBuf,
/// The location where the compiler toolchain should be installed
#[clap(long, env("FIREFLY_INSTALL_DIR"), default_value = "./_build")]
install_dir: PathBuf,
}
impl Config {
pub fn working_directory(&self) -> PathBuf {
self.cwd
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap())
}
pub fn llvm_prefix(&self) -> &Path {
self.llvm_prefix.as_path()
}
pub fn verbose(&self) -> bool {
self.verbose || self.ci
}
pub fn lto(&self) -> bool {
self.lto
}
pub fn profile(&self) -> &str {
self.profile.as_str()
}
pub fn link_static(&self) -> bool {
self.link_static || self.build_type == "static" || !self.link_dynamic
}
pub fn link_dynamic(&self) -> bool {
self.link_dynamic || self.build_type == "dynamic"
}
pub fn sanitizer(&self) -> Option<&str> {
self.sanitizer.as_deref()
}
pub fn toolchain(&self) -> &str {
self.toolchain.as_str()
}
pub fn rust_target(&self) -> &str {
self.target_triple.as_str()
}
pub fn llvm_target(&self) -> &str {
get_llvm_target(self.toolchain(), self.rust_target())
}
pub fn is_darwin(&self) -> bool {
let is_apple = self
.target_vendor
.as_ref()
.map(|v| v == "apple")
.unwrap_or_else(|| self.rust_target().contains("apple"));
let is_macos = self
.target_os
.as_ref()
.map(|os| os == "macos")
.unwrap_or_else(|| self.rust_target().contains("macos"));
is_apple || is_macos
}
pub fn is_windows(&self) -> bool {
self.target_os
.as_ref()
.map(|os| os == "windows")
.unwrap_or_else(|| self.rust_target().contains("windows"))
}
pub fn is_linux(&self) -> bool {
!self.is_darwin() && !self.is_windows()
}
pub fn bin_dir(&self) -> &Path {
self.bin_dir.as_path()
}
pub fn install_dir(&self) -> &Path {
self.install_dir.as_path()
}
pub fn sysroot(&self) -> &Path {
get_rust_sysroot()
}
pub fn toolchain_target_dir(&self) -> PathBuf {
self.sysroot().join("lib/rustlib").join(self.rust_target())
}
}
pub fn run(config: &Config) -> anyhow::Result<()> {
let cwd = config.working_directory();
let target_dir = config
.target_dir
.clone()
.unwrap_or_else(|| cwd.join("target"));
let mut build_link_args = vec!["-Wl".to_owned()];
let mut extra_cargo_flags = vec![];
let mut rustflags = env::var("RUSTFLAGS")
.unwrap_or(String::new())
.split(' ')
.map(|flag| flag.to_string())
.collect::<Vec<_>>();
if let Ok(f) = env::var("CARGO_MAKE_CARGO_VERBOSE_FLAGS") {
if !f.is_empty() {
extra_cargo_flags.push(f.to_owned());
}
}
if let Some(sanitizer) = config.sanitizer() {
if !sanitizer.is_empty() {
rustflags.push("-Z".to_owned());
rustflags.push(format!("sanitizer={}", sanitizer));
}
}
if config.is_linux() && config.link_dynamic() {
build_link_args.push("-rpath".to_owned());
build_link_args.push("$ORIGIN/../lib".to_owned());
}
let target_subdir = match config.profile() {
"release" => {
extra_cargo_flags.push("--release".to_owned());
"release"
}
"dev" | _ => {
rustflags.push("-C".to_owned());
rustflags.push("opt-level=0".to_owned());
rustflags.push("-C".to_owned());
rustflags.push("debuginfo=2".to_owned());
"debug"
}
};
if config.link_static() {
rustflags.push("-C".to_owned());
rustflags.push("prefer-dynamic=no".to_owned());
}
if config.is_darwin() {
build_link_args.push("-headerpad_max_install_names".to_owned());
}
if config.lto() {
build_link_args.push("-flto=thin".to_owned());
rustflags.push("-C".to_owned());
rustflags.push("embed-bitcode=yes".to_owned());
rustflags.push("-C".to_owned());
rustflags.push("lto=thin".to_owned());
}
rustflags.push("-Z".to_owned());
rustflags.push("remap-cwd-prefix=.".to_owned());
build_link_args.push("-v".to_string());
let link_args = build_link_args.join(",");
let link_args_string = format!("-Clink-args={}", &link_args);
let cargo_args = extra_cargo_flags.iter().collect::<Vec<_>>();
let rustflags = rustflags.as_slice().join(" ");
println!("Starting build..");
let metadata = MetadataCommand::new().exec().unwrap();
let workspace_members = metadata
.workspace_members
.iter()
.cloned()
.collect::<HashSet<_>>();
let path_var = env::var("PATH").unwrap();
let path = format!("{}/bin:{}", config.llvm_prefix().display(), &path_var);
let mut cargo_cmd = Command::new("rustup");
let cargo_cmd = cargo_cmd
.arg("run")
.arg(config.toolchain())
.args(&["cargo", "rustc"])
.args(&["-p", "firefly"])
.arg("--target")
.arg(config.rust_target())
.args(&["--message-format=json-diagnostic-rendered-ansi", "-vv"])
.args(cargo_args.as_slice())
.arg("--")
.arg("--remap-path-prefix")
.arg(&format!("{}=.", cwd.display()))
.arg(link_args_string.as_str())
.env("PATH", path.as_str())
.env("RUSTFLAGS", rustflags.as_str());
let cmd = format!("{:?}", &cargo_cmd);
// Print more verbose output when requested/in CI
let verbose = config.verbose();
cargo_cmd.stdout(Stdio::piped());
if !verbose {
cargo_cmd.stderr(Stdio::null());
}
let mut child = cargo_cmd.spawn().unwrap();
let mut deps: HashMap<String, Vec<String>> = HashMap::new();
{
let child_stdout = child.stdout.take().unwrap();
let child_stdout_reader = BufReader::new(child_stdout);
let stdout = io::stdout();
let mut handle = stdout.lock();
for message in Message::parse_stream(child_stdout_reader) {
match message.unwrap() {
Message::CompilerMessage(msg) => {
use cargo_metadata::diagnostic::DiagnosticLevel;
match msg.message.level {
DiagnosticLevel::Ice
| DiagnosticLevel::Error
| DiagnosticLevel::FailureNote => {
if let Some(msg) = msg.message.rendered.as_ref() {
handle.write_all(msg.as_bytes()).unwrap();
}
}
_ if workspace_members.contains(&msg.package_id) || verbose => {
// This message is relevant to one of our crates
if let Some(msg) = msg.message.rendered.as_ref() {
handle.write_all(msg.as_bytes()).unwrap();
}
}
_ => continue,
}
}
Message::CompilerArtifact(artifact)
if artifact.target.name == "build-script-build" =>
{
let message = format!("Building {}\n", &artifact.package_id.repr);
handle.write_all(message.as_bytes()).unwrap();
}
Message::CompilerArtifact(mut artifact) => {
let message = format!("Compiled {}\n", &artifact.package_id.repr);
handle.write_all(message.as_bytes()).unwrap();
// Track the artifacts for workspace members as we need them later
if workspace_members.contains(&artifact.package_id) {
let files = artifact
.filenames
.drain_filter(|f| {
let p = f.as_path();
let ext = p.extension();
ext == Some("a") || ext == Some("rlib")
})
.map(|f| f.into_string())
.collect::<Vec<_>>();
if !files.is_empty() {
deps.insert(artifact.target.name.clone(), files);
}
}
}
Message::BuildScriptExecuted(_script) => {
continue;
}
Message::BuildFinished(result) if result.success => {
handle
.write_all(b"Build completed successfully!\n")
.unwrap();
}
Message::BuildFinished(_) => {
handle.write_all(b"Build finished with errors!\n").unwrap();
}
Message::TextLine(s) => {
// Unknown message content
handle.write_all(s.as_bytes()).unwrap();
handle.write_all(b"\n").unwrap();
}
// Unhandled message type (this enum is non-exhaustive)
_ => continue,
}
}
}
println!("Build command completed, waiting for exit..");
let output = child.wait().unwrap();
if !output.success() {
bail!(
"command did not execute successfully: {}\n\
expected success, got: {}",
cmd,
output
);
}
let llvm_target = config.llvm_target();
let install_dir = config.install_dir();
let install_bin_dir = install_dir.join("bin");
let install_host_lib_dir = install_dir.join("lib");
let install_target_lib_dir = install_dir.join(&format!("lib/fireflylib/{}/lib", &llvm_target));
println!("Preparing to install Firefly to {}", install_dir.display());
if !install_bin_dir.exists() {
fs::create_dir_all(&install_bin_dir).expect("failed to create install bin directory");
}
if !install_host_lib_dir.exists() {
fs::create_dir_all(&install_host_lib_dir)
.expect("failed to create install host libs directory");
}
if !install_target_lib_dir.exists() {
fs::create_dir_all(&install_target_lib_dir)
.expect("failed to create install target libs directory");
}
let walker = WalkDir::new(&install_host_lib_dir).into_iter();
for entry in walker.filter_entry(|e| is_dir_or_library_file(e)) {
let entry = entry.unwrap();
let ty = entry.file_type();
if ty.is_dir() {
continue;
}
fs::remove_file(entry.path()).unwrap();
}
println!("Installing Firefly..");
let src_firefly_exe = target_dir
.join(config.rust_target())
.join(target_subdir)
.join("firefly");
if !src_firefly_exe.exists() {
panic!(
"Expected build to place Firefly executable at {}",
src_firefly_exe.display()
);
}
let firefly_exe = install_bin_dir.join("firefly");
if firefly_exe.exists() {
fs::remove_file(&firefly_exe).expect("failed to remove existing firefly executable");
}
fs::copy(src_firefly_exe, &firefly_exe).unwrap();
symlink(&firefly_exe, config.bin_dir().join("firefly"));
if config.is_darwin() {
println!("Patching runtime path..");
let mut install_name_tool_cmd = Command::new("install_name_tool");
let install_name_tool_cmd = install_name_tool_cmd
.args(&["-add_rpath", "@executable_path/../lib"])
.arg(&format!("{}", firefly_exe.display()));
let cmd = install_name_tool_cmd.stdin(Stdio::null()).output().unwrap();
if !cmd.status.success() {
io::stderr().write_all(&cmd.stderr).unwrap();
io::stdout().write_all(&cmd.stdout).unwrap();
panic!(
"command did not execute successfully: {:?}\n\
expected success, got: {}",
install_name_tool_cmd, cmd.status
);
}
}
println!("Installing runtime dependencies..");
let rustlibs = &["libpanic_abort", "libpanic_unwind"];
let walker = WalkDir::new(config.toolchain_target_dir().join("lib")).into_iter();
for entry in walker.filter_entry(|e| is_dir_or_matching_rlib(e, rustlibs)) {
let entry = entry.unwrap();
let ty = entry.file_type();
if ty.is_dir() {
continue;
}
let path = entry.path().canonicalize().unwrap();
let stem = path.file_stem().unwrap().to_str().unwrap();
for lib in &rustlibs[..] {
if stem.starts_with(lib) {
fs::copy(&path, install_target_lib_dir.join(&format!("{}.rlib", lib))).unwrap();
}
}
}
println!("Installing runtime libraries..");
let firefly_libs = &["firefly_rt_tiny", "panic", "unwind"];
for lib in firefly_libs.iter().copied() {
if let Some(files) = deps.get(lib) {
for file in files.iter() {
let path = Path::new(file).canonicalize().unwrap();
let extension = path.extension().unwrap().to_str().unwrap();
let target_path = install_target_lib_dir.join(&format!("lib{}.{}", lib, extension));
fs::copy(path, target_path).unwrap();
}
} else {
panic!("Unable to find archive (.a/.rlib) for dependency: {}", lib);
}
}
if config.link_dynamic() {
match env::var_os("LLVM_LINK_LLVM_DYLIB") {
Some(val) if val == "ON" => {
let walker = WalkDir::new(config.llvm_prefix().join("lib")).into_iter();
let mut symlinks = HashMap::new();
for entry in walker.filter_entry(|e| is_dir_or_llvm_lib(e)) {
let entry = entry.unwrap();
let ty = entry.file_type();
if ty.is_dir() {
continue;
}
let path = entry.path();
let filename = path.file_name().unwrap().to_str().unwrap();
if entry.path_is_symlink() {
// Replicate symlink in target
let real_path = fs::read_link(&path).unwrap();
let real_name = real_path.file_name().unwrap().to_str().unwrap();
symlinks.insert(filename.to_owned(), real_name.to_owned());
continue;
}
let target_path = install_host_lib_dir.join(filename);
if !target_path.exists() {
fs::copy(&path, &install_host_lib_dir.join(filename)).unwrap();
} else {
let src_metadata = entry.metadata().unwrap();
let dst_metadata = target_path.metadata().unwrap();
let src_ctime = src_metadata.created().ok();
let dst_ctime = dst_metadata.created().ok();
// Skip unchanged files
if src_ctime.is_some() && dst_ctime.is_some() && src_ctime == dst_ctime {
continue;
}
fs::copy(&path, &install_host_lib_dir.join(filename)).unwrap();
}
}
for (link_name, file_name) in symlinks.iter() {
let src = install_host_lib_dir.join(file_name);
let dst = install_host_lib_dir.join(link_name);
symlink(&src, dst);
}
}
_ => {}
}
}
println!("Install complete!");
Ok(())
}
static LLVM_TARGET: OnceLock<String> = OnceLock::new();
fn get_llvm_target(toolchain_name: &str, target: &str) -> &'static str {
let target = LLVM_TARGET.get_or_init(|| {
let mut rustc_cmd = Command::new("rustup");
let rustc_cmd = rustc_cmd
.arg("run")
.arg(toolchain_name)
.args(&["rustc"])
.args(&["-Z", "unstable-options"])
.args(&["--print", "target-spec-json", "--target"])
.arg(target);
let output = rustc_cmd
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.unwrap();
if !output.status.success() {
panic!(
"unable to determine llvm target triple!: {}",
String::from_utf8(output.stderr).unwrap()
);
}
let spec: TargetSpec = serde_json::from_slice(output.stdout.as_slice()).unwrap();
spec.llvm_target
});
target.as_str()
}
static RUST_SYSROOT: OnceLock<PathBuf> = OnceLock::new();
fn get_rust_sysroot() -> &'static Path {
let path = RUST_SYSROOT.get_or_init(|| {
let mut rustc_cmd = Command::new("rustc");
let rustc_cmd = rustc_cmd.args(&["--print", "sysroot"]);
let output = rustc_cmd
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.output()
.unwrap();
if !output.status.success() {
panic!("unable to determine rust sysroot!");
}
let s = String::from_utf8(output.stdout).unwrap();
PathBuf::from(s.trim().to_owned())
});
path.as_path()
}
fn is_dir_or_llvm_lib(entry: &DirEntry) -> bool {
let ty = entry.file_type();
if ty.is_dir() {
return true;
}
let path = entry.path();
let filename = path.file_name().unwrap().to_str().unwrap();
filename.starts_with("libMLIR.")
|| filename.starts_with("libLLVM.")
|| filename.starts_with("libLLVM-")
}
fn is_dir_or_matching_rlib(entry: &DirEntry, libs: &[&str]) -> bool {
// Recurse into subdirectories
let ty = entry.file_type();
if ty.is_dir() {
return true;
}
let path = entry.path();
if let Some(ext) = path.extension() {
if ext != "rlib" {
return false;
}
let filename = path.file_name().unwrap().to_str().unwrap();
for lib in &libs[..] {
if filename.starts_with(lib) {
return true;
}
}
}
false
}
fn is_dir_or_library_file(entry: &DirEntry) -> bool {
// Recurse into subdirectories
let ty = entry.file_type();
if ty.is_dir() {
return true;
}
let path = entry.path();
if let Some(ext) = path.extension() {
ext == "dylib" || ext == "so" || ext == "a" || ext == "rlib"
} else {
false
}
}
#[cfg(unix)]
fn symlink(src: &Path, dst: PathBuf) {
use std::os::unix;
fs::remove_file(dst.as_path()).ok();
unix::fs::symlink(src, dst.as_path()).unwrap()
}
#[cfg(windows)]
fn symlink(src: &Path, dst: PathBuf) {
use std::os::windows::fs::symlink_file;
fs::remove_file(dst.as_path()).ok();
symlink_file(src, dst.as_path()).unwrap()
}
|
use crate::fd::{AsFd, BorrowedFd};
use bitflags::bitflags;
bitflags! {
/// `POLL*` flags for use with [`poll`].
///
/// [`poll`]: crate::io::poll
#[repr(transparent)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct PollFlags: u16 {
/// `POLLIN`
const IN = linux_raw_sys::general::POLLIN as u16;
/// `POLLPRI`
const PRI = linux_raw_sys::general::POLLPRI as u16;
/// `POLLOUT`
const OUT = linux_raw_sys::general::POLLOUT as u16;
/// `POLLRDNORM`
const RDNORM = linux_raw_sys::general::POLLRDNORM as u16;
/// `POLLWRNORM`
const WRNORM = linux_raw_sys::general::POLLWRNORM as u16;
/// `POLLRDBAND`
const RDBAND = linux_raw_sys::general::POLLRDBAND as u16;
/// `POLLWRBAND`
const WRBAND = linux_raw_sys::general::POLLWRBAND as u16;
/// `POLLERR`
const ERR = linux_raw_sys::general::POLLERR as u16;
/// `POLLHUP`
const HUP = linux_raw_sys::general::POLLHUP as u16;
/// `POLLNVAL`
const NVAL = linux_raw_sys::general::POLLNVAL as u16;
/// `POLLRDHUP`
const RDHUP = linux_raw_sys::general::POLLRDHUP as u16;
}
}
/// `struct pollfd`—File descriptor and flags for use with [`poll`].
///
/// [`poll`]: crate::event::poll
#[doc(alias = "pollfd")]
#[repr(C)]
#[derive(Debug, Clone)]
pub struct PollFd<'fd> {
pub(crate) fd: BorrowedFd<'fd>,
pub(crate) events: u16,
pub(crate) revents: u16,
}
impl<'fd> PollFd<'fd> {
/// Constructs a new `PollFd` holding `fd` and `events`.
#[inline]
pub fn new<Fd: AsFd>(fd: &'fd Fd, events: PollFlags) -> Self {
Self::from_borrowed_fd(fd.as_fd(), events)
}
/// Sets the contained file descriptor to `fd`.
#[inline]
pub fn set_fd<Fd: AsFd>(&mut self, fd: &'fd Fd) {
self.fd = fd.as_fd();
}
/// Clears the ready events.
#[inline]
pub fn clear_revents(&mut self) {
self.revents = 0;
}
/// Constructs a new `PollFd` holding `fd` and `events`.
///
/// This is the same as `new`, but can be used to avoid borrowing the
/// `BorrowedFd`, which can be tricky in situations where the `BorrowedFd`
/// is a temporary.
#[inline]
pub fn from_borrowed_fd(fd: BorrowedFd<'fd>, events: PollFlags) -> Self {
Self {
fd,
events: events.bits(),
revents: 0,
}
}
/// Returns the ready events.
#[inline]
pub fn revents(&self) -> PollFlags {
// Use `unwrap()` here because in theory we know we know all the bits
// the OS might set here, but OS's have added extensions in the past.
PollFlags::from_bits(self.revents).unwrap()
}
}
impl<'fd> AsFd for PollFd<'fd> {
#[inline]
fn as_fd(&self) -> BorrowedFd<'_> {
self.fd.as_fd()
}
}
|
use crate::net::MacAddr;
use crate::packets::{CondRc, Header, Packet};
use crate::{Mbuf, Result, SizeOf};
use std::fmt;
use std::ptr::NonNull;
/* Ethernet Type II Frame
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Dst MAC | Src MAC |Typ| Payload |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +
| |
| |
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Destination MAC 48-bit MAC address of the originator of the
packet.
Source MAC 48-bit MAC address of the intended recipient of
the packet.
Ether Type 16-bit indicator. Identifies which protocol is
encapsulated in the payload of the frame.
*/
/// The protocol type in the ethernet packet payload.
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
#[repr(C, packed)]
pub struct EtherType(pub u16);
impl EtherType {
pub fn new(value: u16) -> Self {
EtherType(value)
}
}
/// Supported ethernet payload protocol types.
#[allow(non_snake_case)]
#[allow(non_upper_case_globals)]
pub mod EtherTypes {
use super::EtherType;
// Internet Protocol version 4
pub const Ipv4: EtherType = EtherType(0x0800);
// Internet Protocol version 6
pub const Ipv6: EtherType = EtherType(0x86DD);
}
impl fmt::Display for EtherType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match *self {
EtherTypes::Ipv4 => "IPv4".to_string(),
EtherTypes::Ipv6 => "IPv6".to_string(),
_ => {
let t = self.0;
format!("0x{:04x}", t)
}
}
)
}
}
/// Ethernet header.
#[derive(Clone, Copy, Debug, Default)]
#[repr(C, packed)]
pub struct EthernetHeader {
dst: MacAddr,
src: MacAddr,
ether_type: u16,
}
impl Header for EthernetHeader {}
/// Ethernet packet.
#[derive(Clone)]
pub struct Ethernet {
envelope: CondRc<Mbuf>,
header: NonNull<EthernetHeader>,
offset: usize,
}
impl Ethernet {
#[inline]
pub fn src(&self) -> MacAddr {
self.header().src
}
#[inline]
pub fn set_src(&mut self, src: MacAddr) {
self.header_mut().src = src
}
#[inline]
pub fn dst(&self) -> MacAddr {
self.header().dst
}
#[inline]
pub fn set_dst(&mut self, dst: MacAddr) {
self.header_mut().dst = dst
}
#[inline]
pub fn ether_type(&self) -> EtherType {
EtherType::new(u16::from_be(self.header().ether_type))
}
#[inline]
pub fn set_ether_type(&mut self, ether_type: EtherType) {
self.header_mut().ether_type = u16::to_be(ether_type.0)
}
#[inline]
pub fn swap_addresses(&mut self) {
let src = self.src();
let dst = self.dst();
self.set_src(dst);
self.set_dst(src);
}
}
impl fmt::Debug for Ethernet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ethernet")
.field("src", &format!("{}", self.src()))
.field("dst", &format!("{}", self.dst()))
.field("ether_type", &format!("{}", self.ether_type()))
.field("$offset", &self.offset())
.field("$len", &self.len())
.field("$header_len", &self.header_len())
.finish()
}
}
impl Packet for Ethernet {
type Header = EthernetHeader;
type Envelope = Mbuf;
#[inline]
fn envelope(&self) -> &Self::Envelope {
&self.envelope
}
#[inline]
fn envelope_mut(&mut self) -> &mut Self::Envelope {
&mut self.envelope
}
#[doc(hidden)]
#[inline]
fn header(&self) -> &Self::Header {
unsafe { self.header.as_ref() }
}
#[doc(hidden)]
#[inline]
fn header_mut(&mut self) -> &mut Self::Header {
unsafe { self.header.as_mut() }
}
#[inline]
fn offset(&self) -> usize {
self.offset
}
#[doc(hidden)]
#[inline]
fn do_parse(envelope: Self::Envelope) -> Result<Self> {
let mbuf = envelope.mbuf();
let offset = envelope.payload_offset();
let header = mbuf.read_data(offset)?;
Ok(Ethernet {
envelope: CondRc::new(envelope),
header,
offset,
})
}
#[doc(hidden)]
#[inline]
fn do_push(mut envelope: Self::Envelope) -> Result<Self> {
let offset = envelope.payload_offset();
let mbuf = envelope.mbuf_mut();
mbuf.extend(offset, Self::Header::size_of())?;
let header = mbuf.write_data(offset, &Self::Header::default())?;
Ok(Ethernet {
envelope: CondRc::new(envelope),
header,
offset,
})
}
#[inline]
fn remove(mut self) -> Result<Self::Envelope> {
let offset = self.offset();
let len = self.header_len();
self.mbuf_mut().shrink(offset, len)?;
Ok(self.envelope.into_owned())
}
#[inline]
fn deparse(self) -> Self::Envelope {
self.envelope.into_owned()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::packets::UDP_PACKET;
#[test]
fn size_of_ethernet_header() {
assert_eq!(14, EthernetHeader::size_of());
}
#[test]
fn ether_type_to_string() {
assert_eq!("IPv4", EtherTypes::Ipv4.to_string());
assert_eq!("IPv6", EtherTypes::Ipv6.to_string());
assert_eq!("0x0000", EtherType::new(0).to_string());
}
#[nb2::test]
fn parse_ethernet_packet() {
let packet = Mbuf::from_bytes(&UDP_PACKET).unwrap();
let ethernet = packet.parse::<Ethernet>().unwrap();
assert_eq!("00:00:00:00:00:01", ethernet.dst().to_string());
assert_eq!("00:00:00:00:00:02", ethernet.src().to_string());
assert_eq!(EtherTypes::Ipv4, ethernet.ether_type());
}
#[nb2::test]
fn swap_addresses() {
let packet = Mbuf::from_bytes(&UDP_PACKET).unwrap();
let mut ethernet = packet.parse::<Ethernet>().unwrap();
ethernet.swap_addresses();
assert_eq!("00:00:00:00:00:02", ethernet.dst().to_string());
assert_eq!("00:00:00:00:00:01", ethernet.src().to_string());
}
#[nb2::test]
fn push_ethernet_packet() {
let packet = Mbuf::new().unwrap();
let ethernet = packet.push::<Ethernet>().unwrap();
assert_eq!(EthernetHeader::size_of(), ethernet.len());
}
}
|
use super::diacritics;
use geocoder_abbreviations::{Token, TokenType};
use neon::prelude::*;
use regex::Regex;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Clone)]
pub struct Tokens {
tokens: HashMap<String, ParsedToken>,
regex_tokens: HashMap<String, ParsedToken>,
multi_tokens: HashMap<String, ParsedToken>,
}
impl Tokens {
pub fn new(
tokens: HashMap<String, ParsedToken>,
regex_tokens: HashMap<String, ParsedToken>,
multi_tokens: HashMap<String, ParsedToken>,
) -> Self {
Tokens {
tokens: tokens,
regex_tokens: regex_tokens,
multi_tokens: multi_tokens,
}
}
pub fn generate(languages: Vec<String>) -> Self {
let import: HashMap<String, Vec<Token>> =
geocoder_abbreviations::config(languages).unwrap();
let mut map: HashMap<String, ParsedToken> = HashMap::new();
let mut regex_map: HashMap<String, ParsedToken> = HashMap::new();
let mut multi_map: HashMap<String, ParsedToken> = HashMap::new();
// regex_map contains all tokens with `regex: true`, regardless of spanBoundaries setting
// multi_map contains all tokens with a `spanBoundaries` property
// map contains the remaining tokens that does not spanBoundaries and is not a regex
// the priority for creating the token maps and applying transformations on the names for linking is:
// 1) regex, 2) span boundaries, 3) all others
for language in import.keys() {
for group in import.get(language).unwrap() {
if group.regex {
for tk in &group.tokens {
regex_map.insert(
tk.to_lowercase(),
ParsedToken::new(
group.canonical.to_lowercase(),
group.token_type.to_owned(),
),
);
}
} else if group.span_boundaries != None {
for tk in &group.tokens {
let token = &tk.to_lowercase();
let canonical = &group.canonical.to_lowercase();
if token != canonical {
multi_map.insert(
diacritics(&tk.to_lowercase()),
ParsedToken::new(
diacritics(&group.canonical.to_lowercase()),
group.token_type.to_owned(),
),
);
}
}
} else {
for tk in &group.tokens {
map.insert(
diacritics(&tk.to_lowercase()),
ParsedToken::new(
diacritics(&group.canonical.to_lowercase()),
group.token_type.to_owned(),
),
);
}
}
}
}
Tokens {
tokens: map,
regex_tokens: regex_map,
multi_tokens: multi_map,
}
}
pub fn process(&self, text: &String, country: &String) -> Vec<Tokenized> {
let mut tokens = self.tokenize(&text);
let mut normalized_full_text = diacritics(&text.to_lowercase());
let skip_regex_list = vec![String::from("US"), String::from("GB")]; // add countries that are using english tokens here to get around lookahead token replacement errors
let mut tokenized: Vec<Tokenized> = Vec::with_capacity(tokens.len());
if !country.is_empty() && !skip_regex_list.contains(&country) {
for (regex_string, v) in self.regex_tokens.iter() {
let re = Regex::new(&format!(r"{}", regex_string)).unwrap();
let canonical: &str = &*v.canonical; // convert from std::string::String -> &str
normalized_full_text = re.replace_all(&normalized_full_text, canonical).to_string();
tokens = self.tokenize(&normalized_full_text);
}
for (multi_string, v) in self.multi_tokens.iter() {
let canonical: &str = &*v.canonical; // convert from std::string::String -> &str
normalized_full_text = normalized_full_text
.replace(multi_string, canonical)
.to_string();
tokens = self.tokenize(&normalized_full_text);
}
}
for token in &tokens {
match self.tokens.get(token) {
None => {
tokenized.push(Tokenized::new(token.to_owned(), None));
}
Some(t) => {
tokenized.push(Tokenized::new(
t.canonical.to_owned(),
t.token_type.to_owned(),
));
}
}
}
if country == &String::from("US") {
tokenized = type_us_st(&tokens, tokenized);
}
tokenized
}
///
/// Remove all diacritics, punctuation non-space whitespace
/// returning a vector of component tokens
///
fn tokenize(&self, text: &String) -> Vec<String> {
let text = text.trim();
lazy_static! {
static ref UP: Regex = Regex::new(r"[\^]+").unwrap();
// collapse periods
static ref PERIOD_PUNC: Regex = Regex::new(r"[\u2018\u2019\u02BC\u02BB\uFF07\.]").unwrap();
// collapse apostrophes
static ref APOS_PUNC: Regex = Regex::new(r"'").unwrap();
// split apostrophes if l' or d' followed by vowel ie. l'onze
static ref APOSTROPHE: Regex = Regex::new(r"(l|d)'([aeiouhy][^ ]+)").unwrap();
// all other ascii and unicode punctuation except '-' per
// http://stackoverflow.com/questions/4328500 split terms
static ref SPACEPUNC: Regex = Regex::new(r#"[\u2000-\u206F\u2E00-\u2E7F\\'!"$#%&()*+,./:;<=>?@\[\]^_`{|}~-]"#).unwrap();
static ref SPACE: Regex = Regex::new(r"\s+").unwrap();
static ref IGNORE: Regex = Regex::new(r"(\d+)-(\d+)[a-z]?").unwrap();
}
let mut normalized = diacritics(&text.to_lowercase());
normalized = UP.replace_all(normalized.as_str(), "").to_string();
normalized = PERIOD_PUNC.replace_all(normalized.as_str(), "").to_string();
normalized = APOSTROPHE
.replace_all(normalized.as_str(), "$1 $2")
.to_string();
normalized = APOS_PUNC.replace_all(normalized.as_str(), "").to_string();
normalized = SPACEPUNC.replace_all(normalized.as_str(), " ").to_string();
normalized = SPACE.replace_all(normalized.as_str(), " ").to_string();
let tokens: Vec<String> = normalized
.split(" ")
.map(|split| String::from(split))
.filter(|token| {
// Remove Empty Tokens (Double Space/Non Trimmed Input)
if token.len() == 0 {
false
} else {
true
}
})
.collect();
tokens
}
}
/// Simplified struct from geocoder_abbreviations::Token
/// @TODO replace with geocoder_abbreviations::Token when additional traits are derived
#[derive(Debug, PartialEq, Clone)]
pub struct ParsedToken {
canonical: String,
token_type: Option<TokenType>,
}
impl ParsedToken {
pub fn new(canonical: String, token_type: Option<TokenType>) -> Self {
ParsedToken {
canonical,
token_type,
}
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
pub struct Tokenized {
pub token: String,
pub token_type: Option<TokenType>,
}
impl Tokenized {
pub fn new(token: String, token_type: Option<TokenType>) -> Self {
Tokenized { token, token_type }
}
}
///
/// Change 'st' token_type to TokenType::Way ('Street') or None ('Saint')
///
pub fn type_us_st(tokens: &Vec<String>, mut tokenized: Vec<Tokenized>) -> Vec<Tokenized> {
// check if original name contained "st"
// don't modify if "street" or "saint" has already been tokenized
if tokens.contains(&String::from("st")) {
let mut st_index = Vec::new();
let mut way_tokens = false;
for (i, tk) in tokenized.iter().enumerate() {
if tk.token == String::from("st") {
st_index.push(i);
}
// if there are non-st ways
else if tk.token_type == Some(TokenType::Way) {
way_tokens = true;
}
}
// all but the last 'st' are likely not ways
let last = st_index.pop().unwrap();
for i in st_index {
tokenized[i].token_type = None;
}
// if there are no other way tokens, st => street
if !way_tokens {
tokenized[last].token_type = Some(TokenType::Way);
// if there are non-st way tokens, st => saint
} else {
tokenized[last].token_type = None;
}
}
tokenized
}
pub fn tokenize_name(mut cx: FunctionContext) -> JsResult<JsValue> {
let name = cx.argument::<JsString>(0)?.value();
let context = cx.argument::<JsValue>(1)?;
let context: crate::types::InputContext = neon_serde::from_value(&mut cx, context)?;
let context = crate::Context::from(context);
let tokenized = context.tokens.process(&name, &context.country);
Ok(neon_serde::to_value(&mut cx, &tokenized)?)
}
#[cfg(test)]
mod tests {
use super::*;
fn tokenized_string(tokenized: Vec<Tokenized>) -> String {
let tokens: Vec<String> = tokenized
.into_iter()
.map(|x| String::from(x.token))
.collect();
let token_string = String::from(tokens.join(" ").trim());
token_string
}
#[test]
fn test_remove_diacritics() {
let tokens = Tokens::new(HashMap::new(), HashMap::new(), HashMap::new());
// diacritics are removed from latin text
assert_eq!(
tokenized_string(tokens.process(
&String::from("Hérê àrë søme wöřdš, including diacritics and puncatuation!"),
&String::from("")
)),
String::from("here are some words including diacritics and puncatuation")
);
// nothing happens to latin text
assert_eq!(
tokenized_string(tokens.process(
&String::from(
"Cranberries are low, creeping shrubs or vines up to 2 metres (7 ft)"
),
&String::from("")
)),
String::from("cranberries are low creeping shrubs or vines up to 2 metres 7 ft")
);
// nothing happens to Japanese text
assert_eq!(
tokenized_string(tokens.process(
&String::from("堪《たま》らん!」と片息《かたいき》になつて、喚《わめ》"),
&String::from("")
)),
String::from("堪《たま》らん!」と片息《かたいき》になつて、喚《わめ》")
);
// greek diacritics are removed and other characters stay the same
assert_eq!(
tokenized_string(tokens.process(
&String::from("άΆέΈήΉίΊόΌύΎ αΑεΕηΗιΙοΟυΥ"),
&String::from("")
)),
String::from("άάέέήήίίόόύύ ααεεηηιιοουυ")
);
// cyrillic diacritics are removed and other characters stay the same
assert_eq!(
tokenized_string(
tokens.process(&String::from("ўЎёЁѐЀґҐйЙ уУеЕеЕгГиИ"), &String::from(""))
),
String::from("ўўёёѐѐґґйй ууееееггии")
);
}
#[test]
fn test_tokenize() {
let tokens = Tokens::new(HashMap::new(), HashMap::new(), HashMap::new());
assert_eq!(
tokenized_string(tokens.process(&String::from(""), &String::from(""))),
String::from("")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo"), &String::from(""))),
String::from("foo")
);
assert_eq!(
tokenized_string(tokens.process(&String::from(" foo bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo bar "), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo-bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo+bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo_bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo:bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo;bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo|bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo}bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo{bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo[bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo]bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo(bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo)bar"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo b.a.r"), &String::from(""))),
String::from("foo bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("foo's bar"), &String::from(""))),
String::from("foos bar")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("San José"), &String::from(""))),
String::from("san jose")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("A Coruña"), &String::from(""))),
String::from("a coruna")
);
assert_eq!(
tokenized_string(
tokens.process(&String::from("Chamonix-Mont-Blanc"), &String::from(""))
),
String::from("chamonix mont blanc")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("Rue d'Argout"), &String::from(""))),
String::from("rue d argout")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("Hale’iwa Road"), &String::from(""))),
String::from("haleiwa road")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("москва"), &String::from(""))),
String::from("москва")
);
assert_eq!(
tokenized_string(tokens.process(&String::from("京都市"), &String::from(""))),
String::from("京都市")
);
assert_eq!(
tokenized_string(tokens.process(
&String::from("carrer de l'onze de setembre"),
&String::from("")
)),
String::from("carrer de l onze de setembre")
);
}
#[test]
fn test_replacement_tokens() {
let mut map: HashMap<String, ParsedToken> = HashMap::new();
let mut regex_map: HashMap<String, ParsedToken> = HashMap::new();
let mut multi_map: HashMap<String, ParsedToken> = HashMap::new();
map.insert(
String::from("barter"),
ParsedToken::new(String::from("foo"), None),
);
map.insert(
String::from("saint"),
ParsedToken::new(String::from("st"), None),
);
map.insert(
String::from("street"),
ParsedToken::new(String::from("st"), Some(TokenType::Way)),
);
let tokens = Tokens::new(map, regex_map, multi_map);
assert_eq!(
tokens.process(&String::from("Main Street"), &String::from("")),
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
assert_eq!(
tokens.process(&String::from("Main St"), &String::from("")),
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), None)
]
);
assert_eq!(
tokens.process(&String::from("foobarter"), &String::from("")),
vec![Tokenized::new(String::from("foobarter"), None)]
);
assert_eq!(
tokens.process(&String::from("foo barter"), &String::from("")),
vec![
Tokenized::new(String::from("foo"), None),
Tokenized::new(String::from("foo"), None)
]
);
}
#[test]
fn test_de_replacement() {
let tokens = Tokens::generate(vec![String::from("de")]);
assert_eq!(
tokens.process(&String::from("Fresenbergstr"), &String::from("DE")),
vec![
Tokenized::new(String::from("fresenberg"), None),
Tokenized::new(String::from("str"), Some(TokenType::Way)),
]
);
}
#[test]
fn test_multi_word_tokens() {
let tokens = Tokens::generate(vec![String::from("es")]);
assert_eq!(
tokens.process(&String::from("GV Corts Catalanes"), &String::from("ES")),
vec![
Tokenized::new(String::from("gv"), None),
Tokenized::new(String::from("corts"), None),
Tokenized::new(String::from("catalanes"), None)
]
);
assert_eq!(
tokens.process(
&String::from("Gran Via De Les Corts Catalanes"),
&String::from("ES")
),
vec![
Tokenized::new(String::from("gv"), None),
Tokenized::new(String::from("de"), Some(TokenType::Determiner)),
Tokenized::new(String::from("les"), Some(TokenType::Determiner)),
Tokenized::new(String::from("corts"), None),
Tokenized::new(String::from("catalanes"), None)
]
);
assert_eq!(
tokens.process(
&String::from("Calle Gran Vía de Colón"),
&String::from("ES")
),
vec![
Tokenized::new(String::from("cl"), Some(TokenType::Way)),
Tokenized::new(String::from("gv"), None),
Tokenized::new(String::from("de"), Some(TokenType::Determiner)),
Tokenized::new(String::from("colon"), None)
]
);
assert_eq!(
tokens.process(
&String::from("carrer de l'onze de setembre"),
&String::from("ES")
),
vec![
Tokenized::new(String::from("cl"), Some(TokenType::Way)),
Tokenized::new(String::from("de"), Some(TokenType::Determiner)),
Tokenized::new(String::from("la"), Some(TokenType::Determiner)),
Tokenized::new(String::from("11"), Some(TokenType::Number)),
Tokenized::new(String::from("de"), Some(TokenType::Determiner)),
Tokenized::new(String::from("setembre"), None)
]
);
assert_eq!(
tokens.process(&String::from("cl onze de setembre"), &String::from("ES")),
vec![
Tokenized::new(String::from("cl"), Some(TokenType::Way)),
Tokenized::new(String::from("11"), Some(TokenType::Number)),
Tokenized::new(String::from("de"), Some(TokenType::Determiner)),
Tokenized::new(String::from("setembre"), None)
]
);
}
#[test]
fn test_generate_tokens() {
let tokens = Tokens::generate(vec![String::from("en")]);
assert_eq!(
tokens.process(&String::from("New Jersey Av NW"), &String::from("US")),
vec![
Tokenized::new(String::from("new"), None),
Tokenized::new(String::from("jersey"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way)),
Tokenized::new(String::from("nw"), Some(TokenType::Cardinal))
]
);
assert_eq!(
tokens.process(&String::from("New Jersey Ave NW"), &String::from("US")),
vec![
Tokenized::new(String::from("new"), None),
Tokenized::new(String::from("jersey"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way)),
Tokenized::new(String::from("nw"), Some(TokenType::Cardinal))
]
);
assert_eq!(
tokens.process(
&String::from("New Jersey Avenue Northwest"),
&String::from("US")
),
vec![
Tokenized::new(String::from("new"), None),
Tokenized::new(String::from("jersey"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way)),
Tokenized::new(String::from("nw"), Some(TokenType::Cardinal))
]
);
assert_eq!(
tokens.process(&String::from("Saint Peter Street"), &String::from("US")),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
assert_eq!(
tokens.process(&String::from("St Peter St"), &String::from("US")),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
}
#[test]
fn test_type_us_st() {
assert_eq!(
type_us_st(
&vec![String::from("")],
vec![Tokenized::new(String::from(""), None)]
),
vec![Tokenized::new(String::from(""), None)]
);
// main st
assert_eq!(
type_us_st(
&vec![String::from("main"), String::from("st")],
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), None)
]
),
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
assert_eq!(
type_us_st(
&vec![String::from("main"), String::from("st")],
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
),
vec![
Tokenized::new(String::from("main"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
// st peter st
assert_eq!(
type_us_st(
&vec![
String::from("st"),
String::from("peter"),
String::from("st")
],
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), None)
]
),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
assert_eq!(
type_us_st(
&vec![
String::from("st"),
String::from("peter"),
String::from("st")
],
vec![
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
// st peter
assert_eq!(
type_us_st(
&vec![String::from("st"), String::from("peter")],
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
]
),
vec![
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("peter"), None),
]
);
assert_eq!(
type_us_st(
&vec![String::from("st"), String::from("peter")],
vec![
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("peter"), None),
]
),
vec![
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("peter"), None),
]
);
// st peter av
assert_eq!(
type_us_st(
&vec![
String::from("st"),
String::from("peter"),
String::from("av")
],
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way))
]
),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way))
]
);
assert_eq!(
type_us_st(
&vec![
String::from("st"),
String::from("peter"),
String::from("av")
],
vec![
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way))
]
),
vec![
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("peter"), None),
Tokenized::new(String::from("av"), Some(TokenType::Way))
]
);
// rue st francois st
assert_eq!(
type_us_st(
&vec![
String::from("rue"),
String::from("st"),
String::from("francois"),
String::from("st")
],
vec![
Tokenized::new(String::from("rue"), None),
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("francois"), None),
Tokenized::new(String::from("st"), None)
]
),
vec![
Tokenized::new(String::from("rue"), None),
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("francois"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
assert_eq!(
type_us_st(
&vec![
String::from("rue"),
String::from("st"),
String::from("francois"),
String::from("st")
],
vec![
Tokenized::new(String::from("rue"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way)),
Tokenized::new(String::from("francois"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
),
vec![
Tokenized::new(String::from("rue"), None),
Tokenized::new(String::from("st"), None),
Tokenized::new(String::from("francois"), None),
Tokenized::new(String::from("st"), Some(TokenType::Way))
]
);
}
}
|
extern crate num;
use num::complex::Complex64;
extern crate piston_window;
use piston_window::*;
extern crate structopt;
#[macro_use]
extern crate structopt_derive;
use structopt::StructOpt;
extern crate image;
use image::{LumaA, Rgba};
extern crate imageproc;
use imageproc::drawing::draw_text_mut;
extern crate rusttype;
use rusttype::{FontCollection, Scale};
extern crate rayon;
use rayon::prelude::*;
extern crate itertools;
use itertools::Itertools;
use std::sync::{Arc, Mutex};
use std::ops::DerefMut;
type Pic<T> = image::ImageBuffer<image::LumaA<T>, std::vec::Vec<T>>;
type SharedData<'a> = Arc<Mutex<(&'a mut Pic<u16>, &'a mut State)>>;
#[derive(Debug, StructOpt)]
struct Config {
#[structopt(short="w", long="width", help="Window width", default_value = "1280")]
width: u32,
#[structopt(short="h", long="height", help="Window height", default_value = "720")]
height: u32,
#[structopt(short="b", long="bounds", help="Minimum and maximum value for bounds", default_value = "0.6")]
bounds: f64,
#[structopt(short="p", long="power", help="Power to use in the Mandelbrot equation", default_value = "2.0")]
power: f64,
#[structopt(short="f", long="factor", help="Exponent used to determine brightness curve", default_value = "50.0")]
factor: f64,
#[structopt(short="z", long="zoom", default_value = "350")]
zoom: u32,
#[structopt(short="d", long="delta", help="Steps between each coordinate", default_value = "0.05")]
delta: f64,
#[structopt(short="l", long="loops", help="Number of iterations for each coordinate", default_value = "200")]
loop_limit: u32,
#[structopt(long="re", help="Real offset", default_value = "0.0")]
off_real: f64,
#[structopt(long="im", help="Imaginary offset", default_value = "0.0")]
off_imaginary: f64,
}
#[derive(Debug, Copy, Clone)]
struct State {
counter: usize,
max: usize,
just_finished: bool,
render_count: u8,
}
fn get_rgba(hue: f64, sat: f64, val: f64) -> Rgba<u8> {
let hi = (hue/60.0).floor() % 6.0;
let f = (hue/60.0) - (hue/60.0).floor();
let p = val * (1.0 - sat);
let q = val * (1.0 - f * sat);
let t = val * (1.0 - (1.0-f) * sat);
match hi as u8 {
0 => Rgba([(255.0 * val) as u8, (255.0 * t) as u8, (255.0 * p) as u8, 255]),
1 => Rgba([(255.0 * q) as u8, (255.0 * val) as u8, (255.0 * p) as u8, 255]),
2 => Rgba([(255.0 * p) as u8, (255.0 * val) as u8, (255.0 * t) as u8, 255]),
3 => Rgba([(255.0 * p) as u8, (255.0 * q) as u8, (255.0 * val) as u8, 255]),
4 => Rgba([(255.0 * t) as u8, (255.0 * p) as u8, (255.0 * val) as u8, 255]),
5 => Rgba([(255.0 * val) as u8, (255.0 * p) as u8, (255.0 * q) as u8, 255]),
_ => Rgba([0,0,0,255])
}
}
fn mandelbrot(z: Complex64, x: f64, y: f64, cfg: &Config) -> Complex64 {
let c = Complex64::new(x, y);
z.powf(cfg.power) + c
}
fn draw_point(image: &mut Pic<u16>, z: Complex64, cfg: &Config) {
let pos_x = (cfg.width as f64/2.0) + z.re * cfg.zoom as f64 - 0.5;
let pos_y = (cfg.height as f64/2.0) - z.im * cfg.zoom as f64 + 0.5;
if pos_x + 1.0 < 0.0 || pos_y < 0.0 {
return;
}
let pos_x = pos_x as u32;
let pos_y = pos_y as u32;
if pos_x >= cfg.width || pos_y >= cfg.height {
return;
}
let pixel = image.get_pixel_mut(pos_x as u32, pos_y as u32);
pixel[0] = pixel[0].saturating_add(1);
}
fn u16_to_u8(from: &Pic<u16>, to: &mut image::ImageBuffer<Rgba<u8>, Vec<u8>>, factor: f64) {
for ((_,_,p), (_,_,p2)) in from.enumerate_pixels().zip(to.enumerate_pixels_mut()) {
let p = p[0] as f64;
let val = 1.0 - ((-p)/factor).exp();
let c = (val * 255.0) as u8;
*p2 = Rgba([c,c,c,255]);
}
}
fn iterate_coordinate(x: f64, y: f64, p_data: SharedData, cfg: &Config) {
let mut z = Complex64::new(cfg.off_real, cfg.off_imaginary);
// Skip the first value to get rid of the square.
z = mandelbrot(z, x, y, &cfg);
let mut points = vec![];
for _ in 0..cfg.loop_limit {
z = mandelbrot(z, x, y, &cfg);
points.push(z);
}
let mut pic = p_data.lock().expect("Lock failed");
let &mut (ref mut canvas, ref mut state) = pic.deref_mut();
for z in points {
draw_point(canvas, z, &cfg);
}
state.counter+=1;
}
fn output_buckets(canvas: &Pic<u16>) {
let step = 5000;
let mut buckets = vec![0;14];
for (_, _, p) in canvas.enumerate_pixels() {
if p[0] == 0 {
continue;
}
buckets[(p[0] / step) as usize] += 1;
}
println!("{:#?}", buckets);
}
fn display(cfg: &Config, shared_pic: SharedData) {
let mut window: PistonWindow = WindowSettings::new("Pixi", (cfg.width, cfg.height))
.exit_on_esc(true)
.opengl(OpenGL::V4_5)
.build()
.expect("Error creating window");
let mut buffer = image::ImageBuffer::new(cfg.width, cfg.height);
let mut texture = Texture::from_image( &mut window.factory, &buffer, &TextureSettings::new()).expect("Error creating texture.");
let font = Vec::from(include_bytes!("SourceSansPro-Light.ttf") as &[u8]);
let font = FontCollection::from_bytes(font).into_font().unwrap();
let scale = Scale { x: 12.4 * 2.0, y: 12.4 };
let cfg_string = format!("{:#?}", cfg);
let cfg_string: Vec<_> = cfg_string.lines().enumerate().collect();
let mut factor = cfg.factor;
let now = std::time::Instant::now();
let mut force_rerender = false;
while let Some(e) = window.next() {
if let Some(_) = e.render_args() {
let mut pic_data = shared_pic.lock().expect("Lock failed");
let &mut (ref canvas, ref mut state) = pic_data.deref_mut();
if force_rerender || (state.render_count == 30 && (!state.just_finished || state.counter != state.max)) {
u16_to_u8(&canvas, &mut buffer, factor);
draw_text_mut(&mut buffer, Rgba([255, 255, 255, 255]), 10, 10, scale, &font, &format!("{:.*}% - {}s", 2, (state.counter as f64)/(state.max as f64)*100.0, now.elapsed().as_secs()));
for &(i, l) in cfg_string.iter() {
draw_text_mut(&mut buffer, Rgba([255, 255, 255, 255]), 10, 20 + i as u32*13, scale, &font, l);
}
state.just_finished = state.counter == state.max;
state.render_count = 0;
force_rerender = false;
if state.just_finished {
//output_buckets(&canvas);
}
}
state.render_count += 1;
texture.update(&mut window.encoder, &buffer).expect("Error flipping buffer");
window.draw_2d(&e, |c,g| {
image(&texture, c.transform, g);
});
}
if let Some(btn) = e.release_args() {
match btn {
Button::Mouse(MouseButton::Right) => buffer.save("out.png").unwrap(),
_ => (),
}
}
if let Some(scroll) = e.mouse_scroll_args() {
if scroll[1] == -1.0 {
factor *= 2.0;
force_rerender = true;
} else if scroll[1] == 1.0 {
factor /= 2.0;
force_rerender = true;
}
}
}
// Horrid hack to make both threads exit.
panic!("Exiting.");
}
fn main() {
let cfg = Config::from_args();
let mut canvas = image::ImageBuffer::from_pixel(cfg.width, cfg.height, LumaA([0,u16::max_value()]));
let coords: Vec<_> = (0_u32..).map(|x| -cfg.bounds + x as f64 * cfg.delta).take_while(|&x| x < cfg.bounds).collect();
let all_coords: Vec<_> = coords.iter().cartesian_product(coords.iter()).collect();
let mut state = State {
counter: 0,
max: all_coords.len(),
just_finished: false,
render_count: 0,
};
let shared_pic = Arc::new(Mutex::new((&mut canvas, &mut state)));
rayon::join(
|| all_coords.par_iter().for_each(|&(&y, &x)| iterate_coordinate(x, y, shared_pic.clone(), &cfg) ),
|| display(&cfg, shared_pic.clone()));
}
|
pub static ERROR_COLOR: u32 = 0xff0033;
pub static WARN_COLOR: u32 = 0xffe148;
pub static SUCCESS_COLOR: u32 = 0x28a745;
|
use wabt;
use wasmi::*;
fn main() {
let wasm_binary_import = wabt::wat2wasm(
r#"
(module
(memory (import "env" "memory") 17)
(func (export "get_first_i32") (result i32)
(i32.load (i32.const 0))
)
)
"#,
)
.expect("failed to parse wat 2");
{
let module =
wasmi::Module::from_buffer(&wasm_binary_import).expect("failed to load wasm 1");
let imports = ImportsBuilder::new().with_resolver("env", &ResolveAll {});
let instance = ModuleInstance::new(&module, &imports)
.expect("failed to instantiate wasm module 2")
.assert_no_start();
println!(
"import got {:?} wanted {:?}",
instance
.invoke_export("get_first_i32", &[], &mut wasmi::NopExternals)
.expect("failed to execute get_first_i32 2"),
Some(RuntimeValue::I32(123))
);
}
let wasm_binary_export = wabt::wat2wasm(
r#"
(module
(memory (;0;) 17)
(export "memory" (memory 0))
(func (export "get_first_i32") (result i32)
(i32.load (i32.const 0))
)
)
"#,
)
.expect("failed to parse wat 1");
{
let module =
wasmi::Module::from_buffer(&wasm_binary_export).expect("failed to load wasm 1");
let imports = ImportsBuilder::new().with_resolver("env", &ResolveAll {});
let instance = ModuleInstance::new(&module, &imports)
.expect("failed to instantiate wasm module 1")
.assert_no_start();
let mem = instance
.export_by_name("memory")
.expect("Module expected to have 'memory' export")
.as_memory()
.cloned()
.expect("'memory' export should be a memory");
mem.set_value(0, 1234 as i32)
.expect("memory.set_value should not fail");
println!(
"export got {:?} wanted {:?}",
instance
.invoke_export("get_first_i32", &[], &mut wasmi::NopExternals)
.expect("failed to execute get_first_i32 2"),
Some(RuntimeValue::I32(1234))
);
}
{
let wasm_binary_rust: Vec<u8> = include_bytes!(
"./rust_contract/target/wasm32-unknown-unknown/release/wasm_example.wasm"
)
.iter()
.cloned()
.collect();
let module = wasmi::Module::from_buffer(&wasm_binary_rust).expect("failed to load wasm 3");
let imports = ImportsBuilder::new().with_resolver("env", &ResolveAll {});
let instance = ModuleInstance::new(&module, &imports)
.expect("failed to instantiate wasm module 3")
.assert_no_start();
let mem = instance
.export_by_name("memory")
.expect("Module expected to have 'memory' export")
.as_memory()
.cloned()
.expect("'memory' export should be a memory");
mem.set_value(4, 12345 as i32)
.expect("memory.set_value should not fail");
println!(
"rust got {:?} wanted {:?}",
instance
.invoke_export("get_first_i32", &[], &mut wasmi::NopExternals)
.expect("failed to execute get_first_i32 3"),
Some(RuntimeValue::I32(12345))
);
}
}
struct ResolveAll {}
impl wasmi::ModuleImportResolver for ResolveAll {
fn resolve_memory(
&self,
field_name: &str,
descriptor: &MemoryDescriptor,
) -> Result<MemoryRef, wasmi::Error> {
if field_name == "memory" {
let mem = MemoryInstance::alloc(
memory_units::Pages(descriptor.initial() as usize),
descriptor
.maximum()
.map(|x| memory_units::Pages(x as usize)),
)?;
mem.set_value(0, 123 as i32)?;
Ok(mem)
} else {
Err(wasmi::Error::Instantiation(
"Memory imported under unknown name".to_owned(),
))
}
}
}
|
use std::ops::{Index, IndexMut};
#[derive(Debug)]
pub struct Registers {
regs: [u8; 16],
}
pub enum Position {
///Second 4 bits (0X00)
X,
///Third 4 bits (00Y0)
Y,
}
impl Registers {
pub fn new() -> Registers {
Registers { regs: [0; 16] }
}
pub fn add_immediate(&mut self, index: u8, value: u8) {
let (result, _) = self[index].overflowing_add(value);
self[index] = result;
}
pub fn get_index(opcode: u16, position: Position) -> u8 {
match position {
Position::X => ((opcode & 0x0F00) >> 8) as u8,
Position::Y => ((opcode & 0x00F0) >> 4) as u8,
}
}
pub fn set_carry(&mut self, carried: bool) {
if carried {
self[0xF] = 1;
} else {
self[0xF] = 0;
}
}
}
impl Index<u8> for Registers {
type Output = u8;
fn index(&self, index: u8) -> &Self::Output {
&self.regs[index as usize]
}
}
impl Index<(u16, Position)> for Registers {
type Output = u8;
fn index(&self, index: (u16, Position)) -> &Self::Output {
let index = Registers::get_index(index.0, index.1);
&self.regs[index as usize]
}
}
impl IndexMut<(u16, Position)> for Registers {
fn index_mut(&mut self, index: (u16, Position)) -> &mut Self::Output {
let index = Registers::get_index(index.0, index.1);
&mut self.regs[index as usize]
}
}
impl IndexMut<u8> for Registers {
fn index_mut(&mut self, index: u8) -> &mut Self::Output {
&mut self.regs[index as usize]
}
}
|
pub struct Solution;
impl Solution {
pub fn max_rotate_function(a: Vec<i32>) -> i32 {
let n = a.len();
let mut x = 0;
let mut y = 0;
for i in 0..n {
x += a[i];
y += (i as i32) * a[i];
}
let mut max = y;
let mut z = 0;
for i in 1..n {
z += a[i - 1];
max = max.max(y - (i as i32) * x + (n as i32) * z);
}
max
}
}
#[test]
fn test0396() {
fn case(a: Vec<i32>, want: i32) {
let got = Solution::max_rotate_function(a);
assert_eq!(got, want);
}
case(vec![4, 3, 2, 6], 26);
}
|
use std::ffi::c_void;
use std::ops::Deref;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use ash::extensions::khr;
use ash::vk;
use parking_lot::{
ReentrantMutex,
ReentrantMutexGuard,
};
use crate::queue::VkQueueInfo;
use crate::raw::RawVkInstance;
bitflags! {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct VkFeatures : u32 {
const DESCRIPTOR_INDEXING = 0b1;
const DEDICATED_ALLOCATION = 0b10;
const DESCRIPTOR_TEMPLATE = 0b100;
const RAY_TRACING = 0b1000;
const ADVANCED_INDIRECT = 0b10000;
const MIN_MAX_FILTER = 0b100000;
const BARYCENTRICS = 0b1000000;
const IMAGE_FORMAT_LIST = 0b10000000;
}
}
pub struct RawVkDevice {
pub device: ash::Device,
pub allocator: vma_sys::VmaAllocator,
pub physical_device: vk::PhysicalDevice,
pub instance: Arc<RawVkInstance>,
pub features: VkFeatures,
pub graphics_queue_info: VkQueueInfo,
pub compute_queue_info: Option<VkQueueInfo>,
pub transfer_queue_info: Option<VkQueueInfo>,
pub is_alive: AtomicBool,
pub graphics_queue: ReentrantMutex<vk::Queue>,
pub compute_queue: Option<ReentrantMutex<vk::Queue>>,
pub transfer_queue: Option<ReentrantMutex<vk::Queue>>,
pub rt: Option<RawVkRTEntries>,
pub indirect_count: Option<ash::extensions::khr::DrawIndirectCount>,
pub supports_d24: bool,
pub timeline_semaphores: ash::extensions::khr::TimelineSemaphore,
pub synchronization2: ash::extensions::khr::Synchronization2,
pub properties: vk::PhysicalDeviceProperties,
}
unsafe impl Send for RawVkDevice {}
unsafe impl Sync for RawVkDevice {}
pub struct RawVkRTEntries {
pub acceleration_structure: khr::AccelerationStructure,
pub rt_pipelines: khr::RayTracingPipeline,
pub deferred_operations: khr::DeferredHostOperations,
pub bda: khr::BufferDeviceAddress,
pub rt_pipeline_properties: vk::PhysicalDeviceRayTracingPipelinePropertiesKHR,
}
unsafe impl Send for RawVkRTEntries {}
unsafe impl Sync for RawVkRTEntries {}
impl RawVkDevice {
pub fn new(
device: ash::Device,
allocator: vma_sys::VmaAllocator,
physical_device: vk::PhysicalDevice,
instance: Arc<RawVkInstance>,
features: VkFeatures,
graphics_queue_info: VkQueueInfo,
compute_queue_info: Option<VkQueueInfo>,
transfer_queue_info: Option<VkQueueInfo>,
graphics_queue: vk::Queue,
compute_queue: Option<vk::Queue>,
transfer_queue: Option<vk::Queue>,
) -> Self {
let mut rt_pipeline_properties =
vk::PhysicalDeviceRayTracingPipelinePropertiesKHR::default();
let mut properties: vk::PhysicalDeviceProperties2 = Default::default();
if features.contains(VkFeatures::RAY_TRACING) {
rt_pipeline_properties.p_next = std::mem::replace(
&mut properties.p_next,
&mut rt_pipeline_properties
as *mut vk::PhysicalDeviceRayTracingPipelinePropertiesKHR
as *mut c_void,
);
}
unsafe { instance.get_physical_device_properties2(physical_device, &mut properties) };
let rt = if features.contains(VkFeatures::RAY_TRACING) {
Some(RawVkRTEntries {
acceleration_structure: khr::AccelerationStructure::new(&instance, &device),
rt_pipelines: khr::RayTracingPipeline::new(&instance, &device),
deferred_operations: khr::DeferredHostOperations::new(&instance, &device),
bda: khr::BufferDeviceAddress::new(&instance, &device),
rt_pipeline_properties,
})
} else {
None
};
let mut d24_props = vk::FormatProperties2::default();
unsafe {
instance.get_physical_device_format_properties2(
physical_device,
vk::Format::D24_UNORM_S8_UINT,
&mut d24_props,
);
}
let supports_d24 = d24_props
.format_properties
.optimal_tiling_features
.contains(vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT);
let indirect_count = features
.contains(VkFeatures::ADVANCED_INDIRECT)
.then(|| ash::extensions::khr::DrawIndirectCount::new(&instance, &device));
let timeline_semaphores = ash::extensions::khr::TimelineSemaphore::new(&instance, &device);
let synchronization2 = ash::extensions::khr::Synchronization2::new(&instance, &device);
Self {
device,
allocator,
physical_device,
instance,
features,
graphics_queue_info,
compute_queue_info,
transfer_queue_info,
graphics_queue: ReentrantMutex::new(graphics_queue),
compute_queue: compute_queue.map(ReentrantMutex::new),
transfer_queue: transfer_queue.map(ReentrantMutex::new),
is_alive: AtomicBool::new(true),
rt,
indirect_count,
supports_d24,
timeline_semaphores,
synchronization2,
properties: properties.properties,
}
}
pub fn graphics_queue(&self) -> ReentrantMutexGuard<vk::Queue> {
self.graphics_queue.lock()
}
pub fn compute_queue(&self) -> Option<ReentrantMutexGuard<vk::Queue>> {
self.compute_queue.as_ref().map(|queue| queue.lock())
}
pub fn transfer_queue(&self) -> Option<ReentrantMutexGuard<vk::Queue>> {
self.transfer_queue.as_ref().map(|queue| queue.lock())
}
pub fn wait_for_idle(&self) {
let _graphics_queue_lock = self.graphics_queue();
let _compute_queue_lock = self.compute_queue();
let _transfer_queue_lock = self.transfer_queue();
unsafe {
self.device.device_wait_idle().unwrap();
}
}
}
impl Deref for RawVkDevice {
type Target = ash::Device;
fn deref(&self) -> &Self::Target {
&self.device
}
}
impl Drop for RawVkDevice {
fn drop(&mut self) {
unsafe {
vma_sys::vmaDestroyAllocator(self.allocator);
self.device.destroy_device(None);
}
}
}
|
use crate::FileDialog;
use std::path::Path;
use std::{ops::DerefMut, path::PathBuf};
use objc::{class, msg_send, sel, sel_impl};
use objc_id::Id;
use super::super::utils::{INSURL, NSURL};
use objc::runtime::{Object, YES};
use objc::runtime::{BOOL, NO};
use objc_foundation::{INSArray, INSString, NSArray, NSString};
use super::super::{
utils::{FocusManager, PolicyManager},
AsModal,
};
extern "C" {
pub fn CGShieldingWindowLevel() -> i32;
}
fn make_nsstring(s: &str) -> Id<NSString> {
NSString::from_str(s)
}
pub struct Panel {
pub(crate) panel: Id<Object>,
_focus_manager: FocusManager,
_policy_manager: PolicyManager,
}
impl AsModal for Panel {
fn modal_ptr(&mut self) -> *mut Object {
self.panel.deref_mut()
}
}
impl Panel {
pub fn new(panel: *mut Object) -> Self {
let _policy_manager = PolicyManager::new();
let _focus_manager = FocusManager::new();
let _: () = unsafe { msg_send![panel, setLevel: CGShieldingWindowLevel()] };
Self {
panel: unsafe { Id::from_ptr(panel) },
_focus_manager,
_policy_manager,
}
}
pub fn open_panel() -> Self {
Self::new(unsafe { msg_send![class!(NSOpenPanel), openPanel] })
}
pub fn save_panel() -> Self {
Self::new(unsafe { msg_send![class!(NSSavePanel), savePanel] })
}
pub fn run_modal(&self) -> i32 {
unsafe { msg_send![self.panel, runModal] }
}
pub fn set_can_choose_directories(&self, v: BOOL) {
let _: () = unsafe { msg_send![self.panel, setCanChooseDirectories: v] };
}
pub fn set_can_choose_files(&self, v: BOOL) {
let _: () = unsafe { msg_send![self.panel, setCanChooseFiles: v] };
}
pub fn set_allows_multiple_selection(&self, v: BOOL) {
let _: () = unsafe { msg_send![self.panel, setAllowsMultipleSelection: v] };
}
pub fn add_filters(&self, params: &FileDialog) {
let mut exts: Vec<String> = Vec::new();
for filter in params.filters.iter() {
exts.append(&mut filter.extensions.to_vec());
}
unsafe {
let f_raw: Vec<_> = exts.iter().map(|ext| make_nsstring(&ext)).collect();
let array = NSArray::from_vec(f_raw);
let _: () = msg_send![self.panel, setAllowedFileTypes: array];
}
}
pub fn set_path(&self, path: &Path, file_name: Option<&str>) {
// if file_name is some, and path is a dir
let path = if let (Some(name), true) = (file_name, path.is_dir()) {
let mut path = path.to_owned();
// add a name to the end of path
path.push(name);
path
} else {
path.to_owned()
};
if let Some(path) = path.to_str() {
unsafe {
let url = NSURL::file_url_with_path(path, true);
let () = msg_send![self.panel, setDirectoryURL: url];
}
}
}
pub fn get_result(&self) -> PathBuf {
unsafe {
let url = msg_send![self.panel, URL];
let url: Id<NSURL> = Id::from_ptr(url);
url.to_path_buf()
}
}
pub fn get_results(&self) -> Vec<PathBuf> {
unsafe {
let urls = msg_send![self.panel, URLs];
let urls: Id<NSArray<NSURL>> = Id::from_ptr(urls);
let mut res = Vec::new();
for url in urls.to_vec() {
res.push(url.to_path_buf());
}
res
}
}
}
impl Panel {
pub fn build_pick_file(opt: &FileDialog) -> Self {
let panel = Panel::open_panel();
if !opt.filters.is_empty() {
panel.add_filters(&opt);
}
if let Some(path) = &opt.starting_directory {
panel.set_path(path, opt.file_name.as_deref());
}
panel.set_can_choose_directories(NO);
panel.set_can_choose_files(YES);
panel
}
pub fn build_save_file(opt: &FileDialog) -> Self {
let panel = Panel::save_panel();
if !opt.filters.is_empty() {
panel.add_filters(&opt);
}
if let Some(path) = &opt.starting_directory {
panel.set_path(path, opt.file_name.as_deref());
}
panel
}
pub fn build_pick_folder(opt: &FileDialog) -> Self {
let panel = Panel::open_panel();
if let Some(path) = &opt.starting_directory {
panel.set_path(path, opt.file_name.as_deref());
}
panel.set_can_choose_directories(YES);
panel.set_can_choose_files(NO);
panel
}
pub fn build_pick_files(opt: &FileDialog) -> Self {
let panel = Panel::open_panel();
if !opt.filters.is_empty() {
panel.add_filters(&opt);
}
if let Some(path) = &opt.starting_directory {
panel.set_path(path, opt.file_name.as_deref());
}
panel.set_can_choose_directories(NO);
panel.set_can_choose_files(YES);
panel.set_allows_multiple_selection(YES);
panel
}
}
|
use crate::plan::ir::TagSet;
use crate::plan::var_ref::{field_type_to_var_ref_data_type, influx_type_to_var_ref_data_type};
use crate::plan::SchemaProvider;
use influxdb_influxql_parser::expression::VarRefDataType;
use schema::InfluxColumnType;
use std::collections::HashMap;
pub(crate) type FieldTypeMap = HashMap<String, VarRefDataType>;
pub(crate) fn field_and_dimensions(
s: &dyn SchemaProvider,
name: &str,
) -> Option<(FieldTypeMap, TagSet)> {
s.table_schema(name).map(|iox| {
let mut field_set = FieldTypeMap::new();
let mut tag_set = TagSet::new();
for col in iox.iter() {
match col {
(InfluxColumnType::Field(ft), f) => {
field_set.insert(f.name().to_owned(), field_type_to_var_ref_data_type(ft));
}
(InfluxColumnType::Tag, f) => {
tag_set.insert(f.name().to_owned());
}
(InfluxColumnType::Timestamp, _) => {}
}
}
(field_set, tag_set)
})
}
pub(crate) fn map_type(
s: &dyn SchemaProvider,
measurement_name: &str,
field: &str,
) -> Option<VarRefDataType> {
s.table_schema(measurement_name).and_then(|iox| {
iox.field_by_name(field)
.and_then(|(dt, _)| influx_type_to_var_ref_data_type(Some(dt)))
})
}
#[cfg(test)]
mod test {
use super::*;
use crate::plan::test_utils::MockSchemaProvider;
use assert_matches::assert_matches;
#[test]
fn test_schema_field_mapper() {
let namespace = MockSchemaProvider::default();
// Measurement exists
let (field_set, tag_set) = field_and_dimensions(&namespace, "cpu").unwrap();
assert_eq!(
field_set,
FieldTypeMap::from([
("usage_user".to_string(), VarRefDataType::Float),
("usage_system".to_string(), VarRefDataType::Float),
("usage_idle".to_string(), VarRefDataType::Float),
])
);
assert_eq!(
tag_set,
TagSet::from(["cpu".to_string(), "host".to_string(), "region".to_string()])
);
// Measurement does not exist
assert!(field_and_dimensions(&namespace, "cpu2").is_none());
// `map_type` API calls
// Returns expected type
assert_matches!(
map_type(&namespace, "cpu", "usage_user"),
Some(VarRefDataType::Float)
);
assert_matches!(
map_type(&namespace, "cpu", "host"),
Some(VarRefDataType::Tag)
);
assert_matches!(
map_type(&namespace, "cpu", "time"),
Some(VarRefDataType::Timestamp)
);
// Returns None for nonexistent field
assert!(map_type(&namespace, "cpu", "nonexistent").is_none());
// Returns None for nonexistent measurement
assert!(map_type(&namespace, "nonexistent", "usage").is_none());
}
}
|
// Copyright (c) 2018-2022 Ministerio de Fomento
// Instituto de Ciencias de la Construcción Eduardo Torroja (IETcc-CSIC)
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// Author(s): Rafael Villar Burke <pachi@ietcc.csic.es>,
// Daniel Jiménez González <dani@ietcc.csic.es>,
// Marta Sorribes Gil <msorribes@ietcc.csic.es>
/*!
Componentes energéticos
=======================
Define el tipo Components (lista de componentes + metadatos) y sus traits.
Los componentes modelizan el uso y producción de energía en el periodo de cálculo.
Hipótesis:
- Se completa automáticamente el consumo de energía procedente del medioambiente o termosolar con una producción
- El reparto de la electricidad generada es proporcional a los consumos eléctricos
*/
use std::{
collections::{HashMap, HashSet},
fmt, str,
};
use serde::{Deserialize, Serialize};
use crate::{
error::{EpbdError, Result},
types::{Carrier, EProd, Energy, HasValues, Meta, MetaVec, ProdSource, Service, BuildingNeeds},
vecops::{veclistsum, vecvecdif, vecvecsum},
};
/// Lista de datos de componentes con sus metadatos
///
/// List of component data bundled with its metadata
///
/// #META CTE_AREAREF: 100.5
/// 0, ELECTRICIDAD,CONSUMO,EPB,16.39,13.11,8.20,7.38,4.10,4.92,6.56,5.74,4.10,6.56,9.84,13.11
/// 0, ELECTRICIDAD,PRODUCCION,INSITU,8.20,6.56,4.10,3.69,2.05,2.46,3.28,2.87,2.05,3.28,4.92,6.56
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Components {
/// Metadata
pub meta: Vec<Meta>,
/// EUsed or produced energy data
pub data: Vec<Energy>,
/// Building energy needs
pub needs: BuildingNeeds,
}
impl MetaVec for Components {
fn get_metavec(&self) -> &Vec<Meta> {
&self.meta
}
fn get_mut_metavec(&mut self) -> &mut Vec<Meta> {
&mut self.meta
}
}
impl fmt::Display for Components {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let metalines = self
.meta
.iter()
.map(|v| format!("{}", v))
.collect::<Vec<_>>()
.join("\n");
let datalines = self
.data
.iter()
.map(|v| format!("{}", v))
.collect::<Vec<_>>()
.join("\n");
write!(f, "{}\n{}", metalines, datalines)
}
}
impl str::FromStr for Components {
type Err = EpbdError;
fn from_str(s: &str) -> std::result::Result<Components, Self::Err> {
let s_nobom = s.strip_prefix('\u{feff}').unwrap_or(s);
let lines: Vec<&str> = s_nobom.lines().map(str::trim).collect();
let metalines = lines
.iter()
.filter(|l| l.starts_with("#META") || l.starts_with("#CTE_"));
let datalines = lines
.iter()
.filter(|l| !(l.starts_with('#') || l.starts_with("vector,") || l.is_empty()));
let cmeta = metalines
.map(|e| e.parse())
.collect::<Result<Vec<Meta>>>()?;
let mut cdata = Vec::new();
let mut needs = BuildingNeeds::default();
// let mut systems = None;
// Tipos disponibles
let ctypes_tag_list = ["CONSUMO", "PRODUCCION", "AUX", "SALIDA", "DEMANDA"];
for line in datalines {
let tags: Vec<_> = line.splitn(3, ',').map(str::trim).take(2).collect();
let tag1 = tags.first().unwrap_or(&"");
let tag2 = tags.get(1).unwrap_or(&"");
let tag = if ctypes_tag_list.contains(tag1) {
tag1
} else {
tag2
};
match *tag {
"CONSUMO" => cdata.push(Energy::Used(line.parse()?)),
"PRODUCCION" => cdata.push(Energy::Prod(line.parse()?)),
"AUX" => cdata.push(Energy::Aux(line.parse()?)),
"SALIDA" => cdata.push(Energy::Out(line.parse()?)),
"DEMANDA" => needs.add(line.parse()?)?,
_ => {
return Err(EpbdError::ParseError(format!(
"ERROR: No se reconoce el componente de la línea: {} {}",
line, tag
)))
}
}
}
// Check that all used or produced energy components have an equal number of steps (data lengths)
// TODO: Additional checks
// - Move to check_components
// - There are, at most, 3 building needs definitions (CAL, REF, ACS)
// - Q_out (SALIDA) services include, at least, those included in E_in (CONSUMO). Think about interactive building of components and transient states
// - Q_out (SALIDA) for ACS service with BIOMASA & BIOMASADENSIFICADA
// - AUX components for systems with more than 1 service output need Q_out (SALIDA) components
{
let cdata_lengths: Vec<_> = cdata.iter().map(|e| e.num_steps()).collect();
let start_num_steps = *cdata_lengths.first().unwrap_or(&12);
if cdata_lengths.iter().any(|&clen| clen != start_num_steps) {
return Err(EpbdError::ParseError(
"Componentes con distinto número de pasos de cálculo".into(),
));
}
}
Components {
meta: cmeta,
data: cdata,
needs,
}
.normalize()
}
}
impl Components {
/// Number of steps of the first component
pub fn num_steps(&self) -> usize {
self.data.get(0).map(|v| v.num_steps()).unwrap_or(0)
}
/// Conjunto de vectores energéticos disponibles en componentes de energía consumida o producida
pub fn available_carriers(&self) -> HashSet<Carrier> {
self.data
.iter()
.filter(|c| c.is_used() || c.is_generated())
.map(|e| e.carrier())
.collect()
}
/// Corrige los componentes de consumo y producción
///
/// - Asegura que la energía EAMBIENTE consumida tiene su producción correspondiente
/// - Asegura que la energía TERMOSOLAR consumida tiene su producción correspondiente
/// - Reparte los consumos auxiliares proporcionalmente a los servicios
///
/// Los metadatos, servicios y coherencia de los vectores se aseguran ya en el parsing
pub fn normalize(mut self) -> Result<Self> {
// Compensa consumos no respaldados por producción
self.complete_produced_for_onsite_generated_use(Carrier::EAMBIENTE);
self.complete_produced_for_onsite_generated_use(Carrier::TERMOSOLAR);
self.assign_aux_nepb_to_epb_services()?;
self.sort_by_id();
Ok(self)
}
/// Compensa los consumos declarados de energía insitu no equilibrada por producción
///
/// Afecta a los vectores EAMBIENTE y TERMOSOLAR
///
/// cuando el consumo de esos vectores supera la producción.
/// Evita tener que declarar las producciones de EAMBIENTE y TERMOSOLAR, basta con los consumos.
/// La compensación se hace sistema a sistema, sin trasvases de producción entre sistemas.
///
/// Esto significa que, para cada sistema (j=id):
/// 1) se calcula el consumo del vector en todos los servicios
/// 2) se calculan las cantidades producidas del vector
/// 2) se reparte la producción existente para ese sistema
/// 3) se genera una producción que completa las cantidades no cubiertas por la producción definida
///
/// Las producciones declaradas para un sistema, que no se consuman, no se trasvasan a otros.
fn complete_produced_for_onsite_generated_use(&mut self, carrier: Carrier) {
let source = match carrier {
Carrier::EAMBIENTE => ProdSource::EAMBIENTE,
Carrier::TERMOSOLAR => ProdSource::TERMOSOLAR,
_ => {
panic!("Intento de compensación de vector distinto de EAMBIENTE o TERMOSOLAR")
}
};
// Localiza componentes pertenecientes al vector
let envcomps: Vec<_> = self
.data
.iter()
.cloned()
.filter(|c| c.has_carrier(carrier))
.collect();
if envcomps.is_empty() {
return;
};
let ids: HashSet<_> = envcomps.iter().map(|c| c.id()).collect();
for id in ids {
// Componentes para el sistema dado
let components_for_id = envcomps.iter().filter(|c| c.has_id(id));
// Componentes de producción del servicio
let prod: Vec<_> = components_for_id
.clone()
.filter(|c| c.is_generated())
.collect();
// Componentes de consumo
let used: Vec<_> = components_for_id.clone().filter(|c| c.is_used()).collect();
// Si no hay consumo que compensar con producción retornamos None
if used.is_empty() {
continue;
};
// Consumos no compensados con producción
let total_use = veclistsum(&used.iter().map(|&v| v.values()).collect::<Vec<_>>());
// Usos no compensados con la producción existente
let unbalanced_use = if prod.is_empty() {
total_use
} else {
let avail_prod = veclistsum(&prod.iter().map(|&v| v.values()).collect::<Vec<_>>());
vecvecdif(&total_use, &avail_prod)
.iter()
.map(|&v| if v > 0.0 { v } else { 0.0 })
.collect()
};
// Si no hay desequilibrio continuamos
if unbalanced_use.iter().sum::<f32>() == 0.0 {
continue;
};
// Si hay desequilibrio agregamos un componente de producción
self.data.push(Energy::Prod(EProd {
id,
source,
values: unbalanced_use,
comment: "Equilibrado de consumo sin producción declarada".into(),
}));
}
}
/// Asigna servicios EPB a los componentes de energía auxiliar
///
/// Los componentes de consumos auxiliares se cargan inicialmente con el servicio NEPB
/// pero representan solo servicios EPB y debemos asignarlos.
///
/// Para hacer esta asignación se actúa sistema a sistema:
/// 1) si solamente hay un servicio EPB se asigna el consumo Aux a ese servicio
/// 2) si hay más de un servicio EPB se genera un consumo Aux para cada servicio
/// disponible y se asigna a cada servicio un consumo proporcional
/// a la energía saliente de cada servicio en relación a la total saliente
/// para todos los servicios EPB.
fn assign_aux_nepb_to_epb_services(&mut self) -> Result<()> {
// ids with aux energy use
let ids: HashSet<_> = self
.data
.iter()
.filter(|c| c.is_aux())
.map(Energy::id)
.collect();
for id in ids {
let services_for_uses_with_id = self
.data
.iter()
.filter_map(|c| match c {
Energy::Used(e) if e.id == id => Some(e.service),
_ => None,
})
.collect::<HashSet<_>>();
// Con un solo servicio en los consumos usamos ese para los auxiliares
// sin necesidad de consultar la energía entregada o absorbida
if services_for_uses_with_id.len() == 1 {
let service = *services_for_uses_with_id.iter().next().unwrap();
for c in &mut self.data {
if let Energy::Aux(e) = c {
if e.id == id {
e.service = service
}
}
}
continue;
}
// Con más de un servicio necesitamos repartir la energía auxiliar de forma proporcional
// a la energía saliente de cada servicio en relación al total de servicios EPB
let aux_tot = veclistsum(
&self
.data
.iter()
.filter_map(|c| match c {
Energy::Aux(e) if e.id == id => Some(e.values()),
_ => None,
})
.collect::<Vec<_>>(),
);
let mut q_out_by_srv: HashMap<Service, Vec<f32>> = HashMap::new();
for component in &self.data {
if let Energy::Out(e) = component {
if e.id == id {
q_out_by_srv
.entry(e.service)
.or_insert_with(|| vec![0.0; self.num_steps()]);
q_out_by_srv
.insert(e.service, vecvecsum(&q_out_by_srv[&e.service], &e.values));
}
};
}
let mut q_out_tot = vec![0.0; self.num_steps()];
for q_out in q_out_by_srv.values() {
q_out_tot = vecvecsum(&*q_out_tot, q_out);
}
if aux_tot.iter().sum::<f32>() > 0.0 && q_out_tot.iter().sum::<f32>() == 0.0 {
return Err(EpbdError::WrongInput(format!("Sin datos de energía saliente para hacer el reparto de los consumos auxiliares del sistema {}", id)));
};
// Calculamos la fracción de cada servicio sobre el total
let mut q_out_frac_by_srv = q_out_by_srv;
let out_services: Vec<Service> = q_out_frac_by_srv.keys().cloned().collect();
for service in &out_services {
let values = q_out_frac_by_srv[service]
.iter()
.zip(q_out_tot.iter())
.map(|(val, tot)| if tot > &0.0 { val / tot } else { 0.0 })
.collect();
q_out_frac_by_srv.insert(*service, values);
}
// Elimina componentes de auxiliares existentes
self.data.retain(|c| !c.is_aux());
// Incorpora nuevos auxiliares con reparto calculado por servicios
for service in &out_services {
let values = q_out_frac_by_srv[service]
.iter()
.zip(aux_tot.iter())
.map(|(q_out_frac, aux_tot_i)| q_out_frac * aux_tot_i)
.collect();
self.data.push(Energy::Aux(crate::types::EAux {
id,
service: *service,
values,
comment: "Reasignación automática de consumos auxiliares".into(),
}));
}
}
Ok(())
}
/// Ordena componentes según el id del sistema
fn sort_by_id(&mut self) {
self.data.sort_by_key(|e| e.id());
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
const TCOMPS1: &str = "#META CTE_AREAREF: 100.5
0, PRODUCCION, EL_INSITU, 8.20, 6.56, 4.10, 3.69, 2.05, 2.46, 3.28, 2.87, 2.05, 3.28, 4.92, 6.56
0, CONSUMO, REF, ELECTRICIDAD, 16.39, 13.11, 8.20, 7.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 13.11
0, CONSUMO, CAL, ELECTRICIDAD, 16.39, 13.11, 8.20, 7.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 13.11
0, CONSUMO, CAL, EAMBIENTE, 6.39, 3.11, 8.20, 17.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 3.11
0, PRODUCCION, EAMBIENTE, 6.39, 3.11, 8.20, 17.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 3.11 # Equilibrado de consumo sin producción declarada";
// Reparto de producciones eléctricas y compensación de consumos de EAMBIENTE
const TCOMPSRES1: &str = "#META CTE_AREAREF: 100.5
0, PRODUCCION, EL_INSITU, 8.20, 6.56, 4.10, 3.69, 2.05, 2.46, 3.28, 2.87, 2.05, 3.28, 4.92, 6.56
0, CONSUMO, REF, ELECTRICIDAD, 16.39, 13.11, 8.20, 7.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 13.11
0, CONSUMO, CAL, ELECTRICIDAD, 16.39, 13.11, 8.20, 7.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 13.11
0, CONSUMO, CAL, EAMBIENTE, 6.39, 3.11, 8.20, 17.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 3.11
0, PRODUCCION, EAMBIENTE, 6.39, 3.11, 8.20, 17.38, 4.10, 4.92, 6.56, 5.74, 4.10, 6.56, 9.84, 3.11 # Equilibrado de consumo sin producción declarada";
#[test]
fn tcomponents_parse() {
let tcomps = TCOMPS1.parse::<Components>().unwrap();
// roundtrip building from/to string
assert_eq!(tcomps.to_string(), TCOMPS1);
}
#[test]
fn tcomponents_normalize() {
let tcompsnorm = TCOMPS1.parse::<Components>().unwrap();
assert_eq!(tcompsnorm.to_string(), TCOMPSRES1);
}
/// Componentes con id de sistema diferenciados
/// e imputación de producción no compensada de EAMBIENTE a los id correspondientes
#[test]
fn check_normalized_components() {
let comps = "# Bomba de calor 1
1,CONSUMO,ACS,ELECTRICIDAD,100 # BdC 1
1,CONSUMO,ACS,EAMBIENTE,150 # BdC 1
# Bomba de calor 2
2,CONSUMO,CAL,ELECTRICIDAD,200 # BdC 2
2,CONSUMO,CAL,EAMBIENTE,300 # BdC 2
# Producción fotovoltaica in situ
1,PRODUCCION,EL_INSITU,50 # PV
2,PRODUCCION,EL_INSITU,100 # PV
# Producción de energía ambiente dada por el usuario
0,PRODUCCION,EAMBIENTE,100 # Producción declarada de sistema sin consumo (no reduce energía a compensar)
1,PRODUCCION,EAMBIENTE,100 # Producción declarada de sistema con consumo (reduce energía a compensar)
2,PRODUCCION,EAMBIENTE,100 # Producción declarada de sistema sin ese servicio consumo (no reduce energía a compensar)
# Compensación de energía ambiente a completar por CteEPBD"
.parse::<Components>()
.unwrap();
let ma_prod = comps
.data
.iter()
.filter(|c| c.is_generated() && c.has_carrier(Carrier::EAMBIENTE));
// Se añaden 50kWh a los 100kWh declarados para compensar consumo en ACS (150kWh)
let ma_prod_1: f32 = ma_prod
.clone()
.filter(|c| c.has_id(1))
.map(Energy::values_sum)
.sum();
assert_eq!(format!("{:.1}", ma_prod_1), "150.0");
// Se añaden 200kWh a los 100kWh declarados para compensar consumo en CAL (300kWh)
let ma_prod_2: f32 = ma_prod
.clone()
.filter(|c| c.has_id(2))
.map(Energy::values_sum)
.sum();
assert_eq!(format!("{:.1}", ma_prod_2), "300.0");
// En total, se añaden 200 + 50 a los 300kWh declarados, para un total de 550kWh
// Hay 100kWh declarados para sistema 0 que no se consumen
let ma_prod_tot: f32 = ma_prod.clone().map(Energy::values_sum).sum();
assert_eq!(format!("{:.1}", ma_prod_tot), "550.0");
}
/// Prueba del formato con componentes de zona y sistema para declarar
/// demanda del edificio y energía entregada o absorbida por los sistemas
#[test]
fn tcomponents_extended_parse() {
"#META CTE_AREAREF: 1.0
DEMANDA, REF, 3.0 # Demanda ref. edificio
DEMANDA, CAL, 3.0 # Demanda cal. edificio
1, PRODUCCION, EL_INSITU, 2.00 # Producción PV
2, CONSUMO, CAL, ELECTRICIDAD, 1.00 # BdC modo calefacción
2, CONSUMO, CAL, EAMBIENTE, 2.00 # BdC modo calefacción
2, SALIDA, CAL, 3.0 # Energía entregada por el equipo de calefacción con COP 3
2, CONSUMO, ACS, ELECTRICIDAD, 1.0 # BdC modo ACS
2, CONSUMO, ACS, EAMBIENTE, 2.0 # BdC modo ACS
2, SALIDA, ACS, 3.0 # Energía entregada por el equipo de acs con COP_dhw 3
2, AUX, 0.5 # Auxiliares ACS BdC
3, CONSUMO, REF, ELECTRICIDAD, 1.00 # BdC modo refrigeración
3, SALIDA, REF, -3.0 # Energía absorbida por el equipo de refrigeración con EER 3
"
.parse::<Components>()
.unwrap();
}
}
|
//! Encode a BinJS, then decode it, ensure that we obtain the same AST.
#[macro_use]
extern crate bencher;
extern crate binjs;
extern crate env_logger;
extern crate glob;
extern crate itertools;
use binjs::generic::*;
use binjs::source::*;
use itertools::Itertools;
const PATHS: [&'static str; 1] = ["benches/test.js"];
const NUMBER_OF_SAMPLES: usize = 3;
fn launch_shift() -> Shift {
Shift::try_new().expect("Could not launch Shift")
}
fn bench_parsing_one_parser_per_run(bencher: &mut bencher::Bencher) {
bench_parsing_aux(None, bencher);
}
fn bench_parsing_reuse_parser(bencher: &mut bencher::Bencher) {
bench_parsing_aux(Some(&launch_shift()), bencher);
}
fn bench_parsing_aux(parser: Option<&Shift>, bencher: &mut bencher::Bencher) {
let entries = PATHS
.iter()
.map(|path_suffix| format!("{}/{}", env!("CARGO_MANIFEST_DIR"), path_suffix))
.flat_map(|path| glob::glob(&path).expect("Invalid path"))
.map(|entry| entry.expect("Invalid entry"))
.sorted();
let paths: Vec<_> = entries.into_iter().take(NUMBER_OF_SAMPLES).collect();
for path in &paths {
bencher.iter(move || {
let shift;
let json = match parser {
Some(parser) => parser,
None => {
shift = launch_shift();
&shift
}
}
.parse_file(path)
.expect("Could not parse source");
binjs::specialized::es6::ast::Script::import(&json).expect("Could not import AST")
});
}
}
benchmark_group!(
bench,
bench_parsing_one_parser_per_run,
bench_parsing_reuse_parser
);
benchmark_main!(bench);
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
pub mod account;
mod chain_state;
pub mod common_transactions;
mod gas;
pub mod genesis;
pub mod mock_vm;
pub mod starcoin_vm;
pub mod system_module_names;
pub mod transaction_scripts;
pub mod type_tag_parser;
|
pub use rand::random;
use rand::Rng;
/// Random number within an inclusive [min:max] range
pub fn within(min: i32, max: i32) -> i32 {
rand::thread_rng().gen_range(min, max + 1)
}
/// Return true with probability p
pub fn chance(p: f32) -> bool {
rand::thread_rng().next_f32() <= p
}
/// Choose a random value from the input slice
pub fn choose<T>(values: &[T]) -> Option<&T> {
rand::thread_rng().choose(values)
}
/// Roll custom dice
pub fn dx(x: i32) -> i32 {
match x {
0 => 0,
x => rand::thread_rng().gen_range(1, x + 1),
}
}
/// Roll n custom dice
pub fn ndx(n: i32, x: i32) -> i32 {
(0..n).map(|_| dx(x)).sum()
}
/// Roll 1d3
pub fn d3() -> i32 {
rand::thread_rng().gen_range(1, 4)
}
/// Roll nd3
pub fn nd3(n: i32) -> i32 {
(0..n).map(|_| d3()).sum()
}
/// Roll 1d6
pub fn d6() -> i32 {
rand::thread_rng().gen_range(1, 7)
}
/// Roll nd6
pub fn nd6(n: i32) -> i32 {
(0..n).map(|_| d6()).sum()
}
/// Roll 1d12
pub fn d12() -> i32 {
rand::thread_rng().gen_range(1, 13)
}
/// Roll nd12
pub fn nd12(n: i32) -> i32 {
(0..n).map(|_| d12()).sum()
}
/// Roll 1d20
pub fn d20() -> i32 {
rand::thread_rng().gen_range(1, 21)
}
/// Roll 1d100
pub fn d100() -> i32 {
rand::thread_rng().gen_range(1, 101)
}
|
use std::sync::Arc;
use nalgebra::Vector3;
use smallvec::SmallVec;
use sourcerenderer_core::graphics::{
Backend,
Barrier,
BarrierAccess,
BarrierSync,
BarrierTextureRange,
BindingFrequency,
BufferInfo,
BufferUsage,
CommandBuffer,
Device,
MemoryUsage,
Queue,
Swapchain,
SwapchainError,
TextureLayout,
TextureView,
WHOLE_BUFFER, FenceRef,
};
use sourcerenderer_core::{
Matrix4,
Platform,
Vec2,
Vec2UI,
Vec3,
};
use super::acceleration_structure_update::AccelerationStructureUpdatePass;
use super::clustering::ClusteringPass;
use super::geometry::GeometryPass;
use super::light_binning::LightBinningPass;
use super::occlusion::OcclusionPass;
use super::prepass::Prepass;
use super::rt_shadows::RTShadowPass;
use super::sharpen::SharpenPass;
use super::ssao::SsaoPass;
use super::taa::TAAPass;
use crate::input::Input;
use crate::renderer::passes::blue_noise::BlueNoise;
use crate::renderer::render_path::{
FrameInfo,
RenderPath,
SceneInfo,
ZeroTextures, RenderPassParameters,
};
use crate::renderer::renderer_assets::RendererAssets;
use crate::renderer::renderer_resources::{
HistoryResourceEntry,
RendererResources,
};
use crate::renderer::shader_manager::ShaderManager;
use crate::renderer::LateLatching;
pub struct ConservativeRenderer<P: Platform> {
swapchain: Arc<<P::GraphicsBackend as Backend>::Swapchain>,
device: Arc<<P::GraphicsBackend as Backend>::Device>,
barriers: RendererResources<P::GraphicsBackend>,
clustering_pass: ClusteringPass,
light_binning_pass: LightBinningPass,
prepass: Prepass,
geometry: GeometryPass<P>,
taa: TAAPass,
sharpen: SharpenPass,
ssao: SsaoPass<P>,
occlusion: OcclusionPass<P>,
rt_passes: Option<RTPasses<P>>,
blue_noise: BlueNoise<P::GraphicsBackend>,
}
pub struct RTPasses<P: Platform> {
acceleration_structure_update: AccelerationStructureUpdatePass<P>,
shadows: RTShadowPass,
}
pub struct FrameBindings<B: Backend> {
gpu_scene_buffer: Arc<B::Buffer>,
camera_buffer: Arc<B::Buffer>,
camera_history_buffer: Arc<B::Buffer>,
vertex_buffer: Arc<B::Buffer>,
index_buffer: Arc<B::Buffer>,
directional_lights: Arc<B::Buffer>,
point_lights: Arc<B::Buffer>,
setup_buffer: Arc<B::Buffer>,
}
impl<P: Platform> ConservativeRenderer<P> {
pub fn new(
device: &Arc<<P::GraphicsBackend as Backend>::Device>,
swapchain: &Arc<<P::GraphicsBackend as Backend>::Swapchain>,
shader_manager: &mut ShaderManager<P>,
) -> Self {
let mut init_cmd_buffer = device.graphics_queue().create_command_buffer();
let resolution = Vec2UI::new(swapchain.width(), swapchain.height());
let mut barriers = RendererResources::<P::GraphicsBackend>::new(device);
let blue_noise = BlueNoise::new::<P>(device);
let clustering = ClusteringPass::new::<P>(&mut barriers, shader_manager);
let light_binning = LightBinningPass::new::<P>(&mut barriers, shader_manager);
let prepass = Prepass::new::<P>(&mut barriers, shader_manager, resolution);
let geometry = GeometryPass::<P>::new(device, resolution, &mut barriers, shader_manager);
let taa = TAAPass::new::<P>(resolution, &mut barriers, shader_manager, false);
let sharpen = SharpenPass::new::<P>(resolution, &mut barriers, shader_manager);
let ssao = SsaoPass::<P>::new(device, resolution, &mut barriers, shader_manager, false);
let occlusion = OcclusionPass::<P>::new(device, shader_manager);
let rt_passes = device.supports_ray_tracing().then(|| RTPasses {
acceleration_structure_update: AccelerationStructureUpdatePass::<P>::new(
device,
&mut init_cmd_buffer,
),
shadows: RTShadowPass::new::<P>(resolution, &mut barriers, shader_manager),
});
init_cmd_buffer.flush_barriers();
device.flush_transfers();
let c_graphics_queue = device.graphics_queue().clone();
c_graphics_queue.submit(init_cmd_buffer.finish(), &[], &[], true);
rayon::spawn(move || c_graphics_queue.process_submissions());
Self {
swapchain: swapchain.clone(),
device: device.clone(),
barriers,
clustering_pass: clustering,
light_binning_pass: light_binning,
prepass,
geometry,
taa,
sharpen,
ssao,
occlusion,
rt_passes,
blue_noise,
}
}
fn create_frame_bindings(
&self,
cmd_buf: &mut <P::GraphicsBackend as Backend>::CommandBuffer,
scene: &SceneInfo<P::GraphicsBackend>,
gpu_scene_buffer: &Arc<<P::GraphicsBackend as Backend>::Buffer>,
camera_buffer: &Arc<<P::GraphicsBackend as Backend>::Buffer>,
camera_history_buffer: &Arc<<P::GraphicsBackend as Backend>::Buffer>,
swapchain: &Arc<<P::GraphicsBackend as Backend>::Swapchain>,
rendering_resolution: &Vec2UI,
frame: u64,
) -> FrameBindings<P::GraphicsBackend> {
let view = &scene.views[scene.active_view_index];
let cluster_count = self.clustering_pass.cluster_count();
let cluster_z_scale = (cluster_count.z as f32) / (view.far_plane / view.near_plane).log2();
let cluster_z_bias = -(cluster_count.z as f32) * (view.near_plane).log2()
/ (view.far_plane / view.near_plane).log2();
#[repr(C)]
#[derive(Debug, Clone)]
struct SetupBuffer {
point_light_count: u32,
directional_light_count: u32,
cluster_z_bias: f32,
cluster_z_scale: f32,
cluster_count: Vector3<u32>,
_padding: u32,
swapchain_transform: Matrix4,
halton_point: Vec2,
rt_size: Vec2UI,
}
let setup_buffer = cmd_buf.upload_dynamic_data(
&[SetupBuffer {
point_light_count: scene.scene.point_lights().len() as u32,
directional_light_count: scene.scene.directional_lights().len() as u32,
cluster_z_bias,
cluster_z_scale,
cluster_count,
_padding: 0,
swapchain_transform: swapchain.transform(),
halton_point: super::taa::scaled_halton_point(
rendering_resolution.x,
rendering_resolution.y,
(frame % 8) as u32 + 1,
),
rt_size: *rendering_resolution,
}],
BufferUsage::CONSTANT,
);
#[repr(C)]
#[derive(Debug, Clone)]
struct PointLight {
position: Vec3,
intensity: f32,
}
let point_lights: SmallVec<[PointLight; 16]> = scene.scene
.point_lights()
.iter()
.map(|l| PointLight {
position: l.position,
intensity: l.intensity,
})
.collect();
let point_lights_buffer = cmd_buf.upload_dynamic_data(&point_lights, BufferUsage::CONSTANT);
#[repr(C)]
#[derive(Debug, Clone)]
struct DirectionalLight {
direction: Vec3,
intensity: f32,
}
let directional_lights: SmallVec<[DirectionalLight; 16]> = scene.scene
.directional_lights()
.iter()
.map(|l| DirectionalLight {
direction: l.direction,
intensity: l.intensity,
})
.collect();
let directional_lights_buffer =
cmd_buf.upload_dynamic_data(&directional_lights, BufferUsage::CONSTANT);
FrameBindings {
gpu_scene_buffer: gpu_scene_buffer.clone(),
camera_buffer: camera_buffer.clone(),
camera_history_buffer: camera_history_buffer.clone(),
vertex_buffer: scene.vertex_buffer.clone(),
index_buffer: scene.index_buffer.clone(),
directional_lights: directional_lights_buffer,
point_lights: point_lights_buffer,
setup_buffer,
}
}
}
impl<P: Platform> RenderPath<P> for ConservativeRenderer<P> {
fn is_gpu_driven(&self) -> bool {
false
}
fn write_occlusion_culling_results(&self, frame: u64, bitset: &mut Vec<u32>) {
self.occlusion.write_occlusion_query_results(frame, bitset);
}
fn on_swapchain_changed(
&mut self,
swapchain: &std::sync::Arc<<P::GraphicsBackend as Backend>::Swapchain>,
) {
// TODO: resize render targets
self.swapchain = swapchain.clone();
}
#[profiling::function]
fn render(
&mut self,
scene: &SceneInfo<P::GraphicsBackend>,
zero_textures: &ZeroTextures<P::GraphicsBackend>,
late_latching: Option<&dyn LateLatching<P::GraphicsBackend>>,
input: &Input,
frame_info: &FrameInfo,
shader_manager: &ShaderManager<P>,
assets: &RendererAssets<P>,
) -> Result<(), SwapchainError> {
let graphics_queue = self.device.graphics_queue();
let mut cmd_buf = graphics_queue.create_command_buffer();
let late_latching_buffer = late_latching.unwrap().buffer();
let late_latching_history_buffer = late_latching.unwrap().history_buffer().unwrap();
let primary_view = &scene.views[scene.active_view_index];
let empty_buffer = cmd_buf.create_temporary_buffer(
&BufferInfo {
size: 16,
usage: BufferUsage::STORAGE,
},
MemoryUsage::VRAM,
);
let frame_bindings = self.create_frame_bindings(
&mut cmd_buf,
scene,
&empty_buffer,
&late_latching_buffer,
&late_latching_history_buffer,
&self.swapchain,
&Vec2UI::new(self.swapchain.width(), self.swapchain.height()),
frame_info.frame,
);
setup_frame::<P::GraphicsBackend>(&mut cmd_buf, &frame_bindings);
let params = RenderPassParameters {
device: self.device.as_ref(),
scene,
shader_manager,
resources: &mut self.barriers,
zero_textures,
assets
};
if let Some(rt_passes) = self.rt_passes.as_mut() {
rt_passes
.acceleration_structure_update
.execute(&mut cmd_buf, ¶ms);
}
self.occlusion.execute(
&mut cmd_buf,
¶ms,
frame_info.frame,
&late_latching_buffer,
Prepass::DEPTH_TEXTURE_NAME,
);
self.clustering_pass.execute::<P>(
&mut cmd_buf,
¶ms,
Vec2UI::new(self.swapchain.width(), self.swapchain.height()),
&late_latching_buffer
);
self.light_binning_pass.execute(
&mut cmd_buf,
¶ms,
&late_latching_buffer
);
self.prepass.execute(
&mut cmd_buf,
¶ms,
self.swapchain.transform(),
frame_info.frame,
&late_latching_buffer,
&late_latching_history_buffer
);
self.ssao.execute(
&mut cmd_buf,
¶ms,
Prepass::DEPTH_TEXTURE_NAME,
Some(Prepass::MOTION_TEXTURE_NAME),
&late_latching_buffer,
self.blue_noise.frame(frame_info.frame),
self.blue_noise.sampler(),
false
);
if let Some(rt_passes) = self.rt_passes.as_mut() {
rt_passes.shadows.execute(
&mut cmd_buf,
¶ms,
Prepass::DEPTH_TEXTURE_NAME,
rt_passes
.acceleration_structure_update
.acceleration_structure(),
&self.blue_noise.frame(frame_info.frame),
&self.blue_noise.sampler(),
);
}
self.geometry.execute(
&mut cmd_buf,
¶ms,
Prepass::DEPTH_TEXTURE_NAME,
&frame_bindings
);
self.taa.execute(
&mut cmd_buf,
¶ms,
GeometryPass::<P>::GEOMETRY_PASS_TEXTURE_NAME,
Prepass::DEPTH_TEXTURE_NAME,
Some(Prepass::MOTION_TEXTURE_NAME),
false
);
self.sharpen
.execute(&mut cmd_buf, ¶ms);
let sharpened_texture = self.barriers.access_texture(
&mut cmd_buf,
SharpenPass::SHAPENED_TEXTURE_NAME,
&BarrierTextureRange::default(),
BarrierSync::COPY,
BarrierAccess::COPY_READ,
TextureLayout::CopySrc,
false,
HistoryResourceEntry::Current,
);
let back_buffer_res = self.swapchain.prepare_back_buffer();
if back_buffer_res.is_none() {
return Err(SwapchainError::Other);
}
let back_buffer = back_buffer_res.unwrap();
cmd_buf.barrier(&[Barrier::TextureBarrier {
old_sync: BarrierSync::empty(),
new_sync: BarrierSync::COPY,
old_access: BarrierAccess::empty(),
new_access: BarrierAccess::COPY_WRITE,
old_layout: TextureLayout::Undefined,
new_layout: TextureLayout::CopyDst,
texture: back_buffer.texture_view.texture(),
range: BarrierTextureRange::default(),
}]);
cmd_buf.flush_barriers();
cmd_buf.blit(&*sharpened_texture, 0, 0, back_buffer.texture_view.texture(), 0, 0);
cmd_buf.barrier(&[Barrier::TextureBarrier {
old_sync: BarrierSync::COPY,
new_sync: BarrierSync::empty(),
old_access: BarrierAccess::COPY_WRITE,
new_access: BarrierAccess::empty(),
old_layout: TextureLayout::CopyDst,
new_layout: TextureLayout::Present,
texture: back_buffer.texture_view.texture(),
range: BarrierTextureRange::default(),
}]);
std::mem::drop(sharpened_texture);
self.barriers.swap_history_resources();
if let Some(late_latching) = late_latching {
let input_state = input.poll();
late_latching.before_submit(&input_state, primary_view);
}
graphics_queue.submit(
cmd_buf.finish(),
&[FenceRef::WSIFence(back_buffer.prepare_fence)],
&[FenceRef::WSIFence(back_buffer.present_fence)],
true,
);
graphics_queue.present(&self.swapchain, back_buffer.present_fence, true);
let c_graphics_queue = graphics_queue.clone();
rayon::spawn(move || c_graphics_queue.process_submissions());
if let Some(late_latching) = late_latching {
late_latching.after_submit(&self.device);
}
Ok(())
}
fn set_ui_data(&mut self, data: crate::ui::UIDrawData<<P as Platform>::GraphicsBackend>) {
todo!()
}
}
pub fn setup_frame<B: Backend>(cmd_buf: &mut B::CommandBuffer, frame_bindings: &FrameBindings<B>) {
for i in 0..6 {
cmd_buf.bind_storage_buffer(
BindingFrequency::Frame,
i,
&frame_bindings.gpu_scene_buffer,
0,
WHOLE_BUFFER,
);
}
cmd_buf.bind_uniform_buffer(
BindingFrequency::Frame,
7,
&frame_bindings.camera_buffer,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_uniform_buffer(
BindingFrequency::Frame,
8,
&frame_bindings.camera_history_buffer,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_storage_buffer(
BindingFrequency::Frame,
9,
&frame_bindings.vertex_buffer,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_storage_buffer(
BindingFrequency::Frame,
10,
&frame_bindings.index_buffer,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_uniform_buffer(
BindingFrequency::Frame,
11,
&frame_bindings.setup_buffer,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_uniform_buffer(
BindingFrequency::Frame,
12,
&frame_bindings.point_lights,
0,
WHOLE_BUFFER,
);
cmd_buf.bind_uniform_buffer(
BindingFrequency::Frame,
13,
&frame_bindings.directional_lights,
0,
WHOLE_BUFFER,
);
}
|
//! Traits for types wrapped in `DynTrait<_>`
use crate::std_types::RBoxError;
#[allow(unused_imports)]
use crate::type_level::{
bools::{False, True},
impl_enum::{Implementability, Implemented, Unimplemented},
trait_marker,
};
macro_rules! declare_InterfaceType {
(
$(#[$attrs:meta])*
assoc_types[
$(
$(#[$assoc_attrs:meta])*
type $trait_:ident ;
)*
]
) => (
$(#[$attrs])*
pub trait InterfaceType: Sized {
$(
$(#[$assoc_attrs])*
type $trait_: Implementability;
)*
#[doc(hidden)]
type define_this_in_the_impl_InterfaceType_macro;
}
)
}
declare_InterfaceType! {
/// Defines the usable/required traits when creating a
/// [`DynTrait<Pointer<()>, ThisInterfaceType>`](crate::DynTrait).
///
/// This trait can only be implemented using the
/// [`#[derive(StableAbi)]`](derive@crate::StableAbi)
/// derive with the
/// [`#[sabi(impl_InterfaceType(...))]`](derive@crate::StableAbi#sabiimpl_interfacetype)
/// helper attribute,
/// defaulting associated types to `Unimplemented<_>`.
///
/// The value of every associated type can be:
///
/// - [`Implemented<_>`](crate::type_level::impl_enum::Implemented):
/// the trait would be required by, and be usable in `DynTrait`.
///
/// - [`Unimplemented<_>`](crate::type_level::impl_enum::Unimplemented):
/// the trait would not be required by, and not be usable in `DynTrait`.
///
/// # Example
///
/// ```
///
/// use abi_stable::{erased_types::InterfaceType, type_level::bools::*, StableAbi};
///
/// #[repr(C)]
/// #[derive(StableAbi)]
/// #[sabi(impl_InterfaceType(Clone, Debug))]
/// pub struct FooInterface;
///
/// /*
/// The `#[sabi(impl_InterfaceType(Clone, Debug))]` helper attribute
/// (as part of #[derive(StableAbi)]) above is roughly equivalent to this impl:
///
/// impl InterfaceType for FooInterface {
/// type Clone = Implemented<trait_marker::Clone>;
///
/// type Debug = Implemented<trait_marker::Debug>;
///
/// /////////////////////////////////////
/// //// defaulted associated types
/// /////////////////////////////////////
///
/// // Changing this to require/unrequire in minor versions, is an abi breaking change.
/// // type Send = Unimplemented<trait_marker::Send>;
///
/// // Changing this to require/unrequire in minor versions, is an abi breaking change.
/// // type Sync = Unimplemented<trait_marker::Sync>;
///
/// // Changing this to require/unrequire in minor versions, is an abi breaking change.
/// // type Unpin = Unimplemented<trait_marker::Unpin>;
///
/// // type Iterator = Unimplemented<trait_marker::Iterator>;
///
/// // type DoubleEndedIterator = Unimplemented<trait_marker::DoubleEndedIterator>;
///
/// // type Default = Unimplemented<trait_marker::Default>;
///
/// // type Display = Unimplemented<trait_marker::Display>;
///
/// // type Serialize = Unimplemented<trait_marker::Serialize>;
///
/// // type Eq = Unimplemented<trait_marker::Eq>;
///
/// // type PartialEq = Unimplemented<trait_marker::PartialEq>;
///
/// // type Ord = Unimplemented<trait_marker::Ord>;
///
/// // type PartialOrd = Unimplemented<trait_marker::PartialOrd>;
///
/// // type Hash = Unimplemented<trait_marker::Hash>;
///
/// // type Deserialize = Unimplemented<trait_marker::Deserialize>;
///
/// // type FmtWrite = Unimplemented<trait_marker::FmtWrite>;
///
/// // type IoWrite = Unimplemented<trait_marker::IoWrite>;
///
/// // type IoSeek = Unimplemented<trait_marker::IoSeek>;
///
/// // type IoRead = Unimplemented<trait_marker::IoRead>;
///
/// // type IoBufRead = Unimplemented<trait_marker::IoBufRead>;
///
/// // type Error = Unimplemented<trait_marker::Error>;
/// }
/// */
///
/// # fn main(){}
///
///
/// ```
///
///
///
///
assoc_types[
/// Changing this to require/unrequire in minor versions, is an abi breaking change.
type Send;
/// Changing this to require/unrequire in minor versions, is an abi breaking change.
type Sync;
/// Changing this to require/unrequire in minor versions, is an abi breaking change.
type Unpin;
///
type Clone;
///
type Default;
///
type Display;
///
type Debug;
///
type Serialize;
///
type Eq;
///
type PartialEq;
///
type Ord;
///
type PartialOrd;
///
type Hash;
///
type Deserialize;
///
type Iterator;
///
type DoubleEndedIterator;
/// For the `std::fmt::Write` trait
type FmtWrite;
/// For the `std::io::Write` trait
type IoWrite;
/// For the `std::io::Seek` trait
type IoSeek;
/// For the `std::io::Read` trait
type IoRead;
/// For the `std::io::BufRead` trait
type IoBufRead;
/// For the `std::error::Error` trait
type Error;
]
}
///////////////////////////////////////////////////////////////////////////////
/// Describes how a type is serialized by [`DynTrait`].
///
/// # Example
///
/// ```rust
/// use abi_stable::{
/// erased_types::{SerializeType, SerializeProxyType, DynTrait},
/// external_types::RawValueBox,
/// std_types::{RBox, RBoxError, RErr, ROk, RResult, RStr},
/// StableAbi,
/// };
///
/// let boxed = make_foo_box(1234);
/// let serialized = serde_json::to_string(&boxed).unwrap();
/// assert_eq!(serialized, r#"{"field":1234}"#);
///
///
/// type FooBox = DynTrait<'static, RBox<()>, FooInterface>;
///
/// /// Implements `InterfaceType`, requiring `Send + Sync + Debug + Eq`
/// #[repr(C)]
/// #[derive(StableAbi)]
/// #[sabi(impl_InterfaceType(Send, Sync, Debug, Eq, Serialize))]
/// pub struct FooInterface;
///
/// impl SerializeProxyType<'_> for FooInterface {
/// type Proxy = RawValueBox;
/// }
///
/// /////////////
/// // everything below could be defined in an implementation crate
///
/// #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize)]
/// struct Foo {
/// field: u32,
/// }
///
/// impl<'a> SerializeType<'a> for Foo {
/// type Interface = FooInterface;
///
/// fn serialize_impl(&'a self) -> Result<RawValueBox, RBoxError> {
/// match serde_json::value::to_raw_value::<Foo>(self) {
/// Ok(x) => Ok(x.into()),
/// Err(e) => Err(RBoxError::new(e)),
/// }
/// }
/// }
///
/// extern "C" fn make_foo_box(field: u32) -> FooBox {
/// abi_stable::extern_fn_panic_handling!{
/// FooBox::from_value(Foo{field})
/// }
/// }
/// ```
///
///
/// [`DynTrait`]: ../struct.DynTrait.html
pub trait SerializeType<'s> {
/// An [`InterfaceType`] implementor which determines the
/// intermediate type through which this is serialized.
///
/// [`InterfaceType`]: ./trait.InterfaceType.html
type Interface: SerializeProxyType<'s>;
/// Performs the serialization into the proxy.
fn serialize_impl(
&'s self,
) -> Result<<Self::Interface as SerializeProxyType<'s>>::Proxy, RBoxError>;
}
/// Determines the intermediate type a [`SerializeType`] implementor is converted into,
/// and is then serialized.
///
/// [`SerializeType`]: ./trait.SerializeType.html
pub trait SerializeProxyType<'borr>: InterfaceType {
/// The intermediate type.
type Proxy: 'borr;
}
#[doc(hidden)]
pub trait GetSerializeProxyType<'borr>: InterfaceType {
type ProxyType;
}
impl<'borr, I, PT> GetSerializeProxyType<'borr> for I
where
I: InterfaceType,
I: GetSerializeProxyTypeHelper<'borr, <I as InterfaceType>::Serialize, ProxyType = PT>,
{
type ProxyType = PT;
}
#[doc(hidden)]
pub trait GetSerializeProxyTypeHelper<'borr, IS>: InterfaceType {
type ProxyType;
}
impl<'borr, I> GetSerializeProxyTypeHelper<'borr, Implemented<trait_marker::Serialize>> for I
where
I: SerializeProxyType<'borr>,
{
type ProxyType = <I as SerializeProxyType<'borr>>::Proxy;
}
impl<'borr, I> GetSerializeProxyTypeHelper<'borr, Unimplemented<trait_marker::Serialize>> for I
where
I: InterfaceType,
{
type ProxyType = ();
}
///////////////////////////////////////
/// Describes how `D` is deserialized, using a proxy to do so.
///
/// Generally this delegates to a library function,
/// so that the implementation can be delegated
/// to the `implementation crate`.
///
/// # Example
///
/// ```rust
/// use abi_stable::{
/// erased_types::{DeserializeDyn, DynTrait},
/// external_types::RawValueRef,
/// std_types::{RBox, RBoxError, RErr, ROk, RResult, RStr},
/// StableAbi,
/// };
///
/// let boxed = serde_json::from_str::<FooBox>(r#"{"field": 10}"#).unwrap();
///
/// assert_eq!(*boxed.downcast_as::<Foo>().unwrap(), Foo{field: 10});
///
///
///
/// type FooBox = DynTrait<'static, RBox<()>, FooInterface>;
///
/// /// Implements `InterfaceType`, requiring `Send + Sync + Debug + Eq`
/// #[repr(C)]
/// #[derive(StableAbi)]
/// #[sabi(impl_InterfaceType(Send, Sync, Debug, Eq, Deserialize))]
/// pub struct FooInterface;
///
/// impl<'a> DeserializeDyn<'a, FooBox> for FooInterface {
/// type Proxy = RawValueRef<'a>;
///
/// fn deserialize_dyn(s: Self::Proxy) -> Result<FooBox, RBoxError> {
/// deserialize_foo(s.get_rstr()).into_result()
/// }
/// }
///
/// /////////////
/// // everything below could be defined in an implementation crate
///
/// #[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize)]
/// struct Foo {
/// field: u32,
/// }
///
/// extern "C" fn deserialize_foo(s: RStr<'_>) -> RResult<FooBox, RBoxError> {
/// abi_stable::extern_fn_panic_handling!{
/// match serde_json::from_str::<Foo>(s.into()) {
/// Ok(x) => ROk(DynTrait::from_value(x)),
/// Err(e) => RErr(RBoxError::new(e)),
/// }
/// }
/// }
/// ```
///
pub trait DeserializeDyn<'borr, D>:
InterfaceType<Deserialize = Implemented<trait_marker::Deserialize>>
{
/// The type that is deserialized and then converted into `D`,
/// with `DeserializeDyn::deserialize_dyn`.
type Proxy;
/// Converts the proxy type into `D`.
fn deserialize_dyn(s: Self::Proxy) -> Result<D, RBoxError>;
}
/// The way to specify the expected `Iterator::Item` type for an `InterfaceType`.
///
/// This is a separate trait to allow iterators that yield borrowed elements.
pub trait IteratorItem<'a>: InterfaceType {
/// The iterator item type.
type Item;
}
/// Gets the expected `Iterator::Item` type for an `InterfaceType`,
/// defaulting to `()` if it doesn't require `Iterator` to be implemented.
///
/// Used by `DynTrait`'s vtable to give its iterator methods a defaulted return type.
pub trait IteratorItemOrDefault<'borr>: InterfaceType {
/// The iterator item type.
type Item;
}
impl<'borr, I, Item> IteratorItemOrDefault<'borr> for I
where
I: InterfaceType,
I: IteratorItemOrDefaultHelper<'borr, <I as InterfaceType>::Iterator, Item = Item>,
{
type Item = Item;
}
#[doc(hidden)]
pub trait IteratorItemOrDefaultHelper<'borr, ImplIsRequired> {
type Item;
}
impl<'borr, I, Item> IteratorItemOrDefaultHelper<'borr, Implemented<trait_marker::Iterator>> for I
where
I: IteratorItem<'borr, Item = Item>,
{
type Item = Item;
}
impl<'borr, I> IteratorItemOrDefaultHelper<'borr, Unimplemented<trait_marker::Iterator>> for I {
type Item = ();
}
/////////////////////////////////////////////////////////////////////
crate::impl_InterfaceType! {
impl crate::erased_types::InterfaceType for () {
type Send= True;
type Sync= True;
}
}
|
// SPDX-License-Identifier: Apache-2.0 AND MIT
//! `ConsumerBuilder` and `Consumer` structs
use futures::stream::{Stream, StreamExt};
use std::pin::Pin;
use std::task::{Context, Poll};
/// A [non-consuming] [Consumer] builder.
///
/// [Consumer]: struct.Consumer.html
/// [non-consuming]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html#non-consuming-builders-(preferred):
#[derive(Clone)]
pub struct ConsumerBuilder {
conn: crate::Connection,
ex: String,
queue: String,
kind: lapin::ExchangeKind,
ex_opts: lapin::options::ExchangeDeclareOptions,
queue_opts: lapin::options::QueueDeclareOptions,
bind_opts: lapin::options::QueueBindOptions,
field_table: lapin::types::FieldTable,
tx_props: lapin::BasicProperties,
tx_opts: lapin::options::BasicPublishOptions,
rx_opts: lapin::options::BasicConsumeOptions,
ack_opts: lapin::options::BasicAckOptions,
rej_opts: lapin::options::BasicRejectOptions,
processor: Box<dyn crate::MessageProcess + Send + Sync>,
}
impl ConsumerBuilder {
pub fn new(conn: crate::Connection) -> Self {
Self {
conn,
ex: String::from(crate::DEFAULT_EXCHANGE),
queue: String::from(crate::DEFAULT_QUEUE),
kind: lapin::ExchangeKind::Direct,
ex_opts: lapin::options::ExchangeDeclareOptions::default(),
queue_opts: lapin::options::QueueDeclareOptions::default(),
bind_opts: lapin::options::QueueBindOptions::default(),
field_table: lapin::types::FieldTable::default(),
tx_props: lapin::BasicProperties::default(),
tx_opts: lapin::options::BasicPublishOptions::default(),
rx_opts: lapin::options::BasicConsumeOptions::default(),
ack_opts: lapin::options::BasicAckOptions::default(),
rej_opts: lapin::options::BasicRejectOptions::default(),
processor: Box::new(crate::message::EchoProcessor {}),
}
}
/// Specify the exchange name.
pub fn exchange(&mut self, exchange: &str) -> &mut Self {
self.ex = exchange.to_string();
self
}
/// Specify the queue name.
pub fn queue(&mut self, queue: &str) -> &mut Self {
self.queue = queue.to_string();
self
}
/// Use the provided [MessageProcess] trait object.
///
/// [MessageProcess]: ../message/trait.MessageProcess.html
pub fn with_processor(
&mut self,
processor: Box<dyn crate::MessageProcess + Send + Sync>,
) -> &mut Self {
self.processor = processor;
self
}
pub async fn build(&self) -> crate::Result<Consumer> {
let opts = crate::client::QueueOptions {
kind: self.kind.clone(),
ex_opts: self.ex_opts.clone(),
ex_field: self.field_table.clone(),
queue_opts: self.queue_opts.clone(),
queue_field: self.field_table.clone(),
bind_opts: self.bind_opts.clone(),
bind_field: self.field_table.clone(),
};
let (ch, q) = self.conn.queue(&self.ex, &self.queue, opts).await?;
let consume = ch
.clone()
.basic_consume(
q.name().as_str(),
"consumer",
self.rx_opts.clone(),
self.field_table.clone(),
)
.await
.map_err(crate::Error::from)?;
Ok(Consumer {
ch,
consume,
ex: self.ex.clone(),
tx_props: self.tx_props.clone(),
tx_opts: self.tx_opts.clone(),
ack_opts: self.ack_opts.clone(),
rej_opts: self.rej_opts.clone(),
processor: self.processor.clone(),
})
}
}
/// A zero-cost [lapin::Consumer] abstruction type.
///
/// [lapin::Consumer]: https://docs.rs/lapin/latest/lapin/struct.Consumer.html
pub struct Consumer {
ch: lapin::Channel,
consume: lapin::Consumer,
ex: String,
tx_props: lapin::BasicProperties,
tx_opts: lapin::options::BasicPublishOptions,
ack_opts: lapin::options::BasicAckOptions,
rej_opts: lapin::options::BasicRejectOptions,
processor: Box<dyn crate::MessageProcess + Send + Sync>,
}
impl Consumer {
/// Use the provided [MessageProcess] trait object.
///
/// [MessageProcess]: ../message/trait.MessageProcess.html
pub fn with_processor(
&mut self,
processor: Box<dyn crate::MessageProcess + Send + Sync>,
) -> &mut Self {
self.processor = processor;
self
}
pub async fn run(&mut self) -> crate::Result<()> {
while let Some(msg) = self.consume.next().await {
match msg {
Ok(msg) => {
let req = &crate::Message::new(msg);
match self.processor.process(req).await {
Ok(resp) => self.response(req, &resp).await?,
Err(_err) => self.reject(req).await?,
}
}
Err(err) => return Err(crate::Error::from(err)),
}
}
Ok(())
}
pub async fn response(&mut self, req: &crate::Message, resp: &[u8]) -> crate::Result<()> {
if let Some(reply_to) = req.reply_to() {
self.send(reply_to, resp).await?;
}
self.ch
.basic_ack(req.delivery_tag(), self.ack_opts.clone())
.await
.map_err(crate::Error::from)?;
Ok(())
}
pub async fn reject(&mut self, req: &crate::Message) -> crate::Result<()> {
self.ch
.basic_reject(req.delivery_tag(), self.rej_opts.clone())
.await
.map_err(crate::Error::from)?;
Ok(())
}
async fn send(&mut self, routing_key: &str, msg: &[u8]) -> crate::Result<()> {
self.ch
.basic_publish(
&self.ex,
&routing_key,
self.tx_opts.clone(),
msg.to_vec(),
self.tx_props.clone(),
)
.await
.map_err(crate::Error::from)?;
Ok(())
}
}
impl Stream for Consumer {
type Item = Result<crate::Message, crate::Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let c = &mut self.consume;
let c = Pin::new(c);
match c.poll_next(cx) {
Poll::Ready(Some(Ok(msg))) => Poll::Ready(Some(Ok(crate::Message::new(msg)))),
Poll::Ready(Some(Err(err))) => Poll::Ready(Some(Err(err.into()))),
Poll::Ready(None) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
}
}
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "512"]
mod proxies;
mod services;
#[cfg(test)]
mod test;
use failure::Error;
use fuchsia_async as fasync;
use fuchsia_component::server::ServiceFs;
use futures::{channel::mpsc, prelude::*};
type Result<T> = std::result::Result<T, Error>;
const CHANNEL_BUFFER_SIZE: usize = 100;
/// This number should be forgiving. If we lower it we may want to build some
/// in-process staging area for changes so we can send them to clients that ACK
/// late. At 20 though, clients that don't ACK can't reasonably expect to be
/// accomodated.
const MAX_EVENTS_SENT_WITHOUT_ACK: usize = 20;
fn spawn_log_error(fut: impl Future<Output = Result<()>> + 'static) {
fasync::spawn_local(fut.unwrap_or_else(|e| eprintln!("{}", e)))
}
#[fasync::run_singlethreaded]
async fn main() {
fuchsia_syslog::init_with_tags(&["mediasession"]).expect("Initializing syslogger");
fuchsia_syslog::fx_log_info!("Initializing Fuchsia Media Session Service");
let (player_sink, player_stream) = mpsc::channel(CHANNEL_BUFFER_SIZE);
let (request_sink, request_stream) = mpsc::channel(CHANNEL_BUFFER_SIZE);
let discovery = self::services::discovery::Discovery::new(player_stream);
spawn_log_error(discovery.serve(request_stream));
let mut server = ServiceFs::new_local();
server
.dir("svc")
.add_fidl_service(|request_stream| {
spawn_log_error(
self::services::publisher::Publisher::new(player_sink.clone())
.serve(request_stream),
)
})
.add_fidl_service(
|request_stream: fidl_fuchsia_media_sessions2::DiscoveryRequestStream| {
let request_sink = request_sink.clone().sink_map_err(Into::into);
spawn_log_error(request_stream.map_err(Into::into).forward(request_sink));
},
);
server.take_and_serve_directory_handle().expect("To serve Media Session services");
server.collect::<()>().await;
}
|
use std::vec;
mod katapiller;
mod random_polygon;
mod random_polygonbox;
mod poly_grid;
mod grid;
use super::util::{Point, Polygon};
use super::util;
fn count_point_orcurences (pol: &Polygon) -> Vec<Vec<(usize,usize)>>{
let len = pol.points.len();
let mut edges_on_point: Vec<Vec<(usize,usize)>> = Vec::new();
let edges = pol.edges.to_vec();
for _i in 0..len {
edges_on_point.push(Vec::new());
}
for edge in edges {
edges_on_point[edge.0].push((edge.0, edge.1));
edges_on_point[edge.1].push((edge.1, edge.0));
}
return edges_on_point
}
fn generate_weakly_simple (pol: &Polygon) -> Vec<Point> {
//every point contains a vector of every edge that is connected to it (every edge is)
let mut edges_on_point = count_point_orcurences(pol);
let mut point_polygon: Vec<Point> = Vec::new();
let mut lastedge = edges_on_point[0].pop().unwrap();
let x = pol.points[lastedge.0];
let y = pol.points[lastedge.1];
point_polygon.push(x);
point_polygon.push(y);
let mut finished = false;
while ! finished {
let current_point = lastedge.1;
// the cardinality of the point (node)
let current_len = edges_on_point[current_point].len();
match current_len {
0 => {finished = true},
_ => {
// initializes the nextedge as return edge
let mut next_point = 0;
// notices that angles is impossibly large
let mut min_angle: f64 = 10.0;
// finds edge with smallest angle to lastedge
for i in 0..current_len {
let p = edges_on_point[current_point][i];
let a = pol.points[lastedge.0];
let b = pol.points[lastedge.1];
//assert_eq!(b, pol.Points[p.0]);
let c = pol.points[p.1];
let p_angle = util::angle_between_3_points(a, b, c);
if p_angle < min_angle {
min_angle = p_angle;
next_point = i;
}
}
lastedge = edges_on_point[current_point].remove(next_point);
point_polygon.push(pol.points[lastedge.1]);
}
}
}
return point_polygon
}
#[inline]
pub fn gen_polygon (n: usize) -> Vec<Vec<Point>>{
let p = random_polygon::random_select_polygon(n);
return vec!(generate_weakly_simple(&p.0), generate_weakly_simple(&p.1))
}
pub fn gen_boxpoly (n: usize) -> Vec<Vec<Point>>{
let p = random_polygonbox::random_select_polygon(n);
return vec!(generate_weakly_simple(&p.0), generate_weakly_simple(&p.1))
}
pub fn gen_catapiller (n: usize) -> Vec<Vec<Vec<Point>>> {
katapiller::katapiller_polygon(n)
}
pub fn gen_grid_poly (n: usize) -> Vec<Vec<Point>> {
let p = poly_grid::generate_poly(2, n, 3);
return vec!(generate_weakly_simple(&p[0]), generate_weakly_simple(&p[1]))
}
pub fn _linecross_tjekker(poly1: Vec<Point>, poly2: Vec<Point>) -> Vec<Vec<Point>> {
let len1 = poly1.len();
let len2 = poly2.len();
let mut retvec = Vec::new();
let mut _tjek = true;
for i in 1..len1-1 {
for j in 1..len2-1{
if util::linecross(poly1[i-1], poly1[i], poly2[j-1], poly2[j]){
retvec.push(vec![poly1[i-1], poly1[i], poly2[j-1], poly2[j]]);
_tjek = false;
}
}
}
for i in 1..len2-1 {
for j in 1..len1-1{
if util::linecross(poly2[i-1], poly2[i], poly1[j-1], poly1[j]){
retvec.push(vec![poly2[i-1], poly2[i], poly1[j-1], poly1[j]]);
_tjek = false;
}
}
}
return retvec
}
#[test]
fn test_weakly_simple() {
let a = Point{x: 0.0, y: 0.0};
let b = Point{x: 1.0, y: 0.0};
let c = Point{x: 1.0, y: 1.0};
let d = Point{x: 0.0, y: 1.0};
let e = Point{x: 1.0, y: 2.0};
let f = Point{x: 2.0, y: 1.0};
let g = Point{x: 2.0, y: 2.0};
let points = vec!(a, b, c, d, e, f, g);
let e1 = (0,1);
let e2 = (1,2);
let e3 = (2,3);
let e4 = (2,4);
let e5 = (2,5);
let e6 = (5,6);
let edges = vec!(e1,e2,e3,e4,e5,e6);
let p = Polygon{points: points, edges: edges};
let weakly_p = generate_weakly_simple(&p);
// tjek if the result of weakly has same lenght as abcfgfcecdcba
assert_eq!(weakly_p.len(), 13);
let abcfgfcecdcba = vec!(a,b,c,f,g,f,c,e,c,d,c,b,a);
for i in 0..13 {
print! ("({},{}) ", weakly_p[i].x, weakly_p[i].y);
assert_eq! (abcfgfcecdcba[i].x, weakly_p[i].x);
assert_eq! (abcfgfcecdcba[i].y, weakly_p[i].y);
}
println! ("");
}
#[test]
fn test_weakly_simple_large_n() {
for i in 1..3 {
let n = 10i64.pow(i);
let (p, _q) = random_polygonbox::random_select_polygon(n as usize);
let pweak = generate_weakly_simple(&p);
let res = 2*n-1;
assert_eq!(pweak.len(), res as usize);
}
}
#[test]
fn test_grid_to_weakly_simple() {
for i in 1..5 {
let n = 10i64.pow(i);
let poly_vec = poly_grid::generate_poly(2, n as usize, 3);
let pweak = generate_weakly_simple(&poly_vec[0]);
let res = 2*n-1;
assert_eq!(pweak.len(), res as usize);
}
}
|
extern crate log;
use leetcode_rust::init_logger;
use log::*;
struct Solution {}
impl Solution {
#[inline]
fn get_row_col(n: usize) -> (usize, usize) {
let row = n / 9;
let col = n % 9;
return (row, col);
}
fn check_num(c: char, n: usize, board: &mut Vec<Vec<char>>) -> bool {
Self::check_row_col(c, n, board) && Self::check_grid(c, n, board)
}
fn check_row_col(c: char, n: usize, board: &mut Vec<Vec<char>>) -> bool {
let (row, col) = Self::get_row_col(n);
for i in 0..9 {
if board[row][i] == c || board[i][col] == c {
return false;
}
}
return true;
}
fn check_grid(c: char, n: usize, board: &mut Vec<Vec<char>>) -> bool {
let grid_row = n / 27;
let grid_col = (n % 9) / 3;
for i in (grid_row * 3)..(grid_row * 3 + 3) {
for j in (grid_col * 3)..(grid_col * 3 + 3) {
if board[i][j] == c {
return false;
}
}
}
return true;
}
fn dfs(n: usize, board: &mut Vec<Vec<char>>) -> bool {
if n == 81 {
return true;
}
let (row, col) = Self::get_row_col(n);
if board[row][col] != '.' {
return Self::dfs(n + 1, board);
}
for c in ['1', '2', '3', '4', '5', '6', '7', '8', '9'].iter() {
if Self::check_num(*c, n, board) {
board[row][col] = *c;
let res = Self::dfs(n, board);
if !res {
board[row][col] = '.';
} else {
return true;
}
}
}
return false;
}
pub fn solve_sudoku(board: &mut Vec<Vec<char>>) {
Self::dfs(0, board);
}
}
fn main() {
init_logger();
let mut board: Vec<Vec<char>> = vec![
vec!['5', '3', '.', '.', '7', '.', '.', '.', '.'],
vec!['6', '.', '.', '1', '9', '5', '.', '.', '.'],
vec!['.', '9', '8', '.', '.', '.', '.', '6', '.'],
vec!['8', '.', '.', '.', '6', '.', '.', '.', '3'],
vec!['4', '.', '.', '8', '.', '3', '.', '.', '1'],
vec!['7', '.', '.', '.', '2', '.', '.', '.', '6'],
vec!['.', '6', '.', '.', '.', '.', '2', '8', '.'],
vec!['.', '.', '.', '4', '1', '9', '.', '.', '5'],
vec!['.', '.', '.', '.', '8', '.', '.', '7', '9'],
];
board.iter().for_each(|line| info!("{:?}", line));
info!("===");
let s = std::time::Instant::now();
Solution::solve_sudoku(&mut board);
let e = std::time::Instant::now();
info!("{:?}", e - s);
board.iter().for_each(|line| info!("{:?}", line));
}
|
use crate::util::color;
use crate::{config, err};
use anyhow::Result;
use regex::Regex;
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
/// Render a template to its location given the replacement variables.
fn render_template<P: AsRef<Path>, Q: AsRef<Path>>(
template: P,
render_to: Q,
vars: &HashMap<String, String>,
) -> Result<()> {
use std::io::prelude::*;
let mut contents = String::new();
File::open(&template)?.read_to_string(&mut contents)?;
for (var, value) in vars.iter() {
contents = contents.replace(&format!("{{{{{}}}}}", var.trim()), value);
}
let re = Regex::new(r"\{\{([^}]*)\}\}").unwrap();
if let Some(var) = re.captures(&contents) {
return err::err(format!(
"In template {}: this machine has no value for variable {}",
color::path(template),
color::emphasis(var[1].to_owned())
));
}
write!(File::create(render_to)?, "{}", contents)?;
Ok(())
}
/// Render all local templates to their location.
pub fn render() -> Result<()> {
let config = config::get_config()?;
let vars = config.vars();
for (remote_name, render_to) in config.templates().iter() {
render_template(config.dest(remote_name), render_to, &vars)?;
}
Ok(())
}
|
// error-pattern:assigning to immutable obj field
obj objy(x: int) {
fn foo() { x = 5; }
}
fn main() { } |
use std::io::{self, Read};
use std::str::{self, FromStr};
use bytes::Bytes;
use thiserror::Error;
use crate::object::{
Blob, Commit, ObjectData, ObjectHeader, ObjectKind, ParseBlobError, ParseCommitError,
ParseTagError, ParseTreeError, Tag, Tree,
};
use crate::parse::{self, Buffer, Parser};
#[derive(Debug, Error)]
pub(in crate::object) enum ParseObjectError {
#[error("the object header is invalid")]
InvalidHeader(#[source] ParseHeaderError),
#[error("the blob object is invalid")]
InvalidBlob(
#[source]
#[from]
ParseBlobError,
),
#[error("the tree object is invalid")]
InvalidTree(
#[source]
#[from]
ParseTreeError,
),
#[error("the commit object is invalid")]
InvalidCommit(
#[source]
#[from]
ParseCommitError,
),
#[error("the tag object is invalid")]
InvalidTag(
#[source]
#[from]
ParseTagError,
),
#[error("io error reading object")]
Io(
#[source]
#[from]
io::Error,
),
}
#[derive(Debug, Error)]
#[error("unknown object type `{0}`")]
pub(in crate::object) struct ParseObjectKindError(String);
#[derive(Debug, Error)]
pub(in crate::object) enum ParseHeaderError {
#[error("unsupported object type")]
UnsupportedObjectKind,
#[error("object size doesn't match actual size")]
LengthMismatch,
#[error("object size is too big")]
LengthTooBig,
#[error("{0}")]
Other(&'static str),
#[error(transparent)]
ParseObjectKind(#[from] ParseObjectKindError),
#[error(transparent)]
Parse(parse::Error),
}
impl ObjectHeader {
const MAX_LEN: usize = 28;
}
impl<R: Read> Buffer<R> {
pub(in crate::object) fn read_object_header(
&mut self,
) -> Result<ObjectHeader, ParseHeaderError> {
let range =
self.read_until_byte(b'\0', ObjectHeader::MAX_LEN)?
.ok_or(ParseHeaderError::Other(
"the end of the header was not found",
))?;
let mut parser = self.parser(range);
let header = parser.parse_object_header()?;
debug_assert!(parser.finished());
Ok(header)
}
pub(in crate::object) fn read_object_body(
self,
header: ObjectHeader,
) -> Result<ObjectData, ParseObjectError> {
let parser = self
.read_to_end_into_parser(header.len)
.map_err(ParseHeaderError::from)?;
parser.parse_object_body(header.kind)
}
}
impl Parser<Bytes> {
fn parse_object_body(self, kind: ObjectKind) -> Result<ObjectData, ParseObjectError> {
match kind {
ObjectKind::Blob => Blob::parse(self)
.map(ObjectData::Blob)
.map_err(ParseObjectError::InvalidBlob),
ObjectKind::Commit => Commit::parse(self)
.map(ObjectData::Commit)
.map_err(ParseObjectError::InvalidCommit),
ObjectKind::Tree => Tree::parse(self)
.map(ObjectData::Tree)
.map_err(ParseObjectError::InvalidTree),
ObjectKind::Tag => Tag::parse(self)
.map(ObjectData::Tag)
.map_err(ParseObjectError::InvalidTag),
_ => Err(ParseObjectError::InvalidHeader(
ParseHeaderError::UnsupportedObjectKind,
)),
}
}
}
impl<B: AsRef<[u8]>> Parser<B> {
pub(in crate::object) fn parse_object_header(
&mut self,
) -> Result<ObjectHeader, ParseHeaderError> {
debug_assert_eq!(self.pos(), 0);
let kind = self
.consume_until(b' ')
.ok_or(ParseHeaderError::Other("failed to parse object kind"))?;
let kind = ObjectKind::from_bytes(&self[kind])?;
let len = self
.consume_until(b'\0')
.ok_or(ParseHeaderError::Other("failed to parse object length"))?;
let len = str::from_utf8(&self[len])
.map_err(|_| ParseHeaderError::Other("failed to parse object length"))?;
let len = usize::from_str(&len).map_err(|_| ParseHeaderError::LengthTooBig)?;
Ok(ObjectHeader { kind, len })
}
}
impl ObjectKind {
pub(in crate::object) fn from_bytes(input: &[u8]) -> Result<Self, ParseObjectKindError> {
match input {
b"commit" => Ok(ObjectKind::Commit),
b"tree" => Ok(ObjectKind::Tree),
b"blob" => Ok(ObjectKind::Blob),
b"tag" => Ok(ObjectKind::Tag),
input => Err(ParseObjectKindError(
String::from_utf8_lossy(input).into_owned(),
)),
}
}
}
impl From<ParseHeaderError> for ParseObjectError {
fn from(err: ParseHeaderError) -> Self {
match err {
ParseHeaderError::Parse(parse::Error::Io(err)) => ParseObjectError::Io(err),
err => ParseObjectError::InvalidHeader(err),
}
}
}
impl From<parse::Error> for ParseHeaderError {
fn from(err: parse::Error) -> Self {
match err {
parse::Error::InvalidLength => ParseHeaderError::LengthMismatch,
err => ParseHeaderError::Parse(err),
}
}
}
#[test]
fn test_max_header_len() {
assert_eq!(
ObjectHeader::MAX_LEN,
format!("commit {}\0", u64::MAX).len()
);
}
#[test]
fn test_parse_header() {
fn parse_header(bytes: &[u8]) -> Result<ObjectHeader, ParseHeaderError> {
Parser::new(bytes).parse_object_header()
}
assert_eq!(
parse_header(b"tree 3\0abc").unwrap(),
ObjectHeader {
kind: ObjectKind::Tree,
len: 3,
}
);
assert_eq!(
parse_header(b"blob 3\0abc").unwrap(),
ObjectHeader {
kind: ObjectKind::Blob,
len: 3,
}
);
assert!(parse_header(b"commit 333333333333333333333\0abc").is_err(),);
assert!(parse_header(b"blob 3").is_err(),);
assert!(parse_header(b"blob3\0abc").is_err(),);
}
|
//! State management for a selection in the grid.
//!
//! A selection should start when the mouse is clicked, and it should be
//! finalized when the button is released. The selection should be cleared
//! when text is added/removed/scrolled on the screen. The selection should
//! also be cleared if the user clicks off of the selection.
use std::cmp::min;
use std::mem;
use std::ops::{Bound, Range, RangeBounds};
use crate::ansi::CursorShape;
use crate::grid::{Dimensions, GridCell, Indexed};
use crate::index::{Boundary, Column, Line, Point, Side};
use crate::term::cell::{Cell, Flags};
use crate::term::Term;
/// A Point and side within that point.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
struct Anchor {
point: Point,
side: Side,
}
impl Anchor {
fn new(point: Point, side: Side) -> Anchor {
Anchor { point, side }
}
}
/// Represents a range of selected cells.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct SelectionRange {
/// Start point, top left of the selection.
pub start: Point,
/// End point, bottom right of the selection.
pub end: Point,
/// Whether this selection is a block selection.
pub is_block: bool,
}
impl SelectionRange {
pub fn new(start: Point, end: Point, is_block: bool) -> Self {
assert!(start <= end);
Self { start, end, is_block }
}
}
impl SelectionRange {
/// Check if a point lies within the selection.
pub fn contains(&self, point: Point) -> bool {
self.start.line <= point.line
&& self.end.line >= point.line
&& (self.start.column <= point.column
|| (self.start.line != point.line && !self.is_block))
&& (self.end.column >= point.column || (self.end.line != point.line && !self.is_block))
}
/// Check if the cell at a point is part of the selection.
pub fn contains_cell(
&self,
indexed: &Indexed<&Cell>,
point: Point,
shape: CursorShape,
) -> bool {
// Do not invert block cursor at selection boundaries.
if shape == CursorShape::Block
&& point == indexed.point
&& (self.start == indexed.point
|| self.end == indexed.point
|| (self.is_block
&& ((self.start.line == indexed.point.line
&& self.end.column == indexed.point.column)
|| (self.end.line == indexed.point.line
&& self.start.column == indexed.point.column))))
{
return false;
}
// Point itself is selected.
if self.contains(indexed.point) {
return true;
}
// Check if a wide char's trailing spacer is selected.
indexed.cell.flags().contains(Flags::WIDE_CHAR)
&& self.contains(Point::new(indexed.point.line, indexed.point.column + 1))
}
}
/// Different kinds of selection.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum SelectionType {
Simple,
Block,
Semantic,
Lines,
}
/// Describes a region of a 2-dimensional area.
///
/// Used to track a text selection. There are four supported modes, each with its own constructor:
/// [`simple`], [`block`], [`semantic`], and [`lines`]. The [`simple`] mode precisely tracks which
/// cells are selected without any expansion. [`block`] will select rectangular regions.
/// [`semantic`] mode expands the initial selection to the nearest semantic escape char in either
/// direction. [`lines`] will always select entire lines.
///
/// Calls to [`update`] operate different based on the selection kind. The [`simple`] and [`block`]
/// mode do nothing special, simply track points and sides. [`semantic`] will continue to expand
/// out to semantic boundaries as the selection point changes. Similarly, [`lines`] will always
/// expand the new point to encompass entire lines.
///
/// [`simple`]: enum.Selection.html#method.simple
/// [`block`]: enum.Selection.html#method.block
/// [`semantic`]: enum.Selection.html#method.semantic
/// [`lines`]: enum.Selection.html#method.lines
/// [`update`]: enum.Selection.html#method.update
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Selection {
pub ty: SelectionType,
region: Range<Anchor>,
}
impl Selection {
pub fn new(ty: SelectionType, location: Point, side: Side) -> Selection {
Self {
region: Range { start: Anchor::new(location, side), end: Anchor::new(location, side) },
ty,
}
}
/// Update the end of the selection.
pub fn update(&mut self, point: Point, side: Side) {
self.region.end = Anchor::new(point, side);
}
pub fn rotate<D: Dimensions>(
mut self,
dimensions: &D,
range: &Range<Line>,
delta: i32,
) -> Option<Selection> {
let bottommost_line = dimensions.bottommost_line();
let range_bottom = range.end;
let range_top = range.start;
let (mut start, mut end) = (&mut self.region.start, &mut self.region.end);
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Rotate start of selection.
if (start.point.line >= range_top || range_top == 0) && start.point.line < range_bottom {
start.point.line = min(start.point.line - delta, bottommost_line);
// If end is within the same region, delete selection once start rotates out.
if start.point.line >= range_bottom && end.point.line < range_bottom {
return None;
}
// Clamp selection to start of region.
if start.point.line < range_top && range_top != 0 {
if self.ty != SelectionType::Block {
start.point.column = Column(0);
start.side = Side::Left;
}
start.point.line = range_top;
}
}
// Rotate end of selection.
if (end.point.line >= range_top || range_top == 0) && end.point.line < range_bottom {
end.point.line = min(end.point.line - delta, bottommost_line);
// Delete selection if end has overtaken the start.
if end.point.line < start.point.line {
return None;
}
// Clamp selection to end of region.
if end.point.line >= range_bottom {
if self.ty != SelectionType::Block {
end.point.column = dimensions.last_column();
end.side = Side::Right;
}
end.point.line = range_bottom - 1;
}
}
Some(self)
}
pub fn is_empty(&self) -> bool {
match self.ty {
SelectionType::Simple => {
let (mut start, mut end) = (self.region.start, self.region.end);
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Simple selection is empty when the points are identical
// or two adjacent cells have the sides right -> left.
start == end
|| (start.side == Side::Right
&& end.side == Side::Left
&& (start.point.line == end.point.line)
&& start.point.column + 1 == end.point.column)
},
SelectionType::Block => {
let (start, end) = (self.region.start, self.region.end);
// Block selection is empty when the points' columns and sides are identical
// or two cells with adjacent columns have the sides right -> left,
// regardless of their lines
(start.point.column == end.point.column && start.side == end.side)
|| (start.point.column + 1 == end.point.column
&& start.side == Side::Right
&& end.side == Side::Left)
|| (end.point.column + 1 == start.point.column
&& start.side == Side::Left
&& end.side == Side::Right)
},
SelectionType::Semantic | SelectionType::Lines => false,
}
}
/// Check whether selection contains any point in a given range.
pub fn intersects_range<R: RangeBounds<Line>>(&self, range: R) -> bool {
let mut start = self.region.start.point.line;
let mut end = self.region.end.point.line;
if start > end {
mem::swap(&mut start, &mut end);
}
let range_top = match range.start_bound() {
Bound::Included(&range_start) => range_start,
Bound::Excluded(&range_start) => range_start + 1,
Bound::Unbounded => Line(i32::MIN),
};
let range_bottom = match range.end_bound() {
Bound::Included(&range_end) => range_end,
Bound::Excluded(&range_end) => range_end - 1,
Bound::Unbounded => Line(i32::MAX),
};
range_bottom >= start && range_top <= end
}
/// Expand selection sides to include all cells.
pub fn include_all(&mut self) {
let (start, end) = (self.region.start.point, self.region.end.point);
let (start_side, end_side) = match self.ty {
SelectionType::Block
if start.column > end.column
|| (start.column == end.column && start.line > end.line) =>
{
(Side::Right, Side::Left)
},
SelectionType::Block => (Side::Left, Side::Right),
_ if start > end => (Side::Right, Side::Left),
_ => (Side::Left, Side::Right),
};
self.region.start.side = start_side;
self.region.end.side = end_side;
}
/// Convert selection to grid coordinates.
pub fn to_range<T>(&self, term: &Term<T>) -> Option<SelectionRange> {
let grid = term.grid();
let columns = grid.columns();
// Order start above the end.
let mut start = self.region.start;
let mut end = self.region.end;
if start.point > end.point {
mem::swap(&mut start, &mut end);
}
// Clamp selection to within grid boundaries.
if end.point.line < term.topmost_line() {
return None;
}
start.point = start.point.grid_clamp(term, Boundary::Grid);
match self.ty {
SelectionType::Simple => self.range_simple(start, end, columns),
SelectionType::Block => self.range_block(start, end),
SelectionType::Semantic => Some(Self::range_semantic(term, start.point, end.point)),
SelectionType::Lines => Some(Self::range_lines(term, start.point, end.point)),
}
}
fn range_semantic<T>(term: &Term<T>, mut start: Point, mut end: Point) -> SelectionRange {
if start == end {
if let Some(matching) = term.bracket_search(start) {
if (matching.line == start.line && matching.column < start.column)
|| (matching.line < start.line)
{
start = matching;
} else {
end = matching;
}
return SelectionRange { start, end, is_block: false };
}
}
let start = term.semantic_search_left(start);
let end = term.semantic_search_right(end);
SelectionRange { start, end, is_block: false }
}
fn range_lines<T>(term: &Term<T>, start: Point, end: Point) -> SelectionRange {
let start = term.line_search_left(start);
let end = term.line_search_right(end);
SelectionRange { start, end, is_block: false }
}
fn range_simple(
&self,
mut start: Anchor,
mut end: Anchor,
columns: usize,
) -> Option<SelectionRange> {
if self.is_empty() {
return None;
}
// Remove last cell if selection ends to the left of a cell.
if end.side == Side::Left && start.point != end.point {
// Special case when selection ends to left of first cell.
if end.point.column == 0 {
end.point.column = Column(columns - 1);
end.point.line -= 1;
} else {
end.point.column -= 1;
}
}
// Remove first cell if selection starts at the right of a cell.
if start.side == Side::Right && start.point != end.point {
start.point.column += 1;
// Wrap to next line when selection starts to the right of last column.
if start.point.column == columns {
start.point.column = Column(0);
start.point.line += 1;
}
}
Some(SelectionRange { start: start.point, end: end.point, is_block: false })
}
fn range_block(&self, mut start: Anchor, mut end: Anchor) -> Option<SelectionRange> {
if self.is_empty() {
return None;
}
// Always go top-left -> bottom-right.
if start.point.column > end.point.column {
mem::swap(&mut start.side, &mut end.side);
mem::swap(&mut start.point.column, &mut end.point.column);
}
// Remove last cell if selection ends to the left of a cell.
if end.side == Side::Left && start.point != end.point && end.point.column.0 > 0 {
end.point.column -= 1;
}
// Remove first cell if selection starts at the right of a cell.
if start.side == Side::Right && start.point != end.point {
start.point.column += 1;
}
Some(SelectionRange { start: start.point, end: end.point, is_block: true })
}
}
/// Tests for selection.
///
/// There are comments on all of the tests describing the selection. Pictograms
/// are used to avoid ambiguity. Grid cells are represented by a [ ]. Only
/// cells that are completely covered are counted in a selection. Ends are
/// represented by `B` and `E` for begin and end, respectively. A selected cell
/// looks like [XX], [BX] (at the start), [XB] (at the end), [XE] (at the end),
/// and [EX] (at the start), or [BE] for a single cell. Partially selected cells
/// look like [ B] and [E ].
#[cfg(test)]
mod tests {
use super::*;
use crate::config::Config;
use crate::index::{Column, Point, Side};
use crate::term::test::TermSize;
use crate::term::Term;
fn term(height: usize, width: usize) -> Term<()> {
let size = TermSize::new(width, height);
Term::new(&Config::default(), &size, ())
}
/// Test case of single cell selection.
///
/// 1. [ ]
/// 2. [B ]
/// 3. [BE]
#[test]
fn single_cell_left_to_right() {
let location = Point::new(Line(0), Column(0));
let mut selection = Selection::new(SelectionType::Simple, location, Side::Left);
selection.update(location, Side::Right);
assert_eq!(selection.to_range(&term(1, 2)).unwrap(), SelectionRange {
start: location,
end: location,
is_block: false
});
}
/// Test case of single cell selection.
///
/// 1. [ ]
/// 2. [ B]
/// 3. [EB]
#[test]
fn single_cell_right_to_left() {
let location = Point::new(Line(0), Column(0));
let mut selection = Selection::new(SelectionType::Simple, location, Side::Right);
selection.update(location, Side::Left);
assert_eq!(selection.to_range(&term(1, 2)).unwrap(), SelectionRange {
start: location,
end: location,
is_block: false
});
}
/// Test adjacent cell selection from left to right.
///
/// 1. [ ][ ]
/// 2. [ B][ ]
/// 3. [ B][E ]
#[test]
fn between_adjacent_cells_left_to_right() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(0)), Side::Right);
selection.update(Point::new(Line(0), Column(1)), Side::Left);
assert_eq!(selection.to_range(&term(1, 2)), None);
}
/// Test adjacent cell selection from right to left.
///
/// 1. [ ][ ]
/// 2. [ ][B ]
/// 3. [ E][B ]
#[test]
fn between_adjacent_cells_right_to_left() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(1)), Side::Left);
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert_eq!(selection.to_range(&term(1, 2)), None);
}
#[rustfmt::skip]
/// Test selection across adjacent lines.
///
/// 1. [ ][ ][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 2. [ ][ B][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 3. [ ][ B][XX][XX][XX]
/// [XX][XE][ ][ ][ ]
#[test]
fn across_adjacent_lines_upward_final_cell_exclusive() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(0), Column(1)), Side::Right);
selection.update(Point::new(Line(1), Column(1)), Side::Right);
assert_eq!(selection.to_range(&term(2, 5)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(1), Column(1)),
is_block: false,
});
}
#[rustfmt::skip]
/// Test selection across adjacent lines.
///
/// 1. [ ][ ][ ][ ][ ]
/// [ ][ ][ ][ ][ ]
/// 2. [ ][ ][ ][ ][ ]
/// [ ][ B][ ][ ][ ]
/// 3. [ ][ E][XX][XX][XX]
/// [XX][XB][ ][ ][ ]
/// 4. [ E][XX][XX][XX][XX]
/// [XX][XB][ ][ ][ ]
#[test]
fn selection_bigger_then_smaller() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(1), Column(1)), Side::Right);
selection.update(Point::new(Line(0), Column(1)), Side::Right);
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert_eq!(selection.to_range(&term(2, 5)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(1)),
end: Point::new(Line(1), Column(1)),
is_block: false,
});
}
#[test]
fn line_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Lines, Point::new(Line(9), Column(1)), Side::Left);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(0)),
end: Point::new(Line(5), Column(4)),
is_block: false,
});
}
#[test]
fn semantic_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Semantic, Point::new(Line(9), Column(3)), Side::Left);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(1)),
end: Point::new(Line(5), Column(3)),
is_block: false,
});
}
#[test]
fn simple_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(9), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(5), Column(3)),
is_block: false,
});
}
#[test]
fn block_selection() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(9), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(0)..Line(size.0 as i32)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(0), Column(2)),
end: Point::new(Line(5), Column(3)),
is_block: true
});
}
#[test]
fn simple_is_empty() {
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(1), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert!(!selection.is_empty());
}
#[test]
fn block_is_empty() {
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(1), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(1), Column(1)), Side::Right);
assert!(!selection.is_empty());
selection.update(Point::new(Line(0), Column(0)), Side::Right);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(1)), Side::Left);
assert!(selection.is_empty());
selection.update(Point::new(Line(0), Column(1)), Side::Right);
assert!(!selection.is_empty());
}
#[test]
fn rotate_in_region_up() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(7), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(1), Column(0)),
end: Point::new(Line(3), Column(3)),
is_block: false,
});
}
#[test]
fn rotate_in_region_down() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Simple, Point::new(Line(4), Column(3)), Side::Right);
selection.update(Point::new(Line(1), Column(1)), Side::Left);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), -5).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(6), Column(1)),
end: Point::new(Line(8), size.last_column()),
is_block: false,
});
}
#[test]
fn rotate_in_region_up_block() {
let size = (10, 5);
let mut selection =
Selection::new(SelectionType::Block, Point::new(Line(7), Column(3)), Side::Right);
selection.update(Point::new(Line(4), Column(1)), Side::Right);
selection = selection.rotate(&size, &(Line(1)..Line(size.0 as i32 - 1)), 4).unwrap();
assert_eq!(selection.to_range(&term(size.0, size.1)).unwrap(), SelectionRange {
start: Point::new(Line(1), Column(2)),
end: Point::new(Line(3), Column(3)),
is_block: true,
});
}
#[test]
fn range_intersection() {
let mut selection =
Selection::new(SelectionType::Lines, Point::new(Line(3), Column(1)), Side::Left);
selection.update(Point::new(Line(6), Column(1)), Side::Right);
assert!(selection.intersects_range(..));
assert!(selection.intersects_range(Line(2)..));
assert!(selection.intersects_range(Line(2)..=Line(4)));
assert!(selection.intersects_range(Line(2)..=Line(7)));
assert!(selection.intersects_range(Line(4)..=Line(5)));
assert!(selection.intersects_range(Line(5)..Line(8)));
assert!(!selection.intersects_range(..=Line(2)));
assert!(!selection.intersects_range(Line(7)..=Line(8)));
}
}
|
use crate::get_txns_handler::GetTxnsHandler;
use crate::helper::{
do_accumulator_node, do_get_block_by_hash, do_get_hash_by_number, do_state_node,
};
use actix::prelude::*;
use actix::{Actor, Addr, AsyncContext, Context, Handler};
use anyhow::Result;
use bus::{BusActor, Subscription};
use chain::ChainActorRef;
use crypto::hash::HashValue;
use logger::prelude::*;
use network::RawRpcRequestMessage;
use starcoin_accumulator::AccumulatorNode;
use starcoin_canonical_serialization::SCSCodec;
use starcoin_state_tree::StateNode;
use starcoin_storage::Store;
/// Sync message which inbound
use starcoin_sync_api::sync_messages::{
BatchBlockInfo, BatchBodyMsg, BatchHashByNumberMsg, BatchHeaderMsg, BlockBody, DataType,
GetDataByHashMsg, GetHashByNumberMsg, HashWithNumber, SyncRpcRequest,
};
use std::sync::Arc;
use traits::ChainAsyncService;
use traits::Consensus;
use txpool::TxPoolRef;
pub struct ProcessActor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
processor: Arc<Processor<C>>,
bus: Addr<BusActor>,
}
impl<C> ProcessActor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
pub fn launch(
chain_reader: ChainActorRef<C>,
txpool: TxPoolRef,
bus: Addr<BusActor>,
storage: Arc<dyn Store>,
) -> Result<Addr<ProcessActor<C>>> {
let process_actor = ProcessActor {
processor: Arc::new(Processor::new(chain_reader, txpool, storage)),
bus,
};
Ok(process_actor.start())
}
}
impl<C> Actor for ProcessActor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
let rpc_recipient = ctx.address().recipient::<RawRpcRequestMessage>();
self.bus
.send(Subscription {
recipient: rpc_recipient,
})
.into_actor(self)
.then(|_res, act, _ctx| async {}.into_actor(act))
.wait(ctx);
info!("Process actor started");
}
}
impl<C> Handler<RawRpcRequestMessage> for ProcessActor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
type Result = Result<()>;
fn handle(&mut self, msg: RawRpcRequestMessage, _ctx: &mut Self::Context) -> Self::Result {
let responder = msg.responder.clone();
let processor = self.processor.clone();
let req = SyncRpcRequest::decode(msg.request.as_slice())?;
Arbiter::spawn(async move {
info!("process req :{:?}", req);
match req {
SyncRpcRequest::GetHashByNumberMsg(get_hash_by_number_msg) => {
let batch_hash_by_number_msg = Processor::handle_get_hash_by_number_msg(
processor.clone(),
get_hash_by_number_msg,
)
.await;
if let Err(e) = do_get_hash_by_number(responder, batch_hash_by_number_msg).await
{
error!("error: {:?}", e);
}
}
SyncRpcRequest::GetDataByHashMsg(get_data_by_hash_msg) => {
match get_data_by_hash_msg.data_type {
DataType::HEADER => {
let batch_header_msg = Processor::handle_get_header_by_hash_msg(
processor.clone(),
get_data_by_hash_msg.clone(),
)
.await;
let batch_body_msg = Processor::handle_get_body_by_hash_msg(
processor.clone(),
get_data_by_hash_msg.clone(),
)
.await;
let batch_block_info_msg =
Processor::handle_get_block_info_by_hash_msg(
processor.clone(),
get_data_by_hash_msg,
)
.await;
debug!(
"batch block size: {} : {} : {}",
batch_header_msg.headers.len(),
batch_body_msg.bodies.len(),
batch_block_info_msg.infos.len()
);
if let Err(e) = do_get_block_by_hash(
responder,
batch_header_msg,
batch_body_msg,
batch_block_info_msg,
)
.await
{
error!("error: {:?}", e);
}
}
_ => {}
}
}
SyncRpcRequest::GetStateNodeByNodeHash(state_node_key) => {
let mut keys = Vec::new();
keys.push(state_node_key);
let mut state_nodes =
Processor::handle_state_node_msg(processor.clone(), keys).await;
if let Some((_, state_node_res)) = state_nodes.pop() {
if let Some(state_node) = state_node_res {
if let Err(e) = do_state_node(responder, state_node).await {
error!("error: {:?}", e);
}
} else {
warn!("{:?}", "state_node is none.");
}
} else {
warn!("{:?}", "state_nodes is none.");
}
}
SyncRpcRequest::GetAccumulatorNodeByNodeHash(accumulator_node_key) => {
let mut keys = Vec::new();
keys.push(accumulator_node_key);
let mut accumulator_nodes =
Processor::handle_accumulator_node_msg(processor.clone(), keys).await;
if let Some((_, accumulator_node_res)) = accumulator_nodes.pop() {
if let Some(accumulator_node) = accumulator_node_res {
if let Err(e) = do_accumulator_node(responder, accumulator_node).await {
error!("error: {:?}", e);
}
} else {
warn!("accumulator_node {:?} is none.", accumulator_node_key);
}
} else {
warn!("{:?}", "accumulator_nodes is none.");
}
}
SyncRpcRequest::GetTxns(msg) => {
let handler = GetTxnsHandler::new(processor.txpool.clone());
let result = handler.handle(responder, msg).await;
if let Err(e) = result {
warn!("handle get txn fail, error: {:?}", e);
}
}
}
});
Ok(())
}
}
/// Process request for syncing block
pub struct Processor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
chain_reader: ChainActorRef<C>,
txpool: TxPoolRef,
storage: Arc<dyn Store>,
}
impl<C> Processor<C>
where
C: Consensus + Sync + Send + 'static + Clone,
{
pub fn new(chain_reader: ChainActorRef<C>, txpool: TxPoolRef, storage: Arc<dyn Store>) -> Self {
Processor {
chain_reader,
txpool,
storage,
}
}
pub async fn handle_get_hash_by_number_msg(
processor: Arc<Processor<C>>,
get_hash_by_number_msg: GetHashByNumberMsg,
) -> BatchHashByNumberMsg {
let mut hashs = Vec::new();
for number in get_hash_by_number_msg.numbers {
info!("get block from get_block_by_number with {}", number);
let block = processor
.chain_reader
.clone()
.master_block_by_number(number)
.await;
match block {
Ok(b) => {
debug!(
"block number:{:?}, hash {:?}",
b.header().number(),
b.header().id()
);
let hash_with_number = HashWithNumber {
number: b.header().number(),
hash: b.header().id(),
};
hashs.push(hash_with_number);
}
Err(_) => {
warn!("block is none.");
}
}
}
BatchHashByNumberMsg { hashs }
}
pub async fn handle_get_header_by_hash_msg(
processor: Arc<Processor<C>>,
get_header_by_hash_msg: GetDataByHashMsg,
) -> BatchHeaderMsg {
let mut headers = Vec::new();
for hash in get_header_by_hash_msg.hashs {
let header = processor
.chain_reader
.clone()
.get_header_by_hash(&hash)
.await
.unwrap();
headers.push(header);
}
BatchHeaderMsg { headers }
}
pub async fn handle_get_body_by_hash_msg(
processor: Arc<Processor<C>>,
get_body_by_hash_msg: GetDataByHashMsg,
) -> BatchBodyMsg {
let mut bodies = Vec::new();
for hash in get_body_by_hash_msg.hashs {
let transactions = match processor.chain_reader.clone().get_block_by_hash(hash).await {
Ok(block) => block.transactions().clone().to_vec(),
_ => Vec::new(),
};
let body = BlockBody { transactions, hash };
bodies.push(body);
}
BatchBodyMsg { bodies }
}
pub async fn handle_get_block_info_by_hash_msg(
processor: Arc<Processor<C>>,
get_body_by_hash_msg: GetDataByHashMsg,
) -> BatchBlockInfo {
let mut infos = Vec::new();
for hash in get_body_by_hash_msg.hashs {
if let Some(block_info) = processor
.chain_reader
.clone()
.get_block_info_by_hash(&hash)
.await
{
infos.push(block_info);
}
}
BatchBlockInfo { infos }
}
pub async fn handle_state_node_msg(
processor: Arc<Processor<C>>,
nodes_hash: Vec<HashValue>,
) -> Vec<(HashValue, Option<StateNode>)> {
let mut state_nodes = Vec::new();
nodes_hash
.iter()
.for_each(|node_key| match processor.storage.get(node_key) {
Ok(node) => state_nodes.push((node_key.clone(), node)),
Err(e) => error!("error: {:?}", e),
});
state_nodes
}
pub async fn handle_accumulator_node_msg(
processor: Arc<Processor<C>>,
nodes_hash: Vec<HashValue>,
) -> Vec<(HashValue, Option<AccumulatorNode>)> {
let mut accumulator_nodes = Vec::new();
nodes_hash.iter().for_each(
|node_key| match processor.storage.get_node(node_key.clone()) {
Ok(node) => accumulator_nodes.push((node_key.clone(), node)),
Err(e) => error!("error: {:?}", e),
},
);
accumulator_nodes
}
}
|
use ic_cdk::export::candid::export_service;
use ic_cdk::{caller, trap};
use ic_cdk_macros::{init, query, update};
use ic_event_hub_macros::{
implement_add_event_listeners, implement_event_emitter, implement_get_event_listeners,
implement_remove_event_listeners,
};
use union_utils::log;
use membership_token_client::events::{
ControllerType, ControllersUpdateEvent, MembershipStatus, MembershipStatusUpdateEvent,
};
use membership_token_client::types::{
ControllerList, GetControllersResponse, GetTotalMembersResponse, InitRequest, IsMemberRequest,
IsMemberResponse, IssueRevokeMembershipsRequest, UpdateControllerRequest,
UpdateControllerResponse,
};
use crate::common::guards::{event_listeners_guard, issue_guard, revoke_guard};
use crate::common::membership_token::MembershipToken;
mod common;
// -------------------- MAIN LOGIC ------------------------
#[init]
fn init(request: InitRequest) {
log("membership_token.init()");
let controllers = if let Some(default_controllers) = request.default_controllers {
ControllerList {
issue_controllers: default_controllers.clone(),
revoke_controllers: default_controllers.clone(),
event_listeners_controllers: default_controllers,
}
} else {
ControllerList::single(Some(caller()))
};
let token = MembershipToken::new(controllers);
unsafe {
STATE = Some(token);
}
}
#[update(guard = "issue_guard")]
fn issue_memberships(request: IssueRevokeMembershipsRequest) {
log("membership_token.issue_memberships()");
let token = get_token();
for to in request.principals.into_iter() {
match token.issue_membership(to) {
Ok(_) => emit(MembershipStatusUpdateEvent {
member: to,
new_status: MembershipStatus::Issued,
}),
Err(e) => {
trap(format!("Failed to issue membership on principal {} - {}", to, e).as_str())
}
}
}
}
#[update(guard = "revoke_guard")]
fn revoke_memberships(request: IssueRevokeMembershipsRequest) {
log("membership_token.revoke_memberships()");
let token = get_token();
for from in request.principals.into_iter() {
match token.revoke_membership(from) {
Ok(_) => emit(MembershipStatusUpdateEvent {
member: from,
new_status: MembershipStatus::Revoked,
}),
Err(e) => {
trap(format!("Failed to revoke membership on principal {} - {}", from, e).as_str())
}
}
}
}
#[update]
fn accept_membership() {
log("membership_token.accept_membership()");
let caller = caller();
match get_token().accept_membership(caller) {
Ok(_) => emit(MembershipStatusUpdateEvent {
member: caller,
new_status: MembershipStatus::Accepted,
}),
Err(e) => trap(format!("Failed to accept membership for caller - {}", e).as_str()),
}
}
#[update]
fn decline_membership() {
log("membership_token.decline_membership()");
let caller = caller();
match get_token().decline_membership(caller) {
Ok(_) => emit(MembershipStatusUpdateEvent {
member: caller,
new_status: MembershipStatus::Declined,
}),
Err(e) => trap(format!("Failed to decline membership for caller - {}", e).as_str()),
}
}
#[query]
fn is_member(request: IsMemberRequest) -> IsMemberResponse {
log("membership_token.is_member()");
let is_member = get_token().is_member(&request.prin);
IsMemberResponse { is_member }
}
#[query]
fn is_pending_member(request: IsMemberRequest) -> IsMemberResponse {
log("membership_token.is_pending_member()");
let is_pending_member = get_token().is_pending_member(&request.prin);
IsMemberResponse {
is_member: is_pending_member,
}
}
#[query]
fn get_total_members() -> GetTotalMembersResponse {
log("membership_token.total_members()");
let total_members = get_token().get_total_members() as u64;
GetTotalMembersResponse { total_members }
}
// ------------- GRANULAR CONTROL -----------------
#[update(guard = "issue_guard")]
fn update_issue_controller(request: UpdateControllerRequest) -> UpdateControllerResponse {
log("membership_token.update_issue_controller()");
let old_controller = get_token().update_issue_controllers(request.new_controllers.clone());
emit(ControllersUpdateEvent {
kind: ControllerType::Issue,
new_controllers: request.new_controllers,
});
UpdateControllerResponse {
old_controllers: old_controller,
}
}
#[update(guard = "revoke_guard")]
fn update_revoke_controller(request: UpdateControllerRequest) -> UpdateControllerResponse {
log("membership_token.update_revoke_controller()");
let old_controller = get_token().update_revoke_controllers(request.new_controllers.clone());
emit(ControllersUpdateEvent {
kind: ControllerType::Revoke,
new_controllers: request.new_controllers,
});
UpdateControllerResponse {
old_controllers: old_controller,
}
}
#[update(guard = "event_listeners_guard")]
fn update_event_listeners_controller(request: UpdateControllerRequest) -> UpdateControllerResponse {
log("membership_token.update_event_listeners_controller()");
let old_controller =
get_token().update_event_listeners_controllers(request.new_controllers.clone());
emit(ControllersUpdateEvent {
kind: ControllerType::EventListeners,
new_controllers: request.new_controllers,
});
UpdateControllerResponse {
old_controllers: old_controller,
}
}
#[query]
fn get_controllers() -> GetControllersResponse {
log("membership_token.get_controllers()");
let controllers = get_token().controllers.clone();
GetControllersResponse { controllers }
}
// ------------------ EVENT HUB --------------------
implement_event_emitter!();
implement_add_event_listeners!(guard = "event_listeners_guard");
implement_remove_event_listeners!(guard = "event_listeners_guard");
implement_get_event_listeners!();
// ------------------ STATE ----------------------
export_service!();
#[query(name = "__get_candid_interface_tmp_hack")]
fn export_candid() -> String {
__export_service()
}
static mut STATE: Option<MembershipToken> = None;
pub fn get_token() -> &'static mut MembershipToken {
unsafe { STATE.as_mut().unwrap() }
}
|
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{position_before_rarrow, snippet_opt};
use if_chain::if_chain;
use rustc_ast::ast;
use rustc_ast::visit::FnKind;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::BytePos;
declare_clippy_lint! {
/// ### What it does
/// Checks for unit (`()`) expressions that can be removed.
///
/// ### Why is this bad?
/// Such expressions add no value, but can make the code
/// less readable. Depending on formatting they can make a `break` or `return`
/// statement look like a function call.
///
/// ### Example
/// ```rust
/// fn return_unit() -> () {
/// ()
/// }
/// ```
/// is equivalent to
/// ```rust
/// fn return_unit() {}
/// ```
#[clippy::version = "1.31.0"]
pub UNUSED_UNIT,
style,
"needless unit expression"
}
declare_lint_pass!(UnusedUnit => [UNUSED_UNIT]);
impl EarlyLintPass for UnusedUnit {
fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, span: Span, _: ast::NodeId) {
if_chain! {
if let ast::FnRetTy::Ty(ref ty) = kind.decl().output;
if let ast::TyKind::Tup(ref vals) = ty.kind;
if vals.is_empty() && !ty.span.from_expansion() && get_def(span) == get_def(ty.span);
then {
lint_unneeded_unit_return(cx, ty, span);
}
}
}
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) {
if_chain! {
if let Some(stmt) = block.stmts.last();
if let ast::StmtKind::Expr(ref expr) = stmt.kind;
if is_unit_expr(expr);
let ctxt = block.span.ctxt();
if stmt.span.ctxt() == ctxt && expr.span.ctxt() == ctxt;
then {
let sp = expr.span;
span_lint_and_sugg(
cx,
UNUSED_UNIT,
sp,
"unneeded unit expression",
"remove the final `()`",
String::new(),
Applicability::MachineApplicable,
);
}
}
}
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
match e.kind {
ast::ExprKind::Ret(Some(ref expr)) | ast::ExprKind::Break(_, Some(ref expr)) => {
if is_unit_expr(expr) && !expr.span.from_expansion() {
span_lint_and_sugg(
cx,
UNUSED_UNIT,
expr.span,
"unneeded `()`",
"remove the `()`",
String::new(),
Applicability::MachineApplicable,
);
}
},
_ => (),
}
}
fn check_poly_trait_ref(&mut self, cx: &EarlyContext<'_>, poly: &ast::PolyTraitRef, _: &ast::TraitBoundModifier) {
let segments = &poly.trait_ref.path.segments;
if_chain! {
if segments.len() == 1;
if ["Fn", "FnMut", "FnOnce"].contains(&&*segments[0].ident.name.as_str());
if let Some(args) = &segments[0].args;
if let ast::GenericArgs::Parenthesized(generic_args) = &**args;
if let ast::FnRetTy::Ty(ty) = &generic_args.output;
if ty.kind.is_unit();
then {
lint_unneeded_unit_return(cx, ty, generic_args.span);
}
}
}
}
// get the def site
#[must_use]
fn get_def(span: Span) -> Option<Span> {
if span.from_expansion() {
Some(span.ctxt().outer_expn_data().def_site)
} else {
None
}
}
// is this expr a `()` unit?
fn is_unit_expr(expr: &ast::Expr) -> bool {
if let ast::ExprKind::Tup(ref vals) = expr.kind {
vals.is_empty()
} else {
false
}
}
fn lint_unneeded_unit_return(cx: &EarlyContext<'_>, ty: &ast::Ty, span: Span) {
let (ret_span, appl) =
snippet_opt(cx, span.with_hi(ty.span.hi())).map_or((ty.span, Applicability::MaybeIncorrect), |fn_source| {
position_before_rarrow(&fn_source).map_or((ty.span, Applicability::MaybeIncorrect), |rpos| {
(
#[allow(clippy::cast_possible_truncation)]
ty.span.with_lo(BytePos(span.lo().0 + rpos as u32)),
Applicability::MachineApplicable,
)
})
});
span_lint_and_sugg(
cx,
UNUSED_UNIT,
ret_span,
"unneeded unit return type",
"remove the `-> ()`",
String::new(),
appl,
);
}
|
// Copyright (c) 2020 Sam Blenny
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
#![forbid(unsafe_code)]
//! Export v1 api names. The point of using re-exports is to allow for splitting
//! the crate implementation into relatively small modules that are easy to
//! refactor without breaking the public api.
// Re-export names from modules into the v1 namespace
pub use crate::blit::{clear_region, paint_str};
pub use crate::cliprect::ClipRect;
pub use crate::cursor::Cursor;
pub use crate::demo;
pub use crate::framebuffer::{new_fr_buf, FrBuf, FRAME_BUF_SIZE, LINES, WIDTH, WORDS_PER_LINE};
pub use crate::glyphstyle::{glyph_to_height_hint, GlyphStyle};
pub use crate::pt::Pt;
/// These tests aim to cover all names exported in the v1 api
#[cfg(test)]
mod tests {
use super::*;
use crate::m3hash;
#[test]
fn test_api_v1_blit() {
let fb = &mut new_fr_buf();
let clip = ClipRect::full_screen();
clear_region(fb, clip);
let cursor = &mut Cursor::from_top_left_of(clip);
paint_str(fb, clip, cursor, GlyphStyle::Regular, "abc");
assert_eq!(m3hash::frame_buffer(fb, 0), 0x529828DB);
}
#[test]
fn test_api_v1_cliprect() {
let cr1 = ClipRect {
min: Pt { x: 1, y: 2 },
max: Pt { x: 3, y: 4 },
};
assert_eq!(cr1, ClipRect::new(1, 2, 3, 4));
assert_ne!(ClipRect::full_screen(), ClipRect::padded_screen());
}
#[test]
fn test_api_v1_cursor() {
let c1 = Cursor {
pt: Pt { x: 1, y: 2 },
line_height: 0,
};
assert_eq!(c1, Cursor::new(1, 2, 0));
let clip = ClipRect::new(1, 2, 3, 4);
let c2 = Cursor::from_top_left_of(clip);
assert_eq!(c1.line_height, c2.line_height);
}
#[test]
fn test_api_v1_demo() {
let fb = &mut new_fr_buf();
demo::sample_text(fb);
assert_eq!(m3hash::frame_buffer(fb, 0), 0x59AA26A1);
demo::short_greeting(fb);
assert_eq!(m3hash::frame_buffer(fb, 0), 0x7AC16AC8);
demo::goose_poem(fb);
assert_eq!(m3hash::frame_buffer(fb, 0), 0x9bd28a96);
}
#[test]
fn test_api_v1_framebuffer() {
assert_eq!(LINES * WORDS_PER_LINE, FRAME_BUF_SIZE);
assert!(LINES > 0);
assert!(WIDTH > 0);
let fb: FrBuf = new_fr_buf();
assert!(fb.len() > 0);
}
#[test]
fn test_api_v1_glyphstyle() {
let s: usize = GlyphStyle::Small.into();
let r: usize = GlyphStyle::Regular.into();
let b: usize = GlyphStyle::Bold.into();
assert_eq!(GlyphStyle::Small, GlyphStyle::from(s));
assert_eq!(GlyphStyle::Regular, GlyphStyle::from(r));
assert_eq!(GlyphStyle::Bold, GlyphStyle::from(b));
assert_eq!(24, glyph_to_height_hint(GlyphStyle::Small));
assert_eq!(30, glyph_to_height_hint(GlyphStyle::Regular));
assert_eq!(30, glyph_to_height_hint(GlyphStyle::Bold));
}
#[test]
fn test_api_v1_pt() {
let p1 = Pt { x: 1, y: 2 };
let p2 = Pt::new(1, 2);
assert_eq!(p1, p2);
}
}
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use day_04::{self, INPUT};
fn criterion_benchmark(c: &mut Criterion) {
let pairs = day_04::parse_input(INPUT).unwrap();
c.bench_function("day_04::parse_input", |b| {
b.iter(|| day_04::parse_input(black_box(INPUT)));
});
c.bench_function("day_04::part_one", |b| {
b.iter(|| day_04::part_one(black_box(&pairs)));
});
c.bench_function("day_04::part_two", |b| {
b.iter(|| day_04::part_two(black_box(&pairs)));
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
// Copyright 2019 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::{anyhow, bail, Context, Result};
use bytes::Buf;
use cpio::{write_cpio, NewcBuilder, NewcReader};
use nix::unistd::isatty;
use openat_ext::FileExt;
use serde::{Deserialize, Serialize};
use std::convert::TryInto;
use std::fs::{read, write, File, OpenOptions};
use std::io::{self, copy, BufReader, BufWriter, Cursor, Read, Seek, SeekFrom, Write};
use std::iter::repeat;
use std::os::unix::io::AsRawFd;
use std::path::Path;
use crate::cmdline::*;
use crate::install::*;
use crate::io::*;
use crate::iso9660::{self, IsoFs};
const FILENAME: &str = "config.ign";
const COREOS_IGNITION_EMBED_PATH: &str = "IMAGES/IGNITION.IMG";
const COREOS_IGNITION_HEADER_SIZE: u64 = 24;
const COREOS_KARG_EMBED_AREA_HEADER_MAGIC: &[u8] = b"coreKarg";
const COREOS_KARG_EMBED_AREA_HEADER_SIZE: u64 = 72;
const COREOS_KARG_EMBED_AREA_HEADER_MAX_OFFSETS: usize = 6;
const COREOS_KARG_EMBED_AREA_MAX_SIZE: usize = 2048;
const COREOS_KARG_EMBED_INFO_PATH: &str = "COREOS/KARGS.JSO";
const COREOS_ISO_PXEBOOT_DIR: &str = "IMAGES/PXEBOOT";
pub fn iso_embed(config: &IsoEmbedConfig) -> Result<()> {
eprintln!("`iso embed` is deprecated; use `iso ignition embed`. Continuing.");
iso_ignition_embed(&IsoIgnitionEmbedConfig {
force: config.force,
ignition_file: config.config.clone(),
output: config.output.clone(),
input: config.input.clone(),
})
}
pub fn iso_show(config: &IsoShowConfig) -> Result<()> {
eprintln!("`iso show` is deprecated; use `iso ignition show`. Continuing.");
iso_ignition_show(&IsoIgnitionShowConfig {
input: config.input.clone(),
header: false,
})
}
pub fn iso_remove(config: &IsoRemoveConfig) -> Result<()> {
eprintln!("`iso remove` is deprecated; use `iso ignition remove`. Continuing.");
iso_ignition_remove(&IsoIgnitionRemoveConfig {
output: config.output.clone(),
input: config.input.clone(),
})
}
pub fn iso_ignition_embed(config: &IsoIgnitionEmbedConfig) -> Result<()> {
let ignition = match config.ignition_file {
Some(ref ignition_path) => {
read(ignition_path).with_context(|| format!("reading {}", ignition_path))?
}
None => {
let mut data = Vec::new();
io::stdin()
.lock()
.read_to_end(&mut data)
.context("reading stdin")?;
data
}
};
let mut iso_file = open_live_iso(&config.input, Some(config.output.as_ref()))?;
let mut iso = IsoConfig::for_file(&mut iso_file)?;
if !config.force && iso.have_ignition() {
bail!("This ISO image already has an embedded Ignition config; use -f to force.");
}
let cpio = make_cpio(&ignition)?;
iso.set_ignition(&cpio)?;
write_live_iso(&iso, &mut iso_file, config.output.as_ref())
}
pub fn iso_ignition_show(config: &IsoIgnitionShowConfig) -> Result<()> {
let mut iso_file = open_live_iso(&config.input, None)?;
let iso = IsoConfig::for_file(&mut iso_file)?;
let stdout = io::stdout();
let mut out = stdout.lock();
if config.header {
serde_json::to_writer_pretty(&mut out, &iso.ignition)
.context("failed to serialize header")?;
out.write_all(b"\n").context("failed to write newline")?;
} else {
if !iso.have_ignition() {
bail!("No embedded Ignition config.");
}
out.write_all(&extract_cpio(iso.ignition())?)
.context("writing output")?;
out.flush().context("flushing output")?;
}
Ok(())
}
pub fn iso_ignition_remove(config: &IsoIgnitionRemoveConfig) -> Result<()> {
let mut iso_file = open_live_iso(&config.input, Some(config.output.as_ref()))?;
let mut iso = IsoConfig::for_file(&mut iso_file)?;
iso.set_ignition(&[])?;
write_live_iso(&iso, &mut iso_file, config.output.as_ref())
}
pub fn pxe_ignition_wrap(config: &PxeIgnitionWrapConfig) -> Result<()> {
if config.output.is_none()
&& isatty(io::stdout().as_raw_fd()).context("checking if stdout is a TTY")?
{
bail!("Refusing to write binary data to terminal");
}
let ignition = match config.ignition_file {
Some(ref ignition_path) => {
read(ignition_path).with_context(|| format!("reading {}", ignition_path))?
}
None => {
let mut data = Vec::new();
io::stdin()
.lock()
.read_to_end(&mut data)
.context("reading stdin")?;
data
}
};
let cpio = make_cpio(&ignition)?;
match &config.output {
Some(output_path) => {
write(output_path, cpio).with_context(|| format!("writing {}", output_path))?
}
None => {
let stdout = io::stdout();
let mut out = stdout.lock();
out.write_all(&cpio).context("writing output")?;
out.flush().context("flushing output")?;
}
}
Ok(())
}
pub fn pxe_ignition_unwrap(config: &PxeIgnitionUnwrapConfig) -> Result<()> {
let buf = read(&config.input).with_context(|| format!("reading {}", config.input))?;
let stdout = io::stdout();
let mut out = stdout.lock();
out.write_all(&extract_cpio(&buf)?)
.context("writing output")?;
out.flush().context("flushing output")?;
Ok(())
}
pub fn iso_kargs_modify(config: &IsoKargsModifyConfig) -> Result<()> {
let mut iso_file = open_live_iso(&config.input, Some(config.output.as_ref()))?;
let mut iso = IsoConfig::for_file(&mut iso_file)?;
let kargs = modify_kargs(
iso.kargs()?,
&config.append,
&[],
&config.replace,
&config.delete,
)?;
iso.set_kargs(&kargs)?;
write_live_iso(&iso, &mut iso_file, config.output.as_ref())
}
pub fn iso_kargs_reset(config: &IsoKargsResetConfig) -> Result<()> {
let mut iso_file = open_live_iso(&config.input, Some(config.output.as_ref()))?;
let mut iso = IsoConfig::for_file(&mut iso_file)?;
iso.set_kargs(&iso.kargs_default()?.to_string())?;
write_live_iso(&iso, &mut iso_file, config.output.as_ref())
}
pub fn iso_kargs_show(config: &IsoKargsShowConfig) -> Result<()> {
let mut iso_file = open_live_iso(&config.input, None)?;
let iso = IsoConfig::for_file(&mut iso_file)?;
if config.header {
let stdout = io::stdout();
let mut out = stdout.lock();
serde_json::to_writer_pretty(&mut out, &iso.kargs).context("failed to serialize header")?;
out.write_all(b"\n").context("failed to write newline")?;
} else {
let kargs = if config.default {
iso.kargs_default()?
} else {
iso.kargs()?
};
println!("{}", kargs);
}
Ok(())
}
// output_path should be None if not outputting, or Some(output_path_argument)
fn open_live_iso(input_path: &str, output_path: Option<Option<&String>>) -> Result<File> {
// if output_path is Some(None), we're modifying in place, so we need to
// open for writing
OpenOptions::new()
.read(true)
.write(matches!(output_path, Some(None)))
.open(&input_path)
.with_context(|| format!("opening {}", &input_path))
}
fn write_live_iso(iso: &IsoConfig, input: &mut File, output_path: Option<&String>) -> Result<()> {
match output_path.map(|v| v.as_str()) {
None => {
// open_live_iso() opened input for writing
iso.write(input)?;
}
Some("-") => {
if isatty(io::stdout().as_raw_fd()).context("checking if stdout is a TTY")? {
bail!("Refusing to write binary data to terminal");
}
iso.stream(input, &mut io::stdout().lock())?;
}
Some(output_path) => {
let output_dir = Path::new(output_path)
.parent()
.with_context(|| format!("no parent directory of {}", output_path))?;
let mut output = tempfile::Builder::new()
.prefix(".coreos-installer-temp-")
.tempfile_in(output_dir)
.context("creating temporary file")?;
input.seek(SeekFrom::Start(0)).context("seeking input")?;
input
.copy_to(output.as_file_mut())
.context("copying input to temporary file")?;
iso.write(output.as_file_mut())?;
output
.persist_noclobber(&output_path)
.map_err(|e| e.error)
.with_context(|| format!("persisting output file to {}", output_path))?;
}
}
Ok(())
}
struct IsoConfig {
ignition: Region,
kargs: Option<KargEmbedAreas>,
}
impl IsoConfig {
pub fn for_file(file: &mut File) -> Result<Self> {
let mut iso = IsoFs::from_file(file.try_clone().context("cloning file")?)
.context("parsing ISO9660 image")?;
Ok(Self {
ignition: ignition_embed_area(&mut iso)?,
kargs: KargEmbedAreas::for_iso(&mut iso)?,
})
}
pub fn have_ignition(&self) -> bool {
self.ignition().iter().any(|v| *v != 0)
}
pub fn ignition(&self) -> &[u8] {
&self.ignition.contents[..]
}
pub fn set_ignition(&mut self, data: &[u8]) -> Result<()> {
let capacity = self.ignition.length;
if data.len() > capacity {
bail!(
"Compressed Ignition config is too large: {} > {}",
data.len(),
capacity
)
}
self.ignition.contents.clear();
self.ignition.contents.extend_from_slice(data);
self.ignition
.contents
.extend(repeat(0).take(capacity - data.len()));
self.ignition.modified = true;
Ok(())
}
pub fn kargs(&self) -> Result<&str> {
Ok(self.unwrap_kargs()?.kargs())
}
pub fn kargs_default(&self) -> Result<&str> {
Ok(self.unwrap_kargs()?.kargs_default())
}
pub fn set_kargs(&mut self, kargs: &str) -> Result<()> {
self.unwrap_kargs_mut()?.set_kargs(kargs)
}
fn unwrap_kargs(&self) -> Result<&KargEmbedAreas> {
self.kargs
.as_ref()
.ok_or_else(|| anyhow!("No karg embed areas found; old or corrupted CoreOS ISO image."))
}
fn unwrap_kargs_mut(&mut self) -> Result<&mut KargEmbedAreas> {
self.kargs
.as_mut()
.ok_or_else(|| anyhow!("No karg embed areas found; old or corrupted CoreOS ISO image."))
}
pub fn write(&self, file: &mut File) -> Result<()> {
self.ignition.write(file)?;
if let Some(kargs) = &self.kargs {
kargs.write(file)?;
}
Ok(())
}
pub fn stream(&self, input: &mut File, writer: &mut (impl Write + ?Sized)) -> Result<()> {
let mut regions = vec![&self.ignition];
if let Some(kargs) = &self.kargs {
regions.extend(kargs.regions.iter())
}
regions.stream(input, writer)
}
}
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Serialize)]
struct Region {
// sort order is derived from field order
pub offset: u64,
pub length: usize,
#[serde(skip_serializing)]
pub contents: Vec<u8>,
#[serde(skip_serializing)]
pub modified: bool,
}
impl Region {
pub fn read(file: &mut File, offset: u64, length: usize) -> Result<Self> {
let mut contents = vec![0; length];
file.seek(SeekFrom::Start(offset))
.with_context(|| format!("seeking to offset {}", offset))?;
file.read_exact(&mut contents)
.with_context(|| format!("reading {} bytes at {}", length, offset))?;
Ok(Self {
offset,
length,
contents,
modified: false,
})
}
pub fn write(&self, file: &mut File) -> Result<()> {
self.validate()?;
if self.modified {
file.seek(SeekFrom::Start(self.offset))
.with_context(|| format!("seeking to offset {}", self.offset))?;
file.write_all(&self.contents)
.with_context(|| format!("writing {} bytes at {}", self.length, self.offset))?;
}
Ok(())
}
pub fn validate(&self) -> Result<()> {
if self.length != self.contents.len() {
bail!(
"expected region contents length {}, found {}",
self.length,
self.contents.len()
);
}
Ok(())
}
}
trait Stream {
fn stream(&self, input: &mut File, writer: &mut (impl Write + ?Sized)) -> Result<()>;
}
impl Stream for [&Region] {
fn stream(&self, input: &mut File, writer: &mut (impl Write + ?Sized)) -> Result<()> {
input.seek(SeekFrom::Start(0)).context("seeking to start")?;
let mut regions: Vec<&&Region> = self.iter().filter(|r| r.modified).collect();
regions.sort_unstable();
let mut buf = [0u8; BUFFER_SIZE];
let mut cursor: u64 = 0;
// validate regions
for region in ®ions {
region.validate()?;
if region.offset < cursor {
bail!(
"region starting at {} precedes current offset {}",
region.offset,
cursor
);
}
cursor = region.offset + region.length as u64;
}
// write regions
cursor = 0;
for region in ®ions {
assert!(region.offset >= cursor);
copy_exactly_n(input, writer, region.offset - cursor, &mut buf)
.with_context(|| format!("copying bytes from {} to {}", cursor, region.offset))?;
writer.write_all(®ion.contents).with_context(|| {
format!(
"writing region for {} at offset {}",
region.length, region.offset
)
})?;
cursor = input
.seek(SeekFrom::Current(region.length as i64))
.with_context(|| format!("seeking region length {}", region.length))?;
}
// write the remainder
let mut write_buf = BufWriter::with_capacity(BUFFER_SIZE, writer);
copy(
&mut BufReader::with_capacity(BUFFER_SIZE, input),
&mut write_buf,
)
.context("copying file")?;
write_buf.flush().context("flushing output")?;
Ok(())
}
}
#[derive(Serialize)]
struct KargEmbedAreas {
length: usize,
default: String,
#[serde(rename = "kargs")]
regions: Vec<Region>,
#[serde(skip_serializing)]
args: String,
}
#[derive(Deserialize)]
struct KargEmbedInfo {
default: String,
files: Vec<KargEmbedLocation>,
size: usize,
}
#[derive(Deserialize)]
struct KargEmbedLocation {
path: String,
offset: u64,
}
impl KargEmbedAreas {
// Return Ok(None) if no kargs embed areas exist.
pub fn for_iso(iso: &mut IsoFs) -> Result<Option<Self>> {
let iso_file = match iso.get_path(COREOS_KARG_EMBED_INFO_PATH) {
Ok(record) => record.try_into_file()?,
// old ISO without info JSON
Err(e) if e.is::<iso9660::NotFound>() => {
return Self::for_file_via_system_area(iso.as_file()?)
}
Err(e) => return Err(e),
};
let info: KargEmbedInfo = serde_json::from_reader(
iso.read_file(&iso_file)
.context("reading kargs embed area info")?,
)
.context("decoding kargs embed area info")?;
// sanity-check size against a reasonable limit
if info.size > COREOS_KARG_EMBED_AREA_MAX_SIZE {
bail!(
"karg embed area size larger than {} (found {})",
COREOS_KARG_EMBED_AREA_MAX_SIZE,
info.size
);
}
if info.default.len() > info.size {
bail!(
"default kargs size {} larger than embed areas ({})",
info.default.len(),
info.size
);
}
// writable regions
let mut regions = Vec::new();
for loc in info.files {
let iso_file = iso
.get_path(&loc.path.to_uppercase())
.with_context(|| format!("looking up '{}'", loc.path))?
.try_into_file()?;
// we rely on Region::read() to verify that the offset/length
// pair is in bounds
regions.push(
Region::read(
iso.as_file()?,
iso_file.address.as_offset() + loc.offset,
info.size,
)
.context("reading kargs embed area")?,
);
}
regions.sort_unstable_by_key(|r| r.offset);
Some(Self::build(info.size, info.default, regions)).transpose()
}
fn for_file_via_system_area(file: &mut File) -> Result<Option<Self>> {
// The ISO 9660 System Area is 32 KiB. Karg embed area information is located in the 72 bytes
// before the initrd embed area (see EmbedArea below):
// 8 bytes: magic string "coreKarg"
// 8 bytes little-endian: length of karg embed areas
// 8 bytes little-endian: offset to default kargs
// 8 bytes little-endian x 6: offsets to karg embed areas
let region = Region::read(
file,
32768 - COREOS_IGNITION_HEADER_SIZE - COREOS_KARG_EMBED_AREA_HEADER_SIZE,
COREOS_KARG_EMBED_AREA_HEADER_SIZE as usize,
)
.context("reading karg embed header")?;
let mut header = ®ion.contents[..];
// magic number
if header.copy_to_bytes(8) != COREOS_KARG_EMBED_AREA_HEADER_MAGIC {
return Ok(None);
}
// length
let length: usize = header
.get_u64_le()
.try_into()
.context("karg embed area length too large to allocate")?;
// sanity-check against a reasonable limit
if length > COREOS_KARG_EMBED_AREA_MAX_SIZE {
bail!(
"karg embed area length larger than {} (found {})",
COREOS_KARG_EMBED_AREA_MAX_SIZE,
length
);
}
// we rely on Region::read() to verify that offset/length pairs are
// in bounds
// default kargs
let offset = header.get_u64_le();
let default_region = Region::read(file, offset, length).context("reading default kargs")?;
let default = Self::parse(&default_region)?;
// writable regions
let mut regions = Vec::new();
while regions.len() < COREOS_KARG_EMBED_AREA_HEADER_MAX_OFFSETS {
let offset = header.get_u64_le();
if offset == 0 {
break;
}
regions.push(Region::read(file, offset, length).context("reading kargs embed area")?);
}
Some(Self::build(length, default, regions)).transpose()
}
fn build(length: usize, default: String, regions: Vec<Region>) -> Result<Self> {
// we expect at least one region
if regions.is_empty() {
bail!("No karg embed areas found; corrupted CoreOS ISO image.");
}
// parse kargs and verify that all the offsets have the same arguments
let args = Self::parse(®ions[0])?;
for region in regions.iter().skip(1) {
let current_args = Self::parse(region)?;
if current_args != args {
bail!(
"kargs don't match at all offsets! (expected '{}', but offset {} has: '{}')",
args,
region.offset,
current_args
);
}
}
Ok(Self {
length,
default,
regions,
args,
})
}
fn parse(region: &Region) -> Result<String> {
Ok(String::from_utf8(region.contents.clone())
.context("invalid UTF-8 in karg area")?
.trim_end_matches('#')
.trim()
.into())
}
pub fn kargs_default(&self) -> &str {
&self.default
}
pub fn kargs(&self) -> &str {
&self.args
}
pub fn set_kargs(&mut self, kargs: &str) -> Result<()> {
let unformatted = kargs.trim();
let formatted = unformatted.to_string() + "\n";
if formatted.len() > self.length {
bail!(
"kargs too large for area: {} vs {}",
formatted.len(),
self.length
);
}
let mut contents = vec![b'#'; self.length];
contents[..formatted.len()].copy_from_slice(formatted.as_bytes());
for region in &mut self.regions {
region.contents = contents.clone();
region.modified = true;
}
self.args = unformatted.to_string();
Ok(())
}
pub fn write(&self, file: &mut File) -> Result<()> {
for region in &self.regions {
region.write(file)?;
}
Ok(())
}
}
fn ignition_embed_area(iso: &mut IsoFs) -> Result<Region> {
let f = iso
.get_path(COREOS_IGNITION_EMBED_PATH)
.context("finding Ignition embed area")?
.try_into_file()?;
// read (checks offset/length as a side effect)
Region::read(iso.as_file()?, f.address.as_offset(), f.length as usize)
.context("reading Ignition embed area")
}
/// Make a gzipped CPIO archive containing the specified Ignition config.
fn make_cpio(ignition: &[u8]) -> Result<Vec<u8>> {
use xz2::stream::{Check, Stream};
use xz2::write::XzEncoder;
let mut result = Cursor::new(Vec::new());
// kernel requires CRC32: https://www.kernel.org/doc/Documentation/xz.txt
let encoder = XzEncoder::new_stream(
&mut result,
Stream::new_easy_encoder(9, Check::Crc32).context("creating XZ encoder")?,
);
let mut input_files = vec![(
// S_IFREG | 0644
NewcBuilder::new(FILENAME).mode(0o100_644),
Cursor::new(ignition),
)];
write_cpio(input_files.drain(..), encoder).context("writing CPIO archive")?;
Ok(result.into_inner())
}
/// Extract a gzipped CPIO archive and return the contents of the Ignition
/// config.
fn extract_cpio(buf: &[u8]) -> Result<Vec<u8>> {
// older versions of this program, and its predecessor, compressed
// with gzip
let mut decompressor = DecompressReader::new(BufReader::new(buf))?;
loop {
let mut reader = NewcReader::new(decompressor).context("reading CPIO entry")?;
let entry = reader.entry();
if entry.is_trailer() {
bail!("couldn't find Ignition config in archive");
}
if entry.name() == FILENAME {
let mut result = Vec::with_capacity(entry.file_size() as usize);
reader
.read_to_end(&mut result)
.context("reading CPIO entry contents")?;
return Ok(result);
}
decompressor = reader.finish().context("finishing reading CPIO entry")?;
}
}
#[derive(Serialize)]
struct IsoInspectOutput {
header: IsoFs,
records: Vec<String>,
}
pub fn iso_inspect(config: &IsoInspectConfig) -> Result<()> {
let mut iso = IsoFs::from_file(open_live_iso(&config.input, None)?)?;
let records = iso
.walk()?
.map(|r| r.map(|(path, _)| path))
.collect::<Result<Vec<String>>>()
.context("while walking ISO filesystem")?;
let inspect_out = IsoInspectOutput {
header: iso,
records,
};
let stdout = io::stdout();
let mut out = stdout.lock();
serde_json::to_writer_pretty(&mut out, &inspect_out)
.context("failed to serialize ISO metadata")?;
out.write_all(b"\n").context("failed to write newline")?;
Ok(())
}
pub fn iso_extract_pxe(config: &IsoExtractPxeConfig) -> Result<()> {
let mut iso = IsoFs::from_file(open_live_iso(&config.input, None)?)?;
let pxeboot = iso.get_path(COREOS_ISO_PXEBOOT_DIR)?.try_into_dir()?;
std::fs::create_dir_all(&config.output_dir)?;
let base = {
// this can't be None since we successfully opened the live ISO at the location
let mut s = Path::new(&config.input).file_stem().unwrap().to_os_string();
s.push("-");
s
};
for record in iso.list_dir(&pxeboot)? {
match record? {
iso9660::DirectoryRecord::Directory(_) => continue,
iso9660::DirectoryRecord::File(file) => {
let filename = {
let mut s = base.clone();
s.push(file.name.to_lowercase());
s
};
let path = Path::new(&config.output_dir).join(&filename);
println!("{}", path.display());
copy_file_from_iso(&mut iso, &file, &path)?;
}
}
}
Ok(())
}
fn copy_file_from_iso(iso: &mut IsoFs, file: &iso9660::File, output_path: &Path) -> Result<()> {
let mut outf = OpenOptions::new()
.write(true)
.create_new(true)
.open(&output_path)
.with_context(|| format!("opening {}", output_path.display()))?;
let mut bufw = BufWriter::with_capacity(BUFFER_SIZE, &mut outf);
copy(&mut iso.read_file(file)?, &mut bufw)?;
bufw.flush()?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::copy;
use tempfile::tempfile;
use xz2::read::XzDecoder;
fn open_iso_file() -> File {
let iso_bytes: &[u8] = include_bytes!("../fixtures/iso/embed-areas-2021-09.iso.xz");
let mut decoder = XzDecoder::new(iso_bytes);
let mut iso_file = tempfile().unwrap();
copy(&mut decoder, &mut iso_file).unwrap();
iso_file
}
#[test]
fn test_ignition_embed_area() {
let mut iso_file = open_iso_file();
// normal read
let mut iso = IsoFs::from_file(iso_file.try_clone().unwrap()).unwrap();
let region = ignition_embed_area(&mut iso).unwrap();
assert_eq!(region.offset, 102400);
assert_eq!(region.length, 262144);
// missing embed area
iso_file.seek(SeekFrom::Start(65903)).unwrap();
iso_file.write_all(b"Z").unwrap();
let mut iso = IsoFs::from_file(iso_file).unwrap();
ignition_embed_area(&mut iso).unwrap_err();
}
#[test]
fn test_karg_embed_area() {
let mut iso_file = open_iso_file();
// normal read
check_karg_embed_areas(&mut iso_file);
// JSON only
iso_file.seek(SeekFrom::Start(32672)).unwrap();
iso_file.write_all(&[0; 8]).unwrap();
check_karg_embed_areas(&mut iso_file);
// legacy header only
iso_file.seek(SeekFrom::Start(32672)).unwrap();
iso_file.write_all(b"coreKarg").unwrap();
iso_file.seek(SeekFrom::Start(63725)).unwrap();
iso_file.write_all(b"Z").unwrap();
check_karg_embed_areas(&mut iso_file);
// neither header
iso_file.seek(SeekFrom::Start(32672)).unwrap();
iso_file.write_all(&[0; 8]).unwrap();
let mut iso = IsoFs::from_file(iso_file).unwrap();
assert!(KargEmbedAreas::for_iso(&mut iso).unwrap().is_none());
}
fn check_karg_embed_areas(iso_file: &mut File) {
let iso_file = iso_file.try_clone().unwrap();
let mut iso = IsoFs::from_file(iso_file).unwrap();
let areas = KargEmbedAreas::for_iso(&mut iso).unwrap().unwrap();
assert_eq!(areas.length, 1139);
assert_eq!(areas.default, "mitigations=auto,nosmt coreos.liveiso=fedora-coreos-34.20210921.dev.0 ignition.firstboot ignition.platform.id=metal");
assert_eq!(areas.regions.len(), 2);
assert_eq!(areas.regions[0].offset, 98126);
assert_eq!(areas.regions[0].length, 1139);
assert_eq!(areas.regions[1].offset, 371658);
assert_eq!(areas.regions[1].length, 1139);
}
#[test]
fn test_cpio_roundtrip() {
let input = r#"{}"#;
let cpio = make_cpio(input.as_bytes()).unwrap();
let output = extract_cpio(&cpio).unwrap();
assert_eq!(input.as_bytes(), output.as_slice());
}
}
|
use super::{InlineObject, InlineObjectTrait};
use crate::{
channel::ChannelId,
heap::{object_heap::HeapObject, symbol_table::impl_ord_with_symbol_table_via_ord, Heap},
utils::{impl_debug_display_via_debugdisplay, DebugDisplay},
};
use candy_frontend::id::CountableId;
use derive_more::Deref;
use rustc_hash::FxHashMap;
use std::{
cmp::Ordering,
fmt::{self, Formatter},
hash::{Hash, Hasher},
num::NonZeroU64,
};
#[derive(Clone, Copy, Deref)]
pub struct InlinePort(InlineObject);
impl InlinePort {
const CHANNEL_ID_SHIFT: usize = 3;
pub fn create(heap: &mut Heap, channel_id: ChannelId, is_send: bool) -> InlineObject {
heap.notify_port_created(channel_id);
let channel_id = channel_id.to_usize();
debug_assert_eq!(
(channel_id << Self::CHANNEL_ID_SHIFT) >> Self::CHANNEL_ID_SHIFT,
channel_id,
"Channel ID is too large.",
);
let kind = if is_send {
InlineObject::KIND_SEND_PORT
} else {
InlineObject::KIND_RECEIVE_PORT
};
let header_word = kind | ((channel_id as u64) << Self::CHANNEL_ID_SHIFT);
let header_word = unsafe { NonZeroU64::new_unchecked(header_word) };
InlineObject(header_word)
}
pub fn channel_id(self) -> ChannelId {
ChannelId::from_usize((self.raw_word().get() >> Self::CHANNEL_ID_SHIFT) as usize)
}
}
impl From<InlinePort> for InlineObject {
fn from(port: InlinePort) -> Self {
port.0
}
}
impl Eq for InlinePort {}
impl PartialEq for InlinePort {
fn eq(&self, other: &Self) -> bool {
self.channel_id() == other.channel_id()
}
}
impl Hash for InlinePort {
fn hash<H: Hasher>(&self, state: &mut H) {
self.channel_id().hash(state)
}
}
impl Ord for InlinePort {
fn cmp(&self, other: &Self) -> Ordering {
self.channel_id().cmp(&other.channel_id())
}
}
impl PartialOrd for InlinePort {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
// Send Port
#[derive(Clone, Copy, Deref, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct InlineSendPort(InlinePort);
impl InlineSendPort {
pub fn new_unchecked(object: InlineObject) -> Self {
Self(InlinePort(object))
}
pub fn create(heap: &mut Heap, channel_id: ChannelId) -> InlineObject {
InlinePort::create(heap, channel_id, true)
}
}
impl DebugDisplay for InlineSendPort {
fn fmt(&self, f: &mut Formatter, _is_debug: bool) -> fmt::Result {
write!(f, "send port for {:?}", self.channel_id())
}
}
impl_debug_display_via_debugdisplay!(InlineSendPort);
impl InlineObjectTrait for InlineSendPort {
fn clone_to_heap_with_mapping(
self,
heap: &mut Heap,
_address_map: &mut FxHashMap<HeapObject, HeapObject>,
) -> Self {
heap.notify_port_created(self.channel_id());
self
}
}
impl_ord_with_symbol_table_via_ord!(InlineSendPort);
// Receive Port
#[derive(Clone, Copy, Deref, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct InlineReceivePort(InlinePort);
impl InlineReceivePort {
pub fn new_unchecked(object: InlineObject) -> Self {
Self(InlinePort(object))
}
pub fn create(heap: &mut Heap, channel_id: ChannelId) -> InlineObject {
InlinePort::create(heap, channel_id, false)
}
}
impl DebugDisplay for InlineReceivePort {
fn fmt(&self, f: &mut Formatter, _is_debug: bool) -> fmt::Result {
write!(f, "receive port for {:?}", self.channel_id())
}
}
impl_debug_display_via_debugdisplay!(InlineReceivePort);
impl InlineObjectTrait for InlineReceivePort {
fn clone_to_heap_with_mapping(
self,
heap: &mut Heap,
_address_map: &mut FxHashMap<HeapObject, HeapObject>,
) -> Self {
heap.notify_port_created(self.channel_id());
self
}
}
impl_ord_with_symbol_table_via_ord!(InlineReceivePort);
|
pub mod node;
pub mod pipeline;
mod entity;
mod graph;
mod material;
mod mesh;
pub use entity::*;
pub use graph::*;
pub use material::*;
pub use mesh::*;
pub use node::*;
|
use cql_bindgen::CassIterator as _CassIterator;
use cql_bindgen::cass_iterator_free;
use cql_bindgen::cass_iterator_next;
use cql_bindgen::cass_iterator_get_column;
use cql_bindgen::CassRow as _CassRow;
use cql_bindgen::cass_row_get_column;
use cql_bindgen::cass_row_get_column_by_name;
use cql_bindgen::cass_iterator_from_row;
use cql_bindgen::CASS_ERROR_LIB_INDEX_OUT_OF_BOUNDS;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::fmt;
use std::ffi::CString;
use std::iter::IntoIterator;
use std::iter;
use cql_ffi::value::CassValue;
use cql_ffi::error::CassError;
use cql_ffi::column::CassColumn;
pub struct CassRow(pub *const _CassRow);
impl Debug for CassRow {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for column in self {
//println!("foo:{:?}",column);
try!(write!(f, "{:?}\t", CassValue::new(column.0)));
}
Ok(())
}
}
impl Display for CassRow {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for column in self {
try!(write!(f, "{}\t", CassValue::new(column.0)));
}
Ok(())
}
}
impl CassRow {
pub fn get_column(&self, index: u64) -> Result<CassColumn, CassError> {
unsafe {
let col = cass_row_get_column(self.0, index);
match col.is_null() {
true => Err(CassError::build(CASS_ERROR_LIB_INDEX_OUT_OF_BOUNDS)),
false => Ok(CassColumn(col)),
}
}
}
pub fn get_column_by_name<S>(&self, name: S) -> CassColumn
where S: Into<String>
{
unsafe {
let name = CString::new(name.into()).unwrap();
println!("name: {:?}", name);
println!("self: {:?}", self);
//unimplemented!();
CassColumn(cass_row_get_column_by_name(self.0, name.as_ptr()))
}
}
}
pub struct RowIterator(pub *mut _CassIterator);
impl Drop for RowIterator {
fn drop(&mut self) {
unsafe {
cass_iterator_free(self.0)
}
}
}
impl iter::Iterator for RowIterator {
type Item = CassColumn;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
unsafe {
match cass_iterator_next(self.0) {
0 => None,
_ => Some(CassColumn(cass_iterator_get_column(self.0))),
}
}
}
}
impl<'a> Iterator for &'a RowIterator {
type Item = CassColumn;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
unsafe {
match cass_iterator_next(self.0) {
0 => None,
_ => Some(CassColumn(cass_iterator_get_column(self.0))),
}
}
}
}
impl Display for RowIterator {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for item in self {
try!(write!(f, "{}\t", CassValue::new(item.0)));
}
Ok(())
}
}
impl IntoIterator for CassRow {
type Item = CassColumn;
type IntoIter = RowIterator;
fn into_iter(self) -> Self::IntoIter {
unsafe {
RowIterator(cass_iterator_from_row(self.0))
}
}
}
impl<'a> IntoIterator for &'a CassRow {
type Item = CassColumn;
type IntoIter = RowIterator;
fn into_iter(self) -> Self::IntoIter {
unsafe {
RowIterator(cass_iterator_from_row(self.0))
}
}
}
|
// Copyright (c) Calibra Research
// SPDX-License-Identifier: Apache-2.0
use crate::Author;
#[cfg(test)]
#[path = "unit_tests/base_type_tests.rs"]
mod base_type_tests;
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Debug)]
pub struct EpochId(pub usize);
// The following types are simplified for simulation purposes.
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Debug)]
pub struct BlockHash(pub u64);
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Hash, Debug)]
pub struct QuorumCertificateHash(pub u64);
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Hash, Debug)]
pub struct State(pub u64);
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Hash, Debug)]
pub struct Command {
pub proposer: Author,
pub index: usize,
}
impl EpochId {
pub fn initial_hash(self) -> QuorumCertificateHash {
QuorumCertificateHash(self.0 as u64)
}
pub fn previous(self) -> Option<EpochId> {
if self.0 == 0 {
None
} else {
Some(EpochId(self.0))
}
}
}
pub fn is_power2_minus1(x: usize) -> bool {
(x & (x + 1)) == 0
}
pub fn merge_sort<T: Eq, I: IntoIterator<Item = T>, F: Fn(&T, &T) -> std::cmp::Ordering>(
v1: I,
v2: I,
cmp: F,
) -> Vec<T> {
let mut iter1 = v1.into_iter();
let mut iter2 = v2.into_iter();
let mut result = Vec::new();
let mut head1 = iter1.next();
let mut head2 = iter2.next();
while let (Some(x1), Some(x2)) = (&head1, &head2) {
match cmp(&x1, &x2) {
std::cmp::Ordering::Less => {
result.push(head1.unwrap());
head1 = iter1.next();
}
std::cmp::Ordering::Equal => {
if head1 == head1 {
result.push(head1.unwrap());
} else {
result.push(head1.unwrap());
result.push(head2.unwrap());
}
head1 = iter1.next();
head2 = iter2.next();
}
std::cmp::Ordering::Greater => {
result.push(head2.unwrap());
head2 = iter2.next();
}
}
}
while let Some(x1) = head1 {
result.push(x1);
head1 = iter1.next();
}
while let Some(x2) = head2 {
result.push(x2);
head2 = iter2.next();
}
result
}
|
use std::iter::FromIterator;
use crate::{Alignment, AlignmentPositionIterator, AlignmentSequenceIterator};
impl<'a, T> Iterator for AlignmentPositionIterator<'a, T>
where
T: Clone,
{
type Item = Vec<&'a T>;
fn next(&mut self) -> Option<Vec<&'a T>> {
if self.index >= self.alignment.length {
return None;
}
match self.alignment.nth_position(self.index) {
Some(position) => {
self.index = self.index.saturating_add(1);
self.size_hint = self.size_hint.saturating_sub(1);
Some(position)
}
None => None,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.size_hint < usize::max_value() {
(self.size_hint, Some(self.size_hint))
} else {
(usize::max_value(), None)
}
}
}
impl<'a, T> ExactSizeIterator for AlignmentPositionIterator<'a, T>
where
T: Clone,
{
fn len(&self) -> usize {
let (lower, upper) = self.size_hint();
// Note: This assertion is overly defensive, but it checks the invariant
// guaranteed by the trait. If this trait were rust-internal,
// we could use debug_assert!; assert_eq! will check all Rust user
// implementations too.
assert_eq!(upper, Some(lower));
lower
}
}
impl<'a, T> Iterator for AlignmentSequenceIterator<'a, T>
where
T: Clone,
{
type Item = Vec<&'a T>;
fn next(&mut self) -> Option<Vec<&'a T>> {
if self.index >= self.alignment.n_sequences {
return None;
}
match self.alignment.nth_sequence(self.index) {
Some(seq) => {
self.index = self.index.saturating_add(1);
self.size_hint = self.size_hint.saturating_sub(1);
Some(seq)
}
None => None,
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.alignment.nth_sequence(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.size_hint < usize::max_value() {
// ?
(self.size_hint, Some(self.size_hint))
} else {
(usize::max_value(), None)
}
}
}
impl<'a, T> ExactSizeIterator for AlignmentSequenceIterator<'a, T>
where
T: Clone,
{
fn len(&self) -> usize {
let (lower, upper) = self.size_hint();
// Note: This assertion is overly defensive, but it checks the invariant
// guaranteed by the trait. If this trait were rust-internal,
// we could use debug_assert!; assert_eq! will check all Rust user
// implementations too.
assert_eq!(upper, Some(lower));
lower
}
}
impl<A> FromIterator<Vec<A>> for Alignment<A>
where
A: Clone,
{
/// # Panics
///
/// Panics if sequences are of different lengths
fn from_iter<I: IntoIterator<Item = Vec<A>>>(iter: I) -> Self {
let mut length: Option<usize> = None;
let mut n_sequences = 0_usize;
let sequences = iter
.into_iter()
.flat_map(|x| {
if length.is_none() {
length = Some(x.len());
} else if Some(x.len()) != length {
panic!("sequences of different lengths");
}
n_sequences += 1;
x.to_vec()
})
.collect::<Vec<_>>();
Self {
sequences,
n_sequences,
length: length.unwrap(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn for_positions() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
let mut x = Vec::new();
for col in align.iter_positions() {
x.push(col);
}
assert_eq!(x.len(), 8);
assert_eq!(x.get(0).unwrap(), &[&b'A', &b'V']);
assert_eq!(x.get(3).unwrap(), &[&b'H', &b'-']);
}
#[test]
#[should_panic]
fn for_positions_out_of_bonds() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
let mut x = Vec::new();
for col in align.iter_positions() {
x.push(col);
}
let _ = x.get(22).unwrap();
}
#[test]
fn for_positions_exact() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
assert_eq!(align.iter_positions().len(), 8);
assert_eq!(align.iter_positions().next().unwrap().len(), 2);
}
#[test]
fn for_sequences() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
let mut x = Vec::new();
for row in align.iter_sequences() {
assert_eq!(row.len(), 8);
x.push(row);
}
assert_eq!(x.len(), 2)
}
#[test]
fn for_sequences_exact() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
assert_eq!(align.iter_sequences().len(), 2);
assert_eq!(align.iter_sequences().next().unwrap().len(), 8);
}
#[test]
fn for_sequences_collect() {
let align =
Alignment::with_sequences(&[b"ALKHITAN".to_vec(), b"VLK-ITAN".to_vec()]).unwrap();
assert_eq!(align.iter_sequences().len(), 2);
assert_eq!(align.iter_sequences().next().unwrap().len(), 8);
}
}
|
use serde_derive::Deserialize;
use std::collections::{hash_map::Entry, HashMap};
use std::process::exit;
use super::{parse_to_config_file, ConfigStructure, Flattenable};
use crate::commands::{self, CommandKeybind, LllCommand};
use crate::KEYMAP_FILE;
pub const ESCAPE: i32 = 0x1B;
const fn default_up() -> i32 {
ncurses::KEY_UP
}
const fn default_down() -> i32 {
ncurses::KEY_DOWN
}
const fn default_left() -> i32 {
ncurses::KEY_LEFT
}
const fn default_right() -> i32 {
ncurses::KEY_RIGHT
}
const fn default_home() -> i32 {
ncurses::KEY_HOME
}
const fn default_end() -> i32 {
ncurses::KEY_END
}
const fn default_backspace() -> i32 {
ncurses::KEY_BACKSPACE
}
const fn default_delete() -> i32 {
ncurses::KEY_DC
}
const fn default_enter() -> i32 {
'\n' as i32
}
const fn default_escape() -> i32 {
ESCAPE
}
const fn default_tab() -> i32 {
'\t' as i32
}
#[derive(Debug, Deserialize)]
struct LllRawKeymapping {
#[serde(default)]
keymaps: LllKeyMapping,
#[serde(skip)]
mapcommand: Vec<LllMapCommand>,
}
#[derive(Debug, Deserialize)]
pub struct LllKeyMapping {
#[serde(default = "default_up")]
pub up: i32,
#[serde(default = "default_down")]
pub down: i32,
#[serde(default = "default_left")]
pub left: i32,
#[serde(default = "default_right")]
pub right: i32,
#[serde(default = "default_home")]
pub home: i32,
#[serde(default = "default_end")]
pub end: i32,
#[serde(default = "default_backspace")]
pub backspace: i32,
#[serde(default = "default_delete")]
pub delete: i32,
#[serde(default = "default_enter")]
pub enter: i32,
#[serde(default = "default_escape")]
pub escape: i32,
#[serde(default = "default_tab")]
pub tab: i32,
}
impl std::default::Default for LllKeyMapping {
fn default() -> Self {
LllKeyMapping {
up: default_up(),
down: default_down(),
left: default_left(),
right: default_right(),
home: default_home(),
end: default_end(),
backspace: default_backspace(),
delete: default_delete(),
enter: default_enter(),
escape: default_escape(),
tab: default_tab(),
}
}
}
impl Flattenable<LllKeyMapping> for LllRawKeymapping {
fn flatten(self) -> LllKeyMapping {
self.keymaps
}
}
impl ConfigStructure for LllKeyMapping {
fn get_config() -> Self {
parse_to_config_file::<LllRawKeymapping, LllKeyMapping>(KEYMAP_FILE)
.unwrap_or_else(LllKeyMapping::default)
}
}
#[derive(Debug, Deserialize)]
struct LllMapCommand {
pub keys: Vec<i32>,
pub command: String,
#[serde(default)]
pub args: Vec<String>,
}
#[derive(Debug, Deserialize)]
struct LllRawCommandMapping {
#[serde(skip)]
keymaps: LllKeyMapping,
#[serde(default)]
mapcommand: Vec<LllMapCommand>,
}
impl Flattenable<LllCommandMapping> for LllRawCommandMapping {
fn flatten(self) -> LllCommandMapping {
let mut keymaps = LllCommandMapping::new();
self.mapcommand.iter().for_each(|m| {
let args: Vec<&str> = m.args.iter().map(String::as_str).collect();
match commands::from_args(m.command.as_str(), &args) {
Ok(command) => insert_keycommand(&mut keymaps, command, &m.keys[..]),
Err(e) => eprintln!("{}", e),
}
});
keymaps
}
}
pub type LllCommandMapping = HashMap<i32, CommandKeybind>;
impl ConfigStructure for LllCommandMapping {
fn get_config() -> Self {
parse_to_config_file::<LllRawCommandMapping, LllCommandMapping>(KEYMAP_FILE)
.unwrap_or_else(LllCommandMapping::default)
}
}
fn insert_keycommand(map: &mut LllCommandMapping, keycommand: Box<dyn LllCommand>, keys: &[i32]) {
match keys.len() {
0 => {}
1 => match map.entry(keys[0]) {
Entry::Occupied(_) => {
eprintln!("Error: Keybindings ambiguous");
exit(1);
}
Entry::Vacant(entry) => {
entry.insert(CommandKeybind::SimpleKeybind(keycommand));
}
},
_ => match map.entry(keys[0]) {
Entry::Occupied(mut entry) => match entry.get_mut() {
CommandKeybind::CompositeKeybind(ref mut m) => {
insert_keycommand(m, keycommand, &keys[1..])
}
_ => {
eprintln!("Error: Keybindings ambiguous");
exit(1);
}
},
Entry::Vacant(entry) => {
let mut new_map = LllCommandMapping::new();
insert_keycommand(&mut new_map, keycommand, &keys[1..]);
let composite_command = CommandKeybind::CompositeKeybind(new_map);
entry.insert(composite_command);
}
},
}
}
|
#[doc = "Reader of register OR"]
pub type R = crate::R<u32, super::OR>;
#[doc = "Writer for register OR"]
pub type W = crate::W<u32, super::OR>;
#[doc = "Register OR `reset()`'s with value 0"]
impl crate::ResetValue for super::OR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Timer21 ETR remap\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum ETR_RMP_A {
#[doc = "0: TIM2x ETR input connected to GPIO"]
GPIO = 0,
#[doc = "1: TIM2x ETR input connected to COMP2_OUT"]
COMP2_OUT = 1,
#[doc = "2: TIM2x ETR input connected to COMP1_OUT"]
COMP1_OUT = 2,
#[doc = "3: TIM2x ETR input connected to LSE clock"]
LSE = 3,
}
impl From<ETR_RMP_A> for u8 {
#[inline(always)]
fn from(variant: ETR_RMP_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `ETR_RMP`"]
pub type ETR_RMP_R = crate::R<u8, ETR_RMP_A>;
impl ETR_RMP_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ETR_RMP_A {
match self.bits {
0 => ETR_RMP_A::GPIO,
1 => ETR_RMP_A::COMP2_OUT,
2 => ETR_RMP_A::COMP1_OUT,
3 => ETR_RMP_A::LSE,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GPIO`"]
#[inline(always)]
pub fn is_gpio(&self) -> bool {
*self == ETR_RMP_A::GPIO
}
#[doc = "Checks if the value of the field is `COMP2_OUT`"]
#[inline(always)]
pub fn is_comp2_out(&self) -> bool {
*self == ETR_RMP_A::COMP2_OUT
}
#[doc = "Checks if the value of the field is `COMP1_OUT`"]
#[inline(always)]
pub fn is_comp1_out(&self) -> bool {
*self == ETR_RMP_A::COMP1_OUT
}
#[doc = "Checks if the value of the field is `LSE`"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == ETR_RMP_A::LSE
}
}
#[doc = "Write proxy for field `ETR_RMP`"]
pub struct ETR_RMP_W<'a> {
w: &'a mut W,
}
impl<'a> ETR_RMP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ETR_RMP_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "TIM2x ETR input connected to GPIO"]
#[inline(always)]
pub fn gpio(self) -> &'a mut W {
self.variant(ETR_RMP_A::GPIO)
}
#[doc = "TIM2x ETR input connected to COMP2_OUT"]
#[inline(always)]
pub fn comp2_out(self) -> &'a mut W {
self.variant(ETR_RMP_A::COMP2_OUT)
}
#[doc = "TIM2x ETR input connected to COMP1_OUT"]
#[inline(always)]
pub fn comp1_out(self) -> &'a mut W {
self.variant(ETR_RMP_A::COMP1_OUT)
}
#[doc = "TIM2x ETR input connected to LSE clock"]
#[inline(always)]
pub fn lse(self) -> &'a mut W {
self.variant(ETR_RMP_A::LSE)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
#[doc = "Timer21 TI1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum TI1_RMP_A {
#[doc = "0: TIM2x TI1 input connected to GPIO"]
GPIO = 0,
#[doc = "1: TIM2x TI1 input connected to COMP2_OUT"]
COMP2_OUT = 1,
#[doc = "2: TIM2x TI1 input connected to COMP1_OUT"]
COMP1_OUT = 2,
}
impl From<TI1_RMP_A> for u8 {
#[inline(always)]
fn from(variant: TI1_RMP_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `TI1_RMP`"]
pub type TI1_RMP_R = crate::R<u8, TI1_RMP_A>;
impl TI1_RMP_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, TI1_RMP_A> {
use crate::Variant::*;
match self.bits {
0 => Val(TI1_RMP_A::GPIO),
1 => Val(TI1_RMP_A::COMP2_OUT),
2 => Val(TI1_RMP_A::COMP1_OUT),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `GPIO`"]
#[inline(always)]
pub fn is_gpio(&self) -> bool {
*self == TI1_RMP_A::GPIO
}
#[doc = "Checks if the value of the field is `COMP2_OUT`"]
#[inline(always)]
pub fn is_comp2_out(&self) -> bool {
*self == TI1_RMP_A::COMP2_OUT
}
#[doc = "Checks if the value of the field is `COMP1_OUT`"]
#[inline(always)]
pub fn is_comp1_out(&self) -> bool {
*self == TI1_RMP_A::COMP1_OUT
}
}
#[doc = "Write proxy for field `TI1_RMP`"]
pub struct TI1_RMP_W<'a> {
w: &'a mut W,
}
impl<'a> TI1_RMP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TI1_RMP_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "TIM2x TI1 input connected to GPIO"]
#[inline(always)]
pub fn gpio(self) -> &'a mut W {
self.variant(TI1_RMP_A::GPIO)
}
#[doc = "TIM2x TI1 input connected to COMP2_OUT"]
#[inline(always)]
pub fn comp2_out(self) -> &'a mut W {
self.variant(TI1_RMP_A::COMP2_OUT)
}
#[doc = "TIM2x TI1 input connected to COMP1_OUT"]
#[inline(always)]
pub fn comp1_out(self) -> &'a mut W {
self.variant(TI1_RMP_A::COMP1_OUT)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 2)) | (((value as u32) & 0x07) << 2);
self.w
}
}
#[doc = "Timer21 TI2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TI2_RMP_A {
#[doc = "0: TIM2x TI2 input connected to GPIO"]
GPIO = 0,
#[doc = "1: TIM2x TI2 input connected to COMP2_OUT"]
COMP2_OUT = 1,
}
impl From<TI2_RMP_A> for bool {
#[inline(always)]
fn from(variant: TI2_RMP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `TI2_RMP`"]
pub type TI2_RMP_R = crate::R<bool, TI2_RMP_A>;
impl TI2_RMP_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TI2_RMP_A {
match self.bits {
false => TI2_RMP_A::GPIO,
true => TI2_RMP_A::COMP2_OUT,
}
}
#[doc = "Checks if the value of the field is `GPIO`"]
#[inline(always)]
pub fn is_gpio(&self) -> bool {
*self == TI2_RMP_A::GPIO
}
#[doc = "Checks if the value of the field is `COMP2_OUT`"]
#[inline(always)]
pub fn is_comp2_out(&self) -> bool {
*self == TI2_RMP_A::COMP2_OUT
}
}
#[doc = "Write proxy for field `TI2_RMP`"]
pub struct TI2_RMP_W<'a> {
w: &'a mut W,
}
impl<'a> TI2_RMP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TI2_RMP_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "TIM2x TI2 input connected to GPIO"]
#[inline(always)]
pub fn gpio(self) -> &'a mut W {
self.variant(TI2_RMP_A::GPIO)
}
#[doc = "TIM2x TI2 input connected to COMP2_OUT"]
#[inline(always)]
pub fn comp2_out(self) -> &'a mut W {
self.variant(TI2_RMP_A::COMP2_OUT)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - Timer21 ETR remap"]
#[inline(always)]
pub fn etr_rmp(&self) -> ETR_RMP_R {
ETR_RMP_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 2:4 - Timer21 TI1"]
#[inline(always)]
pub fn ti1_rmp(&self) -> TI1_RMP_R {
TI1_RMP_R::new(((self.bits >> 2) & 0x07) as u8)
}
#[doc = "Bit 5 - Timer21 TI2"]
#[inline(always)]
pub fn ti2_rmp(&self) -> TI2_RMP_R {
TI2_RMP_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:1 - Timer21 ETR remap"]
#[inline(always)]
pub fn etr_rmp(&mut self) -> ETR_RMP_W {
ETR_RMP_W { w: self }
}
#[doc = "Bits 2:4 - Timer21 TI1"]
#[inline(always)]
pub fn ti1_rmp(&mut self) -> TI1_RMP_W {
TI1_RMP_W { w: self }
}
#[doc = "Bit 5 - Timer21 TI2"]
#[inline(always)]
pub fn ti2_rmp(&mut self) -> TI2_RMP_W {
TI2_RMP_W { w: self }
}
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Targets the Cortex-M0, Cortex-M0+ and Cortex-M1 processors (ARMv6-M architecture)
use spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
pub fn target() -> TargetResult {
Ok(Target {
llvm_target: "thumbv6m-none-eabi".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "32".to_string(),
target_c_int_width: "32".to_string(),
data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(),
arch: "arm".to_string(),
target_os: "none".to_string(),
target_env: "".to_string(),
target_vendor: "".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: TargetOptions {
// The ARMv6-M architecture doesn't support unaligned loads/stores so we disable them
// with +strict-align.
features: "+strict-align".to_string(),
// There are no atomic CAS instructions available in the instruction set of the ARMv6-M
// architecture
atomic_cas: false,
.. super::thumb_base::opts()
}
})
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub mod waiter;
pub mod entry;
pub mod waitlist;
pub mod queue;
pub mod waitgroup;
pub mod lock;
pub mod bufchan;
pub mod chan;
pub mod cond;
pub mod qlock;
use super::super::qlib::linux_def::*;
use super::super::task::*;
pub use self::entry::*;
pub use self::waiter::*;
pub use self::queue::*;
// EventMaskFromLinux returns an EventMask representing the supported events
// from the Linux events e, which is in the format used by poll(2).
pub fn EventMaskFromLinux(e: u32) -> EventMask {
return e as u64 & ALL_EVENTS;
}
// ToLinux returns e in the format used by Linux poll(2).
pub fn ToLinux(e: EventMask) -> u32 {
return e as u32;
}
// Waitable contains the methods that need to be implemented by waitable
// objects.
// default:: Alway readable
pub trait Waitable {
// Readiness returns what the object is currently ready for. If it's
// not ready for a desired purpose, the caller may use EventRegister and
// EventUnregister to get notifications once the object becomes ready.
//
// Implementations should allow for events like EventHUp and EventErr
// to be returned regardless of whether they are in the input EventMask.
fn Readiness(&self, _task: &Task, mask: EventMask) -> EventMask {
return mask
}
// EventRegister registers the given waiter entry to receive
// notifications when an event occurs that makes the object ready for
// at least one of the events in mask.
fn EventRegister(&self, _task: &Task, _e: &WaitEntry, _mask: EventMask) {}
// EventUnregister unregisters a waiter entry previously registered with
// EventRegister().
fn EventUnregister(&self, _task: &Task, _e: &WaitEntry) {}
}
|
mod hash;
pub use hash::Hash;
use uint::construct_uint;
construct_uint! {
/// 256-bit unsigned integer.
pub struct U256(4);
}
|
use crate::Msg;
use anyhow::{Context, Error};
use arcs::{
components::{Dimension, DrawingObject, Geometry, Layer, Name, PointStyle},
primitives::Point,
render::{Renderer, Viewport},
Vector,
};
use js_sys::Function;
use kurbo::Size;
use piet::Color;
use piet_web::WebRenderContext;
use specs::prelude::*;
use wasm_bindgen::{closure::Closure, JsCast};
use web_sys::{CanvasRenderingContext2d, Document, HtmlCanvasElement, Window};
use yew::{html, Component, ComponentLink, Html, ShouldRender};
pub struct App {
link: ComponentLink<Self>,
canvas: Option<CanvasRenderingContext2d>,
renderer: Renderer,
world: World,
layer_id: Entity,
}
impl App {
fn redraw_canvas(&mut self) {
let window = window().unwrap();
let ctx = self.canvas.as_mut().unwrap();
let canvas = canvas_dimensions(&ctx).unwrap();
let render_context = WebRenderContext::new(ctx, &window);
log::trace!("Redrawing the canvas with dimensions {:?}", canvas);
let mut system = self.renderer.system(render_context, canvas);
RunNow::setup(&mut system, &mut self.world);
RunNow::run_now(&mut system, &self.world);
}
fn add_point(&mut self, location: kurbo::Point) {
let location = self.to_drawing_coordinates(location);
let point = self
.world
.create_entity()
.with(DrawingObject {
geometry: Geometry::Point(Point::new(location)),
layer: self.layer_id,
})
.with(PointStyle {
colour: Color::WHITE,
radius: Dimension::Pixels(5.0),
})
.build();
log::info!("Added a point at {:?} (entity: {:?})", location, point);
}
fn to_drawing_coordinates(&self, location: kurbo::Point) -> Vector {
let window = self.canvas.as_ref().and_then(canvas_dimensions).unwrap();
arcs::render::to_drawing_coordinates(
location,
&self.renderer.viewport,
window,
)
}
}
fn create_world_and_default_layer() -> (World, Entity) {
log::debug!("Initializing the world");
let mut world = World::new();
arcs::components::register(&mut world);
let layer_id = Layer::create(
world.create_entity(),
Name::new("base"),
Layer {
visible: true,
z_level: 0,
},
);
(world, layer_id)
}
fn canvas_dimensions(ctx: &CanvasRenderingContext2d) -> Option<Size> {
let element = ctx.canvas()?;
let width = element.width();
let height = element.height();
Some(Size::new(width.into(), height.into()))
}
impl Component for App {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, mut link: ComponentLink<Self>) -> Self {
let on_canvas_loaded = link.send_back(|_| Msg::CanvasLoaded);
on_ready(move || on_canvas_loaded.emit(()));
let viewport = Viewport {
centre: Vector::zero(),
pixels_per_drawing_unit: 1.0,
};
let background = Color::BLACK;
let (world, layer_id) = create_world_and_default_layer();
App {
link,
world,
layer_id,
canvas: None,
renderer: Renderer::new(viewport, background),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
log::trace!("Updating with {:?}", msg);
match msg {
Msg::CanvasLoaded => {
self.canvas = Some(
html5_canvas_context_from_selector("#canvas").unwrap(),
);
self.redraw_canvas();
},
Msg::CanvasClicked(event) => {
log::debug!("Clicked {:?}", event);
self.add_point(event.screen);
self.redraw_canvas();
},
}
true
}
fn view(&self) -> Html<Self> {
log::trace!("Updating the view");
html! {
<div>
<nav class="navbar">
<ul>
<li class="brand" title={ env!("CARGO_PKG_DESCRIPTION") }>
<a href="#">{ "arcs WebAssembly Demo" }</a>
</li>
<li>
<a href="https://github.com/Michael-F-Bryan/">{ "Repo" }</a>
</li>
</ul>
</nav>
<main>
<canvas id="canvas" onclick=|e| Msg::CanvasClicked(e.into()) />
</main>
</div>
}
}
}
fn window() -> Result<Window, Error> {
web_sys::window().context("Unable to get the Window")
}
fn html5_canvas_from_selector(
selector: &str,
) -> Result<HtmlCanvasElement, Error> {
document()?
.query_selector(selector)
.ok()
.context("The selector was malformed")?
.context("Can't find the element")?
.dyn_into::<HtmlCanvasElement>()
.ok()
.context("The element wasn't actually a <canvas>")
}
fn html5_canvas_context_from_selector(
selector: &str,
) -> Result<CanvasRenderingContext2d, Error> {
html5_canvas_from_selector(selector)?
.get_context("2d")
.ok()
.context("The call to #canvas.get_context(\"2d\") failed")?
.context("There is no 2d canvas context")?
.dyn_into::<CanvasRenderingContext2d>()
.ok()
.context("The 2d canvas context wasn't a CanvasRenderingContext2d")
}
fn document() -> Result<Document, Error> {
window()?.document().context("Unable to get the Document")
}
/// An equivalent of the `$.ready()` function from jQuery.
fn on_ready<F>(cb: F)
where
F: FnOnce() + 'static,
{
let document = document().unwrap();
let ready_state = document.ready_state();
let js_callback = Closure::once_into_js(cb).dyn_into::<Function>().unwrap();
match ready_state.as_str() {
"complete" | "interactive" => {
web_sys::window()
.expect("Unable to get the Window")
.set_timeout_with_callback(&js_callback)
.unwrap();
},
_ => {
document
.add_event_listener_with_callback(
"DOMContentLoaded",
&js_callback,
)
.unwrap();
},
}
}
|
use eyeliner::servo_embedder_traits::resources::{Resource, ResourceReaderMethods};
use std::{fs::File, io::Read, path::PathBuf};
pub struct ResourceReader {
pub prefs: PathBuf,
}
impl ResourceReaderMethods for ResourceReader {
fn sandbox_access_files(&self) -> Vec<PathBuf> {
vec![]
}
fn sandbox_access_files_dirs(&self) -> Vec<PathBuf> {
vec![]
}
fn read(&self, file: Resource) -> Vec<u8> {
let path = match file {
Resource::Preferences => &self.prefs,
_ => panic!("Can't find file"),
};
let mut buffer = vec![];
File::open(path)
.expect(&format!("Can't find file: {:?}", path))
.read_to_end(&mut buffer)
.expect("Can't read file");
buffer
}
}
|
use std::fmt;
use crate::constants::*;
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct Outcome {
pub histogram: [u8; SIDES],
}
pub struct OutcomePredecessorIterator<'a> {
outcome: &'a Outcome,
next: usize,
}
pub struct OutcomeSuccessorIterator<'a> {
outcome: &'a Outcome,
next: usize,
}
impl Outcome {
pub fn empty() -> Self {
Outcome { histogram: [0; SIDES] }
}
pub fn encode(&self) -> u32 {
let mut r = 0;
let mut a = 1;
for d in 0..SIDES {
r += a * self.histogram[d] as u32;
a *= (DICE_COUNT + 1) as u32;
}
r
}
pub fn decode(mut v: u32) -> Self {
let mut histogram = [0u8; SIDES];
for d in 0..SIDES {
histogram[d] = (v % (DICE_COUNT + 1) as u32) as u8;
v /= (DICE_COUNT + 1) as u32;
}
Outcome { histogram: histogram }
}
pub fn multiplicity(&self) -> usize {
let mut fac = [0; DICE_COUNT];
fac[0] = 1;
for i in 1..fac.len() {
fac[i] = fac[i - 1] * (i + 1);
}
let mut res = fac[fac.len() - 1];
for i in 0..SIDES {
if self.histogram[i] > 0 {
res /= fac[self.histogram[i] as usize - 1];
}
}
res
}
pub fn predecessors(&self) -> OutcomePredecessorIterator {
OutcomePredecessorIterator {
outcome: self,
next: 0,
}
}
pub fn successors(&self) -> OutcomeSuccessorIterator {
OutcomeSuccessorIterator {
outcome: self,
next: 0,
}
}
}
impl fmt::Display for Outcome {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for i in 0..self.histogram.len() {
for _ in 0..self.histogram[i] {
write!(f, "{}", i + 1)?;
}
}
Ok(())
}
}
pub fn max_outcome_encoding() -> usize {
outcomes().map(|o| o.encode()).max().unwrap() as usize
}
impl <'a> Iterator for OutcomePredecessorIterator<'a> {
type Item = Outcome;
fn next(&mut self) -> Option<Outcome> {
while self.next < SIDES && self.outcome.histogram[self.next] == 0 {
self.next += 1;
}
if self.next == SIDES {
return None;
}
let mut histogram = self.outcome.histogram;
histogram[self.next] -= 1;
self.next += 1;
Some(Outcome { histogram: histogram })
}
}
impl <'a> Iterator for OutcomeSuccessorIterator<'a> {
type Item = Outcome;
fn next(&mut self) -> Option<Outcome> {
if self.next == SIDES {
return None;
}
let mut histogram = self.outcome.histogram;
histogram[self.next] += 1;
self.next += 1;
Some(Outcome { histogram: histogram })
}
}
pub struct OutcomeIterator {
histogram: [u8; SIDES],
}
impl OutcomeIterator {
fn new(dice_count: u8) -> Self {
let mut h = [0; SIDES];
h[0] = dice_count;
OutcomeIterator { histogram: h }
}
}
pub fn outcomes() -> OutcomeIterator {
OutcomeIterator::new(DICE_COUNT as u8)
}
pub fn sub_outcomes(dice_count: usize) -> OutcomeIterator {
OutcomeIterator::new(dice_count as u8)
}
impl Iterator for OutcomeIterator {
type Item = Outcome;
fn next(&mut self) -> Option<Outcome> {
let mut i = 0;
while i < SIDES && self.histogram[i] == 0 {
i += 1;
}
if i == SIDES {
return None;
}
let result = Outcome { histogram: self.histogram };
if i + 1 == SIDES {
self.histogram[i] = 0;
} else {
self.histogram[i + 1] += 1;
let v = self.histogram[i] - 1;
self.histogram[i] = 0;
self.histogram[0] = v;
}
Some(result)
}
}
#[cfg(test)]
mod tests {
use crate::*;
use crate::constants::*;
#[test]
fn test_multiplicity() {
let mut sum = 0;
for o in outcomes() {
sum += o.multiplicity();
}
let mut exp = 1;
for _ in 0..DICE_COUNT {
exp *= SIDES;
}
assert_eq!(sum, exp);
}
}
|
use super::*;
#[test]
fn test_manhattan_distance() {
let reference_point = Location::new(0, 0, 0);
let good_cases: Vec<(Location, usize)> = vec![
(Location::new(0, 3, 0), 3),
(Location::new(3, 0, 0), 3),
(Location::new(-6, -6, 0), 12),
(Location::new(-3, 6, 0), 9),
];
for (loc, expected) in good_cases {
assert_eq!(reference_point.manhattan_distance(&loc), expected);
}
}
#[test]
fn test_absolute_translation() {
let good_cases: Vec<(Location, Direction, Location)> = vec![
(
Location::new(12, -3, 0),
Direction::Down(9),
Location::new(12, -12, 9),
),
(
Location::new(7, 38, 0),
Direction::Left(7),
Location::new(0, 38, 7),
),
(
Location::new(7, 38, 0),
Direction::Right(100),
Location::new(107, 38, 100),
),
(
Location::new(0, 0, 0),
Direction::Up(4),
Location::new(0, 4, 4),
),
];
for (loc, dir, expected) in good_cases {
assert_eq!(loc.apply_direction(&dir), expected);
}
}
#[test]
fn test_series_of_absolute_translations() {
let initial_position = Location::new(0, 0, 0);
let direction_list: Vec<Direction> = vec![
Direction::Down(73),
Direction::Down(7),
Direction::Right(45),
Direction::Left(20),
Direction::Up(90),
Direction::Left(50),
];
let expected_locations: Vec<Location> = vec![
Location::new(0, 0, 0),
Location::new(0, -73, 73),
Location::new(0, -80, 80),
Location::new(45, -80, 125),
Location::new(25, -80, 145),
Location::new(25, 10, 235),
Location::new(-25, 10, 285),
];
assert_eq!(
relative_to_absolute(initial_position, &direction_list),
expected_locations
);
}
#[test]
fn test_individual_direction() {
let good_cases: Vec<(&'static str, Direction)> = vec![
("D23", Direction::Down(23)),
("L100", Direction::Left(100)),
("R2", Direction::Right(2)),
("U12384", Direction::Up(12384)),
];
for (input, expected) in good_cases {
assert_eq!(Direction::from_str(&input), Ok(expected));
}
}
#[test]
fn test_parsing_directions() {
let cases: Vec<(&'static str, Vec<Direction>)> = vec![
(
"R8,U5,L5,D3",
vec![
Direction::Right(8),
Direction::Up(5),
Direction::Left(5),
Direction::Down(3),
],
),
(
"U7,R6,D4,L4",
vec![
Direction::Up(7),
Direction::Right(6),
Direction::Down(4),
Direction::Left(4),
],
),
];
for (input, expected) in cases {
let result = parse_directions(&input).unwrap();
assert_eq!(result, expected);
}
}
#[test]
fn test_location_orientation() {
let cases: Vec<(Location, Location, Location, Orientation)> = vec![
(
Location::new(0, 0, 0),
Location::new(0, 5, 0),
Location::new(0, 10, 0),
Orientation::Colinear,
),
(
Location::new(0, 0, 0),
Location::new(0, 5, 0),
Location::new(5, 10, 0),
Orientation::Clockwise,
),
(
Location::new(0, 0, 0),
Location::new(0, 5, 0),
Location::new(-5, 10, 0),
Orientation::CounterClockwise,
),
(
Location::new(0, 0, 0),
Location::new(4, 4, 0),
Location::new(1, 1, 0),
Orientation::Colinear,
),
(
Location::new(0, 0, 0),
Location::new(4, 4, 0),
Location::new(1, 2, 0),
Orientation::CounterClockwise,
),
];
for (p1, p2, p3, orientation) in cases {
assert_eq!(
Orientation::from_three_locations(&p1, &p2, &p3),
orientation
);
}
}
#[test]
fn test_location_on_segments() {
let cases: Vec<(Location, Location, Location, bool)> = vec![
(
Location::new(0, 0, 0),
Location::new(0, 10, 0),
Location::new(0, 5, 0),
true,
),
(
Location::new(1, 1, 0),
Location::new(5, 5, 0),
Location::new(3, 3, 0),
true,
),
(
Location::new(1, 1, 0),
Location::new(5, 5, 0),
Location::new(3, 0, 0),
false,
),
(
Location::new(1, 1, 0),
Location::new(1, 1, 0),
Location::new(1, 1, 0),
true,
),
];
for (p1, p2, p3, expectation) in cases {
assert_eq!(LineSegment(p1, p2).is_present(&p3), expectation);
}
}
#[test]
fn test_intersection_checks() {
let cases: Vec<(Location, Location, Location, Location, bool)> = vec![
// Normal intersection
(
Location::new(1, 1, 0),
Location::new(5, 5, 0),
Location::new(5, 1, 0),
Location::new(1, 5, 0),
true,
),
// Overlapping endpoint
(
Location::new(1, 1, 0),
Location::new(5, 5, 0),
Location::new(3, 3, 0),
Location::new(1, 6, 0),
true,
),
// Non-intersecting segments (the lines would intersect)
(
Location::new(-5, 3, 0),
Location::new(5, 3, 0),
Location::new(0, -5, 0),
Location::new(0, 0, 0),
false,
),
// Non-intersecting segments (the lines would intersect at an endpoint)
(
Location::new(-5, 3, 0),
Location::new(5, 3, 0),
Location::new(-5, -5, 0),
Location::new(-5, 0, 0),
false,
),
// Parallel but non-intersecting
(
Location::new(1, 1, 0),
Location::new(5, 5, 0),
Location::new(1, 2, 0),
Location::new(5, 6, 0),
false,
),
// Colinear and intersecting
(
Location::new(-5, 0, 0),
Location::new(-1, 0, 0),
Location::new(-2, 0, 0),
Location::new(3, 0, 0),
true,
),
// Colinear and non-intersecting
(
Location::new(-7, 2, 0),
Location::new(-4, 2, 0),
Location::new(0, 2, 0),
Location::new(4, 2, 0),
false,
),
];
for (p1, p2, p3, p4, expectation) in cases {
let line_seg1 = LineSegment(p1, p2);
let line_seg2 = LineSegment(p3, p4);
assert_eq!(line_seg1.intersects(&line_seg2), expectation);
}
}
#[test]
fn test_location_set_to_line_set() {
let location_set = vec![];
let line_set: Vec<LineSegment> = vec![];
assert_eq!(location_set_to_line_set(location_set), line_set);
// One location isn't enough to make a line
let location_set = vec![Location::new(0, 0, 0)];
let line_set: Vec<LineSegment> = vec![];
assert_eq!(location_set_to_line_set(location_set), line_set);
// Two is, and here after I'd expect N-1 line segments
let location_set = vec![Location::new(-12, 56, 0), Location::new(3, 7, 0)];
let line_set: Vec<LineSegment> = vec![LineSegment(
Location::new(-12, 56, 0),
Location::new(3, 7, 0),
)];
assert_eq!(location_set_to_line_set(location_set), line_set);
let location_set = vec![
Location::new(1, 2, 0),
Location::new(3, 4, 0),
Location::new(5, 6, 0),
];
let line_set: Vec<LineSegment> = vec![
LineSegment(Location::new(1, 2, 0), Location::new(3, 4, 0)),
LineSegment(Location::new(3, 4, 0), Location::new(5, 6, 0)),
];
assert_eq!(location_set_to_line_set(location_set), line_set);
}
#[test]
fn test_line_segment_intersection_calculation() {
let cases: Vec<(
Location,
Location,
Location,
Location,
Option<Location>,
bool,
)> = vec![
// Parallel
(
Location::new(1, 1, 0),
Location::new(1, 2, 0),
Location::new(2, 1, 0),
Location::new(2, 2, 0),
None,
false,
),
// Meet at origin (overlapping line segments)
(
Location::new(0, 2, 0),
Location::new(0, -2, 0),
Location::new(2, 0, 0),
Location::new(-2, 0, 0),
Some(Location::new(0, 0, 4)),
true,
),
// Meet at a non-overlapping location
(
Location::new(1, 5, 0),
Location::new(2, 6, 0),
Location::new(1, 9, 0),
Location::new(2, 8, 0),
Some(Location::new(3, 7, 8)),
false,
),
// Parallel touching at one point only
(
Location::new(0, 0, 0),
Location::new(9, 0, 0),
Location::new(0, 0, 0),
Location::new(-9, 0, 0),
Some(Location::new(0, 0, 0)),
true,
),
];
for (l1, l2, l3, l4, result, intersects) in cases {
let line_seg1 = LineSegment(l1, l2);
let line_seg2 = LineSegment(l3, l4);
assert_eq!(line_seg1.intersects(&line_seg2), intersects);
assert_eq!(line_seg1.intersecting_location(&line_seg2), result);
}
}
|
use jsonrpc_core::IoHandler;
use jsonrpc_http_server::{AccessControlAllowOrigin, DomainsValidation, ServerBuilder};
use tendermint_light_client::supervisor::Handle;
use crate::error;
pub use sealed::{Client, Rpc, Server};
/// Run the given [`Server`] on the given address and blocks until closed.
///
/// n.b. The underlying server has semantics to close on drop. Also does it does not offer any way to
/// get the underlying Future to await, so we are left with this rather rudimentary way to control
/// the lifecycle. Should we be interested in a more controlled way to close the server we can
/// expose a handle in the future.
pub fn run<H>(server: Server<H>, addr: &str) -> Result<(), error::Error>
where
H: Handle + Send + Sync + 'static,
{
let mut io = IoHandler::new();
io.extend_with(server.to_delegate());
let srv = ServerBuilder::new(io)
.cors(DomainsValidation::AllowOnly(vec![
AccessControlAllowOrigin::Any,
]))
.start_http(&addr.parse().map_err(error::Kind::from)?)
.map_err(|e| error::Kind::Io.context(e))?;
srv.wait();
Ok(())
}
mod sealed {
use jsonrpc_core::futures::future::{self, Future, FutureResult};
use jsonrpc_core::types::Error;
use jsonrpc_derive::rpc;
use tendermint_light_client::supervisor::Handle;
use tendermint_light_client::types::LatestStatus;
use tendermint_light_client::types::LightBlock;
#[rpc]
pub trait Rpc {
/// Returns the latest trusted block.
#[rpc(name = "state")]
fn state(&self) -> FutureResult<Option<LightBlock>, Error>;
/// Returns the latest status.
#[rpc(name = "status")]
fn status(&self) -> FutureResult<LatestStatus, Error>;
}
pub use self::rpc_impl_Rpc::gen_client::Client;
pub struct Server<H>
where
H: Handle + Send + Sync,
{
handle: H,
}
impl<H> Server<H>
where
H: Handle + Send + Sync,
{
pub fn new(handle: H) -> Self {
Self { handle }
}
}
impl<H> Rpc for Server<H>
where
H: Handle + Send + Sync + 'static,
{
fn state(&self) -> FutureResult<Option<LightBlock>, Error> {
let res = self.handle.latest_trusted().map_err(|e| {
let mut err = Error::internal_error();
err.message = e.to_string();
err.data = serde_json::to_value(e.kind()).ok();
err
});
future::result(res)
}
fn status(&self) -> FutureResult<LatestStatus, Error> {
let res = self.handle.latest_status().map_err(|e| {
let mut err = Error::internal_error();
err.message = e.to_string();
err.data = serde_json::to_value(e.kind()).ok();
err
});
future::result(res)
}
}
}
#[cfg(test)]
mod test {
use futures::compat::Future01CompatExt as _;
use jsonrpc_core::futures::future::Future;
use jsonrpc_core::IoHandler;
use jsonrpc_core_client::transports::local;
use pretty_assertions::assert_eq;
use tendermint_light_client::errors::Error;
use tendermint_light_client::supervisor::Handle;
use tendermint_light_client::types::LatestStatus;
use tendermint_light_client::types::LightBlock;
use super::{Client, Rpc as _, Server};
#[tokio::test]
async fn state() {
let server = Server::new(MockHandle {});
let fut = {
let mut io = IoHandler::new();
io.extend_with(server.to_delegate());
let (client, server) = local::connect::<Client, _, _>(io);
client.state().join(server)
};
let (have, _) = fut.compat().await.unwrap();
let want = serde_json::from_str(LIGHTBLOCK_JSON).unwrap();
assert_eq!(have, want);
}
#[tokio::test]
async fn status() {
let server = Server::new(MockHandle {});
let fut = {
let mut io = IoHandler::new();
io.extend_with(server.to_delegate());
let (client, server) = local::connect::<Client, _, _>(io);
client.status().join(server)
};
let (have, _) = fut.compat().await.unwrap();
let want = serde_json::from_str(STATUS_JSON).unwrap();
assert_eq!(have, want);
}
struct MockHandle;
impl Handle for MockHandle {
fn latest_trusted(&self) -> Result<Option<LightBlock>, Error> {
let block: LightBlock = serde_json::from_str(LIGHTBLOCK_JSON).unwrap();
Ok(Some(block))
}
fn latest_status(&self) -> Result<LatestStatus, Error> {
let status: LatestStatus = serde_json::from_str(STATUS_JSON).unwrap();
Ok(status)
}
}
const LIGHTBLOCK_JSON: &str = r#"
{
"signed_header": {
"header": {
"version": {
"block": "0",
"app": "0"
},
"chain_id": "test-chain-01",
"height": "1",
"time": "2019-11-02T15:04:00Z",
"last_block_id": {
"hash": "",
"parts": {
"total": "0",
"hash": ""
}
},
"last_commit_hash": "",
"data_hash": "",
"validators_hash": "ADAE23D9D908638F3866C11A39E31CE4399AE6DE8EC8EBBCB1916B90C46EDDE3",
"next_validators_hash": "ADAE23D9D908638F3866C11A39E31CE4399AE6DE8EC8EBBCB1916B90C46EDDE3",
"consensus_hash": "048091BC7DDC283F77BFBF91D73C44DA58C3DF8A9CBC867405D8B7F3DAADA22F",
"app_hash": "6170705F68617368",
"last_results_hash": "",
"evidence_hash": "",
"proposer_address": "01F527D77D3FFCC4FCFF2DDC2952EEA5414F2A33"
},
"commit": {
"height": "1",
"round": "1",
"block_id": {
"hash": "76B0FB738138A2C934300D7B23C280B65965D7427DA4D5414B41C75EBC4AD4C3",
"parts": {
"total": "1",
"hash": "073CE26981DF93820595E602CE63B810BC8F1003D6BB28DEDFF5B2F4F09811A1"
}
},
"signatures": [
{
"block_id_flag": 2,
"validator_address": "01F527D77D3FFCC4FCFF2DDC2952EEA5414F2A33",
"timestamp": "2019-11-02T15:04:10Z",
"signature": "NaNXQhv7SgBtcq+iHwItxlYUMGHP5MeFpTbyNsnLtzwM6P/EAAAexUH94+osvRDoiahUOoQrRlTiZrYGfahWBw=="
},
{
"block_id_flag": 2,
"validator_address": "026CC7B6F3E62F789DBECEC59766888B5464737D",
"timestamp": "2019-11-02T15:04:10Z",
"signature": "tw0csJ1L1vkBG/71BMjrFEcA6VWjOx29WMwkg1cmDn82XBjRFz+HJu7amGoIj6WLL2p26pO25yQR49crsYQ+AA=="
}
]
}
},
"validator_set": {
"validators": [
{
"address": "01F527D77D3FFCC4FCFF2DDC2952EEA5414F2A33",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "OAaNq3DX/15fGJP2MI6bujt1GRpvjwrqIevChirJsbc="
},
"voting_power": "50",
"proposer_priority": "-50"
},
{
"address": "026CC7B6F3E62F789DBECEC59766888B5464737D",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "+vlsKpn6ojn+UoTZl+w+fxeqm6xvUfBokTcKfcG3au4="
},
"voting_power": "50",
"proposer_priority": "50"
}
],
"proposer": {
"address": "01F527D77D3FFCC4FCFF2DDC2952EEA5414F2A33",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "OAaNq3DX/15fGJP2MI6bujt1GRpvjwrqIevChirJsbc="
},
"voting_power": "50",
"proposer_priority": "-50"
}
},
"next_validator_set": {
"validators": [
{
"address": "01F527D77D3FFCC4FCFF2DDC2952EEA5414F2A33",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "OAaNq3DX/15fGJP2MI6bujt1GRpvjwrqIevChirJsbc="
},
"voting_power": "50",
"proposer_priority": "0"
},
{
"address": "026CC7B6F3E62F789DBECEC59766888B5464737D",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "+vlsKpn6ojn+UoTZl+w+fxeqm6xvUfBokTcKfcG3au4="
},
"voting_power": "50",
"proposer_priority": "0"
}
],
"proposer": {
"address": "026CC7B6F3E62F789DBECEC59766888B5464737D",
"pub_key": {
"type": "tendermint/PubKeyEd25519",
"value": "+vlsKpn6ojn+UoTZl+w+fxeqm6xvUfBokTcKfcG3au4="
},
"voting_power": "50",
"proposer_priority": "0" }
},
"provider": "9D61B19DEFFD5A60BA844AF492EC2CC44449C569"
}
"#;
const STATUS_JSON: &str = r#"
{
"block_hash": "5A55D7AF2DF9AE4BF4B46FDABBBAD1B66D37B5E044A4843AB0FB0EBEC3E0422C",
"connected_nodes": [
"BADFADAD0BEFEEDC0C0ADEADBEEFC0FFEEFACADE",
"CEFEEDBADFADAD0C0CEEFACADE0ADEADBEEFC0FF"
],
"height": 1565,
"valset_hash": "74F2AC2B6622504D08DD2509E28CE731985CFE4D133C9DB0CB85763EDCA95AA3"
}"#;
}
|
use super::CtagsCommonArgs;
use crate::app::Args;
use crate::{send_response_from_cache, SendResponse};
use anyhow::Result;
use clap::Parser;
use filter::{FilterContext, SequentialSource};
use itertools::Itertools;
use maple_core::process::ShellCommand;
use maple_core::tools::ctags::{ProjectCtagsCommand, CTAGS_HAS_JSON_FEATURE};
use matcher::{MatchScope, MatcherBuilder};
use rayon::prelude::*;
use std::ops::Deref;
use std::sync::Arc;
use types::ClapItem;
/// Generate ctags recursively under the given directory.
#[derive(Parser, Debug, Clone)]
pub struct RecursiveTags {
/// Query content.
#[clap(long)]
query: Option<String>,
/// Runs as the forerunner job, create cache when neccessary.
#[clap(long)]
forerunner: bool,
/// Run in parallel.
#[clap(long)]
par_run: bool,
/// Ctags common arguments.
#[clap(flatten)]
pub(super) c_args: CtagsCommonArgs,
}
impl RecursiveTags {
fn project_ctags_cmd(&self) -> Result<ProjectCtagsCommand> {
let dir = self.c_args.dir()?;
let exclude_args = self.c_args.exclude_args();
let mut std_cmd = std::process::Command::new(ProjectCtagsCommand::TAGS_CMD[0]);
std_cmd
.current_dir(&dir)
.args(&ProjectCtagsCommand::TAGS_CMD[1..])
.args(exclude_args);
if let Some(ref languages) = self.c_args.languages {
std_cmd.arg(format!("--languages={languages}"));
}
let shell_cmd = std::iter::once(std_cmd.get_program())
.chain(std_cmd.get_args())
.map(|s| s.to_string_lossy())
.join(" ");
let shell_cmd = ShellCommand::new(shell_cmd, dir);
Ok(ProjectCtagsCommand::new(std_cmd, shell_cmd))
}
pub fn run(
&self,
Args {
no_cache,
icon,
number,
..
}: Args,
) -> Result<()> {
if !CTAGS_HAS_JSON_FEATURE.deref() {
return Err(anyhow::anyhow!(
"ctags executable is not compiled with +json feature, please recompile it."
));
}
let mut ctags_cmd = self.project_ctags_cmd()?;
if self.forerunner {
let (total, cache) = if no_cache {
ctags_cmd.par_create_cache()?
} else if let Some((total, cache_path)) = ctags_cmd.ctags_cache() {
(total, cache_path)
} else {
ctags_cmd.par_create_cache()?
};
send_response_from_cache(&cache, total, SendResponse::Json, icon);
} else {
let filter_context = FilterContext::new(
icon,
number,
None,
MatcherBuilder::new().match_scope(MatchScope::TagName),
);
if self.par_run {
filter::par_dyn_run_list(
self.query.as_deref().unwrap_or_default(),
filter_context,
ctags_cmd
.tag_item_iter()?
.map(|tag_item| Arc::new(tag_item) as Arc<dyn ClapItem>)
.par_bridge(),
);
} else {
filter::dyn_run(
self.query.as_deref().unwrap_or_default(),
filter_context,
SequentialSource::List(ctags_cmd.tag_item_iter()?.map(|tag_item| {
let item: Arc<dyn ClapItem> = Arc::new(tag_item);
item
})),
)?;
}
}
Ok(())
}
}
|
// Decoding metadata from a single crate's metadata
import std::ebmlivec;
import std::ivec;
import std::option;
import std::str;
import std::ioivec;
import std::map::hashmap;
import syntax::ast;
import front::attr;
import middle::ty;
import common::*;
import tydecode::parse_def_id;
import tydecode::parse_ty_data;
import driver::session;
import syntax::print::pprust;
import cstore;
export get_symbol;
export get_tag_variants;
export get_type;
export get_type_param_count;
export get_type_param_kinds;
export lookup_defs;
export get_crate_attributes;
export list_crate_metadata;
export crate_dep;
export get_crate_deps;
export external_resolver;
// A function that takes a def_id relative to the crate being searched and
// returns a def_id relative to the compilation environment, i.e. if we hit a
// def_id for an item defined in another crate, somebody needs to figure out
// what crate that's in and give us a def_id that makes sense for the current
// build.
type external_resolver = fn(&ast::def_id) -> ast::def_id ;
fn lookup_hash(d: &ebmlivec::doc, eq_fn: fn(&u8[]) -> bool , hash: uint) ->
ebmlivec::doc[] {
let index = ebmlivec::get_doc(d, tag_index);
let table = ebmlivec::get_doc(index, tag_index_table);
let hash_pos = table.start + hash % 256u * 4u;
let pos = ebmlivec::be_uint_from_bytes(d.data, hash_pos, 4u);
let bucket = ebmlivec::doc_at(d.data, pos);
// Awkward logic because we can't ret from foreach yet
let result: ebmlivec::doc[] = ~[];
let belt = tag_index_buckets_bucket_elt;
for each elt: ebmlivec::doc in ebmlivec::tagged_docs(bucket, belt) {
let pos = ebmlivec::be_uint_from_bytes(elt.data, elt.start, 4u);
if eq_fn(ivec::slice[u8](*elt.data, elt.start + 4u, elt.end)) {
result += ~[ebmlivec::doc_at(d.data, pos)];
}
}
ret result;
}
fn maybe_find_item(item_id: int, items: &ebmlivec::doc) ->
option::t[ebmlivec::doc] {
fn eq_item(bytes: &u8[], item_id: int) -> bool {
ret ebmlivec::be_uint_from_bytes(@bytes, 0u, 4u) as int == item_id;
}
let eqer = bind eq_item(_, item_id);
let found = lookup_hash(items, eqer, hash_node_id(item_id));
if ivec::len(found) == 0u {
ret option::none[ebmlivec::doc];
} else { ret option::some[ebmlivec::doc](found.(0)); }
}
fn find_item(item_id: int, items: &ebmlivec::doc) -> ebmlivec::doc {
ret option::get(maybe_find_item(item_id, items));
}
// Looks up an item in the given metadata and returns an ebmlivec doc pointing
// to the item data.
fn lookup_item(item_id: int, data: &@u8[]) -> ebmlivec::doc {
let items = ebmlivec::get_doc(ebmlivec::new_doc(data), tag_items);
ret find_item(item_id, items);
}
fn item_family(item: &ebmlivec::doc) -> u8 {
let fam = ebmlivec::get_doc(item, tag_items_data_item_family);
ret ebmlivec::doc_as_uint(fam) as u8;
}
fn item_symbol(item: &ebmlivec::doc) -> str {
let sym = ebmlivec::get_doc(item, tag_items_data_item_symbol);
ret str::unsafe_from_bytes_ivec(ebmlivec::doc_data(sym));
}
fn variant_tag_id(d: &ebmlivec::doc) -> ast::def_id {
let tagdoc = ebmlivec::get_doc(d, tag_items_data_item_tag_id);
ret parse_def_id(ebmlivec::doc_data(tagdoc));
}
fn item_type(item: &ebmlivec::doc, this_cnum: ast::crate_num, tcx: ty::ctxt,
extres: &external_resolver) -> ty::t {
fn parse_external_def_id(this_cnum: ast::crate_num,
extres: &external_resolver, s: str) ->
ast::def_id {
let buf = str::bytes_ivec(s);
let external_def_id = parse_def_id(buf);
// This item was defined in the crate we're searching if it's has the
// local crate number, otherwise we need to search a different crate
if external_def_id.crate == ast::local_crate {
ret {crate: this_cnum, node: external_def_id.node};
} else { ret extres(external_def_id); }
}
let tp = ebmlivec::get_doc(item, tag_items_data_item_type);
let def_parser = bind parse_external_def_id(this_cnum, extres, _);
ret parse_ty_data(item.data, this_cnum, tp.start, tp.end - tp.start,
def_parser, tcx);
}
fn item_ty_param_kinds(item: &ebmlivec::doc) -> ast::kind[] {
let ks: ast::kind[] = ~[];
let tp = tag_items_data_item_ty_param_kinds;
for each p: ebmlivec::doc in ebmlivec::tagged_docs(item, tp) {
let dat : u8[] = ebmlivec::doc_data(p);
let vi = ebmlivec::vint_at(dat, 0u);
let i = 0u;
while i < vi.val {
let k = alt dat.(vi.next + i) as char {
'u' { ast::kind_unique }
's' { ast::kind_shared }
'p' { ast::kind_pinned }
};
ks += ~[k];
i += 1u;
}
}
ret ks;
}
fn tag_variant_ids(item: &ebmlivec::doc, this_cnum: ast::crate_num) ->
ast::def_id[] {
let ids: ast::def_id[] = ~[];
let v = tag_items_data_item_variant;
for each p: ebmlivec::doc in ebmlivec::tagged_docs(item, v) {
let ext = parse_def_id(ebmlivec::doc_data(p));
ids += ~[{crate: this_cnum, node: ext.node}];
}
ret ids;
}
// Given a path and serialized crate metadata, returns the ID of the
// definition the path refers to.
fn resolve_path(path: &ast::ident[], data: @u8[]) -> ast::def_id[] {
fn eq_item(data: &u8[], s: str) -> bool {
ret str::eq(str::unsafe_from_bytes_ivec(data), s);
}
let s = str::connect_ivec(path, "::");
let md = ebmlivec::new_doc(data);
let paths = ebmlivec::get_doc(md, tag_paths);
let eqer = bind eq_item(_, s);
let result: ast::def_id[] = ~[];
for doc: ebmlivec::doc in lookup_hash(paths, eqer, hash_path(s)) {
let did_doc = ebmlivec::get_doc(doc, tag_def_id);
result += ~[parse_def_id(ebmlivec::doc_data(did_doc))];
}
ret result;
}
// Crate metadata queries
fn lookup_defs(data: &@u8[], cnum: ast::crate_num, path: &ast::ident[]) ->
ast::def[] {
ret ivec::map(bind lookup_def(cnum, data, _), resolve_path(path, data));
}
// FIXME doesn't yet handle re-exported externals
fn lookup_def(cnum: ast::crate_num, data: @u8[], did_: &ast::def_id) ->
ast::def {
let item = lookup_item(did_.node, data);
let fam_ch = item_family(item);
let did = {crate: cnum, node: did_.node};
// We treat references to tags as references to types.
let def =
alt fam_ch as char {
'c' { ast::def_const(did) }
'f' { ast::def_fn(did, ast::impure_fn) }
'p' { ast::def_fn(did, ast::pure_fn) }
'F' { ast::def_native_fn(did) }
'y' { ast::def_ty(did) }
'T' { ast::def_native_ty(did) }
't' { ast::def_ty(did) }
'm' { ast::def_mod(did) }
'n' { ast::def_native_mod(did) }
'v' {
let tid = variant_tag_id(item);
tid = {crate: cnum, node: tid.node};
ast::def_variant(tid, did)
}
};
ret def;
}
fn get_type(data: @u8[], def: ast::def_id, tcx: &ty::ctxt,
extres: &external_resolver) -> ty::ty_param_kinds_and_ty {
let this_cnum = def.crate;
let node_id = def.node;
let item = lookup_item(node_id, data);
let t = item_type(item, this_cnum, tcx, extres);
let tp_kinds : ast::kind[];
let fam_ch = item_family(item);
let has_ty_params = family_has_type_params(fam_ch);
if has_ty_params {
tp_kinds = item_ty_param_kinds(item);
} else { tp_kinds = ~[]; }
ret {kinds: tp_kinds, ty: t};
}
fn get_type_param_count(data: @u8[], id: ast::node_id) -> uint {
ret ivec::len(get_type_param_kinds(data, id));
}
fn get_type_param_kinds(data: @u8[], id: ast::node_id) -> ast::kind[] {
ret item_ty_param_kinds(lookup_item(id, data));
}
fn get_symbol(data: @u8[], id: ast::node_id) -> str {
ret item_symbol(lookup_item(id, data));
}
fn get_tag_variants(data: &@u8[], def: ast::def_id, tcx: &ty::ctxt,
extres: &external_resolver) -> ty::variant_info[] {
let external_crate_id = def.crate;
let data =
cstore::get_crate_data(tcx.sess.get_cstore(), external_crate_id).data;
let items = ebmlivec::get_doc(ebmlivec::new_doc(data), tag_items);
let item = find_item(def.node, items);
let infos: ty::variant_info[] = ~[];
let variant_ids = tag_variant_ids(item, external_crate_id);
for did: ast::def_id in variant_ids {
let item = find_item(did.node, items);
let ctor_ty = item_type(item, external_crate_id, tcx, extres);
let arg_tys: ty::t[] = ~[];
alt ty::struct(tcx, ctor_ty) {
ty::ty_fn(_, args, _, _, _) {
for a: ty::arg in args { arg_tys += ~[a.ty]; }
}
_ {
// Nullary tag variant.
}
}
infos += ~[{args: arg_tys, ctor_ty: ctor_ty, id: did}];
}
ret infos;
}
fn family_has_type_params(fam_ch: u8) -> bool {
ret alt fam_ch as char {
'c' { false }
'f' { true }
'p' { true }
'F' { true }
'y' { true }
't' { true }
'T' { false }
'm' { false }
'n' { false }
'v' { true }
};
}
fn read_path(d: &ebmlivec::doc) -> {path: str, pos: uint} {
let desc = ebmlivec::doc_data(d);
let pos = ebmlivec::be_uint_from_bytes(@desc, 0u, 4u);
let pathbytes = ivec::slice[u8](desc, 4u, ivec::len[u8](desc));
let path = str::unsafe_from_bytes_ivec(pathbytes);
ret {path: path, pos: pos};
}
fn describe_def(items: &ebmlivec::doc, id: ast::def_id) -> str {
if id.crate != ast::local_crate { ret "external"; }
ret item_family_to_str(item_family(find_item(id.node, items)));
}
fn item_family_to_str(fam: u8) -> str {
alt fam as char {
'c' { ret "const"; }
'f' { ret "fn"; }
'p' { ret "pred"; }
'F' { ret "native fn"; }
'y' { ret "type"; }
'T' { ret "native type"; }
't' { ret "type"; }
'm' { ret "mod"; }
'n' { ret "native mod"; }
'v' { ret "tag"; }
}
}
fn get_meta_items(md: &ebmlivec::doc) -> (@ast::meta_item)[] {
let items: (@ast::meta_item)[] = ~[];
for each meta_item_doc: ebmlivec::doc in
ebmlivec::tagged_docs(md, tag_meta_item_word) {
let nd = ebmlivec::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::unsafe_from_bytes_ivec(ebmlivec::doc_data(nd));
items += ~[attr::mk_word_item(n)];
}
for each meta_item_doc: ebmlivec::doc in
ebmlivec::tagged_docs(md, tag_meta_item_name_value) {
let nd = ebmlivec::get_doc(meta_item_doc, tag_meta_item_name);
let vd = ebmlivec::get_doc(meta_item_doc, tag_meta_item_value);
let n = str::unsafe_from_bytes_ivec(ebmlivec::doc_data(nd));
let v = str::unsafe_from_bytes_ivec(ebmlivec::doc_data(vd));
// FIXME (#611): Should be able to decode meta_name_value variants,
// but currently they can't be encoded
items += ~[attr::mk_name_value_item_str(n, v)];
}
for each meta_item_doc: ebmlivec::doc in
ebmlivec::tagged_docs(md, tag_meta_item_list) {
let nd = ebmlivec::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::unsafe_from_bytes_ivec(ebmlivec::doc_data(nd));
let subitems = get_meta_items(meta_item_doc);
items += ~[attr::mk_list_item(n, subitems)];
}
ret items;
}
fn get_attributes(md: &ebmlivec::doc) -> ast::attribute[] {
let attrs: ast::attribute[] = ~[];
alt ebmlivec::maybe_get_doc(md, tag_attributes) {
option::some(attrs_d) {
for each attr_doc: ebmlivec::doc in
ebmlivec::tagged_docs(attrs_d, tag_attribute) {
let meta_items = get_meta_items(attr_doc);
// Currently it's only possible to have a single meta item on
// an attribute
assert (ivec::len(meta_items) == 1u);
let meta_item = meta_items.(0);
attrs +=
~[{node: {style: ast::attr_outer, value: *meta_item},
span: {lo: 0u, hi: 0u}}];
}
}
option::none. { }
}
ret attrs;
}
fn list_meta_items(meta_items: &ebmlivec::doc, out: ioivec::writer) {
for mi: @ast::meta_item in get_meta_items(meta_items) {
out.write_str(#fmt("%s\n", pprust::meta_item_to_str(*mi)));
}
}
fn list_crate_attributes(md: &ebmlivec::doc, out: ioivec::writer) {
out.write_str("=Crate Attributes=\n");
for attr: ast::attribute in get_attributes(md) {
out.write_str(#fmt("%s\n", pprust::attribute_to_str(attr)));
}
out.write_str("\n\n");
}
fn get_crate_attributes(data: @u8[]) -> ast::attribute[] {
ret get_attributes(ebmlivec::new_doc(data));
}
type crate_dep = {cnum: ast::crate_num, ident: str};
fn get_crate_deps(data: @u8[]) -> crate_dep[] {
let deps: crate_dep[] = ~[];
let cratedoc = ebmlivec::new_doc(data);
let depsdoc = ebmlivec::get_doc(cratedoc, tag_crate_deps);
let crate_num = 1;
for each depdoc: ebmlivec::doc in
ebmlivec::tagged_docs(depsdoc, tag_crate_dep) {
let depname = str::unsafe_from_bytes_ivec(ebmlivec::doc_data(depdoc));
deps += ~[{cnum: crate_num, ident: depname}];
crate_num += 1;
}
ret deps;
}
fn list_crate_deps(data: @u8[], out: ioivec::writer) {
out.write_str("=External Dependencies=\n");
for dep: crate_dep in get_crate_deps(data) {
out.write_str(#fmt("%d %s\n", dep.cnum, dep.ident));
}
out.write_str("\n");
}
fn list_crate_items(bytes: &@u8[], md: &ebmlivec::doc, out: ioivec::writer) {
out.write_str("=Items=\n");
let paths = ebmlivec::get_doc(md, tag_paths);
let items = ebmlivec::get_doc(md, tag_items);
let index = ebmlivec::get_doc(paths, tag_index);
let bs = ebmlivec::get_doc(index, tag_index_buckets);
for each bucket: ebmlivec::doc in
ebmlivec::tagged_docs(bs, tag_index_buckets_bucket) {
let et = tag_index_buckets_bucket_elt;
for each elt: ebmlivec::doc in ebmlivec::tagged_docs(bucket, et) {
let data = read_path(elt);
let def = ebmlivec::doc_at(bytes, data.pos);
let did_doc = ebmlivec::get_doc(def, tag_def_id);
let did = parse_def_id(ebmlivec::doc_data(did_doc));
out.write_str(#fmt("%s (%s)\n", data.path,
describe_def(items, did)));
}
}
out.write_str("\n");
}
fn list_crate_metadata(bytes: &@u8[], out: ioivec::writer) {
let md = ebmlivec::new_doc(bytes);
list_crate_attributes(md, out);
list_crate_deps(bytes, out);
list_crate_items(bytes, md, out);
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
|
fn main() {
read::<u32>();
let arr: Vec<u32> = read_as_vec();
read::<u32>();
let keys: Vec<u32> = read_as_vec();
let mut result: u32 = 0;
for key in keys {
result += if search(&arr, key) { 1 } else { 0 };
}
println!("{}", result);
}
fn search(arr: &[u32], key: u32) -> bool {
let mut s = 0;
let mut e = arr.len();
while s < e {
let pivot = (s + e) / 2;
if key < arr[pivot] {
e = pivot;
} else if key > arr[pivot] {
s = pivot + 1;
} else {
return true;
}
}
false
}
fn read<T: std::str::FromStr>() -> T {
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
input.trim().parse::<T>().ok().unwrap()
}
fn read_as_vec<T: std::str::FromStr>() -> Vec<T> {
read::<String>()
.split_whitespace()
.map(|e| { e.parse().ok().unwrap() })
.collect()
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_search_1() {
let vec = vec![1,2,3,4,5];
assert_eq!(true, search(&vec, 1));
assert_eq!(true, search(&vec, 2));
assert_eq!(true, search(&vec, 3));
assert_eq!(true, search(&vec, 4));
assert_eq!(true, search(&vec, 5));
}
#[test]
fn test_search_2() {
let vec = vec![2,4,6,8,10];
assert_eq!(false, search(&vec, 1));
assert_eq!(false, search(&vec, 3));
assert_eq!(false, search(&vec, 5));
assert_eq!(false, search(&vec, 7));
assert_eq!(false, search(&vec, 9));
assert_eq!(false, search(&vec, 11));
}
}
|
use zxcvbn::{zxcvbn, ZxcvbnError};
use crate::{InputType, InputValueError};
pub fn min_password_strength<T: AsRef<str> + InputType>(
value: &T,
min_score: u8,
) -> Result<(), InputValueError<T>> {
match zxcvbn(value.as_ref(), &[]) {
Ok(password_strength) => {
if password_strength.score() < min_score {
Err("password is too weak".into())
} else {
Ok(())
}
}
Err(ZxcvbnError::BlankPassword) => Err("password is too weak".into()),
_ => Err("error processing password strength".into()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_password_strength() {
assert!(min_password_strength(&"password".to_string(), 3).is_err());
assert!(min_password_strength(&"query".to_string(), 3).is_err());
assert!(min_password_strength(&"P@ssword1".to_string(), 3).is_err());
assert!(min_password_strength(&"".to_string(), 3).is_err());
assert!(min_password_strength(&"Some!Secure!Password".to_string(), 3).is_ok());
}
}
|
use std::fmt::Debug;
trait DoSomething<T> {
fn do_sth(&self, value: T);
}
impl<'a, T: Debug> DoSomething<T> for &'a usize {
fn do_sth(&self, value: T) {
println!("{:?}", value)
}
}
fn foo(b: Box<dyn for<'f> DoSomething<&'f usize>>) {
let s: usize = 10;
b.do_sth(&s);
}
#[test]
fn test_foo() {
let x = Box::new(&2usize);
foo(x);
}
|
mod map_head;
mod map_type;
mod test;
mod rlew_reader;
mod map_builder;
mod plane;
mod map;
extern crate byteorder;
|
/*
Project Euler Problem 12:
The sequence of triangle numbers is generated by adding the natural numbers. So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ten terms would be:
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
Let us list the factors of the first seven triangle numbers:
1: 1
3: 1,3
6: 1,2,3,6
10: 1,2,5,10
15: 1,3,5,15
21: 1,3,7,21
28: 1,2,4,7,14,28
We can see that 28 is the first triangle number to have over five divisors.
What is the value of the first triangle number to have over five hundred divisors?
*/
fn main() {
let mut t = 0;
for i in 1.. {
t += i;
let mut pf = 0;
for j in 2..((t as f32).sqrt().ceil() as i32) {
if t % j == 0 {
pf += 2;
}
}
if pf > 500 {
println!("{:?}", t);
return;
}
}
} |
use std::fs;
use std::io;
use std::io::Write as _;
use std::process;
fn main() -> io::Result<()> {
let tokens = windows_macros::generate!(
Windows::Win32::Audio::*,
Windows::Win32::CoreAudio::*,
Windows::Win32::Multimedia::{
WAVEFORMATEX,
WAVEFORMATEXTENSIBLE,
WAVE_FORMAT_PCM,
WAVE_FORMAT_IEEE_FLOAT,
KSDATAFORMAT_SUBTYPE_IEEE_FLOAT,
},
Windows::Win32::StructuredStorage::PROPVARIANT,
Windows::Win32::Com::{CoTaskMemAlloc, CoTaskMemFree, CLSIDFromProgID, CoInitializeEx, CoCreateInstance, CLSCTX},
Windows::Win32::WindowsAndMessaging::GetForegroundWindow,
Windows::Win32::SystemServices::{
CreateEventA,
HANDLE,
INVALID_HANDLE_VALUE,
ResetEvent,
SetEvent,
WAIT_RETURN_CAUSE,
WaitForSingleObject,
WaitForMultipleObjects,
FALSE,
TRUE,
S_FALSE,
},
Windows::Win32::WindowsProgramming::{INFINITE, CloseHandle},
Windows::Win32::ApplicationInstallationAndServicing::NTDDI_WIN7,
);
let path = windows_gen::workspace_dir()
.join("audio-device-windows-sys")
.join("src")
.join("bindings.rs");
let mut file = fs::File::create(&path)?;
file.write_all(
"// This file was generated by the `windows` crate - do not edit by hand!\n\n".as_bytes(),
)?;
file.write_all(tokens.as_bytes())?;
drop(file);
let mut cmd = process::Command::new("rustfmt");
cmd.arg(&path);
cmd.output()?;
Ok(())
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_expression::TableSchema;
use common_meta_app::schema::TableInfo;
use crate::plan::ParquetTableInfo;
use crate::plan::ResultScanTableInfo;
use crate::plan::StageTableInfo;
#[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)]
pub enum DataSourceInfo {
// Normal table source, `fuse/system`.
TableSource(TableInfo),
// Internal/External source, like `s3://` or `azblob://`.
StageSource(StageTableInfo),
// stage source with parquet format used for select.
ParquetSource(ParquetTableInfo),
// Table Function Result_Scan
ResultScanSource(ResultScanTableInfo),
}
impl DataSourceInfo {
pub fn schema(&self) -> Arc<TableSchema> {
match self {
DataSourceInfo::TableSource(table_info) => table_info.schema(),
DataSourceInfo::StageSource(table_info) => table_info.schema(),
DataSourceInfo::ParquetSource(table_info) => table_info.schema(),
DataSourceInfo::ResultScanSource(table_info) => table_info.schema(),
}
}
pub fn desc(&self) -> String {
match self {
DataSourceInfo::TableSource(table_info) => table_info.desc.clone(),
DataSourceInfo::StageSource(table_info) => table_info.desc(),
DataSourceInfo::ParquetSource(table_info) => table_info.desc(),
DataSourceInfo::ResultScanSource(table_info) => table_info.desc(),
}
}
}
|
use juniper::GraphQLObject;
#[derive(GraphQLObject)]
struct Obj {}
fn main() {}
|
use serenity::framework::standard::macros::command;
use serenity::framework::standard::CommandResult;
use serenity::model::prelude::Message;
use serenity::prelude::Context;
#[command]
#[required_permissions(ADMINISTRATOR)]
async fn leave(ctx: &Context, msg: &Message) -> CommandResult {
match msg.guild_id {
Some(guild) => {
msg.reply(&ctx.http, "I'm sorry to see you go, goodbye 👋")
.await?;
guild.leave(&ctx.http).await?;
}
None => {
msg.reply(&ctx.http, "I'm not in a guild? ¯\\_(ツ)_/¯")
.await?;
}
};
Ok(())
}
|
use std::borrow::BorrowMut;
use std::mem::swap;
use std::mem::replace;
pub struct SequencedList<T> {
length: usize,
head: Link<T>,
tail: Option<*mut Node<T>>,
}
type Link<T> = Option<Box<Node<T>>>;
struct Node<T> {
value: T,
next: Link<T>,
prev: Option<*mut Node<T>>,
}
pub struct Iter<'a, T: 'a> {
length: usize,
head: &'a Link<T>,
tail: Option<*mut Node<T>>,
}
impl<T> SequencedList<T> {
pub fn new() -> SequencedList<T> {
SequencedList::<T> {
length: 0,
head: None,
tail: None,
}
}
pub fn push_front(&mut self, val: T) {
self.length += 1;
if self.head.is_none() {
let mut node = Box::new(Node::new(val));
self.tail = Some(node.borrow_mut() as *mut Node<T>);
self.head = Some(node);
} else {
let mut node = Box::new(Node::new(val));
swap(&mut node.next, &mut self.head);
swap(&mut self.head, &mut Some(node));
}
}
pub fn push_back(&mut self, val: T) {
self.length += 1;
if self.head.is_none() {
let mut node = Box::new(Node::new(val));
self.tail = Some(node.borrow_mut() as *mut Node<T>);
self.head = Some(node);
} else {
unsafe {
self.tail = (**self.tail.as_ref().unwrap()).insert_next(val);
}
}
}
pub fn get(&self, index: usize) -> Option<&T> {
if index >= self.length {
None
} else {
Some(&self.head.as_ref().unwrap().nth(index).value)
}
}
pub fn iter(&self) -> Iter<T> {
Iter::<T>::new(self.length, &self.head, self.tail)
}
pub fn front(&self) -> Option<&T> {
self.head.as_ref().map(|b| &(**b).value)
}
pub fn back(&self) -> Option<&T> {
unsafe { self.tail.as_ref().map(|&ptr| &(*ptr).value) }
}
pub fn clear(&mut self) {
self.head = None;
self.tail = None;
self.length = 0;
}
pub fn len(&self) -> usize {
self.length
}
pub fn is_empty(&self) -> bool {
self.length == 0
}
pub fn append(&mut self, other: &mut Self) {
if let Some(tail) = self.tail {
self.length += other.length;
other.length = 0;
unsafe {
swap(&mut (*tail).next, &mut other.head);
}
self.tail = other.tail;
other.tail = None;
} else {
swap(&mut self.head, &mut other.head);
swap(&mut self.tail, &mut other.tail);
swap(&mut self.length, &mut other.length);
}
}
}
impl<T> Node<T> {
fn new(val: T) -> Self {
Node::<T> {
value: val,
next: None,
prev: None,
}
}
fn insert_next(&mut self, next_val: T) -> Option<*mut Self> {
let mut next = Box::new(Self::new(next_val));
next.prev = Some(self as *mut Self);
self.next = Some(next);
Some(self.next.as_mut().unwrap().borrow_mut() as *mut Self)
}
fn nth(&self, index: usize) -> &Self {
if index == 0 {
self
} else {
self.next.as_ref().unwrap().nth(index - 1)
}
}
}
impl<'a, T> Iter<'a, T> {
fn new(length: usize, head: &'a Link<T>, tail: Option<*mut Node<T>>) -> Self {
Iter::<T> {
length: length,
head: head,
tail: tail,
}
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if self.length == 0 {
None
} else {
self.length -= 1;
let reference = replace(&mut self.head, &(**self.head.as_ref().unwrap()).next);
Some(&(*reference.as_ref().unwrap()).value)
}
}
}
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.length == 0 {
None
} else {
self.length -= 1;
let ret = unsafe { Some(&(*(*self.tail.as_ref().unwrap())).value) };
unsafe {
self.tail = (**self.tail.as_ref().unwrap()).prev;
}
ret
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.