text stringlengths 8 4.13M |
|---|
use std::collections::HashMap;
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::protocol::{LogStreamSource, ProcessSpec};
/// A request to start managing a new process.
///
/// Eventually turned into a `ProcessSpec` internally, once managed.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct StartRequest {
pub name: String,
pub cmd: Vec<String>,
pub cwd: PathBuf,
pub env: HashMap<String, String>,
}
/// A request to start managing a new process.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ListRequest {
pub filters: Option<Vec<String>>,
}
/// A request to stop managed processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct StopRequest {
pub filters: Option<Vec<String>>,
}
/// A request to restart managed processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RestartRequest {
pub filters: Option<Vec<String>>,
pub env: Option<HashMap<String, String>>,
}
/// A request to get information about managed processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct InfoRequest {
pub name: String,
}
/// A request to stop managing processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DeleteRequest {
pub filters: Option<Vec<String>>,
}
/// A request to dump the `ProcessSpec` of managed processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DumpRequest {
pub filters: Option<Vec<String>>,
}
/// A request to restore processes from a previously generated dump.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RestoreRequest {
pub specs: Vec<ProcessSpec>,
}
/// A request to restore processes from a previously generated dump.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct LogsRequest {
pub filters: Option<Vec<String>>,
pub source_filter: Option<LogStreamSource>,
pub stream: bool,
pub lines: usize,
}
/// A request to prune logs and pid files of unmanaged and/or stopped processes.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PruneRequest {
/// Prune files from stopped, but still managed, processes.
pub stopped: bool,
}
/// A request (from a client to the daemon).
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type", content = "data", rename_all = "kebab-case")]
pub enum Request {
List(ListRequest),
Start(StartRequest),
Stop(StopRequest),
Restart(RestartRequest),
Info(InfoRequest),
Delete(DeleteRequest),
Dump(DumpRequest),
Restore(RestoreRequest),
Logs(LogsRequest),
Prune(PruneRequest),
Version,
Kill,
}
|
use std::error;
use std::ffi::CStr;
use std::fmt;
use std::os::raw::c_int;
use std::result;
use std::str;
/// Error codes that the library might return.
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, PartialOrd, Ord)]
pub enum Error {
/// DNS server returned answer with no data.
ENODATA = c_ares_sys::ARES_ENODATA as isize,
/// DNS server claims query was misformatted.
EFORMERR = c_ares_sys::ARES_EFORMERR as isize,
/// DNS server returned general failure.
ESERVFAIL = c_ares_sys::ARES_ESERVFAIL as isize,
/// Domain name not found.
ENOTFOUND = c_ares_sys::ARES_ENOTFOUND as isize,
/// DNS server does not implement requested operation.
ENOTIMP = c_ares_sys::ARES_ENOTIMP as isize,
/// DNS server refused query.
EREFUSED = c_ares_sys::ARES_EREFUSED as isize,
/// Misformatted DNS query.
EBADQUERY = c_ares_sys::ARES_EBADQUERY as isize,
/// Misformatted domain name.
EBADNAME = c_ares_sys::ARES_EBADNAME as isize,
/// Unsupported address family.
EBADFAMILY = c_ares_sys::ARES_EBADFAMILY as isize,
/// Misformatted DNS reply.
EBADRESP = c_ares_sys::ARES_EBADRESP as isize,
/// Could not contact DNS servers.
ECONNREFUSED = c_ares_sys::ARES_ECONNREFUSED as isize,
/// Timeout while contacting DNS servers.
ETIMEOUT = c_ares_sys::ARES_ETIMEOUT as isize,
/// End of file.
EOF = c_ares_sys::ARES_EOF as isize,
/// Error reading file.
EFILE = c_ares_sys::ARES_EFILE as isize,
/// Out of memory.
ENOMEM = c_ares_sys::ARES_ENOMEM as isize,
/// Channel is being destroyed.
EDESTRUCTION = c_ares_sys::ARES_EDESTRUCTION as isize,
/// Misformatted string.
EBADSTR = c_ares_sys::ARES_EBADSTR as isize,
/// Illegal flags specified.
EBADFLAGS = c_ares_sys::ARES_EBADFLAGS as isize,
/// Given hostname is not numeric.
ENONAME = c_ares_sys::ARES_ENONAME as isize,
/// Illegal hints flags specified.
EBADHINTS = c_ares_sys::ARES_EBADHINTS as isize,
/// c-ares library initialization not yet performed.
ENOTINITIALIZED = c_ares_sys::ARES_ENOTINITIALIZED as isize,
/// Error loading iphlpapi.dll.
ELOADIPHLPAPI = c_ares_sys::ARES_ELOADIPHLPAPI as isize,
/// Could not find GetNetworkParams function.
EADDRGETNETWORKPARAMS = c_ares_sys::ARES_EADDRGETNETWORKPARAMS as isize,
/// DNS query cancelled.
ECANCELLED = c_ares_sys::ARES_ECANCELLED as isize,
/// Unknown error.
UNKNOWN,
}
impl error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
let text = unsafe {
let ptr = c_ares_sys::ares_strerror(*self as c_int);
let buf = CStr::from_ptr(ptr).to_bytes();
str::from_utf8_unchecked(buf)
};
fmt.write_str(text)
}
}
impl From<i32> for Error {
fn from(code: i32) -> Self {
match code {
c_ares_sys::ARES_ENODATA => Error::ENODATA,
c_ares_sys::ARES_EFORMERR => Error::EFORMERR,
c_ares_sys::ARES_ESERVFAIL => Error::ESERVFAIL,
c_ares_sys::ARES_ENOTFOUND => Error::ENOTFOUND,
c_ares_sys::ARES_ENOTIMP => Error::ENOTIMP,
c_ares_sys::ARES_EREFUSED => Error::EREFUSED,
c_ares_sys::ARES_EBADQUERY => Error::EBADQUERY,
c_ares_sys::ARES_EBADNAME => Error::EBADNAME,
c_ares_sys::ARES_EBADFAMILY => Error::EBADFAMILY,
c_ares_sys::ARES_EBADRESP => Error::EBADRESP,
c_ares_sys::ARES_ECONNREFUSED => Error::ECONNREFUSED,
c_ares_sys::ARES_ETIMEOUT => Error::ETIMEOUT,
c_ares_sys::ARES_EOF => Error::EOF,
c_ares_sys::ARES_EFILE => Error::EFILE,
c_ares_sys::ARES_ENOMEM => Error::ENOMEM,
c_ares_sys::ARES_EDESTRUCTION => Error::EDESTRUCTION,
c_ares_sys::ARES_EBADSTR => Error::EBADSTR,
c_ares_sys::ARES_EBADFLAGS => Error::EBADFLAGS,
c_ares_sys::ARES_ENONAME => Error::ENONAME,
c_ares_sys::ARES_EBADHINTS => Error::EBADHINTS,
c_ares_sys::ARES_ENOTINITIALIZED => Error::ENOTINITIALIZED,
c_ares_sys::ARES_ELOADIPHLPAPI => Error::ELOADIPHLPAPI,
c_ares_sys::ARES_EADDRGETNETWORKPARAMS => Error::EADDRGETNETWORKPARAMS,
c_ares_sys::ARES_ECANCELLED => Error::ECANCELLED,
_ => Error::UNKNOWN,
}
}
}
/// The type used by this library for methods that might fail.
pub type Result<T> = result::Result<T, Error>;
|
#[doc = "Register `FDCAN_TEST` reader"]
pub type R = crate::R<FDCAN_TEST_SPEC>;
#[doc = "Field `LBCK` reader - Loop Back mode"]
pub type LBCK_R = crate::BitReader;
#[doc = "Field `TX` reader - Loop Back mode"]
pub type TX_R = crate::FieldReader;
#[doc = "Field `RX` reader - Control of Transmit Pin"]
pub type RX_R = crate::BitReader;
impl R {
#[doc = "Bit 4 - Loop Back mode"]
#[inline(always)]
pub fn lbck(&self) -> LBCK_R {
LBCK_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:6 - Loop Back mode"]
#[inline(always)]
pub fn tx(&self) -> TX_R {
TX_R::new(((self.bits >> 5) & 3) as u8)
}
#[doc = "Bit 7 - Control of Transmit Pin"]
#[inline(always)]
pub fn rx(&self) -> RX_R {
RX_R::new(((self.bits >> 7) & 1) != 0)
}
}
#[doc = "FDCAN Test Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_test::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FDCAN_TEST_SPEC;
impl crate::RegisterSpec for FDCAN_TEST_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fdcan_test::R`](R) reader structure"]
impl crate::Readable for FDCAN_TEST_SPEC {}
#[doc = "`reset()` method sets FDCAN_TEST to value 0"]
impl crate::Resettable for FDCAN_TEST_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
impl Solution {
pub fn kth_largest_value(matrix: Vec<Vec<i32>>, k: i32) -> i32 {
let mut matrix = matrix;
let (n,m) = (matrix.len(),matrix[0].len());
let mut res= Vec::new();
for i in 0..n{
for j in 0..m{
if i >= 1{
matrix[i][j] ^= matrix[i - 1][j];
}
if j >= 1{
matrix[i][j] ^= matrix[i][j - 1];
}
if i >= 1 && j >= 1{
matrix[i][j] ^= matrix[i - 1][j - 1]
}
res.push(matrix[i][j]);
}
}
res.sort_by(|a,b| b.cmp(a));
res[k as usize- 1]
}
} |
use crate::er::{self, Result};
pub struct JitsiEnvConfigLetsEncrypt {
pub domain: String,
pub email: String,
}
pub struct JitsiEnvConfig {
pub config_dir: String,
pub tz: String,
pub public_url: String,
pub letsencrypt: Option<JitsiEnvConfigLetsEncrypt>,
pub http_port: i32,
pub https_port: i32
}
pub fn write_env_file(config: JitsiEnvConfig) -> Result<String> {
// Changes from default:
// * letsencrypt uncommented
// * etherpad enabled
let (letsencrypt_enable, letsencrypt_domain, letsencrypt_email) =
if let Some(letsencrypt) = config.letsencrypt {
(1, letsencrypt.domain, letsencrypt.email)
} else {
(0, "".to_owned(), "".to_owned())
};
let content = format!(
r#"
#
# Basic configuration options
#
# Directory where all configuration will be stored.
CONFIG={config_dir}
# Exposed HTTP port.
HTTP_PORT={http_port}
# Exposed HTTPS port.
HTTPS_PORT={https_port}
# System time zone.
TZ={tz}
# Public URL for the web service.
PUBLIC_URL="{public_url}"
# IP address of the Docker host. See the "Running on a LAN environment" section
# in the README.
#DOCKER_HOST_ADDRESS=192.168.1.1
#
# Let's Encrypt configuration
#
# Enable Let's Encrypt certificate generation.
ENABLE_LETSENCRYPT={letsencrypt_enable}
# Domain for which to generate the certificate.
LETSENCRYPT_DOMAIN={letsencrypt_domain}
# E-Mail for receiving important account notifications (mandatory).
LETSENCRYPT_EMAIL={letsencrypt_email}
#
# Etherpad integration (for document sharing)
#
# Set etherpad-lite URL (uncomment to enable).
ETHERPAD_URL_BASE=http://etherpad.meet.jitsi:9001
#
# Basic Jigasi configuration options (needed for SIP gateway support)
#
# SIP URI for incoming / outgoing calls.
#JIGASI_SIP_URI=test@sip2sip.info
# Password for the specified SIP account as a clear text
#JIGASI_SIP_PASSWORD=passw0rd
# SIP server (use the SIP account domain if in doubt).
#JIGASI_SIP_SERVER=sip2sip.info
# SIP server port
#JIGASI_SIP_PORT=5060
# SIP server transport
#JIGASI_SIP_TRANSPORT=UDP
#
# Authentication configuration (see README for details)
#
# Enable authentication.
#ENABLE_AUTH=1
# Enable guest access.
#ENABLE_GUESTS=1
# Select authentication type: internal, jwt or ldap
#AUTH_TYPE=internal
# JWT auuthentication
#
# Application identifier.
#JWT_APP_ID=my_jitsi_app_id
# Application secret known only to your token.
#JWT_APP_SECRET=my_jitsi_app_secret
# (Optional) Set asap_accepted_issuers as a comma separated list.
#JWT_ACCEPTED_ISSUERS=my_web_client,my_app_client
# (Optional) Set asap_accepted_audiences as a comma separated list.
#JWT_ACCEPTED_AUDIENCES=my_server1,my_server2
# LDAP authentication (for more information see the Cyrus SASL saslauthd.conf man page)
#
# LDAP url for connection.
#LDAP_URL=ldaps://ldap.domain.com/
# LDAP base DN. Can be empty
#LDAP_BASE=DC=example,DC=domain,DC=com
# LDAP user DN. Do not specify this parameter for the anonymous bind.
#LDAP_BINDDN=CN=binduser,OU=users,DC=example,DC=domain,DC=com
# LDAP user password. Do not specify this parameter for the anonymous bind.
#LDAP_BINDPW=LdapUserPassw0rd
# LDAP filter. Tokens example:
# %1-9 - if the input key is user@mail.domain.com, then %1 is com, %2 is domain and %3 is mail.
# %s - %s is replaced by the complete service string.
# %r - %r is replaced by the complete realm string.
#LDAP_FILTER=(sAMAccountName=%u)
# LDAP authentication method
#LDAP_AUTH_METHOD=bind
# LDAP version
#LDAP_VERSION=3
# LDAP TLS using
#LDAP_USE_TLS=1
# List of SSL/TLS ciphers to allow.
#LDAP_TLS_CIPHERS=SECURE256:SECURE128:!AES-128-CBC:!ARCFOUR-128:!CAMELLIA-128-CBC:!3DES-CBC:!CAMELLIA-128-CBC
# Require and verify server certificate
#LDAP_TLS_CHECK_PEER=1
# Path to CA cert file. Used when server sertificate verify is enabled.
#LDAP_TLS_CACERT_FILE=/etc/ssl/certs/ca-certificates.crt
# Path to CA certs directory. Used when server sertificate verify is enabled.
#LDAP_TLS_CACERT_DIR=/etc/ssl/certs
# Wether to use starttls, implies LDAPv3 and requires ldap:// instead of ldaps://
# LDAP_START_TLS=1
#
# Advanced configuration options (you generally don't need to change these)
#
# Internal XMPP domain.
XMPP_DOMAIN=meet.jitsi
# Internal XMPP server
XMPP_SERVER=xmpp.meet.jitsi
# Internal XMPP server URL
XMPP_BOSH_URL_BASE=http://xmpp.meet.jitsi:5280
# Internal XMPP domain for authenticated services.
XMPP_AUTH_DOMAIN=auth.meet.jitsi
# XMPP domain for the MUC.
XMPP_MUC_DOMAIN=muc.meet.jitsi
# XMPP domain for the internal MUC used for jibri, jigasi and jvb pools.
XMPP_INTERNAL_MUC_DOMAIN=internal-muc.meet.jitsi
# XMPP domain for unauthenticated users.
XMPP_GUEST_DOMAIN=guest.meet.jitsi
# Custom Prosody modules for XMPP_DOMAIN (comma separated)
XMPP_MODULES=
# Custom Prosody modules for MUC component (comma separated)
XMPP_MUC_MODULES=
# Custom Prosody modules for internal MUC component (comma separated)
XMPP_INTERNAL_MUC_MODULES=
# MUC for the JVB pool.
JVB_BREWERY_MUC=jvbbrewery
# XMPP user for JVB client connections.
JVB_AUTH_USER=jvb
# XMPP password for JVB client connections.
JVB_AUTH_PASSWORD=passw0rd
# STUN servers used to discover the server's public IP.
JVB_STUN_SERVERS=stun.l.google.com:19302,stun1.l.google.com:19302,stun2.l.google.com:19302
# Media port for the Jitsi Videobridge
JVB_PORT=10000
# TCP Fallback for Jitsi Videobridge for when UDP isn't available
JVB_TCP_HARVESTER_DISABLED=true
JVB_TCP_PORT=4443
# A comma separated list of APIs to enable when the JVB is started. The default is none.
# See https://github.com/jitsi/jitsi-videobridge/blob/master/doc/rest.md for more information
#JVB_ENABLE_APIS=rest,colibri
# XMPP component password for Jicofo.
JICOFO_COMPONENT_SECRET=s3cr37
# XMPP user for Jicofo client connections. NOTE: this option doesn't currently work due to a bug.
JICOFO_AUTH_USER=focus
# XMPP password for Jicofo client connections.
JICOFO_AUTH_PASSWORD=passw0rd
# Base URL of Jicofo's reservation REST API
#JICOFO_RESERVATION_REST_BASE_URL=http://reservation.example.com
# XMPP user for Jigasi MUC client connections.
JIGASI_XMPP_USER=jigasi
# XMPP password for Jigasi MUC client connections.
JIGASI_XMPP_PASSWORD=passw0rd
# MUC name for the Jigasi pool.
JIGASI_BREWERY_MUC=jigasibrewery
# Minimum port for media used by Jigasi.
JIGASI_PORT_MIN=20000
# Maximum port for media used by Jigasi.
JIGASI_PORT_MAX=20050
# Enable SDES srtp
#JIGASI_ENABLE_SDES_SRTP=1
# Keepalive method
#JIGASI_SIP_KEEP_ALIVE_METHOD=OPTIONS
# Health-check extension
#JIGASI_HEALTH_CHECK_SIP_URI=keepalive
# Health-check interval
#JIGASI_HEALTH_CHECK_INTERVAL=300000
#
# Enable Jigasi transcription.
#ENABLE_TRANSCRIPTIONS=1
# Jigasi will recordord an audio when transcriber is on. Default false.
#JIGASI_TRANSCRIBER_RECORD_AUDIO=true
# Jigasi will send transcribed text to the chat when transcriber is on. Default false.
#JIGASI_TRANSCRIBER_SEND_TXT=true
# Jigasi post to the chat an url with transcription file. Default false.
#JIGASI_TRANSCRIBER_ADVERTISE_URL=true
# Credentials for connect to Cloud Google API from Jigasi. Path located inside the container.
# Please read https://cloud.google.com/text-to-speech/docs/quickstart-protocol
# section "Before you begin" from 1 to 5 paragraph. Copy the key on
# the docker host to ${{CONFIG}}/jigasi/key.json and to enable this setting:
#GOOGLE_APPLICATION_CREDENTIALS=/config/key.json
# Enable recording
#ENABLE_RECORDING=1
# XMPP domain for the jibri recorder
XMPP_RECORDER_DOMAIN=recorder.meet.jitsi
# XMPP recorder user for Jibri client connections.
JIBRI_RECORDER_USER=recorder
# XMPP recorder password for Jibri client connections.
JIBRI_RECORDER_PASSWORD=passw0rd
# Directory for recordings inside Jibri container.
JIBRI_RECORDING_DIR=/config/recordings
# The finalizing script. Will run after recording is complete.
JIBRI_FINALIZE_RECORDING_SCRIPT_PATH=/config/finalize.sh
# XMPP user for Jibri client connections.
JIBRI_XMPP_USER=jibri
# XMPP password for Jibri client connections.
JIBRI_XMPP_PASSWORD=passw0rd
# MUC name for the Jibri pool.
JIBRI_BREWERY_MUC=jibribrewery
# MUC connection timeout
JIBRI_PENDING_TIMEOUT=90
# When jibri gets a request to start a service for a room, the room
# jid will look like: roomName@optional.prefixes.subdomain.xmpp_domain
# We'll build the url for the call by transforming that into:
# https://xmpp_domain/subdomain/roomName
# So if there are any prefixes in the jid (like jitsi meet, which
# has its participants join a muc at conference.xmpp_domain) then
# list that prefix here so it can be stripped out to generate
# the call url correctly.
JIBRI_STRIP_DOMAIN_JID=muc
# Directory for logs inside Jibri container.
JIBRI_LOGS_DIR=/config/logs
# Disable HTTPS. This can be useful if TLS connections are going to be handled outside of this setup.
#DISABLE_HTTPS=1
# Redirects HTTP traffic to HTTPS. Only works with the standard HTTPS port (443).
ENABLE_HTTP_REDIRECT=1
"#,
config_dir = config.config_dir,
tz = config.tz,
public_url = config.public_url,
letsencrypt_enable = letsencrypt_enable,
letsencrypt_domain = letsencrypt_domain,
letsencrypt_email = letsencrypt_email,
http_port = config.http_port,
https_port = config.https_port
);
Ok(content)
}
|
#[doc = "Reader of register MDMA_C14TBR"]
pub type R = crate::R<u32, super::MDMA_C14TBR>;
#[doc = "Writer for register MDMA_C14TBR"]
pub type W = crate::W<u32, super::MDMA_C14TBR>;
#[doc = "Register MDMA_C14TBR `reset()`'s with value 0"]
impl crate::ResetValue for super::MDMA_C14TBR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TSEL`"]
pub type TSEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `TSEL`"]
pub struct TSEL_W<'a> {
w: &'a mut W,
}
impl<'a> TSEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f);
self.w
}
}
#[doc = "SBUS\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SBUS_A {
#[doc = "0: The system/AXI bus is used as source\r\n (read operation) on channel x."]
B_0X0 = 0,
#[doc = "1: The AHB bus/TCM is used as source\r\n (read operation) on channel x."]
B_0X1 = 1,
}
impl From<SBUS_A> for bool {
#[inline(always)]
fn from(variant: SBUS_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SBUS`"]
pub type SBUS_R = crate::R<bool, SBUS_A>;
impl SBUS_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SBUS_A {
match self.bits {
false => SBUS_A::B_0X0,
true => SBUS_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SBUS_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SBUS_A::B_0X1
}
}
#[doc = "Write proxy for field `SBUS`"]
pub struct SBUS_W<'a> {
w: &'a mut W,
}
impl<'a> SBUS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SBUS_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The system/AXI bus is used as source (read operation) on channel x."]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SBUS_A::B_0X0)
}
#[doc = "The AHB bus/TCM is used as source (read operation) on channel x."]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SBUS_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "DBUS\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DBUS_A {
#[doc = "0: The system/AXI bus is used as\r\n destination (write operation) on channel\r\n x."]
B_0X0 = 0,
#[doc = "1: The AHB bus/TCM is used as\r\n destination (write operation) on channel\r\n x."]
B_0X1 = 1,
}
impl From<DBUS_A> for bool {
#[inline(always)]
fn from(variant: DBUS_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DBUS`"]
pub type DBUS_R = crate::R<bool, DBUS_A>;
impl DBUS_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DBUS_A {
match self.bits {
false => DBUS_A::B_0X0,
true => DBUS_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == DBUS_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == DBUS_A::B_0X1
}
}
#[doc = "Write proxy for field `DBUS`"]
pub struct DBUS_W<'a> {
w: &'a mut W,
}
impl<'a> DBUS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DBUS_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The system/AXI bus is used as destination (write operation) on channel x."]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(DBUS_A::B_0X0)
}
#[doc = "The AHB bus/TCM is used as destination (write operation) on channel x."]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(DBUS_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
impl R {
#[doc = "Bits 0:5 - TSEL"]
#[inline(always)]
pub fn tsel(&self) -> TSEL_R {
TSEL_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bit 16 - SBUS"]
#[inline(always)]
pub fn sbus(&self) -> SBUS_R {
SBUS_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - DBUS"]
#[inline(always)]
pub fn dbus(&self) -> DBUS_R {
DBUS_R::new(((self.bits >> 17) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:5 - TSEL"]
#[inline(always)]
pub fn tsel(&mut self) -> TSEL_W {
TSEL_W { w: self }
}
#[doc = "Bit 16 - SBUS"]
#[inline(always)]
pub fn sbus(&mut self) -> SBUS_W {
SBUS_W { w: self }
}
#[doc = "Bit 17 - DBUS"]
#[inline(always)]
pub fn dbus(&mut self) -> DBUS_W {
DBUS_W { w: self }
}
}
|
use std::io;
use std::rand;
fn get_random_number() -> uint {
let secret_number = (rand::random::<uint>() % 100u) + 1u;
secret_number
}
fn cmp(a: uint, b: uint) -> Ordering {
if a < b { Less }
else if a > b { Greater }
else { Equal }
}
fn main() {
let secret = get_random_number();
loop {
println!("Type your guess (between 0 and 100) then press enter/return");
let input = io::stdin().read_line()
.ok().expect("Failed to read line\n");
let input_num: Option<uint> = from_str(input.as_slice().trim());
let num = match input_num {
Some(num) => num,
None => {
println!("You didn't type in a number.");
continue;
}
};
match cmp(num, secret) {
Less => println!("Too small"),
Equal => {
println!("Just right\nYou Win!");
return;
},
_ => println!("Too big"),
};
}
}
|
#[doc = "Register `MACFFR` reader"]
pub type R = crate::R<MACFFR_SPEC>;
#[doc = "Register `MACFFR` writer"]
pub type W = crate::W<MACFFR_SPEC>;
#[doc = "Field `PM` reader - Promiscuous mode"]
pub type PM_R = crate::BitReader;
#[doc = "Field `PM` writer - Promiscuous mode"]
pub type PM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HU` reader - Hash unicast"]
pub type HU_R = crate::BitReader;
#[doc = "Field `HU` writer - Hash unicast"]
pub type HU_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HM` reader - Hash multicast"]
pub type HM_R = crate::BitReader;
#[doc = "Field `HM` writer - Hash multicast"]
pub type HM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DAIF` reader - Destination address inverse filtering"]
pub type DAIF_R = crate::BitReader;
#[doc = "Field `DAIF` writer - Destination address inverse filtering"]
pub type DAIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PAM` reader - Pass all multicast"]
pub type PAM_R = crate::BitReader;
#[doc = "Field `PAM` writer - Pass all multicast"]
pub type PAM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BFD` reader - Broadcast frames disable"]
pub type BFD_R = crate::BitReader;
#[doc = "Field `BFD` writer - Broadcast frames disable"]
pub type BFD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PCF` reader - Pass control frames"]
pub type PCF_R = crate::FieldReader;
#[doc = "Field `PCF` writer - Pass control frames"]
pub type PCF_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `SAIF` reader - Source address inverse filtering"]
pub type SAIF_R = crate::BitReader;
#[doc = "Field `SAIF` writer - Source address inverse filtering"]
pub type SAIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SAF` reader - Source address filter"]
pub type SAF_R = crate::BitReader;
#[doc = "Field `SAF` writer - Source address filter"]
pub type SAF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HPF` reader - Hash or perfect filter"]
pub type HPF_R = crate::BitReader;
#[doc = "Field `HPF` writer - Hash or perfect filter"]
pub type HPF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RA` reader - Receive all"]
pub type RA_R = crate::BitReader;
#[doc = "Field `RA` writer - Receive all"]
pub type RA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Promiscuous mode"]
#[inline(always)]
pub fn pm(&self) -> PM_R {
PM_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Hash unicast"]
#[inline(always)]
pub fn hu(&self) -> HU_R {
HU_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Hash multicast"]
#[inline(always)]
pub fn hm(&self) -> HM_R {
HM_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Destination address inverse filtering"]
#[inline(always)]
pub fn daif(&self) -> DAIF_R {
DAIF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Pass all multicast"]
#[inline(always)]
pub fn pam(&self) -> PAM_R {
PAM_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Broadcast frames disable"]
#[inline(always)]
pub fn bfd(&self) -> BFD_R {
BFD_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bits 6:7 - Pass control frames"]
#[inline(always)]
pub fn pcf(&self) -> PCF_R {
PCF_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bit 8 - Source address inverse filtering"]
#[inline(always)]
pub fn saif(&self) -> SAIF_R {
SAIF_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Source address filter"]
#[inline(always)]
pub fn saf(&self) -> SAF_R {
SAF_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Hash or perfect filter"]
#[inline(always)]
pub fn hpf(&self) -> HPF_R {
HPF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 31 - Receive all"]
#[inline(always)]
pub fn ra(&self) -> RA_R {
RA_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Promiscuous mode"]
#[inline(always)]
#[must_use]
pub fn pm(&mut self) -> PM_W<MACFFR_SPEC, 0> {
PM_W::new(self)
}
#[doc = "Bit 1 - Hash unicast"]
#[inline(always)]
#[must_use]
pub fn hu(&mut self) -> HU_W<MACFFR_SPEC, 1> {
HU_W::new(self)
}
#[doc = "Bit 2 - Hash multicast"]
#[inline(always)]
#[must_use]
pub fn hm(&mut self) -> HM_W<MACFFR_SPEC, 2> {
HM_W::new(self)
}
#[doc = "Bit 3 - Destination address inverse filtering"]
#[inline(always)]
#[must_use]
pub fn daif(&mut self) -> DAIF_W<MACFFR_SPEC, 3> {
DAIF_W::new(self)
}
#[doc = "Bit 4 - Pass all multicast"]
#[inline(always)]
#[must_use]
pub fn pam(&mut self) -> PAM_W<MACFFR_SPEC, 4> {
PAM_W::new(self)
}
#[doc = "Bit 5 - Broadcast frames disable"]
#[inline(always)]
#[must_use]
pub fn bfd(&mut self) -> BFD_W<MACFFR_SPEC, 5> {
BFD_W::new(self)
}
#[doc = "Bits 6:7 - Pass control frames"]
#[inline(always)]
#[must_use]
pub fn pcf(&mut self) -> PCF_W<MACFFR_SPEC, 6> {
PCF_W::new(self)
}
#[doc = "Bit 8 - Source address inverse filtering"]
#[inline(always)]
#[must_use]
pub fn saif(&mut self) -> SAIF_W<MACFFR_SPEC, 8> {
SAIF_W::new(self)
}
#[doc = "Bit 9 - Source address filter"]
#[inline(always)]
#[must_use]
pub fn saf(&mut self) -> SAF_W<MACFFR_SPEC, 9> {
SAF_W::new(self)
}
#[doc = "Bit 10 - Hash or perfect filter"]
#[inline(always)]
#[must_use]
pub fn hpf(&mut self) -> HPF_W<MACFFR_SPEC, 10> {
HPF_W::new(self)
}
#[doc = "Bit 31 - Receive all"]
#[inline(always)]
#[must_use]
pub fn ra(&mut self) -> RA_W<MACFFR_SPEC, 31> {
RA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Ethernet MAC frame filter register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`macffr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`macffr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACFFR_SPEC;
impl crate::RegisterSpec for MACFFR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`macffr::R`](R) reader structure"]
impl crate::Readable for MACFFR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`macffr::W`](W) writer structure"]
impl crate::Writable for MACFFR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MACFFR to value 0"]
impl crate::Resettable for MACFFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate lyon;
//use lyon::tessellation as tess;
use lyon::path::iterator::PathIterator;
use lyon::path::FlattenedEvent;
use lyon::algorithms::path::iterator::Flattened;
#[derive(PartialEq)]
pub struct Polygon {
pub vertices: Vec<lyon::math::Point>,
}
impl Polygon {
pub fn new() -> Self {
Polygon {
vertices: Vec::new(),
}
}
pub fn from_path<Iter: PathIterator>(path: Flattened<Iter>) -> Vec<Self> {
let mut polys = Vec::new();
let mut current_poly = None;
for evt in path {
match evt {
FlattenedEvent::MoveTo(p) => {
let mut poly = Polygon::new();
poly.vertices.push(p);
current_poly = Some(poly);
}
FlattenedEvent::LineTo(p) => {
if let Some(ref mut poly) = current_poly {
poly.vertices.push(p);
}
}
FlattenedEvent::Close => {
polys.push(current_poly.unwrap());
current_poly = None;
}
}
}
polys
}
}
pub struct PolyPoint<'a> {
vertex: &'a lyon::math::Point,
prev: &'a lyon::math::Point,
next: &'a lyon::math::Point,
//pub poly_parent: &'a mut Option<&'a Polygon<'a>>,
poly_idx: usize,
// TODO: equality operator that just compares 'vertex'
}
impl<'a> PolyPoint<'a> {
pub fn list(polys: &'a Vec<Polygon>) -> Vec<PolyPoint> {
let mut pts = Vec::new();
for (poly_idx, poly) in polys.iter().enumerate() {
assert!(poly.vertices.len() >= 3, "Got a degenerate polygon with only {} vertices.", poly.vertices.len());
let mut v1 = &poly.vertices[poly.vertices.len()-2]; // second last element
let mut v2 = &poly.vertices[poly.vertices.len()-1]; // last element
debug_assert!(v1 != v2, "got same element twice: point {}", v1);
for v3 in &poly.vertices {
pts.push(PolyPoint {
prev: v1,
vertex: v2,
next: v3,
poly_idx: poly_idx,
});
//trace!("adding Vertex: {}, {}, {}", v1, v2, v3);
v1 = v2;
v2 = v3;
}
}
pts
}
}
// returns points sorted in descending y order
pub fn sort_poly_points(pts: &mut Vec<PolyPoint>) {
// sorting floats is only possible when we don't have NaNs
pts.sort_by(|a, b| b.vertex.y.partial_cmp(&a.vertex.y).unwrap() );
}
struct Edge<'a> {
upper: &'a lyon::math::Point, // higher y
lower: &'a lyon::math::Point, // lower y
poly_idx: usize,
}
impl<'a> Edge<'a> {
pub fn interpolate_x(&self, y: f32) -> f32 {
debug_assert!(self.upper.y >= y && y >= self.lower.y,
"interpolation point must lie between edge's end points: Edge is from {} to {}, query y is {}.", self.upper.y, self.lower.y, y);
let r = (y - self.lower.y) / (self.upper.y - self.lower.y);
return r * (self.upper.x - self.lower.x) + self.lower.x;
}
}
// insert or remove edge from scanline (scanline is at 'vert')
fn handle_edge<'a>(scanline: &mut Vec<Edge<'a>>, vert: &'a lyon::math::Point, other: &'a lyon::math::Point, poly_idx: usize) {
if vert == other {
return; // ignore degenerate edges with zero length
}
//debug_assert!(vert != other, "Edge must consist of two distinct points, but got {} twice.", vert);
trace!(" -> handling edge from {} to {} (poly {})", vert, other, poly_idx);
if other.y == vert.y {
trace!(" -> ignoring horizontal edges");
return;
}
if other.y > vert.y {
// edge ends at scanline
// remove it from scanline
// TODO: implement ordering trait for Edge that uses interpolated x value so we can find
// our edge more efficiently
trace!(" -> removing edge, it ends here");
scanline.retain(|edge| edge.lower != vert);
} else {
// edge starts at scanline
// insert it in a sorted fashion
let index;
match scanline.binary_search_by(|edge| {edge.interpolate_x(vert.y).partial_cmp(&vert.x).unwrap()}) {
Ok(i) => index = i, // found other edge at this point. TODO: This should not happen and we probably want at least a warning here.
Err(i) => index = i, // not found, but it belongs there
}
trace!(" -> insert edge at index {}", index);
scanline.insert(index, Edge { upper: vert, lower: other, poly_idx: poly_idx });
}
}
#[derive(Clone)]
pub struct ParentInfo<'a> {
pub polygon: &'a Polygon,
pub parent_idx: Option<usize>,
pub level: usize, // 0 means poly is outermost
}
pub fn create_parent_list<'a>(polygons: &'a Vec<Polygon>) -> Vec<ParentInfo<'a>> {
let mut pts = PolyPoint::list(&polygons);
sort_poly_points(&mut pts);
let mut current_scanline: Vec<Edge> = Vec::new();
let mut parents: Vec<Option<ParentInfo>> = vec![None; polygons.len()];
for (_step, pt) in pts.iter().enumerate() {
trace!("scanline is at y = {}", pt.vertex.y);
// look at edge (prev, vertex)
handle_edge(&mut current_scanline, pt.vertex, pt.prev, pt.poly_idx);
// look at edge (vertex, next)
handle_edge(&mut current_scanline, pt.vertex, pt.next, pt.poly_idx);
let mut parent_stack: Vec<usize> = Vec::new();
// count number of edges between current vertex and the outside (while ignoring edges of
// the current polygon)
for ref edge in ¤t_scanline {
// only look at edges on the left of the current vertex
if edge.interpolate_x(pt.vertex.y) >= pt.vertex.x {
break;
}
// ignore edges from current polygon
if edge.poly_idx == pt.poly_idx {
continue;
}
// push or pop polys to/from stack
let mut pop = false;
if let Some(p) = parent_stack.last() {
if *p == edge.poly_idx {
pop = true;
}
}
if pop {
parent_stack.pop();
} else {
parent_stack.push(edge.poly_idx);
}
}
trace!(" -> handling point {:?}", pt.vertex);
trace!(" -> last edge on stack of {}: {:?}", parent_stack.len(), parent_stack.last());
if !parents[pt.poly_idx].is_some() {
// parent information not yet defined, add it
parents[pt.poly_idx] = Some(ParentInfo{
polygon: &polygons[pt.poly_idx],
parent_idx: parent_stack.last().cloned(),
level : parent_stack.len(),
});
trace!(" -> assigned parent {:?}", parent_stack.last());
} else if let Some(ref pi) = &parents[pt.poly_idx] {
// polygon at poly_idx already has a parent & level defined
// make sure it is the right one
// (this should not be necessary, just to make sure our implementation is correct
// and our assumptions were valid)
assert!(pi.level == parent_stack.len(),
"Invalid level for polygon {}: Expected {}, but we previously calculated {}.",
pt.poly_idx, parent_stack.len(), pi.level);
assert!(pi.parent_idx.is_some() == parent_stack.last().is_some(),
"Invalid parent for polygon {}: Expected to have parent? {}. Previously determined: {}.",
pt.poly_idx, parent_stack.last().is_some(), pi.parent_idx.is_some());
if let Some(p) = pi.parent_idx {
assert!(p == *parent_stack.last().unwrap(),
"Invalid parent computed: Polygon {} already has parent {}, but we just found {:?} as parent.",
pt.poly_idx, p, parent_stack.last());
}
}
}
assert!(parents.len() == polygons.len(), "Did not process all polygons. Only got {} out of {}.", parents.len(), polygons.len());
parents.into_iter().map(|p| p.unwrap()).collect()
}
|
pub fn encode(n: u64) -> String {
let mut temp = n;
let mut output: String = "".to_string();
let less_than_twenty = |num| {
let value = match num {
0 => "zero",
1 => "one",
2 => "two",
3 => "three",
4 => "four",
5 => "five",
6 => "six",
7 => "seven",
8 => "eight",
9 => "nine",
10 => "ten",
11 => "eleven",
12 => "twelve",
13 => "thirteen",
14 => "fourteen",
15 =>"fifteen",
16 =>"sixteen",
17 =>"seventeen",
18 =>"eighteen",
19 =>"nineteen",
_rest => "problem!"
};
value
};
if n < 20 {
output.push_str(less_than_twenty(n));
} else {
while temp != 0 {
match temp {
val if val >= 1000000000000000000 => {
let m = val / 1000000000000000000;
if m < 20 {
output.push_str(less_than_twenty(m));
} else {
output.push_str(&encode(m));
}
output.push_str(" quintillion ");
temp = val % 1000000000000000000;
},
val if val >= 1000000000000000 => {
let m = val / 1000000000000000;
if m < 20 {
output.push_str(less_than_twenty(m));
} else {
output.push_str(&encode(m));
}
output.push_str(" quadrillion ");
temp = val % 1000000000000000;
},
val if val >= 1000000000000 => {
let m = val / 1000000000000;
if m < 20 {
output.push_str(less_than_twenty(m));
} else {
output.push_str(&encode(m));
}
output.push_str(" trillion ");
temp = val % 1000000000000;
},
val if val >= 1000000000 => {
let m = val / 1000000000;
if m < 20 {
output.push_str(less_than_twenty(m));
} else {
output.push_str(&encode(m));
}
output.push_str(" billion ");
temp = val % 1000000000;
},
val if val >= 1000000 => {
let m = val / 1000000;
if m < 20 {
output.push_str(less_than_twenty(m));
} else {
output.push_str(&encode(m));
}
output.push_str(" million ");
temp = val % 1000000;
},
val if val >= 1000 => {
output.push_str(&encode(val / 1000));
output.push_str(" thousand ");
temp = val % 1000;
},
val if val >= 100 => {
output.push_str(less_than_twenty(val / 100));
output.push_str(" hundred ");
temp = val % 100;
},
val if val >= 20 => {
match val/10 {
2 => output.push_str("twenty-"),
3 => output.push_str("thirty-"),
4 => output.push_str("forty-"),
5 => output.push_str("fifty-"),
rest => {
output.push_str(less_than_twenty(rest).trim_end_matches('t'));
output.push_str("ty-");
}
}
temp = val % 10;
},
val if val < 20 => {
output.push_str(less_than_twenty(val));
temp = 0;
}
_ => {
temp = 0;
}
};
}
}
output.trim_end().trim_end_matches('-').to_string()
}
|
use anyhow::Result;
use itertools::Itertools;
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
#[derive(Debug)]
struct Record(HashMap<String, String>);
impl FromStr for Record {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let data = s
.split_whitespace()
.map(|pair| {
pair.splitn(2, ":")
.map(|s| s.to_string())
.collect_tuple()
.unwrap()
})
.collect::<HashMap<String, String>>();
Ok(Record(data))
}
}
fn main() -> Result<()> {
let all_fields = vec![
"byr", // (Birth Year)
"iyr", // (Issue Year)
"eyr", // (Expiration Year)
"hgt", // (Height)
"hcl", // (Hair Color)
"ecl", // (Eye Color)
"pid", // (Passport ID)
"cid", // (Country ID)
]
.iter()
.map(|s| s.to_string())
.collect::<HashSet<String>>();
let required = {
let mut f = all_fields.clone();
f.remove("cid");
f
};
let selected_input = INPUT;
let input: Vec<Record> = selected_input
.split("\n\n")
.map(|l| l.parse().unwrap())
.collect::<Vec<_>>();
let valid_count = input
.iter()
.filter(|r| {
let key_set = r.0.keys().map(|s| s.clone()).collect();
required.is_subset(&key_set)
})
.count();
println!("Valid count 1: {}", valid_count);
// byr (Birth Year) - four digits; at least 1920 and at most 2002.
let year_re = regex::Regex::new(r"^\d\d\d\d$").unwrap();
// iyr (Issue Year) - four digits; at least 2010 and at most 2020.
// eyr (Expiration Year) - four digits; at least 2020 and at most 2030.
// hgt (Height) - a number followed by either cm or in:
let hgt_re = regex::Regex::new(r"^(\d+)(cm|in)$").unwrap();
// If cm, the number must be at least 150 and at most 193.
// If in, the number must be at least 59 and at most 76.
// hcl (Hair Color) - a # followed by exactly six characters 0-9 or a-f.
let hcl_re = regex::Regex::new(r"^#[0-9a-f]{6}$").unwrap();
// ecl (Eye Color) - exactly one of: amb blu brn gry grn hzl oth.
let ecl_re = regex::Regex::new(r"^(amb|blu|brn|gry|grn|hzl|oth)$").unwrap();
// pid (Passport ID) - a nine-digit number, including leading zeroes.
let pid_re = regex::Regex::new(r"^\d{9}$").unwrap();
let valid_count_2 = input
.iter()
.filter(|r| {
let key_set = r.0.keys().map(|s| s.clone()).collect();
required.is_subset(&key_set)
})
.filter(|r| {
r.0.iter().all(|(k, v)| match k.as_str() {
"byr" => {
year_re.is_match(v) && {
let i: i32 = v.parse().unwrap();
i >= 1920 && i <= 2002
}
}
"iyr" => {
year_re.is_match(v) && {
let i: i32 = v.parse().unwrap();
i >= 2010 && i <= 2020
}
}
"eyr" => {
year_re.is_match(v) && {
let i: i32 = v.parse().unwrap();
i >= 2020 && i <= 2030
}
}
"hgt" => hgt_re.captures(v).map_or(false, |caps| match &caps[2] {
"cm" => (150..=193).contains(&caps[1].parse::<i32>().unwrap()),
"in" => (59..=76).contains(&caps[1].parse::<i32>().unwrap()),
_ => false,
}),
"hcl" => hcl_re.is_match(v),
"ecl" => ecl_re.is_match(v),
"pid" => pid_re.is_match(v),
_ => true,
})
})
.count();
println!("Valid count 2: {}", valid_count_2);
Ok(())
}
const INPUT: &str = r#"eyr:2028 iyr:2016 byr:1995 ecl:oth
pid:543685203 hcl:#c0946f
hgt:152cm
cid:252
hcl:#733820 hgt:155cm
iyr:2013 byr:1989 pid:728471979
ecl:grn eyr:2022
hgt:171cm
iyr:2013 pid:214368857 hcl:#cfa07d byr:1986 eyr:2028 ecl:grn
hgt:167cm cid:210 ecl:brn pid:429131951 hcl:#cfa07d eyr:2029 iyr:2010
byr:1945
hcl:#888785 iyr:2015
hgt:170cm pid:893805464 ecl:amb byr:1966 eyr:2028
hgt:170cm ecl:amb
hcl:#c0946f eyr:2020 iyr:2016 pid:725010548
byr:1928
byr:1999 hcl:#888785
eyr:2026
ecl:hzl
iyr:2016 hgt:193cm pid:170608679
eyr:2024 iyr:2016 hcl:#cfa07d ecl:grn byr:2001 pid:391942873 cid:104 hgt:164cm
iyr:2019
eyr:2025 pid:138912840 byr:1996
hgt:166cm
hcl:#888785 ecl:grn
iyr:2023 hcl:a58381 pid:#401a29 eyr:1940
byr:1920
ecl:utc hgt:183cm
pid:493510244 ecl:gry hgt:153cm byr:1950 cid:181 eyr:2028
hcl:#ceb3a1
iyr:2020
iyr:2018 pid:074340974 hgt:182cm
hcl:#866857 byr:1988 ecl:hzl eyr:2023
hcl:#866857 ecl:oth byr:1977 iyr:2014 hgt:180cm pid:860745884
eyr:2023
eyr:2026 pid:815594641
ecl:gry iyr:2012 byr:1992 hgt:161cm hcl:#b6652a
ecl:gry cid:338 eyr:2021 pid:777099878 hgt:193cm hcl:#efcc98
byr:1945
iyr:2015
iyr:2016 byr:1934 hcl:#b6652a
hgt:162cm ecl:hzl
cid:296
pid:742610207
eyr:2022
ecl:#ba3242
hgt:80 byr:1931
pid:550004054 iyr:1949 eyr:1944 hcl:fb3859
ecl:amb eyr:2024
byr:1965 iyr:2010 pid:094059049
hcl:#fffffd
hgt:168cm
pid:159cm
iyr:1923 eyr:2032 hcl:701107 cid:343 ecl:gmt byr:2010
hgt:177cm
eyr:2021
ecl:grn byr:1991
hcl:#fffffd hgt:167cm pid:243218792 iyr:2019
hgt:157cm byr:2017 ecl:grn iyr:2012
eyr:2030 hcl:#18171d pid:173cm
pid:260101979 hgt:187cm eyr:2033 ecl:lzr
byr:2020 hcl:1058ce cid:133 iyr:2012
hcl:#7d3b0c
pid:307828343 byr:2001
cid:317 iyr:2013
eyr:2029
pid:472940417 eyr:1960
hgt:181cm hcl:#c0946f cid:269
byr:2014
iyr:1956
hcl:#18171d eyr:2021 byr:2001 pid:421443124
ecl:brn iyr:2020 hgt:156cm
cid:347 hgt:60in pid:359783692 byr:1932
ecl:hzl
eyr:2023
hcl:#888785 iyr:2019
pid:230915137
byr:1999
iyr:2011 eyr:2020 hcl:#7d3b0c ecl:hzl
hgt:164cm
iyr:1989
byr:2008
hgt:154cm
eyr:2028 pid:280298169
cid:208
ecl:oth
byr:1954 iyr:2017
ecl:hzl
eyr:2026
pid:966957581 hgt:175cm hcl:#18171d
pid:308053355 hgt:192cm eyr:2022 ecl:amb cid:146 iyr:2015
byr:1991 hcl:#c0946f
hcl:#a97842 pid:244441133 iyr:2019
hgt:182cm
ecl:amb cid:172 byr:1973 eyr:2029
iyr:2017
byr:1985 cid:215
ecl:blu hcl:#623a2f hgt:160cm pid:157856689 eyr:2030
eyr:2027 ecl:#d72f9b hgt:162cm
iyr:2018 hcl:#a97842
byr:1945
pid:131243258
hcl:#b3f2f0 pid:204254353 cid:169 eyr:2020
iyr:2013 hgt:172cm ecl:blu byr:1950
byr:1957 hcl:#c0946f hgt:152cm ecl:blu eyr:2027 pid:325917033
iyr:2010
ecl:oth byr:1950 hgt:166cm pid:007352351
hcl:#b6652a
iyr:2020
eyr:2024
hgt:165 eyr:2030 iyr:2027
ecl:#1a34f1 pid:2894591864 byr:2024 hcl:z
byr:1971 ecl:oth
hgt:163cm eyr:2021 pid:040443396
hgt:177cm
byr:1955 pid:585735590 iyr:2010 ecl:grn eyr:2024
hcl:#602927
cid:74
iyr:2010
pid:014378493 hgt:174cm eyr:2020
ecl:grn byr:1944
pid:404141049
byr:1947 ecl:blu hgt:170cm iyr:2011
eyr:2028
hcl:#cfa07d
ecl:hzl byr:1938 pid:235085606 cid:180 hcl:8fb74c eyr:2021 hgt:73 iyr:2015
pid:860077423 ecl:gry
hcl:#3e845b
hgt:167cm byr:1933 iyr:2016 eyr:2021
hcl:#733820 hgt:66in eyr:1920
ecl:oth byr:1941 pid:979460474 iyr:2010
cid:247
hcl:#cfa07d ecl:#13bd36 hgt:193cm eyr:2027 pid:181cm byr:1952 iyr:1951
ecl:brn hcl:#602927
hgt:161cm
eyr:2027 pid:822749462 byr:1946
iyr:2014
byr:2013
iyr:2021 ecl:zzz eyr:2032 hgt:193in hcl:#a97842 pid:163cm
eyr:2029 cid:140
byr:1984
iyr:2018 hgt:187cm hcl:#b6652a pid:910674579
ecl:hzl hgt:173cm pid:096026282
iyr:2014 byr:1956
eyr:2029 hcl:#866857
eyr:2024 iyr:2019 pid:301205967
cid:276 byr:1957 hcl:#3fec29 ecl:gry hgt:165cm
iyr:2013 ecl:oth hgt:177cm hcl:#6b5442 eyr:2021 byr:1962 pid:006347857
ecl:grt byr:1983 hcl:#cfa07d
hgt:163cm
eyr:1979
iyr:1958 pid:796395720
iyr:2011 pid:415403544 hcl:#c0946f byr:1990 ecl:oth eyr:2023 hgt:73in
cid:107
hgt:166cm eyr:2029 iyr:2015
hcl:#c0946f ecl:brn
byr:1964
pid:469449137
eyr:2023
byr:1969 iyr:2010 hgt:163cm hcl:#a97842 pid:570942274
ecl:blu
hcl:#623a2f
ecl:brn hgt:183cm pid:524675399
eyr:2020 iyr:2012 byr:1981
iyr:2017 hcl:#fffffd eyr:2026
ecl:gry byr:1979 hgt:152cm pid:505790864
hgt:68in
hcl:#c0946f iyr:2012
eyr:2023 pid:933562997 byr:1993
ecl:grn
pid:267705171
hgt:166cm byr:1970 iyr:2019 hcl:#341e13 ecl:oth
eyr:2030
ecl:brn byr:1972 eyr:2026 pid:774637408 hgt:189cm iyr:2015 hcl:#341e13
hgt:175cm eyr:2026 byr:2001 iyr:2020
hcl:#733820 ecl:blu pid:686996160
hgt:190cm hcl:#c0946f pid:228444464 byr:1987
iyr:2020 eyr:2030
ecl:blu
byr:1990 hgt:179cm
pid:885359438 eyr:2028 iyr:2010 ecl:amb
hcl:#67067e
byr:1945 hcl:#866857 eyr:2022 iyr:2019
pid:708146656 cid:65
hgt:172cm ecl:brn
ecl:hzl hgt:191cm
cid:260 pid:010716679 iyr:2011 eyr:2029 byr:1920 hcl:#efcc98
iyr:2012
cid:313 pid:264894705 byr:1951 hcl:#733820 eyr:2030 ecl:blu
hgt:178cm
eyr:2027 pid:790510379
iyr:2013
ecl:amb
hgt:186cm
hcl:#866857
byr:1926
pid:535750794 hgt:191cm iyr:2016 hcl:#a97842 eyr:2029
ecl:hzl byr:1923
byr:2023 pid:#eb4c2a iyr:1939 ecl:grn hcl:06d729 hgt:73 eyr:2038
pid:792365221 iyr:2013 ecl:oth
byr:1997
hgt:170cm hcl:#efcc98
eyr:2022
hgt:192cm pid:874141668
byr:1957 iyr:2015
ecl:gry
hcl:#b6652a pid:770238761 eyr:2029 byr:1934 iyr:2013
ecl:blu cid:177
hgt:184cm
ecl:hzl eyr:2024 hgt:72in pid:546439165
iyr:2013
hcl:#c0946f cid:223 byr:1989
byr:1985
ecl:utc pid:#ff1cbf
iyr:2018 hcl:#866857 hgt:169cm eyr:2026 cid:194
hgt:189cm
eyr:2026 pid:120642045 ecl:blu
hcl:#602927 cid:177
byr:1954 iyr:2012
pid:314624973
byr:1959 iyr:2015 hcl:#c0946f ecl:grn
eyr:2027 cid:349 hgt:156cm
byr:1978
iyr:2020 hgt:150cm cid:266 eyr:2026
pid:443912835 hcl:#b6652a
hgt:174cm byr:1974 pid:729198828
ecl:brn iyr:2014
hcl:#18171d eyr:2027
pid:472891001 ecl:xry
hgt:96 hcl:1b816a iyr:1954
byr:2015 eyr:2037
byr:1966 eyr:2022
iyr:2014
pid:848187688 hcl:#602927 ecl:gry hgt:152cm
hgt:129 eyr:2037 cid:61 iyr:2009 byr:2027 hcl:#c0946f
pid:3569865
ecl:#4e3d72
ecl:gry
eyr:2021 pid:234525998 byr:1964 hgt:168cm cid:140
hcl:#7d3b0c iyr:2013
ecl:xry
cid:86
hgt:172in
byr:1972
iyr:2015 hcl:#7d3b0c pid:833809421 eyr:2030
pid:444365280 hgt:72in
ecl:brn
hcl:#b6652a byr:1985 eyr:2027 iyr:2012
iyr:2010 byr:2013 hgt:181cm eyr:2021
pid:072317444
ecl:oth hcl:#866857
cid:118
pid:4354408888 iyr:2012
hcl:#b6652a cid:104
hgt:96 eyr:2020
byr:1933 ecl:amb
eyr:2023 ecl:gry hcl:#a97842 pid:287719484 byr:1994
iyr:2011 hgt:163cm cid:299
byr:1932
hgt:170cm
iyr:2014 pid:777844412 eyr:2040 hcl:#cfa07d ecl:brn
cid:160 hgt:191cm eyr:2020 iyr:2012
ecl:brn byr:1981 pid:077027782
cid:182 hgt:176cm hcl:#7d3b0c
eyr:2030 ecl:blu pid:096742425 iyr:2010 byr:1963
byr:2010 cid:337 hcl:z pid:525126586 iyr:2010 hgt:73cm eyr:2040 ecl:blu
ecl:gry
iyr:2017
hgt:185cm hcl:#6b5442 byr:1993
eyr:2029 pid:366083139 cid:343
eyr:2028 ecl:amb
pid:878658841 byr:1960 hgt:179cm hcl:#18171d iyr:2010
pid:537309261 iyr:2015 hgt:187cm
hcl:#4fe831 eyr:2026
ecl:blu byr:1982
ecl:brn hgt:163cm
eyr:2021 hcl:#6b5442 byr:1979 iyr:2013 pid:924759517
pid:683651053 hcl:#179c55
ecl:blu byr:1989 hgt:190cm
iyr:2016
eyr:2030
ecl:grn
iyr:2016 hcl:#b6652a
byr:1994 eyr:2020 pid:448424292 hgt:174cm
hgt:157cm
ecl:grn
byr:2000
pid:734707993 hcl:#341e13 iyr:2020
hcl:#341e13 hgt:156cm iyr:2020 pid:299213638
byr:1947 ecl:hzl eyr:2023
hgt:193cm hcl:#b6652a iyr:2014 ecl:hzl byr:1947 eyr:2025
pid:044486467
byr:1975
hgt:159cm
ecl:grn pid:318489576 eyr:2029 hcl:#6b5442
iyr:2020
iyr:2018 pid:512971930
hcl:#888785 byr:1966 eyr:2024 hgt:158cm
cid:100 ecl:gry
ecl:amb eyr:2030 hgt:171cm hcl:#efcc98 pid:800921581 cid:339 byr:1980 iyr:2017
iyr:2019 cid:172
hgt:152cm
eyr:2022 ecl:oth hcl:#602927 byr:1960
iyr:2019 pid:762312913
eyr:2029
ecl:hzl
hcl:#6b5442
byr:1940
hgt:169cm cid:289
eyr:2022 ecl:gry byr:1976
iyr:2020 hcl:#733820 hgt:172cm pid:040331561
hgt:171cm ecl:brn iyr:2013 eyr:2027 byr:1940 hcl:#a6e32a pid:223986941
hcl:#341e13
eyr:2028 ecl:amb byr:1942
hgt:166cm pid:435382099 iyr:2020
cid:298 pid:641326891
hgt:155cm hcl:#623a2f ecl:grn byr:1981 eyr:2025
iyr:2010
iyr:2015 pid:472000322 eyr:2021 byr:1977
ecl:gry hgt:165cm cid:270
eyr:2027 byr:1956
pid:193087729 hcl:#ceb3a1
cid:213 hgt:193cm ecl:oth
iyr:2014
byr:1971 cid:96
hgt:74in
pid:136003336
eyr:2020 ecl:hzl hcl:#efcc98
hcl:z pid:097595072 ecl:amb
iyr:2015 byr:2021
eyr:2039 hgt:188cm
pid:74823273
hcl:#341e13
cid:166 hgt:182cm byr:2026 iyr:2027 ecl:amb
eyr:2032
byr:1932 eyr:2022 pid:367248062 hgt:182cm ecl:oth hcl:#c0946f
iyr:2020
hgt:72cm
iyr:2015 cid:234 byr:2013
ecl:brn pid:9401866358
pid:022399779 iyr:2010 byr:1969 hcl:#6b5442
ecl:grn eyr:2020
hgt:189cm
byr:1971 iyr:2011 cid:161 ecl:brn hgt:153cm
eyr:2028 pid:819137905 hcl:#cfa07d
cid:161 hgt:159cm iyr:2011 pid:815860793 hcl:#a97842 ecl:grn byr:1972 eyr:2027
ecl:amb
hgt:118 byr:1981 iyr:2019
hcl:#a97842 eyr:2021 pid:270790642
hcl:#b6652a pid:732272914 eyr:2030 hgt:183cm ecl:hzl
byr:1934
iyr:2018
eyr:2027
pid:877388498 hcl:#ceb3a1
byr:1925 cid:236 ecl:grn
iyr:2019 hgt:191cm
eyr:2020 ecl:brn hcl:#fffffd hgt:181cm pid:801311341 byr:1986 iyr:2010
byr:1925 cid:179 ecl:hzl pid:360641953 eyr:2030
hgt:171in iyr:2015
hcl:#602927
cid:83 hgt:181cm
eyr:2028 byr:1941 pid:165937945 hcl:#888785 iyr:2014
ecl:grn
hcl:#a97842 byr:1928
iyr:2013
pid:870072019 hgt:76in
ecl:oth cid:127 eyr:2026
cid:169
hgt:187cm pid:008180128 iyr:2013 byr:1991 hcl:#7d3b0c ecl:hzl eyr:2026
ecl:amb
eyr:2027 hgt:155cm pid:586151564 iyr:2010
byr:1949
hcl:#18171d
hgt:167cm
iyr:2010 byr:1982 ecl:amb
cid:235 pid:557737957 eyr:2020
hcl:#ceb3a1
ecl:grn byr:1939 hcl:#733820
eyr:2026 pid:993218958 iyr:2010
hgt:150cm
hgt:68in ecl:blu
byr:1965 iyr:2017 pid:854858050 eyr:2021
ecl:gry pid:347763159 eyr:2024 iyr:2017 byr:1961
hgt:151cm
hcl:#623a2f
ecl:utc hcl:#602927
pid:#1408ff byr:1941
cid:82
iyr:2015 hgt:185cm eyr:2028
iyr:2020 hgt:151cm eyr:2025
byr:1934 hcl:#888785
pid:396545094 ecl:oth
hgt:153cm
eyr:2028 hcl:#733820 ecl:gry iyr:2019
pid:081352630 byr:1943
eyr:2030
iyr:2011
ecl:grn pid:313741119
hgt:161cm byr:1946
hcl:#a97842
byr:1968 ecl:gry
pid:742357550
eyr:2024 hcl:#18171d iyr:2018
hgt:157cm
pid:387505919
ecl:oth byr:1945
iyr:2014
hgt:190cm hcl:#888785
eyr:2028
iyr:2017 hgt:175cm
byr:1989 eyr:2022
hcl:#b6652a pid:499016802 ecl:gry cid:136
pid:490807331 iyr:2016
hcl:#ceb3a1
hgt:150cm eyr:2026
ecl:amb byr:1967
iyr:2011
hgt:155in
hcl:#ceb3a1 pid:118497416
eyr:2029 byr:2011 ecl:oth
hcl:03a888 byr:2029
ecl:#6f7292 eyr:1969 iyr:2028 hgt:162cm pid:73551266
iyr:2016 hgt:182cm
byr:1966 ecl:grn eyr:2022
hcl:#fffffd pid:061720787
byr:1971 hcl:z
eyr:2035 pid:158cm
ecl:#d3ec19
hcl:#623a2f hgt:156cm eyr:2028
ecl:brn iyr:2013
byr:1980 pid:112283719
eyr:2020
byr:1956 iyr:2013
hcl:#6b5442
ecl:grn pid:876589775 hgt:179cm
hgt:138
byr:2013 eyr:2040 iyr:2028 cid:197 ecl:#8844fd pid:8524414485
hcl:z
eyr:2040
hgt:173in hcl:z pid:#654654 byr:2016 iyr:2022 ecl:#452d22
iyr:2012 cid:265 eyr:2021 hgt:192cm
byr:1993 ecl:brn
eyr:2026 hcl:#888785
hgt:158cm byr:1942
iyr:2015
ecl:amb pid:546984106
iyr:2019
ecl:hzl
byr:1922 eyr:2028 hgt:172cm
pid:465052232 hcl:#602927
pid:710362693 eyr:2023
hcl:#c0946f byr:1951 ecl:grn
iyr:2019 hgt:190cm
iyr:2024 pid:#a08e69
hcl:z byr:1966 ecl:#7b9978 eyr:2035
hgt:69cm
hcl:#efcc98
pid:164cm
iyr:2010 cid:194 hgt:71cm byr:1923 eyr:2026
hgt:65in
iyr:2019 byr:1969 pid:466669360 eyr:2022 ecl:brn hcl:#b6652a
pid:42472559 hcl:#6f5763
eyr:2035
iyr:2014 hgt:154in byr:1939 ecl:grt cid:323
pid:715680334 hgt:166cm cid:283
byr:1982
iyr:2015 eyr:2030 hcl:#ceb3a1 ecl:grn
eyr:2018 iyr:2029
ecl:brn
byr:2022 pid:#ff6df1
hcl:z
hgt:68cm
pid:094541122
eyr:2024 byr:1940
ecl:amb iyr:2019 hgt:64in hcl:#733820
hgt:163in
eyr:2022 ecl:utc hcl:#ceb3a1 iyr:2028
ecl:gry pid:53552934
hgt:193 byr:2021
eyr:2028
iyr:2011 cid:98 hcl:90c63f
eyr:2024 hcl:#cfa07d ecl:brn
iyr:2019 byr:1993 hgt:156cm pid:449484188
iyr:2020
hgt:164cm hcl:#623a2f
pid:820731743 eyr:2025
byr:1997 ecl:hzl
hcl:47242b ecl:utc hgt:156
pid:#9a9903 eyr:2030 iyr:1990
byr:2011
hcl:#602927
hgt:189cm
pid:949021883 iyr:2014 ecl:oth cid:327
eyr:2027 byr:1953
hgt:189cm cid:301
byr:1982
ecl:grn
eyr:2028 hcl:#733820 pid:796040143 iyr:2015
cid:169 iyr:2013 pid:355177646 byr:1988
ecl:oth
hcl:#cfa07d
hgt:185cm eyr:2022
pid:563150261 eyr:2020 ecl:brn byr:1996 hcl:#7d3b0c iyr:2018 hgt:189cm cid:84
cid:188 eyr:2027
byr:1944
pid:486184923
iyr:2010 hgt:193cm hcl:#341e13 ecl:oth
iyr:2019
byr:1969 hgt:152cm pid:430698432 ecl:gry hcl:#888785 eyr:2026 cid:293
ecl:gry
cid:270 hcl:#602927 iyr:2017 hgt:151cm eyr:2029 pid:051398739 byr:1954
ecl:oth eyr:2030 pid:024655030
hgt:184cm byr:1969
hcl:#18171d
eyr:2030
pid:899973263 hgt:178cm byr:1987 hcl:#cfa07d iyr:2012
ecl:amb
iyr:1958 hgt:165cm pid:377677319
ecl:grt eyr:2032 byr:2025
hcl:bbfbe2
ecl:blu
iyr:2016
hgt:152cm byr:1964
hcl:#c4f777
eyr:2021
pid:044307549 cid:80
ecl:brn pid:330836320
byr:1963 cid:217 hgt:169cm
eyr:2024
iyr:2019 hcl:#ceb3a1
byr:1976 eyr:2027
pid:452662874 hgt:192cm ecl:oth iyr:2018 hcl:#602927
eyr:2027 hgt:183cm ecl:brn iyr:2017 hcl:#341e13 pid:827463598
ecl:brn pid:930667228 cid:310 iyr:2020
eyr:2027 hgt:160cm byr:1932 hcl:#c0946f
pid:955804028 byr:1983
hcl:#fffffd
hgt:178cm iyr:2013
eyr:2021 ecl:gry
hgt:189cm eyr:2021 pid:430243363 iyr:2015 hcl:#ceb3a1
byr:2000 ecl:oth cid:284
pid:436671537 hcl:#cfa07d iyr:2011 cid:106 hgt:171cm
ecl:blu eyr:2021 byr:1943
eyr:2028 hgt:169cm
iyr:2015 pid:177443573 byr:1945
hcl:#c0946f ecl:gry
hcl:#fffffd byr:1995 eyr:2021
ecl:grn
hgt:192cm iyr:2010 pid:754912745
pid:330882171 iyr:2015 cid:211 ecl:grn byr:1961 eyr:2021 hcl:z
hgt:169cm
byr:1926 eyr:2029 pid:178633665 cid:141 iyr:2017 hcl:#b99eb9
hgt:178cm ecl:brn
eyr:2022 ecl:hzl hcl:#cfa07d hgt:168cm iyr:2015
byr:1982 pid:645675448
ecl:blu byr:1980 hgt:186cm iyr:2010 cid:94 hcl:#c0946f eyr:2027 pid:384440210
cid:309 hcl:#602927 hgt:192cm eyr:2027 ecl:amb
pid:527932745 iyr:2012 byr:1982
cid:132
ecl:blu iyr:2016
eyr:2027 byr:1940 hcl:#341e13 hgt:166cm pid:613386501
pid:360563823 eyr:2028 byr:1990 iyr:2016
ecl:blu cid:287 hgt:162cm hcl:#888785
hgt:161cm
byr:2002
hcl:#623a2f pid:535361632
ecl:gry eyr:2021 iyr:2013
hgt:67in
byr:1967
cid:333 hcl:#cfa07d
iyr:2012 eyr:2024 ecl:hzl pid:538161833
ecl:#2bc145 eyr:1963 iyr:2030
cid:241 hcl:2fc384 hgt:156in pid:2899917140
byr:2005
eyr:2021 pid:021590229 ecl:gry
hgt:164cm iyr:2013 hcl:#efcc98 byr:1985
ecl:hzl byr:1943
cid:279 pid:979130395
iyr:2011
hgt:165cm
eyr:2021
hcl:#f331b3
hgt:161cm
hcl:#888785 byr:1981 pid:835477382 eyr:2025 iyr:2012
cid:348
ecl:blu
hgt:159cm hcl:b4ce6a cid:319 eyr:2035 iyr:1965 ecl:oth
byr:2010 pid:158cm
iyr:2020
eyr:2026 ecl:grn hcl:#a97842 pid:126915503
hgt:178cm byr:1986
hgt:184cm ecl:hzl
cid:67 iyr:2020 eyr:2026 pid:168775568 byr:1944 hcl:#a97842
hcl:#fffffd iyr:2016 pid:379463363
ecl:oth
hgt:179cm byr:1988
eyr:2028
hcl:#cfa07d ecl:amb eyr:2030 pid:320360020
iyr:2016 hgt:172cm byr:1961
cid:221 hcl:#cfa07d byr:1946 eyr:2024 ecl:oth pid:066950409 hgt:173cm
iyr:2020
hcl:#602927 eyr:2028 ecl:gry iyr:2019 pid:583204134 byr:1966 hgt:178cm
byr:1930
iyr:2020 ecl:hzl
hcl:#ceb3a1 pid:285751767 cid:287 eyr:2023 hgt:192cm
eyr:2024
ecl:hzl cid:87 iyr:2015
hgt:152cm hcl:#18171d pid:959574669
byr:1990
pid:45938863
hcl:49c7ce cid:349 hgt:181cm
eyr:2023 ecl:grn iyr:2015 byr:1948
hcl:#866857 iyr:2012 ecl:amb cid:132 byr:1955 hgt:162cm pid:597748286 eyr:2023
pid:293364535 byr:2024
hgt:177cm eyr:2039
iyr:2020 hcl:#dae928 ecl:hzl
pid:212659709 iyr:2018
hgt:188cm
hcl:#efcc98 byr:1974 eyr:2029 ecl:oth cid:244
cid:140
ecl:amb
eyr:2022 hgt:181cm hcl:#efcc98
byr:1943
iyr:2016
cid:71 hgt:151cm pid:5063555219 eyr:2023 ecl:hzl
byr:2019
hcl:#7d3b0c iyr:2023
hgt:157in pid:#298b06 iyr:2030 ecl:#66a631 eyr:2035 hcl:z byr:2019
hgt:190cm iyr:1943
pid:644021656 hcl:#6b621c
ecl:oth eyr:2021 byr:1923
ecl:hzl iyr:2012 eyr:2023 pid:881271720 hcl:#ceb3a1 hgt:172cm
byr:1957
iyr:2017 hcl:#888785
ecl:amb hgt:170cm byr:1967 pid:198856675 eyr:2027
eyr:2026
ecl:gry
pid:834980363 hcl:#733820 byr:1930
hgt:175cm iyr:2018
cid:214
hcl:#efcc98 eyr:2029 iyr:2010 pid:980087545
ecl:brn hgt:157cm
pid:57513658 iyr:2011 byr:1993 ecl:brn eyr:2027 hcl:#6b5442 hgt:165cm
ecl:hzl
eyr:2025
hcl:#733820
hgt:169cm iyr:2018 cid:328 byr:1999 pid:694719489
eyr:2023
cid:125 byr:1925
hgt:185cm pid:806769540 iyr:2013 ecl:hzl
hcl:#866857
iyr:2010 cid:225
ecl:hzl eyr:2027 pid:615545523
hcl:#733820
byr:1994
hgt:166cm
byr:1941 ecl:gry iyr:2019 eyr:2026 hgt:73cm hcl:#602927
pid:352996721
pid:140250433
eyr:2030 ecl:grn
hcl:#fffffd iyr:2011 byr:1937 hgt:185cm
ecl:gry byr:2002 iyr:2017 hcl:#b6652a cid:261 pid:178cm eyr:2022 hgt:166cm
ecl:grn iyr:2010 eyr:2022 byr:1924
pid:214641920 hcl:#ceb3a1
hgt:155cm
hcl:z pid:150cm ecl:utc iyr:1981
eyr:2034
hgt:156in cid:260 byr:2027
byr:1987 hgt:66in
eyr:2021 pid:876757018 iyr:2015 hcl:d596e4 ecl:hzl
cid:116 ecl:oth hgt:180cm
iyr:2020 byr:1942 hcl:#2fc31f
eyr:2027
pid:253569416
pid:509387921
eyr:2022
hcl:#888785 ecl:oth hgt:193cm
iyr:2012 cid:97
byr:1975
hcl:#18171d hgt:190cm pid:062827417 byr:1939
iyr:2019 eyr:2022
ecl:hzl
iyr:2025
byr:2028
hgt:165in eyr:2027 pid:6259332452
hcl:#478251
iyr:2018 eyr:2026 pid:523863237
hgt:187cm
ecl:oth
byr:1944
hcl:#a97842
hgt:181cm hcl:#733820 pid:923996316
cid:110
iyr:2011 byr:1949 ecl:blu eyr:2023
pid:304792392 hcl:487823 eyr:2020
hgt:70cm byr:2024
iyr:1953
ecl:blu
pid:142200694
ecl:oth hcl:#888785 eyr:2028
hgt:152cm byr:1954 iyr:2018
ecl:utc
iyr:2015 byr:1932 hcl:#623a2f
eyr:2027 hgt:183cm pid:036300444
iyr:2014 ecl:hzl byr:1935 hgt:190cm hcl:#efcc98 pid:945893288
eyr:2025
hcl:#efcc98 pid:252639104 hgt:188cm
byr:1998 iyr:2019 ecl:grn
eyr:2023
hcl:58aa4a byr:1930 hgt:193cm
iyr:1998 cid:196 ecl:brn
eyr:2032
iyr:2015 ecl:hzl
hgt:193cm pid:653794674 eyr:2024
hcl:#fffffd byr:1921
pid:980680460 byr:1962 ecl:blu
iyr:2013
hcl:#72cace
eyr:2030
hgt:180cm
eyr:2025
hgt:182cm hcl:#ceb3a1 iyr:2010 byr:1945 cid:314 pid:597769706 ecl:amb
pid:761757504
hcl:#888785 hgt:161cm iyr:2015
byr:1939 eyr:2025
cid:326 ecl:blu
ecl:gry
hgt:163cm byr:1981
pid:330818500 iyr:2017 eyr:2024
cid:71 hcl:#888785
pid:190cm cid:267 iyr:2015 ecl:brn
hcl:869252
byr:1935 hgt:142 eyr:2033
cid:239
eyr:2038 ecl:lzr hcl:z iyr:1987 pid:4632768239
hgt:162in
pid:158038227 ecl:brn byr:1995 eyr:2028 hcl:#efcc98
cid:252 iyr:2021
hgt:184cm
eyr:2027
cid:124 ecl:amb hgt:165cm byr:1949
pid:727126101 iyr:2010 hcl:#602927
ecl:grn
byr:1966 pid:184245393 hgt:164cm
eyr:2022
iyr:2014 hcl:#866857
cid:62 hgt:180cm eyr:2027 hcl:#18171d
iyr:2017 ecl:blu byr:1942 pid:930210027
ecl:grn hgt:171cm iyr:2017 hcl:#fffffd eyr:2029 byr:1946 pid:863414762
cid:95
eyr:2025 ecl:grn iyr:2019 cid:226 hcl:#b6652a
byr:1932 pid:715708549
hgt:156cm
pid:505158338 iyr:2019 byr:1981 hgt:193cm
hcl:#696a5c cid:57 ecl:hzl eyr:2023
byr:1987
hgt:155cm cid:99 ecl:grn iyr:2010
hcl:#c0946f eyr:2023
pid:431067921
hgt:190in
hcl:z eyr:2029 pid:74228790
iyr:2016 byr:2018 ecl:brn
eyr:2022
ecl:xry hgt:154cm pid:62205162
iyr:2014 byr:1936
cid:61
ecl:amb eyr:2026
byr:1966 cid:95 hcl:#733820 pid:957767251 iyr:2013 hgt:157cm
byr:1969
hgt:156cm iyr:2013 ecl:blu hcl:#a97842
cid:183
pid:960672229 eyr:2020
iyr:2013
cid:243 eyr:2028 hgt:192cm hcl:#efcc98
ecl:grn pid:222407433 byr:1978
iyr:2014 byr:1935
eyr:2021 cid:235 pid:#1b34e1
hcl:#89313f hgt:164cm ecl:blu
ecl:hzl iyr:2016 cid:327
byr:1923 pid:695935353 hgt:184cm
hcl:#a97842
eyr:2028
pid:6010745668
byr:1934 ecl:oth eyr:2020 hgt:164cm
hcl:#733820
iyr:2016
ecl:blu pid:071991002 eyr:2021 byr:1978 cid:321
hcl:#efcc98
iyr:2013 hgt:68in
ecl:grn iyr:2015 pid:137792524 cid:156
hcl:#efcc98
eyr:2029 byr:1955
hgt:165cm
byr:1949
hgt:176cm pid:531868428
hcl:#cfa07d ecl:brn iyr:2014 eyr:2024
iyr:1955 cid:108 pid:712137140 byr:2019 eyr:2040 hgt:184cm hcl:220cfe ecl:#551592
iyr:2016 eyr:2030
hgt:177cm cid:137 ecl:brn
hcl:#efcc98 pid:712202745 byr:1938
pid:357180007 iyr:2010 ecl:grn
byr:1991
hcl:#341e13
eyr:2020 hgt:159cm
eyr:2023 ecl:grn
hcl:#733820 iyr:2020 byr:1927 hgt:151cm
pid:165936826
ecl:gry
pid:794227261 iyr:2014 eyr:2030
hcl:#18171d
byr:1994
hgt:162cm
iyr:2017 eyr:2024
hcl:#7d3b0c cid:279
ecl:gry byr:1981 hgt:176cm pid:973822115
eyr:2029
hgt:152cm hcl:#fffffd ecl:amb byr:1946 iyr:2013
cid:62 pid:005240023
iyr:2010
ecl:amb hcl:#341e13 hgt:184cm
eyr:2027
pid:976217816 byr:1950
ecl:grn hgt:178cm cid:192 hcl:#602927 pid:684333017 eyr:2022
iyr:2011 byr:1987
pid:306960973 ecl:hzl hgt:168cm
byr:1954 iyr:2015 eyr:2029 hcl:#602927
hcl:#18171d
byr:1973 ecl:hzl hgt:174cm pid:922891164
iyr:2013
eyr:2023
byr:1998 hgt:189cm pid:472066200 ecl:gry iyr:2012 eyr:2021 hcl:#c0946f cid:299
iyr:2014
eyr:2028 byr:1922 pid:594856217 hgt:158cm
ecl:oth
hcl:#623a2f
pid:215206381 byr:1928
hgt:163cm
hcl:#b6652a ecl:oth iyr:2011
cid:145 iyr:2013
ecl:#38a290
eyr:2034
hcl:#602927 hgt:186cm pid:517498756
byr:1945
hcl:#5637d2 eyr:2030 byr:1955
hgt:187cm
pid:862655087 iyr:2014 ecl:grn
hcl:#7d3b0c hgt:176cm iyr:2019
eyr:2029 byr:1980 ecl:hzl
cid:346 pid:703908707
hgt:185cm
iyr:2017
cid:120 eyr:2020 hcl:#733820 ecl:blu pid:458522542 byr:1966
pid:#725759
hcl:#602927 iyr:2013 byr:2003 eyr:2023 cid:100
"#;
|
use crate::database::raw_index::InnerRawIndex;
#[derive(Clone)]
pub struct SynonymsIndex(pub(crate) InnerRawIndex);
impl SynonymsIndex {
pub fn alternatives_to(&self, word: &[u8]) -> Result<Option<fst::Set>, rocksdb::Error> {
match self.0.get(word)? {
Some(vector) => Ok(Some(fst::Set::from_bytes(vector.to_vec()).unwrap())),
None => Ok(None),
}
}
pub fn set_alternatives_to(&self, word: &[u8], value: Vec<u8>) -> Result<(), rocksdb::Error> {
self.0.set(word, value)?;
Ok(())
}
pub fn del_alternatives_of(&self, word: &[u8]) -> Result<(), rocksdb::Error> {
self.0.delete(word)?;
Ok(())
}
}
|
#[macro_use]
extern crate clap;
mod cli;
mod format;
mod module;
mod segments;
mod theme;
use crate::module::Module;
use crate::segments::Segment;
use crate::theme::Theme;
#[derive(Clone, Copy, Eq, PartialEq)]
pub enum Shell {
Bare,
Bash,
Zsh
}
pub struct Powerline {
segments: Vec<Segment>,
theme: Theme,
shell: Shell,
#[cfg(feature = "git2")]
git: Option<git2::Repository>,
#[cfg(feature = "git2")]
git_statuses: Option<Vec<git2::Status>>
}
fn main() {
#[cfg(feature = "flame")]
flame::start("clap-rs");
let matches = cli::build_cli().get_matches();
#[cfg(feature = "flame")]
flame::end("clap-rs");
#[cfg(feature = "flame")]
flame::start("parse arguments");
let cwd_max_depth = value_t_or_exit!(matches, "cwd-max-depth", u8);
let cwd_max_dir_size = value_t_or_exit!(matches, "cwd-max-dir-size", u8);
let error = value_t_or_exit!(matches, "error", u8);
#[cfg(feature = "flame")]
flame::start("parse theme");
let theme = if let Some(file) = matches.value_of("theme") {
if let Ok(theme) = theme::load(file) {
theme
} else {
eprintln!("Invalid theme.");
theme::DEFAULT
}
} else { theme::DEFAULT };
#[cfg(feature = "flame")]
flame::end("parse theme");
#[cfg(feature = "flame")]
flame::start("parse modules");
let modules: Vec<_> = matches
.values_of("modules")
.unwrap()
.map(|module| module.parse().unwrap())
.collect();
let time_format = matches.value_of("time_format").unwrap();
#[cfg(feature = "flame")]
flame::end("parse modules");
#[cfg(feature = "flame")]
flame::end("parse arguments");
#[cfg(feature = "flame")]
flame::start("main");
let mut p = Powerline {
segments: Vec::with_capacity(16), // just a guess
theme,
shell: match matches.value_of("shell").unwrap() {
"bare" => Shell::Bare,
"bash" => Shell::Bash,
"zsh" => Shell::Zsh,
_ => unreachable!()
},
#[cfg(feature = "git2")]
git: None,
#[cfg(feature = "git2")]
git_statuses: None
};
for module in modules {
match module {
Module::Cwd => segments::segment_cwd(&mut p, cwd_max_depth, cwd_max_dir_size),
Module::Git => { #[cfg(feature = "git2")] segments::segment_git(&mut p) },
Module::GitStage => { #[cfg(feature = "git2")] segments::segment_gitstage(&mut p) },
Module::Host => segments::segment_host(&mut p),
Module::Jobs => segments::segment_jobs(&mut p),
Module::NixShell => segments::segment_nix(&mut p),
Module::Perms => segments::segment_perms(&mut p),
Module::Ps => segments::segment_ps(&mut p),
Module::Root => segments::segment_root(&mut p, error),
Module::Ssh => segments::segment_ssh(&mut p),
Module::Time => segments::segment_time(&mut p, &time_format),
Module::User => segments::segment_user(&mut p),
Module::LineBreak => segments::segment_linebreak(&mut p),
Module::VirtualEnv => segments::segment_virtualenv(&mut p),
}
}
#[cfg(feature = "flame")]
flame::end("main");
#[cfg(feature = "flame")]
flame::start("print");
if matches.is_present("rtl") {
let n = p.segments.len();
for i in 1..n+1 {
p.segments[n-i].escape(p.shell);
p.segments[n-i].print_rtl(p.segments.get(n-i+1), p.shell, &p.theme);
}
} else {
for i in 0..p.segments.len() {
p.segments[i].escape(p.shell);
p.segments[i].print(p.segments.get(i+1), p.shell, &p.theme);
}
}
if matches.is_present("newline") {
println!();
} else if !matches.is_present("rtl") {
print!(" ");
}
#[cfg(feature = "flame")]
flame::end("print");
#[cfg(feature = "flame")]
{
use std::fs::File;
flame::dump_html(&mut File::create("profile.html").unwrap()).unwrap();
}
}
|
use std::io::{self, Write as IoWrite};
use std::fmt;
use std::str::Chars;
use std::iter::Peekable;
use MathError::*;
use Token::*;
fn main() {
println!("Shunting Yard algorithm calculator, enter an expression to be evaluated.");
println!("Type `exit` to exit");
let mut input = String::new();
// let mut _ans: f64;
loop {
print!(">>> ");
io::stdout().flush().expect("Cannot flush stdout.");
io::stdin()
.read_line(&mut input)
.expect("Failed to read stdin.");
match &input.trim()[..] {
"exit" => {
println!("Goodbye.");
return;
},
_ => match Tokens::eval(&input) {
Ok(float) => {
println!("{:.3}", float);
// _ans = float;
}
Err(error) => println!("{}", error),
}
}
input.clear();
}
}
#[derive(Debug)]
struct Tokens(Vec<Token>);
impl Tokens {
fn parse_num(input: &mut Peekable<Chars>) -> Result<f64, MathError> {
let mut buf = String::new();
while matches!(input.peek(), Some('0'..='9' | '.')) {
buf.push(input.next().unwrap());
}
return match buf.parse::<f64>() {
Ok(float) => Ok(float),
Err(_) => Err(ParseNum(buf))
}
}
fn parse(input: &str) -> Result<Self, MathError> {
let mut chars = input.chars().peekable();
let mut tokens = Vec::<Token>::new();
loop {
match chars.peek() {
Some('0'..='9' | '.') => tokens.push(Num(Tokens::parse_num(&mut chars)?)),
Some('+' | '-' | '*' | '/' | '(' | ')') => tokens.push(Token::from_char(chars.next().unwrap())),
Some(chr @ '=') | Some(chr) if chr.is_whitespace() => {
chars.next().unwrap();
},
Some(&badchar) => return Err(BadChar(badchar)),
None => return Ok(Tokens(tokens))
}
}
}
fn shunting(self) -> Result<Self, MathError> {
let mut op_stack = Vec::<Token>::new();
let mut queue = Vec::<Token>::new();
for token in &self.0 {
match token {
Num(_) => queue.push(*token),
ParenOpen => op_stack.push(*token),
ParenClose => {
while let Some(top) = op_stack.last()
.filter(|top| !matches!(top, ParenOpen)) {
queue.push(op_stack.pop().unwrap());
}
if let None = op_stack.pop() {
return Err(UnmatchedParens(self));
}
},
Oper(_) => {
while let Some(_) = op_stack.last()
.filter(|top| token.is_lower(top)) {
queue.push(op_stack.pop().unwrap());
}
op_stack.push(*token);
}
}
}
while let Some(elem) = op_stack.pop() {
match elem {
ParenOpen => return Err(UnclosedParens(self)),
Oper(_) => queue.push(elem),
_ => unreachable!(),
}
}
Ok(Tokens(queue))
}
fn solve(self) -> Result<f64, MathError> {
let mut stack = Vec::<f64>::new();
for token in &self.0 {
match token {
Num(float) => stack.push(*float),
Oper(oper) => {
if stack.len() < 2 {
return Err(NotEnoughTokens(self));
}
let (y, x) = (stack.pop().unwrap(), stack.pop().unwrap());
stack.push(oper.call(x, y));
},
_ => unreachable!()
}
}
match stack.len() {
1 => return Ok(stack.pop().unwrap()),
_ => return Err(NotEnoughTokens(self)),
}
}
fn eval(input: &str) -> Result<f64, MathError> {
Self::parse(input.trim())?
.shunting()?
.solve()
}
}
impl fmt::Display for Tokens {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut tokens = self.0.iter();
write!(f, "{{")?;
if let Some(token) = tokens.next() {
write!(f, "{}", token)?;
}
for token in tokens {
write!(f, ", {}", token)?;
}
write!(f, "}}")
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum Token {
Num(f64),
Oper(Operator),
ParenOpen,
ParenClose,
}
impl Token {
fn from_char(chr: char) -> Self {
match chr {
'+' | '-' | '*' | '/' => Oper(Operator::from_char(chr)),
'(' => ParenOpen,
')' => ParenClose,
_ => unreachable!()
}
}
fn is_lower(&self, token: &Token) -> bool {
if let Oper(oper) = self {
if let Oper(other) = token {
return oper.precedence() < other.precedence();
}
return false;
}
unreachable!()
}
fn is_num(&self) -> bool {
matches!(self, Num(_))
}
fn is_oper(&self) -> bool {
matches!(self, Oper(_))
}
fn is_paren(&self) -> bool {
matches!(self, ParenOpen | ParenClose)
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Token::Num(float) => write!(f, "Num({:.3})", float),
Token::Oper(oper) => write!(f, "Oper({})", oper),
Token::ParenOpen => write!(f, "ParenOpen"),
Token::ParenClose => write!(f, "ParenClose"),
_ => write!(f, "Token `fmt`: not implemented!")
}
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum Operator {
Add, Sub, Mul, Div
}
impl Operator {
fn from_char(chr: char) -> Self {
match chr {
'+' => Operator::Add,
'-' => Operator::Sub,
'*' => Operator::Mul,
'/' => Operator::Div,
_ => unreachable!("Invalid char: `{}`")
}
}
fn precedence(&self) -> u8 {
match self {
Operator::Add | Operator::Sub => 1,
Operator::Mul | Operator::Div => 2,
_ => panic!("`{}.precendence()`: Not implemented!", self)
}
}
fn call(&self, x: f64, y: f64) -> f64 {
match self {
Operator::Add => x + y,
Operator::Sub => x - y,
Operator::Mul => x * y,
Operator::Div => x / y,
_ => panic!("`{}.call(x, y)`: Not implemented!", self)
}
}
}
impl fmt::Display for Operator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", match self {
Operator::Add => '+',
Operator::Sub => '-',
Operator::Mul => '*',
Operator::Div => '/',
_ => '?'
})
}
}
enum MathError {
Generic(String),
ParseNum(String),
BadChar(char),
UnclosedParens(Tokens),
UnmatchedParens(Tokens),
NotEnoughTokens(Tokens),
}
impl fmt::Display for MathError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Generic(string) => write!(f, "Error: `{}`", string),
ParseNum(string) => write!(f, "Cannot parse literal: `{}`", string),
BadChar(chr) => write!(f, "Character not supported: `{}`", chr),
UnclosedParens(tokens) => write!(f, "Opened parentheses were not closed: {}", tokens),
UnmatchedParens(tokens) => write!(f, "Unmatched closed parentheses: {}", tokens),
NotEnoughTokens(tokens) => write!(f, "Unmatched numbers and operators: {}", tokens),
_ => write!(f, "An error occured, but there is no error message implemented for this error"),
}
}
}
|
use std::net::Ipv4Addr;
use std::net::SocketAddrV4;
use std::net::UdpSocket;
use std::str::FromStr;
use std::thread;
use crate::config::Config;
use crate::dns::answer::Answer;
use crate::dns::header::Header;
use crate::dns::message::Message;
pub struct Server {
pub thread: thread::JoinHandle<u8>,
}
impl Server {
pub fn create(config: &Config) -> Server {
let thread = Server::create_thread(config.dns_port);
Server { thread }
}
fn create_thread(port: u16) -> thread::JoinHandle<u8> {
thread::spawn(move || {
let socket = UdpSocket::bind(SocketAddrV4::new(
Ipv4Addr::from_str("127.0.0.1").unwrap(),
port,
))
.unwrap();
let mut buffer: [u8; 512] = [0; 512];
loop {
let (size, source) = socket.recv_from(&mut buffer).unwrap();
let query_message = Message::unpack(&buffer[..size]);
let mut answers = Vec::new();
answers.push(Answer {
name: query_message.questions[0].name.clone(),
rrtype: 1,
class: 1,
ttl: 0,
length: 4,
data: vec![127, 0, 0, 1],
});
let answer_message = Message {
header: Header {
query_response: 1,
answer_count: 1,
ns_count: 0,
ar_count: 0,
..query_message.header
},
answers,
..query_message
};
let size = answer_message.pack(&mut buffer);
socket.send_to(&buffer[..size], &source).unwrap();
}
})
}
}
|
#[doc(
brief = "Attribute parsing",
desc =
"The attribute parser provides methods for pulling documentation out of \
an AST's attributes."
)];
import rustc::syntax::ast;
import rustc::front::attr;
import core::tuple;
export crate_attrs, mod_attrs, fn_attrs, arg_attrs, const_attrs;
export parse_crate, parse_mod, parse_fn, parse_const;
type crate_attrs = {
name: option<str>
};
type mod_attrs = {
brief: option<str>,
desc: option<str>
};
type fn_attrs = {
brief: option<str>,
desc: option<str>,
args: [arg_attrs],
return: option<str>,
failure: option<str>
};
type arg_attrs = {
name: str,
desc: str
};
type const_attrs = {
brief: option<str>,
desc: option<str>
};
fn doc_meta(
attrs: [ast::attribute]
) -> option<@ast::meta_item> {
#[doc =
"Given a vec of attributes, extract the meta_items contained in the \
doc attribute"];
let doc_attrs = attr::find_attrs_by_name(attrs, "doc");
let doc_metas = attr::attr_metas(doc_attrs);
if vec::is_not_empty(doc_metas) {
if vec::len(doc_metas) != 1u {
#warn("ignoring %u doc attributes", vec::len(doc_metas) - 1u);
}
some(doc_metas[0])
} else {
none
}
}
fn parse_crate(attrs: [ast::attribute]) -> crate_attrs {
let link_metas = attr::find_linkage_metas(attrs);
{
name: attr::meta_item_value_from_list(link_metas, "name")
}
}
#[test]
fn should_extract_crate_name_from_link_attribute() {
let source = "#[link(name = \"snuggles\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_crate(attrs);
assert attrs.name == some("snuggles");
}
#[test]
fn should_not_extract_crate_name_if_no_link_attribute() {
let source = "";
let attrs = test::parse_attributes(source);
let attrs = parse_crate(attrs);
assert attrs.name == none;
}
#[test]
fn should_not_extract_crate_name_if_no_name_value_in_link_attribute() {
let source = "#[link(whatever)]";
let attrs = test::parse_attributes(source);
let attrs = parse_crate(attrs);
assert attrs.name == none;
}
fn parse_mod(attrs: [ast::attribute]) -> mod_attrs {
parse_short_doc_or(
attrs,
{|desc|
{
brief: none,
desc: desc
}
},
parse_mod_long_doc
)
}
fn parse_mod_long_doc(
_items: [@ast::meta_item],
brief: option<str>,
desc: option<str>
) -> mod_attrs {
{
brief: brief,
desc: desc
}
}
#[test]
fn parse_mod_should_handle_undocumented_mods() {
let source = "";
let attrs = test::parse_attributes(source);
let attrs = parse_mod(attrs);
assert attrs.brief == none;
assert attrs.desc == none;
}
#[test]
fn parse_mod_should_parse_simple_doc_attributes() {
let source = "#[doc = \"basic\"]";
let attrs = test::parse_attributes(source);
let attrs = parse_mod(attrs);
assert attrs.desc == some("basic");
}
#[test]
fn parse_mod_should_parse_the_brief_description() {
let source = "#[doc(brief = \"short\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_mod(attrs);
assert attrs.brief == some("short");
}
#[test]
fn parse_mod_should_parse_the_long_description() {
let source = "#[doc(desc = \"description\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_mod(attrs);
assert attrs.desc == some("description");
}
fn parse_short_doc_or<T>(
attrs: [ast::attribute],
handle_short: fn&(
short_desc: option<str>
) -> T,
parse_long: fn&(
doc_items: [@ast::meta_item],
brief: option<str>,
desc: option<str>
) -> T
) -> T {
alt doc_meta(attrs) {
some(meta) {
alt attr::get_meta_item_value_str(meta) {
some(desc) { handle_short(some(desc)) }
none {
alt attr::get_meta_item_list(meta) {
some(list) {
let brief = attr::meta_item_value_from_list(list, "brief");
let desc = attr::meta_item_value_from_list(list, "desc");
parse_long(list, brief, desc)
}
none {
handle_short(none)
}
}
}
}
}
none {
handle_short(none)
}
}
}
fn parse_fn(
attrs: [ast::attribute]
) -> fn_attrs {
parse_short_doc_or(
attrs,
{|desc|
{
brief: none,
desc: desc,
args: [],
return: none,
failure: none
}
},
parse_fn_long_doc
)
}
fn parse_fn_long_doc(
items: [@ast::meta_item],
brief: option<str>,
desc: option<str>
) -> fn_attrs {
let return = attr::meta_item_value_from_list(items, "return");
let failure = attr::meta_item_value_from_list(items, "failure");
let args = alt attr::meta_item_list_from_list(items, "args") {
some(items) {
vec::filter_map(items) {|item|
option::map(attr::name_value_str_pair(item)) { |pair|
{
name: tuple::first(pair),
desc: tuple::second(pair)
}
}
}
}
none { [] }
};
{
brief: brief,
desc: desc,
args: args,
return: return,
failure: failure
}
}
#[test]
fn parse_fn_should_handle_undocumented_functions() {
let source = "";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.brief == none;
assert attrs.desc == none;
assert attrs.return == none;
assert vec::len(attrs.args) == 0u;
}
#[test]
fn parse_fn_should_parse_simple_doc_attributes() {
let source = "#[doc = \"basic\"]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.desc == some("basic");
}
#[test]
fn parse_fn_should_parse_the_brief_description() {
let source = "#[doc(brief = \"short\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.brief == some("short");
}
#[test]
fn parse_fn_should_parse_the_long_description() {
let source = "#[doc(desc = \"description\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.desc == some("description");
}
#[test]
fn parse_fn_should_parse_the_return_value_description() {
let source = "#[doc(return = \"return value\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.return == some("return value");
}
#[test]
fn parse_fn_should_parse_the_argument_descriptions() {
let source = "#[doc(args(a = \"arg a\", b = \"arg b\"))]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.args[0] == {name: "a", desc: "arg a"};
assert attrs.args[1] == {name: "b", desc: "arg b"};
}
#[test]
fn parse_fn_should_parse_failure_conditions() {
let source = "#[doc(failure = \"it's the fail\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.failure == some("it's the fail");
}
fn parse_const(attrs: [ast::attribute]) -> const_attrs {
parse_short_doc_or(
attrs,
{|desc|
{
brief: none,
desc: desc
}
},
parse_const_long_doc
)
}
fn parse_const_long_doc(
_items: [@ast::meta_item],
brief: option<str>,
desc: option<str>
) -> const_attrs {
{
brief: brief,
desc: desc
}
}
#[test]
fn should_parse_const_short_doc() {
let source = "#[doc = \"description\"]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.desc == some("description");
}
#[test]
fn should_parse_const_long_doc() {
let source = "#[doc(brief = \"a\", desc = \"b\")]";
let attrs = test::parse_attributes(source);
let attrs = parse_fn(attrs);
assert attrs.brief == some("a");
assert attrs.desc == some("b");
}
#[cfg(test)]
mod test {
fn parse_attributes(source: str) -> [ast::attribute] {
import rustc::syntax::parse::parser;
// FIXME: Uncommenting this results in rustc bugs
//import rustc::syntax::codemap;
import rustc::driver::diagnostic;
let cm = rustc::syntax::codemap::new_codemap();
let handler = diagnostic::mk_handler(none);
let parse_sess = @{
cm: cm,
mutable next_id: 0,
span_diagnostic: diagnostic::mk_span_handler(handler, cm),
mutable chpos: 0u,
mutable byte_pos: 0u
};
let parser = parser::new_parser_from_source_str(
parse_sess, [], "-", @source);
parser::parse_outer_attributes(parser)
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// ScatterPlotWidgetDefinitionRequests : Widget definition.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScatterPlotWidgetDefinitionRequests {
#[serde(rename = "x")]
pub x: Box<crate::models::ScatterPlotRequest>,
#[serde(rename = "y")]
pub y: Box<crate::models::ScatterPlotRequest>,
}
impl ScatterPlotWidgetDefinitionRequests {
/// Widget definition.
pub fn new(x: crate::models::ScatterPlotRequest, y: crate::models::ScatterPlotRequest) -> ScatterPlotWidgetDefinitionRequests {
ScatterPlotWidgetDefinitionRequests {
x: Box::new(x),
y: Box::new(y),
}
}
}
|
mod scheduler;
pub use scheduler::WorkloadBuilder;
use crate::atomic_refcell::AtomicRefCell;
#[cfg(feature = "serde1")]
use crate::atomic_refcell::RefMut;
use crate::borrow::Borrow;
use crate::entity_builder::EntityBuilder;
use crate::error;
#[cfg(feature = "serde1")]
use crate::serde_setup::{ExistingEntities, GlobalDeConfig, GlobalSerConfig, WithShared};
use crate::storage::AllStorages;
#[cfg(feature = "serde1")]
use crate::storage::{Storage, StorageId};
use alloc::borrow::Cow;
use core::ops::Range;
#[cfg(feature = "parallel")]
use rayon::{ThreadPool, ThreadPoolBuilder};
use scheduler::Scheduler;
/// Holds all components and keeps track of entities and what they own.
pub struct World {
pub(crate) all_storages: AtomicRefCell<AllStorages>,
#[cfg(feature = "parallel")]
pub(crate) thread_pool: ThreadPool,
scheduler: AtomicRefCell<Scheduler>,
}
impl Default for World {
/// Create an empty `World`.
fn default() -> Self {
#[cfg(feature = "std")]
{
World {
all_storages: AtomicRefCell::new(AllStorages::new(), None, true),
#[cfg(feature = "parallel")]
thread_pool: ThreadPoolBuilder::new().build().unwrap(),
scheduler: AtomicRefCell::new(Default::default(), None, true),
}
}
#[cfg(not(feature = "std"))]
{
World {
all_storages: AtomicRefCell::new(AllStorages::new()),
#[cfg(feature = "parallel")]
thread_pool: ThreadPoolBuilder::new().build().unwrap(),
scheduler: AtomicRefCell::new(Default::default()),
}
}
}
}
impl World {
/// Create an empty `World`.
pub fn new() -> Self {
Default::default()
}
/// Returns a new `World` with custom threads.
/// Custom threads can be useful when working with wasm for example.
#[cfg(feature = "parallel")]
#[cfg_attr(docsrs, doc(cfg(feature = "parallel")))]
pub fn new_with_custom_thread_pool(thread_pool: ThreadPool) -> Self {
World {
all_storages: AtomicRefCell::new(AllStorages::new(), None, true),
thread_pool,
scheduler: AtomicRefCell::new(Default::default(), None, true),
}
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [UniqueView] or [UniqueViewMut].
/// Does nothing if the storage already exists.
/// Unwraps errors.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn add_unique<T: 'static + Send + Sync>(&self, component: T) {
self.try_add_unique(component).unwrap();
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [UniqueView] or [UniqueViewMut].
/// Does nothing if the storage already exists.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
pub fn try_add_unique<T: 'static + Send + Sync>(
&self,
component: T,
) -> Result<(), error::Borrow> {
self.all_storages.try_borrow()?.add_unique(component);
Ok(())
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSend] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSend]: struct.NonSend.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(feature = "non_send")]
#[cfg_attr(docsrs, doc(cfg(feature = "non_send")))]
pub fn try_add_unique_non_send<T: 'static + Sync>(
&self,
component: T,
) -> Result<(), error::Borrow> {
self.all_storages
.try_borrow()?
.add_unique_non_send(component);
Ok(())
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSend] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
/// Unwraps errors.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSend]: struct.NonSend.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(all(feature = "non_send", feature = "panic"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "non_send", feature = "panic"))))]
pub fn add_unique_non_send<T: 'static + Sync>(&self, component: T) {
self.try_add_unique_non_send::<T>(component).unwrap()
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSync] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSync]: struct.NonSync.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(feature = "non_sync")]
#[cfg_attr(docsrs, doc(cfg(feature = "non_sync")))]
pub fn try_add_unique_non_sync<T: 'static + Send>(
&self,
component: T,
) -> Result<(), error::Borrow> {
self.all_storages
.try_borrow()?
.add_unique_non_sync(component);
Ok(())
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSync] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
/// Unwraps errors.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSync]: struct.NonSync.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(all(feature = "non_sync", feature = "panic"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "non_sync", feature = "panic"))))]
pub fn add_unique_non_sync<T: 'static + Send>(&self, component: T) {
self.try_add_unique_non_sync::<T>(component).unwrap()
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSendSync] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSendSync]: struct.NonSendSync.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(all(feature = "non_send", feature = "non_sync"))]
#[cfg_attr(docsrs, doc(cfg(all(feature = "non_send", feature = "non_sync"))))]
pub fn try_add_unique_non_send_sync<T: 'static>(
&self,
component: T,
) -> Result<(), error::Borrow> {
self.all_storages
.try_borrow()?
.add_unique_non_send_sync(component);
Ok(())
}
/// Adds a new unique storage, unique storages store exactly one `T`.
/// To access a unique storage value, use [NonSendSync] and [UniqueViewMut] or [UniqueViewMut].
/// Does nothing if the storage already exists.
/// Unwraps errors.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [NonSendSync]: struct.NonSendSync.html
/// [UniqueView]: struct.UniqueView.html
/// [UniqueViewMut]: struct.UniqueViewMut.html
#[cfg(all(feature = "non_send", feature = "non_sync", feature = "panic"))]
#[cfg_attr(
docsrs,
doc(cfg(all(feature = "non_send", feature = "non_sync", feature = "panic")))
)]
pub fn add_unique_non_send_sync<T: 'static>(&self, component: T) {
self.try_add_unique_non_send_sync::<T>(component).unwrap()
}
/// Removes a unique storage.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
/// - `T` storage (exclusive)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
/// - `T` storage borrow failed.
/// - `T` storage did not exist.
///
/// [AllStorages]: struct.AllStorages.html
pub fn try_remove_unique<T: 'static>(&self) -> Result<T, error::UniqueRemove> {
self.all_storages
.try_borrow()
.map_err(|_| error::UniqueRemove::AllStorages)?
.try_remove_unique::<T>()
}
/// Removes a unique storage.
/// Unwraps errors.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
/// - `T` storage (exclusive)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
/// - `T` storage borrow failed.
/// - `T` storage did not exist.
///
/// [AllStorages]: struct.AllStorages.html
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn remove_unique<T: 'static>(&self) -> T {
self.try_remove_unique().unwrap()
}
#[doc = "Borrows the requested storage(s), if it doesn't exist it'll get created.
You can use a tuple to get multiple storages at once.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
### Example
```
use shipyard::{EntitiesView, View, ViewMut, World};
let world = World::new();
let u32s = world.try_borrow::<View<u32>>().unwrap();
let (entities, mut usizes) = world
.try_borrow::<(EntitiesView, ViewMut<usize>)>()
.unwrap();
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
pub fn try_borrow<'s, V: Borrow<'s>>(&'s self) -> Result<V, error::GetStorage> {
#[cfg(feature = "parallel")]
{
V::try_borrow(&self.all_storages, &self.thread_pool)
}
#[cfg(not(feature = "parallel"))]
{
V::try_borrow(&self.all_storages)
}
}
#[doc = "Borrows the requested storage(s), if it doesn't exist it'll get created.
You can use a tuple to get multiple storages at once.
Unwraps errors.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
### Example
```
use shipyard::{EntitiesView, View, ViewMut, World};
let world = World::new();
let u32s = world.borrow::<View<u32>>();
let (entities, mut usizes) = world.borrow::<(EntitiesView, ViewMut<usize>)>();
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn borrow<'s, V: Borrow<'s>>(&'s self) -> V {
self.try_borrow::<V>().unwrap()
}
#[doc = "Borrows the requested storages and runs the function.
Data can be passed to the function, this always has to be a single type but you can use a tuple if needed.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
- Error returned by user.
### Example
```
use shipyard::{EntityId, Get, ViewMut, World};
fn sys1((entity, [x, y]): (EntityId, [f32; 2]), mut positions: ViewMut<[f32; 2]>) {
if let Ok(pos) = (&mut positions).get(entity) {
*pos = [x, y];
}
}
let world = World::new();
world.try_run_with_data(sys1, (EntityId::dead(), [0., 0.])).unwrap();
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
pub fn try_run_with_data<'s, Data, B, R, S: crate::system::System<'s, (Data,), B, R>>(
&'s self,
s: S,
data: Data,
) -> Result<R, error::Run> {
Ok(s.run((data,), {
#[cfg(feature = "parallel")]
{
S::try_borrow(&self.all_storages, &self.thread_pool)?
}
#[cfg(not(feature = "parallel"))]
{
S::try_borrow(&self.all_storages)?
}
}))
}
#[doc = "Borrows the requested storages and runs the function.
Data can be passed to the function, this always has to be a single type but you can use a tuple if needed.
Unwraps errors.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
- Error returned by user.
### Example
```
use shipyard::{EntityId, Get, ViewMut, World};
fn sys1((entity, [x, y]): (EntityId, [f32; 2]), mut positions: ViewMut<[f32; 2]>) {
if let Ok(pos) = (&mut positions).get(entity) {
*pos = [x, y];
}
}
let world = World::new();
world.run_with_data(sys1, (EntityId::dead(), [0., 0.]));
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn run_with_data<'s, Data, B, R, S: crate::system::System<'s, (Data,), B, R>>(
&'s self,
s: S,
data: Data,
) -> R {
self.try_run_with_data(s, data).unwrap()
}
#[doc = "Borrows the requested storages and runs the function.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
- Error returned by user.
### Example
```
use shipyard::{View, ViewMut, World};
fn sys1(i32s: View<i32>) -> i32 {
0
}
let world = World::new();
world
.try_run(|usizes: View<usize>, mut u32s: ViewMut<u32>| {
// -- snip --
})
.unwrap();
let i = world.try_run(sys1).unwrap();
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
pub fn try_run<'s, B, R, S: crate::system::System<'s, (), B, R>>(
&'s self,
s: S,
) -> Result<R, error::Run> {
Ok(s.run((), {
#[cfg(feature = "parallel")]
{
S::try_borrow(&self.all_storages, &self.thread_pool)?
}
#[cfg(not(feature = "parallel"))]
{
S::try_borrow(&self.all_storages)?
}
}))
}
#[doc = "Borrows the requested storages and runs the function.
Unwraps errors.
You can use:
* [View]\\<T\\> for a shared access to `T` storage
* [ViewMut]\\<T\\> for an exclusive access to `T` storage
* [EntitiesView] for a shared access to the entity storage
* [EntitiesViewMut] for an exclusive reference to the entity storage
* [AllStoragesViewMut] for an exclusive access to the storage of all components, ⚠️ can't coexist with any other storage borrow
* [UniqueView]\\<T\\> for a shared access to a `T` unique storage
* [UniqueViewMut]\\<T\\> for an exclusive access to a `T` unique storage
* `Option<V>` with one or multiple views for fallible access to one or more storages"]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"parallel\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "parallel", docsrs),
doc = " * [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
all(feature = "parallel", not(docsrs)),
doc = "* [ThreadPoolView] for a shared access to the `ThreadPool` used by the [World]"
)]
#[cfg_attr(
not(feature = "parallel"),
doc = "* ThreadPool: must activate the *parallel* feature"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", docsrs),
doc = " * [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", not(docsrs)),
doc = "* [NonSend]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send`
* [NonSend]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send`
[NonSend] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send` unique storage."
)]
#[cfg_attr(
not(feature = "non_send"),
doc = "* NonSend: must activate the *non_send* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = " * [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_sync", not(docsrs)),
doc = "* [NonSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Sync`
* [NonSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Sync`
[NonSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Sync` unique storage."
)]
#[cfg_attr(
not(feature = "non_sync"),
doc = "* NonSync: must activate the *non_sync* feature"
)]
#[cfg_attr(
all(feature = "non_sync", docsrs),
doc = "* <span style=\"display: table;color: #2f2f2f;background-color: #C4ECFF;border-width: 1px;border-style: solid;border-color: #7BA5DB;padding: 3px;margin-bottom: 5px; font-size: 90%\">This is supported on <strong><code style=\"background-color: #C4ECFF\">feature=\"non_send\"</code> and <code style=\"background-color: #C4ECFF\">feature=\"non_sync\"</code></strong> only:</span>"
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", docsrs),
doc = " * [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync", not(docsrs)),
doc = "* [NonSendSync]<[View]\\<T\\>> for a shared access to a `T` storage where `T` isn't `Send` nor `Sync`
* [NonSendSync]<[ViewMut]\\<T\\>> for an exclusive access to a `T` storage where `T` isn't `Send` nor `Sync`
[NonSendSync] and [UniqueView]/[UniqueViewMut] can be used together to access a `!Send + !Sync` unique storage."
)]
#[cfg_attr(
not(all(feature = "non_send", feature = "non_sync")),
doc = "* NonSendSync: must activate the *non_send* and *non_sync* features"
)]
#[doc = "
### Borrows
- [AllStorages] (exclusive) when requesting [AllStoragesViewMut]
- [AllStorages] (shared) + storage (exclusive or shared) for all other views
### Errors
- [AllStorages] borrow failed.
- Storage borrow failed.
- Unique storage did not exist.
- Error returned by user.
### Example
```
use shipyard::{View, ViewMut, World};
fn sys1(i32s: View<i32>) -> i32 {
0
}
let world = World::new();
world.run(|usizes: View<usize>, mut u32s: ViewMut<u32>| {
// -- snip --
});
let i = world.run(sys1);
```
[AllStorages]: struct.AllStorages.html
[EntitiesView]: struct.Entities.html
[EntitiesViewMut]: struct.Entities.html
[AllStoragesViewMut]: struct.AllStorages.html
[World]: struct.World.html
[View]: struct.View.html
[ViewMut]: struct.ViewMut.html
[UniqueView]: struct.UniqueView.html
[UniqueViewMut]: struct.UniqueViewMut.html"]
#[cfg_attr(
feature = "parallel",
doc = "[ThreadPoolView]: struct.ThreadPoolView.html"
)]
#[cfg_attr(feature = "non_send", doc = "[NonSend]: struct.NonSend.html")]
#[cfg_attr(feature = "non_sync", doc = "[NonSync]: struct.NonSync.html")]
#[cfg_attr(
all(feature = "non_send", feature = "non_sync"),
doc = "[NonSendSync]: struct.NonSendSync.html"
)]
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn run<'s, B, R, S: crate::system::System<'s, (), B, R>>(&'s self, s: S) -> R {
self.try_run(s).unwrap()
}
/// Modifies the current default workload to `name`.
///
/// ### Borrows
///
/// - Scheduler (exclusive)
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload did not exist.
pub fn try_set_default_workload(
&self,
name: impl Into<Cow<'static, str>>,
) -> Result<(), error::SetDefaultWorkload> {
if let Ok(mut scheduler) = self.scheduler.try_borrow_mut() {
if let Some(workload) = scheduler.workloads.get(&name.into()) {
scheduler.default = workload.clone();
Ok(())
} else {
Err(error::SetDefaultWorkload::MissingWorkload)
}
} else {
Err(error::SetDefaultWorkload::Borrow)
}
}
/// Modifies the current default workload to `name`.
/// Unwraps errors.
///
/// ### Borrows
///
/// - Scheduler (exclusive)
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload did not exist.
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn set_default_workload(&self, name: impl Into<Cow<'static, str>>) {
self.try_set_default_workload(name).unwrap();
}
/// A workload is a collection of systems. They will execute as much in parallel as possible.
/// They are evaluated first to last when they can't be parallelized.
/// The default workload will automatically be set to the first workload added.
///
/// ### Borrows
///
/// - Scheduler (exclusive)
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload with an identical name already present.
///
/// ### Example
/// ```
/// use shipyard::{system, EntitiesViewMut, IntoIter, Shiperator, View, ViewMut, World};
///
/// fn add(mut usizes: ViewMut<usize>, u32s: View<u32>) {
/// for (x, &y) in (&mut usizes, &u32s).iter() {
/// *x += y as usize;
/// }
/// }
///
/// fn check(usizes: View<usize>) {
/// let mut iter = usizes.iter();
/// assert_eq!(iter.next(), Some(&1));
/// assert_eq!(iter.next(), Some(&5));
/// assert_eq!(iter.next(), Some(&9));
/// }
///
/// let world = World::new();
///
/// world.run(
/// |mut entities: EntitiesViewMut, mut usizes: ViewMut<usize>, mut u32s: ViewMut<u32>| {
/// entities.add_entity((&mut usizes, &mut u32s), (0, 1));
/// entities.add_entity((&mut usizes, &mut u32s), (2, 3));
/// entities.add_entity((&mut usizes, &mut u32s), (4, 5));
/// },
/// );
///
/// world
/// .try_add_workload("Add & Check")
/// .unwrap()
/// .with_system(system!(add))
/// .with_system(system!(check))
/// .build();
///
/// world.run_default();
/// ```
pub fn try_add_workload(
&self,
name: impl Into<Cow<'static, str>>,
) -> Result<WorkloadBuilder<'_>, error::AddWorkload> {
if let Ok(scheduler) = self.scheduler.try_borrow_mut() {
let name = name.into();
if scheduler.workloads.contains_key(&name) {
Err(error::AddWorkload::AlreadyExists)
} else {
Ok(WorkloadBuilder::new(scheduler, name))
}
} else {
Err(error::AddWorkload::Borrow)
}
}
/// A workload is a collection of systems. They will execute as much in parallel as possible.
/// They are evaluated first to last when they can't be parallelized.
/// The default workload will automatically be set to the first workload added.
/// Unwraps errors.
///
/// ### Borrows
///
/// - Scheduler (exclusive)
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload with an identical name already present.
///
/// ### Example
/// ```
/// use shipyard::{system, EntitiesViewMut, IntoIter, Shiperator, View, ViewMut, World};
///
/// fn add(mut usizes: ViewMut<usize>, u32s: View<u32>) {
/// for (x, &y) in (&mut usizes, &u32s).iter() {
/// *x += y as usize;
/// }
/// }
///
/// fn check(usizes: View<usize>) {
/// let mut iter = usizes.iter();
/// assert_eq!(iter.next(), Some(&1));
/// assert_eq!(iter.next(), Some(&5));
/// assert_eq!(iter.next(), Some(&9));
/// }
///
/// let world = World::new();
///
/// world.run(
/// |mut entities: EntitiesViewMut, mut usizes: ViewMut<usize>, mut u32s: ViewMut<u32>| {
/// entities.add_entity((&mut usizes, &mut u32s), (0, 1));
/// entities.add_entity((&mut usizes, &mut u32s), (2, 3));
/// entities.add_entity((&mut usizes, &mut u32s), (4, 5));
/// },
/// );
///
/// world
/// .add_workload("Add & Check")
/// .with_system(system!(add))
/// .with_system(system!(check))
/// .build();
///
/// world.run_default();
/// ```
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn add_workload(&self, name: impl Into<Cow<'static, str>>) -> WorkloadBuilder<'_> {
self.try_add_workload(name).unwrap()
}
/// Runs the `name` workload.
///
/// ### Borrows
///
/// - Scheduler (shared)
/// - Systems' borrow as they are executed
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload did not exist.
/// - Storage borrow failed.
/// - User error returned by system.
pub fn try_run_workload(&self, name: impl AsRef<str> + Sync) -> Result<(), error::RunWorkload> {
let scheduler = self
.scheduler
.try_borrow()
.map_err(|_| error::RunWorkload::Scheduler)?;
if let Some(range) = scheduler.workloads.get(name.as_ref()) {
self.try_run_workload_index(&*scheduler, range.clone())
} else {
Err(error::RunWorkload::MissingWorkload)
}
}
/// Runs the `name` workload.
/// Unwraps error.
///
/// ### Borrows
///
/// - Scheduler (shared)
/// - Systems' borrow as they are executed
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Workload did not exist.
/// - Storage borrow failed.
/// - User error returned by system.
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn run_workload(&self, name: impl AsRef<str> + Sync) {
self.try_run_workload(name).unwrap();
}
fn try_run_workload_index(
&self,
scheduler: &Scheduler,
workload: Range<usize>,
) -> Result<(), error::RunWorkload> {
for batch in &scheduler.batch[workload] {
if batch.len() == 1 {
scheduler.systems[batch[0]](self).map_err(|err| {
error::RunWorkload::Run((scheduler.system_names[batch[0]], err))
})?;
} else {
#[cfg(feature = "parallel")]
{
use rayon::prelude::*;
self.thread_pool.install(|| {
batch.into_par_iter().try_for_each(|&index| {
(scheduler.systems[index])(self).map_err(|err| {
error::RunWorkload::Run((scheduler.system_names[index], err))
})
})
})?
}
#[cfg(not(feature = "parallel"))]
{
batch.iter().try_for_each(|&index| {
(scheduler.systems[index])(self).map_err(|err| {
error::RunWorkload::Run((scheduler.system_names[index], err))
})
})?
}
}
}
Ok(())
}
/// Run the default workload if there is one.
///
/// ### Borrows
///
/// - Scheduler (shared)
/// - Systems' borrow as they are executed
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Storage borrow failed.
/// - User error returned by system.
pub fn try_run_default(&self) -> Result<(), error::RunWorkload> {
let scheduler = self
.scheduler
.try_borrow()
.map_err(|_| error::RunWorkload::Scheduler)?;
if !scheduler.batch.is_empty() {
self.try_run_workload_index(&scheduler, scheduler.default.clone())?
}
Ok(())
}
/// Run the default workload if there is one.
/// Unwraps error.
///
/// ### Borrows
///
/// - Scheduler (shared)
/// - Systems' borrow as they are executed
///
/// ### Errors
///
/// - Scheduler borrow failed.
/// - Storage borrow failed.
/// - User error returned by system.
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn run_default(&self) {
self.try_run_default().unwrap();
}
/// Used to create an entity without having to borrow its storage explicitly.
/// The entity is only added when [EntityBuilder::try_build] or [EntityBuilder::build] is called.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [EntityBuilder::build]: struct.EntityBuilder.html#method.build
/// [EntityBuilder::try_build]: struct.EntityBuilder.html#method.try_build
pub fn try_entity_builder(&self) -> Result<EntityBuilder<'_, (), ()>, error::Borrow> {
Ok(EntityBuilder::new(self.all_storages.try_borrow()?))
}
/// Used to create an entity without having to borrow its storage explicitly.
/// The entity is only added when [EntityBuilder::try_build] or [EntityBuilder::build] is called.
/// Unwraps error.
///
/// ### Borrows
///
/// - [AllStorages] (shared)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
///
/// [AllStorages]: struct.AllStorages.html
/// [EntityBuilder::build]: struct.EntityBuilder.html#method.build
/// [EntityBuilder::try_build]: struct.EntityBuilder.html#method.try_build
#[cfg(feature = "panic")]
#[cfg_attr(docsrs, doc(cfg(feature = "panic")))]
pub fn entity_builder(&self) -> EntityBuilder<'_, (), ()> {
self.try_entity_builder().unwrap()
}
/// Serializes the [World] the way `ser_config` defines it.
///
/// ### Borrows
///
/// - [AllStorages] (exclusively)
///
/// ### Errors
///
/// - [AllStorages] borrow failed.
/// - Serialization error.
/// - Config not implemented. (temporary)
///
/// [AllStorages]: struct.AllStorages.html
/// [World]: struct.World.html
#[cfg(feature = "serde1")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde1")))]
pub fn serialize<S>(
&self,
ser_config: GlobalSerConfig,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
<S as serde::Serializer>::Ok: 'static,
{
if ser_config.same_binary == true
&& ser_config.with_entities == true
&& ser_config.with_shared == WithShared::PerStorage
{
serializer.serialize_newtype_struct(
"World",
&crate::storage::AllStoragesSerializer {
all_storages: self
.all_storages
.try_borrow_mut()
.map_err(|err| serde::ser::Error::custom(err))?,
ser_config,
},
)
} else {
Err(serde::ser::Error::custom(
"ser_config other than default isn't implemented yet",
))
}
}
/// Creates a new [World] from a deserializer the way `de_config` defines it.
///
/// ### Errors
///
/// - Deserialization error.
/// - Config not implemented. (temporary)
///
/// [World]: struct.World.html
#[cfg(feature = "serde1")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde1")))]
pub fn new_deserialized<'de, D>(
de_config: GlobalDeConfig,
deserializer: D,
) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
if de_config.existing_entities == ExistingEntities::AsNew
&& de_config.with_shared == WithShared::PerStorage
{
let world = World::new();
deserializer.deserialize_struct(
"World",
&["metadata", "storages"],
WorldVisitor {
all_storages: world
.all_storages
.try_borrow_mut()
.map_err(serde::de::Error::custom)?,
de_config,
},
)?;
Ok(world)
} else {
Err(serde::de::Error::custom(
"de_config other than default isn't implemented yet",
))
}
}
}
#[cfg(feature = "serde1")]
struct WorldVisitor<'a> {
all_storages: RefMut<'a, AllStorages>,
de_config: GlobalDeConfig,
}
#[cfg(feature = "serde1")]
impl<'de, 'a> serde::de::Visitor<'de> for WorldVisitor<'a> {
type Value = ();
fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
formatter.write_str("Could not format World")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
let mut metadata: Vec<(StorageId, usize)> = Vec::new();
if let Some((name, types)) = map.next_entry()? {
match name {
"metadata" => (),
_ => todo!(),
}
metadata = types;
}
match map.next_key_seed(core::marker::PhantomData)? {
Some("storages") => (),
_ => todo!(),
}
map.next_value_seed(StoragesSeed {
metadata,
all_storages: self.all_storages,
de_config: self.de_config,
})?;
Ok(())
}
}
#[cfg(feature = "serde1")]
struct StoragesSeed<'all> {
metadata: Vec<(StorageId, usize)>,
all_storages: RefMut<'all, AllStorages>,
de_config: GlobalDeConfig,
}
#[cfg(feature = "serde1")]
impl<'de> serde::de::DeserializeSeed<'de> for StoragesSeed<'_> {
type Value = ();
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StoragesVisitor<'all> {
metadata: Vec<(StorageId, usize)>,
all_storages: RefMut<'all, AllStorages>,
de_config: GlobalDeConfig,
}
impl<'de> serde::de::Visitor<'de> for StoragesVisitor<'_> {
type Value = ();
fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
formatter.write_str("storages value")
}
fn visit_seq<A>(mut self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'de>,
{
let storages = self.all_storages.storages();
for (i, (storage_id, deserialize_ptr)) in self.metadata.into_iter().enumerate() {
let storage: &mut Storage =
&mut storages.entry(storage_id).or_insert_with(|| {
let deserialize =
unsafe { crate::unknown_storage::deserialize_fn(deserialize_ptr) };
let mut sparse_set = crate::sparse_set::SparseSet::<u8>::new();
sparse_set.metadata.serde = Some(crate::sparse_set::SerdeInfos {
serialization:
|sparse_set: &crate::sparse_set::SparseSet<u8>,
ser_config: GlobalSerConfig,
serializer: &mut dyn crate::erased_serde::Serializer| {
crate::erased_serde::Serialize::erased_serialize(
&crate::sparse_set::SparseSetSerializer {
sparse_set: &sparse_set,
ser_config,
},
serializer,
)
},
deserialization: deserialize,
with_shared: true,
});
Storage(Box::new(AtomicRefCell::new(sparse_set, None, true)))
});
if seq
.next_element_seed(crate::storage::StorageDeserializer {
storage,
de_config: self.de_config,
})?
.is_none()
{
return Err(serde::de::Error::invalid_length(i, &"more storages"));
}
}
Ok(())
}
}
deserializer.deserialize_seq(StoragesVisitor {
metadata: self.metadata,
all_storages: self.all_storages,
de_config: self.de_config,
})
}
}
|
//! # Operator Precedence Grammar Parser
//!
//! `opg` reads an context-free grammar input
//! and outputs the precedence of the operators.
mod dfs;
mod table;
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::fs;
///
/// A struct to
/// represent a production.
///
struct Production {
/// the left side of the production.
left: String,
/// the right side of the production.
right: Vec<String>,
}
///
/// Generate FIRSTVT set for
/// every non-terminals.
///
/// ## Input
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// ## Return
/// `firstvt` the set of FIRSTVT
/// for all terminals.
///
/// ## Example
/// ```
/// let firstvt:HashMap<String, HashSet<String>> = gen_firstvt(&productions, &nts);
/// ```
///
/// ## Principles
/// Go through every production
/// and search the following patterns:
/// 1. U => U_1y
/// 2. U => U_1Ty
/// 3. U => Ty
///
/// call `dfs::compose_element()`
/// to get the final result.
///
fn gen_firstvt(
productions: &Vec<Production>,
nts: &HashSet<String>,
) -> HashMap<String, HashSet<String>> {
let mut firstvtmono: HashMap<String, HashSet<String>> = HashMap::new();
let mut firstvtcon: HashMap<String, HashSet<String>> = HashMap::new();
// Find mono terminal and
// record the containing part
for p in productions {
if nts.contains(p.right.first().unwrap()) {
// Case 1: U => U_1y
let ntc = firstvtcon
.entry(p.left.to_string())
.or_insert(HashSet::new());
ntc.insert(p.right.first().unwrap().to_string());
// Case 1*: U => U_1Ty
if p.right.len() > 1 && !nts.contains(&p.right[1]) {
let vts = firstvtmono
.entry(p.left.to_string())
.or_insert(HashSet::new());
vts.insert(p.right[1].clone());
}
} else {
// Case 2: U => Ty
let vts = firstvtmono
.entry(p.left.to_string())
.or_insert(HashSet::new());
vts.insert(p.right.first().unwrap().to_string());
}
}
let firstvt = dfs::compose_elements(&firstvtmono, &firstvtcon);
firstvt
}
///
/// Generate LASTVT set for
/// every non-terminals.
///
/// ## Input
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// ## Return
/// `lastvt` the set of LASTVT
/// for all terminals.
///
/// ## Example
/// ```
/// let lastvt:HashMap<String, HashSet<String>> = gen_lastvt(&productions, &nts);
/// ```
///
/// ## Principles
/// Go through every production
/// and search the following patterns:
/// 1. U => xU_1
/// 2. U => xTU_1
/// 3. U => xT
///
/// call `dfs::compose_element()`
/// to get the final result.
///
fn gen_lastvt(
productions: &Vec<Production>,
nts: &HashSet<String>,
) -> HashMap<String, HashSet<String>> {
let mut lastvtmono: HashMap<String, HashSet<String>> = HashMap::new();
let mut lastvtcon: HashMap<String, HashSet<String>> = HashMap::new();
// Find mono terminal and
// record the containing part
for p in productions {
if nts.contains(p.right.last().unwrap()) {
// Case 1: U => xU_1
let ntc = lastvtcon
.entry(p.left.to_string())
.or_insert(HashSet::new());
ntc.insert(p.right.last().unwrap().to_string());
// Case 1*: U => xTU_1
if p.right.len() > 1 && !nts.contains(&p.right[p.right.len() - 2]) {
let vts = lastvtmono
.entry(p.left.to_string())
.or_insert(HashSet::new());
vts.insert(p.right[p.right.len() - 2].clone());
}
} else {
// Case 2: U => xT
let vts = lastvtmono
.entry(p.left.to_string())
.or_insert(HashSet::new());
vts.insert(p.right.last().unwrap().to_string());
}
}
let lastvt = dfs::compose_elements(&lastvtmono, &lastvtcon);
lastvt
}
///
/// Find the equal operators
///
/// ## Input
/// `table` the mutable `OpTable` struct for output
///
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// ## Example
/// ```
/// find_eq(&mut table, &productions, &nts);
/// ```
///
/// ## Principles
/// Search the pattern of ..T1..T2..
/// and make T1=T2, notice that it is
/// NOT indicate that T2=T1.
///
fn find_eq(
table: &mut table::OpTable,
productions: &Vec<Production>,
nts: &HashSet<String>,
) {
for p in productions {
// Get all terminals in the right side
let mut pe = p.right.clone();
pe.retain(|x| !nts.contains(x));
// Since the relation is not commutative
// Equal will be assigned from left to right.
for i in 0..pe.len() {
for j in i + 1..pe.len() {
table.insert(&(pe[i].clone(), pe[j].clone()), '=');
}
}
}
}
///
/// Find the less relations
///
/// ## Input
/// `table` the mutable `OpTable` struct for output
///
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// `firstvt` the FIRSTVT set generated from the function
/// `gen_firstvt()`.
///
/// ## Example
/// ```
/// let firstvt = gen_firstvt(&productions, &nts);
/// find_less(&mut table, &productions, &nts, &firstvt);
/// ```
///
/// ## Principles
/// Find ...T1U1..., where T2 is in FIRSTVT(U1) and
/// make T1<T2. Notice that this doesn't
/// indicate that T2>T1.
///
fn find_less(
table: &mut table::OpTable,
productions: &Vec<Production>,
nts: &HashSet<String>,
firstvt: &HashMap<String, HashSet<String>>,
) {
for p in productions {
if p.right.len() <= 1 {
continue;
}
for i in 0..p.right.len() - 1 {
if !nts.contains(&p.right[i]) && nts.contains(&p.right[i + 1]) {
for t in firstvt[&p.right[i + 1]].iter() {
table.insert(&(p.right[i].to_owned(), t.to_owned()), '<');
}
}
}
}
}
///
/// Find the greater relations
///
/// ## Input
/// `table` the mutable `OpTable` struct for output
///
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// `lastvt` the LASTVT set generated from the function
/// `gen_lastvt()`.
///
/// ## Example
/// ```
/// let lastvt = gen_firstvt(&productions, &nts);
/// find_greater(&mut table, &productions, &nts, &lastvt);
/// ```
///
/// ## Principles
/// Find ...U1T2..., where T1 is in LASTVT(U1) and
/// make T1>T2. Notice that this doesn't
/// indicate that T2<T1.
///
fn find_greater(
table: &mut table::OpTable,
productions: &Vec<Production>,
nts: &HashSet<String>,
lastvt: &HashMap<String, HashSet<String>>,
) {
for p in productions {
if p.right.len() <= 1 {
continue;
}
for i in 0..p.right.len() - 1 {
if nts.contains(&p.right[i]) && !nts.contains(&p.right[i + 1]) {
for t in lastvt[&p.right[i]].iter() {
table.insert( &(t.to_owned(), p.right[i + 1].to_owned()), '>');
}
}
}
}
}
///
/// Generate production list for
/// the grammar contents.
///
/// ## Input
/// `contents` the string read from file.
///
/// ## Output
/// `p` the vector of productions.
///
/// ## Example
/// ```
/// let mut productions: Vec<Production> = gen_productions(&contents);
/// ```
///
/// ## Principle
/// For every line in the file, split it on "->".
/// Then split the trimmed right side based on "|".
/// After processing, push the new `Production` struct
/// into the result.
///
fn gen_productions(contents: &String) -> Vec<Production> {
let mut p: Vec<Production> = Vec::new();
for line in contents.lines() {
let ps: Vec<_> = line.split("->").collect();
let ls = ps[0].trim();
let rs: Vec<_> = ps[1].split("|").collect();
for rsp in rs {
let vs: Vec<_> = rsp.split_whitespace().collect();
p.push(Production {
left: ls.to_string(),
right: vs.iter().map(|s| s.to_string()).collect(),
});
}
}
p
}
///
/// Get all the non terminals from
/// the generated production.
///
/// ## Input
/// `productions` the vector of struct `Production`
///
/// ## Output
/// The hashset contains non-terminals.
///
/// ## Example
/// ```
/// let nts = get_non_terminals(&productions);
/// ```
///
/// ## Principles
/// Collect all the symbol on the left side
/// in the productions.
///
fn get_non_terminals(productions: &Vec<Production>) -> HashSet<String> {
productions.iter().map(|s| s.left.clone()).collect()
}
///
/// Get terminals
///
/// ## Input
/// `productions` the vector of struct `Production`
///
/// `nts` the non-terminal set, which could be generated
/// from the function `get_non_terminals()`.
///
/// ## Output
/// The hashset contains terminals.
///
/// ## Example
/// ```
/// let nts = get_non_terminals(&productions);
/// let ts = get_terminals(&productions, &nts);
/// ```
///
/// ## Principles
/// To avoid repetative computing, receive the pre-computed
/// non-terminal set and eliminate them among the candidates
/// on the right side in each production.
///
fn get_terminals(productions: &Vec<Production>, nts: &HashSet<String>) -> HashSet<String> {
let mut ts: HashSet<String> = HashSet::new();
for p in productions {
for v in p.right.iter() {
if !nts.contains(v) {
ts.insert(v.clone());
}
}
}
ts
}
///
/// Generate Operator Precedence Table
/// for context-free grammar contents.
///
/// ## Input
/// `contents` The string read from file.
///
/// ## Example
/// ```
/// // Contents of the file
/// let contents = fs::read_to_string(filename).expect("No such file.");
/// // Get the table
/// opg_generate(&contents);
/// ```
///
/// ## Principles
/// Generate FIRSTVT and LASTVT for the contents.
/// Then add S->$S$ for the starting non-terminal.
/// Generate `OpTable` struct based on the algorithm
/// of `find_eq()`, `find_less()`, `find_greater()`.
/// Finally, print the `OpTable`.
///
fn opg_generate(contents: &String) {
let mut productions: Vec<Production> = gen_productions(&contents);
let nts = get_non_terminals(&productions);
let firstvt = gen_firstvt(&productions, &nts);
let lastvt = gen_lastvt(&productions, &nts);
// add the $S$ for the starting non-terminal
let startnt = productions[0].left.clone();
productions.push(Production {
left: startnt.to_string(),
right: vec!["$".to_string(), startnt, "$".to_string()],
});
let ts = get_terminals(&productions, &nts);
let mut table = table::OpTable::new(ts.clone());
// if there is conflict on operator precedence,
// then the grammar is ambiguous.
find_eq(&mut table, &productions, &nts);
find_less(&mut table, &productions, &nts, &firstvt);
find_greater(&mut table, &productions, &nts, &lastvt);
print!("{}", table);
fs::write("output.txt", &table.to_string()).expect("Cannot output file!");
}
fn main() {
let args: Vec<String> = env::args().collect();
// File for input
if args.len() < 2 {
panic!("Please follow a file name!");
}
let filename = &args[1];
// Contents of the file
let contents = fs::read_to_string(filename).expect("No such file.");
// Get the table
opg_generate(&contents);
}
|
use async_trait::async_trait;
use messagebus::{
derive::{Error as MbError, Message},
error, receivers, AsyncHandler, Bus, Message,
};
use thiserror::Error;
#[derive(Debug, Error, MbError)]
enum Error {
#[error("Error({0})")]
Error(anyhow::Error),
}
impl<M: Message> From<error::Error<M>> for Error {
fn from(err: error::Error<M>) -> Self {
Self::Error(err.into())
}
}
#[derive(Debug, Clone, Message)]
struct MsgF32(pub f32);
#[derive(Debug, Clone, Message)]
struct MsgF64(pub f64);
struct TmpReceiver;
#[async_trait]
impl AsyncHandler<MsgF64> for TmpReceiver {
type Error = Error;
type Response = ();
async fn handle(&self, _msg: MsgF64, _bus: &Bus) -> Result<Self::Response, Self::Error> {
std::thread::sleep(std::time::Duration::from_millis(100));
Ok(())
}
}
#[async_trait]
impl AsyncHandler<MsgF32> for TmpReceiver {
type Error = Error;
type Response = ();
async fn handle(&self, _msg: MsgF32, bus: &Bus) -> Result<Self::Response, Self::Error> {
bus.send(MsgF64(12.0)).await.unwrap();
bus.flush::<MsgF64>().await;
Ok(())
}
}
#[tokio::test]
async fn test_backpressure() {
let (b, poller) = Bus::build()
.register(TmpReceiver)
.subscribe_async::<MsgF32>(
1,
receivers::BufferUnorderedConfig {
buffer_size: 1,
max_parallel: 1,
},
)
.done()
.build();
b.send(MsgF32(10.0)).await.unwrap();
// b.idle_all().await;
b.flush_all().await;
b.close().await;
poller.await;
}
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use crate::kty::{c_long};
pub use crate::syscall::raw::arch::asm::{
syscall0, syscall1, syscall2, syscall3, syscall4, syscall5, syscall6,
};
/// Syscall type
pub type SCT = c_long;
|
use crypto::blake2s;
use hash::{H256, H512};
#[inline]
fn concat(a: &H256, b: &H256) -> H512 {
let mut result = [0u8; 64];
result[0..32].copy_from_slice(a);
result[32..64].copy_from_slice(b);
result
}
pub fn merkle_root(hashes: &[H256]) -> H256 {
if hashes.len() == 1 {
return hashes[0].clone();
}
let mut row = vec![];
let mut i = 0;
while i + 1 < hashes.len() {
row.push(blake2s(&concat(&hashes[i], &hashes[i + 1])));
i += 2
}
// duplicate the last element if len is not even
if hashes.len() % 2 == 1 {
let last = hashes[hashes.len() - 1];
row.push(blake2s(&concat(&last, &last)));
}
merkle_root(&row)
} |
// Copyright (c) 2016, <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See COPYING.md for more information.
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::mpsc::{self, Sender, Receiver, TryRecvError};
use display::DisplayBridge;
use error::{Error, Result};
use event::{Event, PollResult, Timeout};
use imp::windows::device::DeviceProvider;
use imp::windows::window::WindowManager;
use imp::windows::worker::Worker;
/// `Display` provider for Windows.
pub struct DisplayProvider {
event_sender: Rc<Sender<Event>>,
event_receiver: Receiver<Event>,
worker: Rc<RefCell<Option<Rc<Worker>>>>,
default_device: RefCell<Option<Rc<DeviceProvider>>>,
window_manager: Rc<WindowManager>,
}
impl DisplayProvider {
pub fn event_sender (&self) -> Rc<Sender<Event>> {
self.event_sender.clone()
}
pub fn window_manager (&self) -> Rc<WindowManager> {
self.window_manager.clone()
}
pub fn worker (&self) -> Rc<RefCell<Option<Rc<Worker>>>> {
self.worker.clone()
}
}
impl DisplayBridge for DisplayProvider {
type Device = DeviceProvider;
fn default_device (&self) -> Result<Rc<DeviceProvider>> {
let mut device_ref = self.default_device.borrow_mut();
if let Some(ref device) = *device_ref {
return Ok(device.clone());
}
let device = Rc::new(try!(DeviceProvider::default(self)));
*device_ref = Some(device.clone());
Ok(device)
}
fn next_event (&self, timeout: Timeout) -> PollResult {
let event = match timeout {
Timeout::Now => match self.event_receiver.try_recv() {
Ok(event) => event,
Err(TryRecvError::Empty) => { return PollResult::Timeout; },
Err(err) => { return PollResult::PollErr(Error::from(err)); },
},
Timeout::Never => match self.event_receiver.recv() {
Ok(event) => event,
Err(err) => { return PollResult::PollErr(Error::from(err)); },
},
};
if let Event::Destroy(id) = event {
self.window_manager.expire(id);
};
PollResult::Ok(event)
}
fn open () -> Result<DisplayProvider> {
let (event_sender, event_receiver) = mpsc::channel();
Ok(DisplayProvider {
event_sender: Rc::new(event_sender),
event_receiver: event_receiver,
worker: Rc::new(RefCell::new(None)),
default_device: RefCell::new(None),
window_manager: Rc::new(WindowManager::new()),
})
}
}
|
use json::JsonValue;
use regex::Regex;
use super::value_matchers::*;
use super::{SelectionLens, SelectionLensParser};
struct Prop {
name: String,
value: Option<JsonValueMemberMatcher>,
}
impl Prop {
pub fn prop_value_matches_exact<'a, 'b>(
prop: &'a JsonValue,
prop_value_matcher: &'b JsonValueMatcher,
) -> Option<&'a JsonValue> {
match (prop, prop_value_matcher) {
(&JsonValue::String(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.eq(prop_value))
}
(&JsonValue::Short(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.eq(prop_value))
}
(&JsonValue::Number(ref number_prop), &JsonValueMatcher::Number(ref prop_value)) => {
Some(prop).filter(|_| number_prop.eq(prop_value))
}
(&JsonValue::Boolean(ref bool_prop), &JsonValueMatcher::Boolean(ref prop_value)) => {
Some(prop).filter(|_| bool_prop.eq(prop_value))
}
(&JsonValue::Null, &JsonValueMatcher::Null) => Some(prop),
(_, _) => None,
}
}
pub fn prop_value_contains_exact<'a, 'b>(
prop: &'a JsonValue,
prop_value_matcher: &'b JsonValueMatcher,
) -> Option<&'a JsonValue> {
match (prop, prop_value_matcher) {
(&JsonValue::String(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| {
string_prop
.split_whitespace()
.any(|string_prop| string_prop.eq(prop_value))
})
}
(&JsonValue::Short(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| {
string_prop
.split_whitespace()
.any(|string_prop| string_prop.eq(prop_value))
})
}
(_, _) => None,
}
}
pub fn prop_value_is_prefixed_by<'a, 'b>(
prop: &'a JsonValue,
prop_value_matcher: &'b JsonValueMatcher,
) -> Option<&'a JsonValue> {
match (prop, prop_value_matcher) {
(&JsonValue::String(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.starts_with(prop_value))
}
(&JsonValue::Short(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.starts_with(prop_value))
}
(_, _) => None,
}
}
pub fn prop_value_is_suffixed_by<'a, 'b>(
prop: &'a JsonValue,
prop_value_matcher: &'b JsonValueMatcher,
) -> Option<&'a JsonValue> {
match (prop, prop_value_matcher) {
(&JsonValue::String(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.ends_with(prop_value))
}
(&JsonValue::Short(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.ends_with(prop_value))
}
(_, _) => None,
}
}
pub fn prop_value_contains<'a, 'b>(
prop: &'a JsonValue,
prop_value_matcher: &'b JsonValueMatcher,
) -> Option<&'a JsonValue> {
match (prop, prop_value_matcher) {
(&JsonValue::String(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.contains(prop_value))
}
(&JsonValue::Short(ref string_prop), &JsonValueMatcher::String(ref prop_value)) => {
Some(prop).filter(|_| string_prop.contains(prop_value))
}
(_, _) => None,
}
}
}
impl SelectionLens for Prop {
fn select<'a>(&self, input: Option<&'a JsonValue>) -> Option<&'a JsonValue> {
match input {
Some(JsonValue::Object(ref object)) => match (object.get(&self.name), &self.value) {
(Some(prop), Some(JsonValueMemberMatcher::Exact(prop_value_matcher))) => {
Prop::prop_value_matches_exact(prop, prop_value_matcher)
}
(Some(prop), Some(JsonValueMemberMatcher::ContainsExact(prop_value_matcher))) => {
Prop::prop_value_contains_exact(prop, prop_value_matcher)
}
(Some(prop), Some(JsonValueMemberMatcher::Prefixed(prop_value_matcher))) => {
Prop::prop_value_is_prefixed_by(prop, prop_value_matcher)
}
(Some(prop), Some(JsonValueMemberMatcher::Suffixed(prop_value_matcher))) => {
Prop::prop_value_is_suffixed_by(prop, prop_value_matcher)
}
(Some(prop), Some(JsonValueMemberMatcher::Contains(prop_value_matcher))) => {
Prop::prop_value_contains(prop, prop_value_matcher)
}
(Some(prop), None) => Some(prop),
(None, _) => None,
},
_ => None,
}
}
}
pub struct PropParser;
impl PropParser {
fn match_prop(pattern: &str) -> Option<(&str, Option<JsonValueMemberMatcher>, Option<&str>)> {
lazy_static! {
static ref RE_PROP: Regex =
Regex::new(r#"^\.(?P<prop>([[:word:]])+)(?P<remainder>.+)?$"#).unwrap();
static ref RE_PROP_VALUE: Regex = Regex::new(
concat!(r#"^\{"(?P<prop>([[:word:]])+)"("#,r#"(?P<matchingStrategy>(:|~:|\$:|\^:|\*:)+)"#,r#"("(?P<stringValue>([^"])+)"|(?P<numberValue>([[:digit:]]+)+)|(?P<literalValue>([[:word:]])+)))?\}(?P<remainder>.+)?$"#)
)
.unwrap();
}
match RE_PROP
.captures(pattern)
.or(RE_PROP_VALUE.captures(pattern))
{
Some(cap) => cap
.name("prop")
.and_then(|prop| match identify_value_matcher(&cap) {
Ok(json_matcher) => Some((
prop.as_str(),
json_matcher,
cap.name("remainder").map(|remainder| remainder.as_str()),
)),
Err(_) => None,
}),
None => None,
}
}
}
impl SelectionLensParser for PropParser {
fn try_parse<'a>(
&self,
lens_pattern: Option<&'a str>,
) -> Result<(Box<dyn SelectionLens>, Option<&'a str>), Option<&'a str>> {
match lens_pattern {
Some(pattern) => match PropParser::match_prop(pattern) {
Some((prop_name, prop_value, remainder)) => Ok((
Box::new(Prop {
name: String::from(prop_name),
value: prop_value,
}),
remainder,
)),
None => Err(lens_pattern),
},
None => Err(lens_pattern),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use json::object;
#[test]
fn should_match_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(".name"));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["name"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn shouldnt_match_identity() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some("."));
assert!(res.is_err());
match res {
Err(Some(selector)) => assert_eq!(selector, "."),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_return_remainder_when_it_matches_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(".father.title"));
assert!(res.is_ok());
match res {
Ok((_, umatched)) => assert_eq!(umatched, Some(".title")),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_return_none_when_json_isnt_present() {
let prop = Prop {
name: String::from(".id"),
value: None,
};
assert_eq!(prop.select(None), None);
}
#[test]
fn should_return_json_prop_when_json_has_prop() {
let data = &object! {
"name" => "John Doe",
"age" => 30
};
let prop = Prop {
name: String::from("name"),
value: None,
};
assert_eq!(prop.select(Some(data)), Some(&data["name"]));
}
#[test]
fn should_match_number_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"age":30}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["age"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_string_property_value_when_using_exact_matching_strategy() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"country":"IRL"}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"country" => "IRL"
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["country"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_string_property_value_when_using_contains_exact_matching_strategy() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"country"~:"GBR"}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"country" => "IRL GBR"
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["country"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_string_property_value_when_using_prefixed_matching_strategy() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"country"^:"IRL"}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"country" => "IRL GBR"
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["country"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_string_property_value_when_using_suffixed_matching_strategy() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"country"$:"GBR"}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"country" => "IRL GBR"
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["country"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_boolean_false_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"is_known":false}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"is_known" => false
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["is_known"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_boolean_true_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"is_anonymous":true}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"is_anonymous" => true
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["is_anonymous"])),
_ => panic!("Invalid result"),
}
}
#[test]
fn should_match_null_prop() {
let prop_parser = PropParser {};
let res = prop_parser.try_parse(Some(r#"{"identity":null}"#));
assert!(res.is_ok());
let data = &object! {
"name" => "John Doe",
"age" => 30,
"identity" => JsonValue::Null
};
match res {
Ok((matcher, _)) => assert_eq!(matcher.select(Some(data)), Some(&data["identity"])),
_ => panic!("Invalid result"),
}
}
}
|
/*
* Copyright 2017 Eldad Zack
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to
* whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* https://opensource.org/licenses/MIT
*
*/
use std::fmt;
use crate::u14::U14BE;
use crate::error::ParseError;
use serde::{Serialize, Serializer, Deserialize, Deserializer};
use serde::de::{Visitor, Unexpected};
use serde::de;
use serde_derive::{Serialize, Deserialize};
// Note
#[derive(Copy, Clone, Default)]
pub struct Note {
value: u8,
}
impl Serialize for Note {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_str(&format!("{}",self))
}
}
struct NoteVisitor;
impl<'de> Visitor<'de> for NoteVisitor {
type Value = Note;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("note and octave string separated by space (e.g., `B# 4`)")
}
fn visit_str<E>(self, value: &str) -> Result <Note, E>
where E: de::Error
{
match Note::from_str(value) {
Ok(n) => Ok(n),
Err(_) => Err(de::Error::invalid_value(Unexpected::Str(value), &self)),
}
}
}
impl<'de> Deserialize<'de> for Note {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
Ok(deserializer.deserialize_str(NoteVisitor)?)
}
}
impl Note {
fn from_str(s: &str) -> Result<Self, ParseError> {
let note_octave: Vec<&str> = s.split(' ').collect();
if note_octave.len() != 2 {
return Err(ParseError::new(&format!("cannot parse note string {} (expected string with exactly one space)", s)));
}
let note = match note_octave[0] {
"C" => 0,
"C#/Db" => 1, "C#" => 1, "Db" => 1,
"D" => 2,
"D#/Eb" => 3, "D#" => 3, "Eb" => 3,
"E" => 4,
"F" => 5,
"F#/Gb" => 6, "F#" => 6, "Gb" => 6,
"G" => 7,
"G#/Ab" => 8, "G#" => 8, "Ab" => 8,
"A" => 9,
"A#/Bb" => 10, "A#" => 10, "Bb" => 10,
"B" => 11,
_ => return Err(ParseError::new(&format!("cannot parse note {} from {}", note_octave[0], s))),
};
let octave: i8 = note_octave[1].parse::<i8>().unwrap(); // TODO: wrap
Ok(Note { value: ((octave + 1) * 12) as u8 + note })
}
pub fn as_str(&self) -> String {
let octave: i8 = (self.value / 12) as i8 - 1;
let note = match self.value % 12 {
0 => "C",
1 => "C#/Db",
2 => "D",
3 => "D#/Eb",
4 => "E",
5 => "F",
6 => "F#/Gb",
7 => "G",
8 => "G#/Ab",
9 => "A",
10 => "A#/Bb",
11 => "B",
_ => unreachable!(),
};
format!("{} {}", note, octave)
}
}
impl fmt::Display for Note {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(&self.as_str())
}
}
impl fmt::Debug for Note {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(&format!("{} ({})", self.as_str(), self.value))
}
}
#[test]
fn test_note_parse_str() {
assert_eq!(0, Note::from_str("C -1").unwrap().value);
assert_eq!(1, Note::from_str("C# -1").unwrap().value);
assert_eq!(1, Note::from_str("C#/Db -1").unwrap().value);
assert_eq!(127, Note::from_str("G 9").unwrap().value);
}
// Toggle
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
enum Toggle {
Off = 0,
On = 1,
}
impl Toggle {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(Toggle::Off),
1 => Ok(Toggle::On),
_ => Err(ParseError::new(&format!("Unknown value for toggle {}", value))),
}
}
}
// Knob
#[derive(Serialize, Deserialize, Copy, Clone, Default)]
struct Knob {
control: u8,
min: u8,
max: u8,
}
impl fmt::Debug for Knob {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Control: {:3}, Min: {:3}, Max: {:3}", self.control, self.min, self.max)
}
}
impl Knob {
fn from(raw: [u8; 3]) -> Self {
Knob {
control: raw[0],
min: raw[1],
max: raw[2],
}
}
fn to_bytes(&self) -> [u8; 3] {
[self.control, self.min, self.max]
}
}
// PadMode
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
enum PadMode {
Momentary = 0,
Toggle = 1,
}
impl PadMode {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(PadMode::Momentary),
1 => Ok(PadMode::Toggle),
_ => Err(ParseError::new(&format!("Unknown padmode value {}", value))),
}
}
}
impl Default for PadMode {
fn default() -> Self {
PadMode::Momentary
}
}
// Pad
#[derive(Serialize, Deserialize, Copy, Clone, Default)]
struct Pad {
note: Note,
control: u8,
program: u8,
mode: PadMode,
}
impl fmt::Debug for Pad {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Note: {:13}, Control: {:3}, Program: {:3}, Mode: {:?}", self.note, self.control, self.program, self.mode)
}
}
impl Pad {
fn from(value: [u8; 4]) -> Result<Self, ParseError> {
Ok(Pad {
note: Note { value: value[0] },
program: value[1],
control: value[2],
mode: PadMode::from(value[3])?,
})
}
fn to_bytes(&self) -> [u8; 4] {
[self.note.value, self.control, self.program, self.mode as u8]
}
}
// ClockSource
#[derive(Serialize, Deserialize, Debug)]
enum ClockSource {
Internal,
External,
}
impl ClockSource {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(ClockSource::Internal),
1 => Ok(ClockSource::External),
_ => Err(ParseError::new(&format!("Unknown clock source value {}", value))),
}
}
}
// ArpeggiatorTimeDivision
#[derive(Serialize, Deserialize, Debug)]
enum ArpeggiatorTimeDivision {
_4,
_4T,
_8,
_8T,
_16,
_16T,
_32,
_32T,
}
impl ArpeggiatorTimeDivision {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(ArpeggiatorTimeDivision::_4),
1 => Ok(ArpeggiatorTimeDivision::_4T),
2 => Ok(ArpeggiatorTimeDivision::_8),
3 => Ok(ArpeggiatorTimeDivision::_8T),
4 => Ok(ArpeggiatorTimeDivision::_16),
5 => Ok(ArpeggiatorTimeDivision::_16T),
6 => Ok(ArpeggiatorTimeDivision::_32),
7 => Ok(ArpeggiatorTimeDivision::_32T),
_ => Err(ParseError::new(&format!("Invalid arpeggiator time division {}", value))),
}
}
}
impl fmt::Display for ArpeggiatorTimeDivision {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let enumrepr = format!("{:?}", self);
write!(f, "1/{}", &enumrepr[1..])
}
}
// ArpeggiatorMode
#[derive(Serialize, Deserialize, Debug)]
enum ArpeggiatorMode {
Up = 0,
Down = 1,
Exclusive = 2,
Inclusive = 3,
Order = 4,
Random = 5,
}
impl ArpeggiatorMode {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(ArpeggiatorMode::Up),
1 => Ok(ArpeggiatorMode::Down),
2 => Ok(ArpeggiatorMode::Exclusive),
3 => Ok(ArpeggiatorMode::Inclusive),
4 => Ok(ArpeggiatorMode::Order),
5 => Ok(ArpeggiatorMode::Random),
_ => Err(ParseError::new(&format!("Invalid arpeggiator mode {}", value))),
}
}
}
// Swing
#[derive(Serialize, Deserialize, Debug)]
enum Swing {
_50 = 0,
_55 = 1,
_57 = 2,
_59 = 3,
_61 = 4,
_64 = 5,
}
impl Swing {
fn from(value: u8) -> Result<Self, ParseError> {
match value {
0 => Ok(Swing::_50),
1 => Ok(Swing::_55),
2 => Ok(Swing::_57),
3 => Ok(Swing::_59),
4 => Ok(Swing::_61),
5 => Ok(Swing::_64),
_ => Err(ParseError::new(&format!("Invalid swing value {}", value))),
}
}
}
impl fmt::Display for Swing {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let enumrepr = format!("{:?}", self);
write!(f, "{}%", &enumrepr[1..])
}
}
// Joystick
#[derive(Serialize, Deserialize, Debug)]
enum Joystick {
Pitchbend,
ControlChannel(u8),
SplitControlChannels(u8, u8), // X: Left, Right, Y: Up, Down
}
impl Joystick {
fn from(bytes: [u8; 3]) -> Result<Self, ParseError> {
match bytes[0] {
0 => Ok(Joystick::Pitchbend),
1 => Ok(Joystick::ControlChannel(bytes[1])),
2 => Ok(Joystick::SplitControlChannels(bytes[1], bytes[2])),
_ => Err(ParseError::new(&format!("Invalid joystick mode {}", bytes[1]))),
}
}
fn to_bytes(&self) -> [u8; 3] {
match *self {
Joystick::Pitchbend => [0; 3],
Joystick::ControlChannel(c) => { [1, c, 0] },
Joystick::SplitControlChannels(c1, c2) => { [2, c1, c2 ]}
}
}
}
// MpkBankDescriptor
const MPK_BANK_DESCRIPTOR_LENGTH: usize = 108;
#[derive(Serialize, Deserialize)]
pub struct MpkBankDescriptor {
octave: u8,
transpose: u8, // -12 (0) .. +12 (24)
pad_midi_channel: u8,
keybed_channel: u8,
joystick_x: Joystick,
joystick_y: Joystick,
arpeggiator: Toggle,
arpeggiator_mode: ArpeggiatorMode,
arpeggiator_time_division: ArpeggiatorTimeDivision,
arpeggiator_octave: u8, // 0..3
swing: Swing,
latch: Toggle,
clock_source: ClockSource,
tempo_taps: u8,
tempo: U14BE,
knobs: [Knob; 8],
pads: [Pad; 16],
}
impl fmt::Display for MpkBankDescriptor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut sb = String::new();
sb.push_str(&format!("PAD Channel: {}\n", self.pad_midi_channel + 1));
sb.push_str(&format!("Keybed Channel: {}\n", self.keybed_channel + 1));
sb.push_str(&format!("Octave: {}\n", self.octave as i8 - 4));
sb.push_str(&format!("Transpose: {}\n", self.transpose as i8 - 12));
sb.push_str(&format!("Arpeggiator: {:?}\n", self.arpeggiator));
sb.push_str(&format!("Arpeggiator Mode: {:?}\n", self.arpeggiator_mode));
sb.push_str(&format!("Arpeggiator Time Division: {}\n", self.arpeggiator_time_division));
sb.push_str(&format!("Arpeggiator Tempo: {}\n", self.tempo));
sb.push_str(&format!("Arpeggiator Octave: {}\n", self.arpeggiator_octave + 1));
sb.push_str(&format!("Swing: {}\n", self.swing));
sb.push_str(&format!("Clock source: {:?}\n", self.clock_source));
sb.push_str(&format!("Latch: {:?}\n", self.latch));
sb.push_str(&format!("Tempo taps: {}\n", self.tempo_taps));
sb.push_str(&format!("Joystick X: {:?}\n", self.joystick_x));
sb.push_str(&format!("Joystick Y: {:?}\n", self.joystick_y));
for (i, knob) in self.knobs.iter().enumerate() {
sb.push_str(&format!("Knob {}: {:?}\n", i + 1, knob));
}
for (i, pad) in self.pads.iter().enumerate() {
let padbank = if i < 8 { "A" } else { "B" };
sb.push_str(&format!("Pad {}{}: {:?}\n", padbank, i % 8 + 1, pad));
}
write!(f, "{}", sb)
}
}
impl fmt::Debug for MpkBankDescriptor {
/* TODO: write raw bytes */
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self)
}
}
impl MpkBankDescriptor {
fn parse_knobs(bytes: &[u8]) -> Result<[Knob; 8], ParseError> {
if bytes.len() != 8 * 3 {
Err(ParseError::new(&format!("trying to parse knobs with unexpected length {} (expected {})", bytes.len(), 8 * 3)))
} else {
let mut knobs: [Knob; 8] = [Knob::default(); 8];
for i in 0..8 {
knobs[i] = Knob::from([bytes[i * 3], bytes[i * 3 + 1], bytes[i * 3 + 2]]);
}
Ok(knobs)
}
}
fn parse_pads(bytes: &[u8]) -> Result<[Pad; 16], ParseError> {
if bytes.len() != 16 * 4 {
Err(ParseError::new(&format!("trying to parse pads with unexpected length {} (expected {})", bytes.len(), 16 * 4)))
} else {
let mut pads: [Pad; 16] = [Pad::default(); 16];
for i in 0..16 {
pads[i] = Pad::from([bytes[i * 4], bytes[i * 4 + 1], bytes[i * 4 + 2], bytes[i * 4 + 3]])?;
}
Ok(pads)
}
}
pub fn from(bytes: &[u8]) -> Result<Self, ParseError> {
if bytes.len() != MPK_BANK_DESCRIPTOR_LENGTH {
Err(ParseError::new(&format!("Unexpected length for bank descriptor ({}, expected {})", bytes.len(), MPK_BANK_DESCRIPTOR_LENGTH)))
} else {
Ok(MpkBankDescriptor {
pad_midi_channel: bytes[0],
keybed_channel: bytes[1],
octave: bytes[2],
arpeggiator: Toggle::from(bytes[3])?,
arpeggiator_mode: ArpeggiatorMode::from(bytes[4])?,
arpeggiator_time_division: ArpeggiatorTimeDivision::from(bytes[5])?,
clock_source: ClockSource::from(bytes[6])?,
latch: Toggle::from(bytes[7])?,
swing: Swing::from(bytes[8])?,
tempo_taps: bytes[9],
tempo: U14BE::from_device([bytes[10], bytes[11]])?,
arpeggiator_octave: bytes[12],
joystick_x: Joystick::from([bytes[13], bytes[14], bytes[15]])?,
joystick_y: Joystick::from([bytes[16], bytes[17], bytes[18]])?,
pads: MpkBankDescriptor::parse_pads(&bytes[19..83])?,
knobs: MpkBankDescriptor::parse_knobs(&bytes[83..107])?,
transpose: bytes[107],
})
}
}
pub fn into_bytes(self) -> Vec<u8> {
let mut ret: Vec<u8> = vec!(
self.pad_midi_channel,
self.keybed_channel,
self.octave,
self.arpeggiator as u8,
self.arpeggiator_mode as u8,
self.arpeggiator_time_division as u8,
self.clock_source as u8,
self.latch as u8,
self.swing as u8,
self.tempo_taps,
);
append_array!(ret, self.tempo.to_device().unwrap());
ret.push(self.arpeggiator_octave);
append_array!(ret, self.joystick_x.to_bytes());
append_array!(ret, self.joystick_y.to_bytes());
for pad in &self.pads {
append_array!(ret, pad.to_bytes());
}
for knob in &self.knobs {
append_array!(ret, knob.to_bytes());
}
ret.push(self.transpose);
assert_eq!(ret.len(), MPK_BANK_DESCRIPTOR_LENGTH);
ret
}
}
|
use std::collections::HashMap;
use std::fs;
use std::io::Error;
use point::PointT;
use point::PointPos;
use polyomino::Polyomino;
#[allow(dead_code)]
pub enum Restrictions {
None,
SquareSymmetry,
RectangularSymmetry, // SingleSided
}
#[allow(dead_code)]
pub fn build_variations<T:PointT>(polys: &Vec<Polyomino<T>>, restrict: Restrictions) -> Vec<Vec<Polyomino<T>>> {
let mut res = Vec::with_capacity(polys.len());
let mut found_asym = false;
for p in polys {
let mut variations = p.make_all_variations();
match restrict {
Restrictions::None => (),
Restrictions::SquareSymmetry => {
if !found_asym && variations.len() == 8 {
found_asym = true;
variations = vec![p.clone()];
}
}
Restrictions::RectangularSymmetry => {
if !found_asym && variations.len() == 8 {
found_asym = true;
variations = vec![p.clone(), p.clone().rotate()];
}
}
}
res.push(variations);
}
res
}
#[derive(Eq,Hash,PartialEq)]
pub enum PredefinedPolyominoes {
Monominoes,
Dominoes,
Triominoes,
Tetrominoes,
Pentominoes,
Hexominoes,
Heptominoes,
Octominoes,
}
lazy_static! {
static ref HASHMAP: HashMap<PredefinedPolyominoes, &'static str> = {
let mut hm = HashMap::new();
hm.insert(PredefinedPolyominoes::Monominoes, include_str!("../data/monomino.poly"));
hm.insert(PredefinedPolyominoes::Dominoes, include_str!("../data/domino.poly"));
hm.insert(PredefinedPolyominoes::Triominoes, include_str!("../data/triomino.poly"));
hm.insert(PredefinedPolyominoes::Tetrominoes, include_str!("../data/tetromino.poly"));
hm.insert(PredefinedPolyominoes::Pentominoes, include_str!("../data/pentomino.poly"));
hm.insert(PredefinedPolyominoes::Hexominoes, include_str!("../data/hexomino.poly"));
hm.insert(PredefinedPolyominoes::Heptominoes, include_str!("../data/heptomino.poly"));
hm.insert(PredefinedPolyominoes::Octominoes, include_str!("../data/octomino.poly"));
hm
};
}
pub fn get_polyominoes<T:PointT>(polytype: PredefinedPolyominoes, make_point:&dyn Fn(PointPos, PointPos) -> T) -> Result<Vec<Polyomino<T>>, Error> {
read_polyomino_string(&HASHMAP.get(&polytype).unwrap().to_string(), make_point)
}
pub fn read_polyominoes_from_file<T:PointT>(name: &str, make_point:&dyn Fn(PointPos, PointPos) -> T ) -> Result<Vec<Polyomino<T>>, Error> {
let contents = fs::read_to_string(name)?;
read_polyomino_string(&contents, make_point)
}
fn read_polyomino_string<T:PointT>(contents: &String, make_point:&dyn Fn(PointPos, PointPos) -> T) -> Result<Vec<Polyomino<T>>, Error> {
let mut res = Vec::new();
let mut count = 0;
let mut points = Vec::new();
for line in contents.split('\n') {
match line {
// polyominoes are separated by empty lines.
"" => {
res.push(Polyomino::new(points));
points = Vec::new();
count = 0;
}
// anything else is a definition
str => {
for (i, c) in str.chars().enumerate() {
if c != ' ' {
points.push(make_point(count, i as PointPos));
}
}
count += 1;
}
}
}
if !points.is_empty() {
res.push(Polyomino::new(points));
}
Ok(res)
}
|
#[macro_use]
extern crate serde_derive;
mod client;
mod protocol;
pub use client::Client;
|
mod local;
use clap::{ App, SubCommand, ArgMatches };
use context::Context;
pub fn subcommand() -> App<'static, 'static> {
SubCommand::with_name("addrs")
.about("List known addresses")
.subcommands(vec![
local::subcommand(),
])
}
pub fn run(context: &mut Context, matches: &ArgMatches) {
match matches.subcommand() {
("local", Some(matches)) => local::run(context, matches),
(_, None) => run_self(context, matches),
_ => unreachable!(),
}
}
fn run_self(context: &mut Context, _: &ArgMatches) {
let peers = context.event_loop
.run(context.client.swarm().addresses())
.expect("TODO: not crash here")
.peers;
for (peer, addrs) in peers {
println!("{} ({}):", peer, addrs.len());
for addr in addrs {
println!(" {}", addr);
}
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
//
// @generated SignedSource<<76df3b41c55ac9a428ed15aad5841cbc>>
//
// To regenerate this file, run:
// hphp/hack/src/oxidized/regen.sh
use arena_trait::TrivialDrop;
use no_pos_hash::NoPosHash;
use ocamlrep_derive::FromOcamlRep;
use ocamlrep_derive::FromOcamlRepIn;
use ocamlrep_derive::ToOcamlRep;
use serde::Deserialize;
use serde::Serialize;
#[allow(unused_imports)]
use crate::*;
pub use aast_defs::*;
pub use doc_comment::DocComment;
/// Aast.program represents the top-level definitions in a Hack program.
/// ex: Expression annotation type (when typechecking, the inferred dtype)
/// fb: Function body tag (e.g. has naming occurred)
/// en: Environment (tracking state inside functions and classes)
/// hi: Hint annotation (when typechecking it will be the localized type hint or the
/// inferred missing type if the hint is missing)
pub type Program<Ex, Fb, En, Hi> = Vec<Def<Ex, Fb, En, Hi>>;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Stmt<Ex, Fb, En, Hi>(pub Pos, pub Stmt_<Ex, Fb, En, Hi>);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum Stmt_<Ex, Fb, En, Hi> {
Fallthrough,
Expr(Box<Expr<Ex, Fb, En, Hi>>),
Break,
Continue,
Throw(Box<Expr<Ex, Fb, En, Hi>>),
Return(Box<Option<Expr<Ex, Fb, En, Hi>>>),
GotoLabel(Box<Pstring>),
Goto(Box<Pstring>),
Awaitall(
Box<(
Vec<(Option<Lid>, Expr<Ex, Fb, En, Hi>)>,
Block<Ex, Fb, En, Hi>,
)>,
),
If(
Box<(
Expr<Ex, Fb, En, Hi>,
Block<Ex, Fb, En, Hi>,
Block<Ex, Fb, En, Hi>,
)>,
),
Do(Box<(Block<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>)>),
While(Box<(Expr<Ex, Fb, En, Hi>, Block<Ex, Fb, En, Hi>)>),
Using(Box<UsingStmt<Ex, Fb, En, Hi>>),
For(
Box<(
Expr<Ex, Fb, En, Hi>,
Expr<Ex, Fb, En, Hi>,
Expr<Ex, Fb, En, Hi>,
Block<Ex, Fb, En, Hi>,
)>,
),
Switch(Box<(Expr<Ex, Fb, En, Hi>, Vec<Case<Ex, Fb, En, Hi>>)>),
Foreach(
Box<(
Expr<Ex, Fb, En, Hi>,
AsExpr<Ex, Fb, En, Hi>,
Block<Ex, Fb, En, Hi>,
)>,
),
Try(
Box<(
Block<Ex, Fb, En, Hi>,
Vec<Catch<Ex, Fb, En, Hi>>,
Block<Ex, Fb, En, Hi>,
)>,
),
Noop,
Block(Block<Ex, Fb, En, Hi>),
Markup(Box<Pstring>),
AssertEnv(Box<(EnvAnnot, LocalIdMap<Ex>)>),
}
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum EnvAnnot {
Join,
Refinement,
}
impl TrivialDrop for EnvAnnot {}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct UsingStmt<Ex, Fb, En, Hi> {
pub is_block_scoped: bool,
pub has_await: bool,
pub expr: Expr<Ex, Fb, En, Hi>,
pub block: Block<Ex, Fb, En, Hi>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum AsExpr<Ex, Fb, En, Hi> {
AsV(Expr<Ex, Fb, En, Hi>),
AsKv(Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>),
AwaitAsV(Pos, Expr<Ex, Fb, En, Hi>),
AwaitAsKv(Pos, Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>),
}
pub type Block<Ex, Fb, En, Hi> = Vec<Stmt<Ex, Fb, En, Hi>>;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct ClassId<Ex, Fb, En, Hi>(pub Ex, pub ClassId_<Ex, Fb, En, Hi>);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum ClassId_<Ex, Fb, En, Hi> {
CIparent,
CIself,
CIstatic,
CIexpr(Expr<Ex, Fb, En, Hi>),
CI(Sid),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Expr<Ex, Fb, En, Hi>(pub Ex, pub Expr_<Ex, Fb, En, Hi>);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum CollectionTarg<Hi> {
CollectionTV(Targ<Hi>),
CollectionTKV(Targ<Hi>, Targ<Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum FunctionPtrId<Ex, Fb, En, Hi> {
FPId(Sid),
FPClassConst(ClassId<Ex, Fb, En, Hi>, Pstring),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum Expr_<Ex, Fb, En, Hi> {
Darray(
Box<(
Option<(Targ<Hi>, Targ<Hi>)>,
Vec<(Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>)>,
)>,
),
Varray(Box<(Option<Targ<Hi>>, Vec<Expr<Ex, Fb, En, Hi>>)>),
Shape(Vec<(ast_defs::ShapeFieldName, Expr<Ex, Fb, En, Hi>)>),
/// TODO: T38184446 Consolidate collections in AAST
ValCollection(Box<(VcKind, Option<Targ<Hi>>, Vec<Expr<Ex, Fb, En, Hi>>)>),
/// TODO: T38184446 Consolidate collections in AAST
KeyValCollection(
Box<(
KvcKind,
Option<(Targ<Hi>, Targ<Hi>)>,
Vec<Field<Ex, Fb, En, Hi>>,
)>,
),
Null,
This,
True,
False,
Omitted,
Id(Box<Sid>),
Lvar(Box<Lid>),
Dollardollar(Box<Lid>),
Clone(Box<Expr<Ex, Fb, En, Hi>>),
ObjGet(Box<(Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>, OgNullFlavor)>),
ArrayGet(Box<(Expr<Ex, Fb, En, Hi>, Option<Expr<Ex, Fb, En, Hi>>)>),
ClassGet(Box<(ClassId<Ex, Fb, En, Hi>, ClassGetExpr<Ex, Fb, En, Hi>)>),
ClassConst(Box<(ClassId<Ex, Fb, En, Hi>, Pstring)>),
Call(
Box<(
Expr<Ex, Fb, En, Hi>,
Vec<Targ<Hi>>,
Vec<Expr<Ex, Fb, En, Hi>>,
Option<Expr<Ex, Fb, En, Hi>>,
)>,
),
FunctionPointer(Box<(FunctionPtrId<Ex, Fb, En, Hi>, Vec<Targ<Hi>>)>),
Int(String),
Float(String),
String(bstr::BString),
String2(Vec<Expr<Ex, Fb, En, Hi>>),
PrefixedString(Box<(String, Expr<Ex, Fb, En, Hi>)>),
Yield(Box<Afield<Ex, Fb, En, Hi>>),
YieldBreak,
Await(Box<Expr<Ex, Fb, En, Hi>>),
Suspend(Box<Expr<Ex, Fb, En, Hi>>),
List(Vec<Expr<Ex, Fb, En, Hi>>),
ExprList(Vec<Expr<Ex, Fb, En, Hi>>),
Cast(Box<(Hint, Expr<Ex, Fb, En, Hi>)>),
Unop(Box<(ast_defs::Uop, Expr<Ex, Fb, En, Hi>)>),
Binop(Box<(ast_defs::Bop, Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>)>),
/// The lid is the ID of the $$ that is implicitly declared by this pipe.
Pipe(Box<(Lid, Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>)>),
Eif(
Box<(
Expr<Ex, Fb, En, Hi>,
Option<Expr<Ex, Fb, En, Hi>>,
Expr<Ex, Fb, En, Hi>,
)>,
),
Is(Box<(Expr<Ex, Fb, En, Hi>, Hint)>),
As(Box<(Expr<Ex, Fb, En, Hi>, Hint, bool)>),
New(
Box<(
ClassId<Ex, Fb, En, Hi>,
Vec<Targ<Hi>>,
Vec<Expr<Ex, Fb, En, Hi>>,
Option<Expr<Ex, Fb, En, Hi>>,
Ex,
)>,
),
Record(Box<(Sid, Vec<(Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>)>)>),
Efun(Box<(Fun_<Ex, Fb, En, Hi>, Vec<Lid>)>),
Lfun(Box<(Fun_<Ex, Fb, En, Hi>, Vec<Lid>)>),
Xml(
Box<(
Sid,
Vec<XhpAttribute<Ex, Fb, En, Hi>>,
Vec<Expr<Ex, Fb, En, Hi>>,
)>,
),
Callconv(Box<(ast_defs::ParamKind, Expr<Ex, Fb, En, Hi>)>),
Import(Box<(ImportFlavor, Expr<Ex, Fb, En, Hi>)>),
/// TODO: T38184446 Consolidate collections in AAST
Collection(Box<(Sid, Option<CollectionTarg<Hi>>, Vec<Afield<Ex, Fb, En, Hi>>)>),
BracedExpr(Box<Expr<Ex, Fb, En, Hi>>),
ParenthesizedExpr(Box<Expr<Ex, Fb, En, Hi>>),
ExpressionTree(Box<(Hint, Expr<Ex, Fb, En, Hi>, Option<Expr<Ex, Fb, En, Hi>>)>),
Lplaceholder(Box<Pos>),
FunId(Box<Sid>),
MethodId(Box<(Expr<Ex, Fb, En, Hi>, Pstring)>),
/// meth_caller('Class name', 'method name')
MethodCaller(Box<(Sid, Pstring)>),
SmethodId(Box<(ClassId<Ex, Fb, En, Hi>, Pstring)>),
Pair(
Box<(
Option<(Targ<Hi>, Targ<Hi>)>,
Expr<Ex, Fb, En, Hi>,
Expr<Ex, Fb, En, Hi>,
)>,
),
Assert(Box<AssertExpr<Ex, Fb, En, Hi>>),
PUAtom(String),
PUIdentifier(Box<(ClassId<Ex, Fb, En, Hi>, Pstring, Pstring)>),
ETSplice(Box<Expr<Ex, Fb, En, Hi>>),
Any,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum ClassGetExpr<Ex, Fb, En, Hi> {
CGstring(Pstring),
CGexpr(Expr<Ex, Fb, En, Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum AssertExpr<Ex, Fb, En, Hi> {
AEAssert(Expr<Ex, Fb, En, Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum Case<Ex, Fb, En, Hi> {
Default(Pos, Block<Ex, Fb, En, Hi>),
Case(Expr<Ex, Fb, En, Hi>, Block<Ex, Fb, En, Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Catch<Ex, Fb, En, Hi>(pub Sid, pub Lid, pub Block<Ex, Fb, En, Hi>);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Field<Ex, Fb, En, Hi>(pub Expr<Ex, Fb, En, Hi>, pub Expr<Ex, Fb, En, Hi>);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum Afield<Ex, Fb, En, Hi> {
AFvalue(Expr<Ex, Fb, En, Hi>),
AFkvalue(Expr<Ex, Fb, En, Hi>, Expr<Ex, Fb, En, Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum XhpAttribute<Ex, Fb, En, Hi> {
XhpSimple(Pstring, Expr<Ex, Fb, En, Hi>),
XhpSpread(Expr<Ex, Fb, En, Hi>),
}
pub type IsVariadic = bool;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct FunParam<Ex, Fb, En, Hi> {
pub annotation: Ex,
pub type_hint: TypeHint<Hi>,
pub is_variadic: IsVariadic,
pub pos: Pos,
pub name: String,
pub expr: Option<Expr<Ex, Fb, En, Hi>>,
pub callconv: Option<ast_defs::ParamKind>,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub visibility: Option<Visibility>,
}
/// does function take varying number of args?
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum FunVariadicity<Ex, Fb, En, Hi> {
/// PHP5.6 ...$args finishes the func declaration
FVvariadicArg(FunParam<Ex, Fb, En, Hi>),
/// HH ... finishes the declaration; deprecate for ...$args?
FVellipsis(Pos),
/// standard non variadic function
FVnonVariadic,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Fun_<Ex, Fb, En, Hi> {
pub span: Pos,
pub annotation: En,
pub mode: file_info::Mode,
pub ret: TypeHint<Hi>,
pub name: Sid,
pub tparams: Vec<Tparam<Ex, Fb, En, Hi>>,
pub where_constraints: Vec<WhereConstraintHint>,
pub variadic: FunVariadicity<Ex, Fb, En, Hi>,
pub params: Vec<FunParam<Ex, Fb, En, Hi>>,
pub cap: TypeHint<Hi>,
pub unsafe_cap: TypeHint<Hi>,
pub body: FuncBody<Ex, Fb, En, Hi>,
pub fun_kind: ast_defs::FunKind,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub file_attributes: Vec<FileAttribute<Ex, Fb, En, Hi>>,
/// true if this declaration has no body because it is an
/// external function declaration (e.g. from an HHI file)
pub external: bool,
pub namespace: Nsenv,
pub doc_comment: Option<DocComment>,
pub static_: bool,
}
/// Naming has two phases and the annotation helps to indicate the phase.
/// In the first pass, it will perform naming on everything except for function
/// and method bodies and collect information needed. Then, another round of
/// naming is performed where function bodies are named. Thus, naming will
/// have named and unnamed variants of the annotation.
/// See BodyNamingAnnotation in nast.ml and the comment in naming.ml
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct FuncBody<Ex, Fb, En, Hi> {
pub ast: Block<Ex, Fb, En, Hi>,
pub annotation: Fb,
}
/// A type annotation is two things:
/// - the localized hint, or if the hint is missing, the inferred type
/// - The typehint associated to this expression if it exists
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct TypeHint<Hi>(pub Hi, pub TypeHint_);
/// Explicit type argument to function, constructor, or collection literal.
/// 'hi = unit in NAST
/// 'hi = Typing_defs.(locl ty) in TAST,
/// and is used to record inferred type arguments, with wildcard hint.
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Targ<Hi>(pub Hi, pub Hint);
pub type TypeHint_ = Option<Hint>;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct UserAttribute<Ex, Fb, En, Hi> {
pub name: Sid,
/// user attributes are restricted to scalar values
pub params: Vec<Expr<Ex, Fb, En, Hi>>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct FileAttribute<Ex, Fb, En, Hi> {
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub namespace: Nsenv,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Tparam<Ex, Fb, En, Hi> {
pub variance: ast_defs::Variance,
pub name: Sid,
pub parameters: Vec<Tparam<Ex, Fb, En, Hi>>,
pub constraints: Vec<(ast_defs::ConstraintKind, Hint)>,
pub reified: ReifyKind,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct UseAsAlias(
pub Option<Sid>,
pub Pstring,
pub Option<Sid>,
pub Vec<UseAsVisibility>,
);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct InsteadofAlias(pub Sid, pub Pstring, pub Vec<Sid>);
pub type IsExtends = bool;
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum EmitId {
EmitId(isize),
Anonymous,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Class_<Ex, Fb, En, Hi> {
pub span: Pos,
pub annotation: En,
pub mode: file_info::Mode,
pub final_: bool,
pub is_xhp: bool,
pub has_xhp_keyword: bool,
pub kind: ast_defs::ClassKind,
pub name: Sid,
/// The type parameters of a class A<T> (T is the parameter)
pub tparams: Vec<Tparam<Ex, Fb, En, Hi>>,
pub extends: Vec<ClassHint>,
pub uses: Vec<TraitHint>,
pub use_as_alias: Vec<UseAsAlias>,
pub insteadof_alias: Vec<InsteadofAlias>,
pub xhp_attr_uses: Vec<XhpAttrHint>,
pub xhp_category: Option<(Pos, Vec<Pstring>)>,
pub reqs: Vec<(ClassHint, IsExtends)>,
pub implements: Vec<ClassHint>,
pub where_constraints: Vec<WhereConstraintHint>,
pub consts: Vec<ClassConst<Ex, Fb, En, Hi>>,
pub typeconsts: Vec<ClassTypeconst<Ex, Fb, En, Hi>>,
pub vars: Vec<ClassVar<Ex, Fb, En, Hi>>,
pub methods: Vec<Method_<Ex, Fb, En, Hi>>,
pub attributes: Vec<ClassAttr<Ex, Fb, En, Hi>>,
pub xhp_children: Vec<(Pos, XhpChild)>,
pub xhp_attrs: Vec<XhpAttr<Ex, Fb, En, Hi>>,
pub namespace: Nsenv,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub file_attributes: Vec<FileAttribute<Ex, Fb, En, Hi>>,
pub enum_: Option<Enum_>,
pub pu_enums: Vec<PuEnum<Ex, Fb, En, Hi>>,
pub doc_comment: Option<DocComment>,
pub emit_id: Option<EmitId>,
}
pub type ClassHint = Hint;
pub type TraitHint = Hint;
pub type XhpAttrHint = Hint;
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum XhpAttrTag {
Required,
LateInit,
}
impl TrivialDrop for XhpAttrTag {}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct XhpAttr<Ex, Fb, En, Hi>(
pub TypeHint<Hi>,
pub ClassVar<Ex, Fb, En, Hi>,
pub Option<XhpAttrTag>,
pub Option<(Pos, Vec<Expr<Ex, Fb, En, Hi>>)>,
);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum ClassAttr<Ex, Fb, En, Hi> {
CAName(Sid),
CAField(CaField<Ex, Fb, En, Hi>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct CaField<Ex, Fb, En, Hi> {
pub type_: CaType,
pub id: Sid,
pub value: Option<Expr<Ex, Fb, En, Hi>>,
pub required: bool,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum CaType {
CAHint(Hint),
CAEnum(Vec<String>),
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct ClassConst<Ex, Fb, En, Hi> {
pub type_: Option<Hint>,
pub id: Sid,
/// expr = None indicates an abstract const
pub expr: Option<Expr<Ex, Fb, En, Hi>>,
pub doc_comment: Option<DocComment>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum TypeconstAbstractKind {
TCAbstract(Option<Hint>),
TCPartiallyAbstract,
TCConcrete,
}
/// This represents a type const definition. If a type const is abstract then
/// then the type hint acts as a constraint. Any concrete definition of the
/// type const must satisfy the constraint.
///
/// If the type const is not abstract then a type must be specified.
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct ClassTypeconst<Ex, Fb, En, Hi> {
pub abstract_: TypeconstAbstractKind,
pub name: Sid,
pub constraint: Option<Hint>,
pub type_: Option<Hint>,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub span: Pos,
pub doc_comment: Option<DocComment>,
}
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct XhpAttrInfo {
pub xai_tag: Option<XhpAttrTag>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct ClassVar<Ex, Fb, En, Hi> {
pub final_: bool,
pub xhp_attr: Option<XhpAttrInfo>,
pub abstract_: bool,
pub visibility: Visibility,
pub type_: TypeHint<Hi>,
pub id: Sid,
pub expr: Option<Expr<Ex, Fb, En, Hi>>,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub doc_comment: Option<DocComment>,
pub is_promoted_variadic: bool,
pub is_static: bool,
pub span: Pos,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Method_<Ex, Fb, En, Hi> {
pub span: Pos,
pub annotation: En,
pub final_: bool,
pub abstract_: bool,
pub static_: bool,
pub visibility: Visibility,
pub name: Sid,
pub tparams: Vec<Tparam<Ex, Fb, En, Hi>>,
pub where_constraints: Vec<WhereConstraintHint>,
pub variadic: FunVariadicity<Ex, Fb, En, Hi>,
pub params: Vec<FunParam<Ex, Fb, En, Hi>>,
pub cap: TypeHint<Hi>,
pub unsafe_cap: TypeHint<Hi>,
pub body: FuncBody<Ex, Fb, En, Hi>,
pub fun_kind: ast_defs::FunKind,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub ret: TypeHint<Hi>,
/// true if this declaration has no body because it is an external method
/// declaration (e.g. from an HHI file)
pub external: bool,
pub doc_comment: Option<DocComment>,
}
pub type Nsenv = ocamlrep::rc::RcOc<namespace_env::Env>;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Typedef<Ex, Fb, En, Hi> {
pub annotation: En,
pub name: Sid,
pub tparams: Vec<Tparam<Ex, Fb, En, Hi>>,
pub constraint: Option<Hint>,
pub kind: Hint,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub mode: file_info::Mode,
pub vis: TypedefVisibility,
pub namespace: Nsenv,
pub span: Pos,
pub emit_id: Option<EmitId>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct Gconst<Ex, Fb, En, Hi> {
pub annotation: En,
pub mode: file_info::Mode,
pub name: Sid,
pub type_: Option<Hint>,
pub value: Expr<Ex, Fb, En, Hi>,
pub namespace: Nsenv,
pub span: Pos,
pub emit_id: Option<EmitId>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct RecordDef<Ex, Fb, En, Hi> {
pub annotation: En,
pub name: Sid,
pub extends: Option<RecordHint>,
pub abstract_: bool,
pub fields: Vec<(Sid, Hint, Option<Expr<Ex, Fb, En, Hi>>)>,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub namespace: Nsenv,
pub span: Pos,
pub doc_comment: Option<DocComment>,
pub emit_id: Option<EmitId>,
}
pub type RecordHint = Hint;
/// Pocket Universe Enumeration, e.g.
///
/// ```
/// enum Foo { // pu_name
/// // pu_case_types
/// case type T0;
/// case type T1;
///
/// // pu_case_values
/// case ?T0 default_value;
/// case T1 foo;
///
/// // pu_members
/// :@A( // pum_atom
/// // pum_types
/// type T0 = string,
/// type T1 = int,
///
/// // pum_exprs
/// default_value = null,
/// foo = 42,
/// );
/// :@B( ... )
/// ...
/// }
/// ```
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct PuEnum<Ex, Fb, En, Hi> {
pub annotation: En,
pub name: Sid,
pub user_attributes: Vec<UserAttribute<Ex, Fb, En, Hi>>,
pub is_final: bool,
pub case_types: Vec<Tparam<Ex, Fb, En, Hi>>,
pub case_values: Vec<PuCaseValue>,
pub members: Vec<PuMember<Ex, Fb, En, Hi>>,
}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct PuCaseValue(pub Sid, pub Hint);
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub struct PuMember<Ex, Fb, En, Hi> {
pub atom: Sid,
pub types: Vec<(Sid, Hint)>,
pub exprs: Vec<(Sid, Expr<Ex, Fb, En, Hi>)>,
}
pub type FunDef<Ex, Fb, En, Hi> = Fun_<Ex, Fb, En, Hi>;
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum Def<Ex, Fb, En, Hi> {
Fun(Box<FunDef<Ex, Fb, En, Hi>>),
Class(Box<Class_<Ex, Fb, En, Hi>>),
RecordDef(Box<RecordDef<Ex, Fb, En, Hi>>),
Stmt(Box<Stmt<Ex, Fb, En, Hi>>),
Typedef(Box<Typedef<Ex, Fb, En, Hi>>),
Constant(Box<Gconst<Ex, Fb, En, Hi>>),
Namespace(Box<(Sid, Program<Ex, Fb, En, Hi>)>),
NamespaceUse(Vec<(NsKind, Sid, Sid)>),
SetNamespaceEnv(Box<Nsenv>),
FileAttributes(Box<FileAttribute<Ex, Fb, En, Hi>>),
}
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum NsKind {
NSNamespace,
NSClass,
NSClassAndNamespace,
NSFun,
NSConst,
}
impl TrivialDrop for NsKind {}
#[derive(
Clone,
Debug,
Deserialize,
Eq,
FromOcamlRep,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum BreakContinueLevel {
LevelOk(Option<isize>),
LevelNonLiteral,
LevelNonPositive,
}
|
use serenity::framework::standard::{macros::command, Args, CommandResult};
use serenity::model::prelude::{Message, UserId};
use serenity::prelude::Context;
use serenity::utils::parse_username;
fn parse_id(s: String) -> Option<UserId> {
let id = if s.starts_with('<') {
parse_username(s)?
} else {
s.parse().ok()?
};
Some(UserId(id))
}
#[command]
async fn avatar(context: &Context, message: &Message, mut args: Args) -> CommandResult {
let mut user_ids: Vec<UserId> = args
.iter::<String>()
.filter_map(Result::ok)
.filter_map(parse_id)
.collect();
if user_ids.is_empty() {
user_ids.push(message.author.id);
};
for id in user_ids {
let user = match id.to_user(&context).await {
Ok(user) => user,
Err(_) => continue,
};
let avatar_url = user.face();
let user_nick = user
.nick_in(&context, message.guild_id.unwrap())
.await
.unwrap_or_else(|| format!("{}#{}", user.name, user.discriminator));
let _sent = message
.channel_id
.send_message(&context.http, |m| {
m.embed(|e| {
e.title(format!("{}'s avatar", user_nick))
.url(&avatar_url)
.image(avatar_url)
})
})
.await;
}
Ok(())
}
|
#[macro_use(untyped)]
extern crate untyped;
use untyped::{eval, Term};
fn main() {
let term = untyped! { ((λ x . 0) ((λ x . 0) (λ z . ((λ x . 0) 0)))) };
let evaluated_term = eval(&term);
println!("Source term: {}", term);
println!("Evaluated term: {}", evaluated_term);
}
|
#![feature(test)]
extern crate libc;
extern crate test;
use std::ffi::CString;
use std::os::raw::c_char;
#[no_mangle]
pub extern fn about() -> *const c_char {
let char_str = CString::new("rust_ffi").unwrap();
let ptr = char_str.as_ptr();
std::mem::forget(char_str);
ptr
}
#[repr(C)]
pub struct Foo {
a: i32,
b: i32,
c: i32,
}
#[no_mangle]
pub extern fn increment_foo(foo: Foo, add: i32) -> Foo {
Foo {
a: foo.a + add,
b: foo.b + add,
c: foo.c + add,
}
}
#[no_mangle]
pub extern fn fill_foo(foo: *mut Foo, value: i32) {
unsafe {
(*foo).a = value;
(*foo).b = value;
(*foo).c = value;
}
}
type Map = fn(i32) -> i32;
#[no_mangle]
pub extern fn map(v: i32, map: Map) -> i32 {
map(v)
}
#[no_mangle]
pub extern fn allocate_foo() -> *mut Foo {
let foo = Box::new(Foo {
a: 1,
b: 2,
c: 3,
});
Box::into_raw(foo)
}
#[no_mangle]
pub extern fn release_foo(ptr: *mut Foo) {
unsafe {
Box::from_raw(ptr);
}
}
#[cfg(test)]
mod tests {
use super::*;
use test::Bencher;
#[bench]
fn bench_fill_foo(b: &mut Bencher) {
let mut foo = Foo{
a: 0,
b: 0,
c: 0
};
b.iter(|| {
for i in 0..10000 {
fill_foo(&mut foo, i)
}
});
}
} |
fn main() {
let numbers = [4, 65, 2, -31, 0, 99, 2, 83, 782, 1];
println!("{:?}\n", quick_sort(numbers.iter()));
}
fn quick_sort<T, E>(mut v: T) -> Vec<E>
where
T: Iterator<Item = E>,
E: PartialOrd,
{
match v.next() {
None => Vec::new(),
Some(pivot) => {
let (lower, higher): (Vec<_>, Vec<_>) = v.partition(|it| it < &pivot);
let lower = quick_sort(lower.into_iter());
let higher = quick_sort(higher.into_iter());
lower.into_iter()
.chain(core::iter::once(pivot))
.chain(higher.into_iter())
.collect()
}
}
} |
/*!
```rudra-poc
[target]
crate = "internment"
version = "0.3.13"
[report]
issue_url = "https://github.com/droundy/internment/issues/20"
issue_date = 2021-03-03
rustsec_url = "https://github.com/RustSec/advisory-db/pull/807"
rustsec_id = "RUSTSEC-2021-0036"
[[bugs]]
analyzer = "SendSyncVariance"
bug_class = "SendSyncVariance"
rudra_report_locations = ["src/lib.rs:116:1: 116:37"]
```
!*/
#![forbid(unsafe_code)]
use internment::Intern;
use std::borrow::Borrow;
use std::cell::Cell;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
// A simple tagged union used to demonstrate problems with data races in Cell.
#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]
enum RefOrInt {
Ref(&'static u64),
Int(u64),
}
static SOME_INT: u64 = 123;
#[derive(Debug, PartialEq, Eq, Clone)]
struct Foo(Cell<RefOrInt>);
impl Hash for Foo {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.get().hash(state);
}
}
impl Foo {
fn set(&self, v: RefOrInt) {
self.0.set(v);
}
fn get(&self) -> RefOrInt {
self.0.get()
}
}
fn main() {
let non_sync = Foo(Cell::new(RefOrInt::Ref(&SOME_INT)));
let i0 = Arc::new(Intern::new(non_sync));
let i1 = i0.clone();
std::thread::spawn(move || {
let i1 = i1;
loop {
// Repeatedly write Ref(&addr) and Int(0xdeadbeef) into the cell.
i1.set(RefOrInt::Ref(&SOME_INT));
i1.set(RefOrInt::Int(0xdeadbeef));
}
});
loop {
if let RefOrInt::Ref(addr) = i0.get() {
// Hope that between the time we pattern match the object as a
// `Ref`, it gets written to by the other thread.
if addr as *const u64 == &SOME_INT as *const u64 {
continue;
}
println!("Pointer is now: {:p}", addr);
println!("Dereferencing addr will now segfault: {}", *addr);
}
}
}
|
use std::{borrow::Cow, io, result, str::Utf8Error, string::FromUtf8Error};
use thiserror::Error;
#[cfg(feature = "tokio_io")]
use tokio::time::error::Elapsed;
use url::ParseError;
/// Clickhouse error codes
pub mod codes;
/// Result type alias for this library.
pub type Result<T> = result::Result<T, Error>;
/// This type enumerates library errors.
#[derive(Debug, Error)]
pub enum Error {
#[error("Driver error: `{}`", _0)]
Driver(#[source] DriverError),
#[error("Input/output error: `{}`", _0)]
Io(#[source] io::Error),
#[error("Connections error: `{}`", _0)]
Connection(#[source] ConnectionError),
#[error("Other error: `{}`", _0)]
Other(Cow<'static, str>),
#[error("Server error: `{}`", _0)]
Server(#[source] ServerError),
#[error("URL error: `{}`", _0)]
Url(#[source] UrlError),
#[error("From SQL error: `{}`", _0)]
FromSql(#[source] FromSqlError),
}
/// This type represents Clickhouse server error.
#[derive(Debug, Error, Clone)]
#[error("ERROR {} ({}): {}", name, code, message)]
pub struct ServerError {
pub code: u32,
pub name: String,
pub message: String,
pub stack_trace: String,
}
/// This type enumerates connection errors.
#[derive(Debug, Error)]
pub enum ConnectionError {
#[error("TLS connection requires hostname to be provided")]
TlsHostNotProvided,
#[error("Input/output error: `{}`", _0)]
IoError(#[source] io::Error),
#[cfg(feature = "tls")]
#[error("TLS connection error: `{}`", _0)]
TlsError(#[source] native_tls::Error),
}
/// This type enumerates connection URL errors.
#[derive(Debug, Error)]
pub enum UrlError {
#[error("Invalid or incomplete connection URL")]
Invalid,
#[error(
"Invalid value `{}' for connection URL parameter `{}'",
value, param
)]
InvalidParamValue { param: String, value: String },
#[error("URL parse error: {}", _0)]
Parse(#[source] ParseError),
#[error("Unknown connection URL parameter `{}'", param)]
UnknownParameter { param: String },
#[error("Unsupported connection URL scheme `{}'", scheme)]
UnsupportedScheme { scheme: String },
}
/// This type enumerates driver errors.
#[derive(Debug, Error)]
pub enum DriverError {
#[error("Varint overflows a 64-bit integer.")]
Overflow,
#[error("Unknown packet 0x{:x}.", packet)]
UnknownPacket { packet: u64 },
#[error("Unexpected packet.")]
UnexpectedPacket,
#[error("Timeout error.")]
Timeout,
#[error("Invalid utf-8 sequence.")]
Utf8Error(Utf8Error),
}
/// This type enumerates cast from sql type errors.
#[derive(Debug, Error)]
pub enum FromSqlError {
#[error("SqlType::{} cannot be cast to {}.", src, dst)]
InvalidType {
src: Cow<'static, str>,
dst: Cow<'static, str>,
},
#[error("Out of range.")]
OutOfRange,
#[error("Unsupported operation.")]
UnsupportedOperation,
}
impl Error {
pub(crate) fn is_would_block(&self) -> bool {
if let Error::Io(ref e) = self {
if e.kind() == io::ErrorKind::WouldBlock {
return true;
}
}
false
}
}
impl From<ConnectionError> for Error {
fn from(error: ConnectionError) -> Self {
Error::Connection(error)
}
}
#[cfg(feature = "tls")]
impl From<native_tls::Error> for ConnectionError {
fn from(error: native_tls::Error) -> Self {
ConnectionError::TlsError(error)
}
}
impl From<DriverError> for Error {
fn from(err: DriverError) -> Self {
Error::Driver(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::Io(err)
}
}
impl From<ServerError> for Error {
fn from(err: ServerError) -> Self {
Error::Server(err)
}
}
impl From<UrlError> for Error {
fn from(err: UrlError) -> Self {
Error::Url(err)
}
}
impl From<String> for Error {
fn from(err: String) -> Self {
Error::Other(Cow::from(err))
}
}
impl From<&str> for Error {
fn from(err: &str) -> Self {
Error::Other(err.to_string().into())
}
}
impl From<FromUtf8Error> for Error {
fn from(err: FromUtf8Error) -> Self {
Error::Other(err.to_string().into())
}
}
#[cfg(feature = "tokio_io")]
impl From<Elapsed> for Error {
fn from(_err: Elapsed) -> Self {
Error::Driver(DriverError::Timeout)
}
}
impl From<ParseError> for Error {
fn from(err: ParseError) -> Self {
Error::Url(UrlError::Parse(err))
}
}
impl From<Error> for io::Error {
fn from(err: Error) -> Self {
match err {
Error::Io(error) => error,
e => io::Error::new(io::ErrorKind::Other, e.to_string()),
}
}
}
impl From<Utf8Error> for Error {
fn from(err: Utf8Error) -> Self {
Error::Driver(DriverError::Utf8Error(err))
}
}
#[cfg(test)]
mod tests {
#[test]
fn to_std_error_without_recursion() {
let src_err: super::Error = From::from("Somth went wrong.");
let dst_err: Box<dyn std::error::Error> = src_err.into();
assert_eq!(dst_err.to_string(), "Other error: `Somth went wrong.`");
}
#[test]
fn to_io_error_without_recursion() {
let src_err: super::Error = From::from("Somth went wrong.");
let dst_err: std::io::Error = src_err.into();
assert_eq!(dst_err.to_string(), "Other error: `Somth went wrong.`");
}
}
|
use lldb_sys as sys;
use sys::{RunMode, SBErrorRef, SBThreadRef, DescriptionLevel};
use core::fmt;
use std::{ffi::{CString, CStr}, os::raw::c_char, thread::{self, sleep}, time::Duration};
struct SBError {
raw: SBErrorRef,
}
impl SBError {
fn new() -> Self {
SBError {
raw: unsafe { sys::CreateSBError() }
}
}
fn is_success(&self) -> bool {
unsafe { sys::SBErrorSuccess(self.raw) }
}
fn from(raw: SBErrorRef) -> Self {
SBError {
raw
}
}
fn into_result(self) -> Result<(), SBError> {
if self.is_success() {
Ok(())
} else {
Err(self)
}
}
}
struct SBThread {
raw: SBThreadRef,
}
impl SBThread {
fn step_over(&self, stop_other_threads: RunMode) -> Result<(), SBError> {
let error = SBError::new();
unsafe { sys::SBThreadStepOver(self.raw, stop_other_threads, error.raw) }
if error.is_success() {
Ok(())
} else {
Err(error)
}
}
fn step_into(&self, stop_other_threads: RunMode) -> Result<(), SBError> {
let error = SBError::new();
unsafe { sys::SBThreadStepInto(self.raw, stop_other_threads) }
if error.is_success() {
Ok(())
} else {
Err(error)
}
}
fn step_out(&self) -> Result<(), SBError> {
let error = SBError::new();
unsafe { sys::SBThreadStepOut(self.raw, error.raw) }
if error.is_success() {
Ok(())
} else {
Err(error)
}
}
fn step_instruction(&self, over: bool) -> Result<(), SBError> {
let error = SBError::new();
unsafe { sys::SBThreadStepInstruction(self.raw, over, error.raw) }
if error.is_success() {
Ok(())
} else {
Err(error)
}
}
fn status(&self) -> String {
let status = unsafe {
let stream = SBStream::new();
sys::SBThreadGetStatus(self.raw, stream.raw);
stream.data()
};
status
}
fn resume(&self) -> Result<(), SBError> {
let error: SBError = SBError::new();
unsafe { sys::SBThreadResume(self.raw, error.raw) };
error.into_result()
}
fn suspend(&self) -> Result<(), SBError> {
let error: SBError = SBError::new();
unsafe { sys::SBThreadSuspend(self.raw, error.raw) };
error.into_result()
}
fn maybe_from(raw: SBThreadRef) -> Option<Self> {
if raw.is_null() {
None
} else {
Some(SBThread { raw })
}
}
}
impl fmt::Debug for SBThread {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let stream = SBStream::new();
unsafe { sys::SBThreadGetDescription(self.raw, stream.raw) };
write!(fmt, "SBThread {{ {} }}", stream.data())
}
}
struct SBProcess {
raw: sys::SBProcessRef,
}
impl SBProcess {
fn from(raw: sys::SBProcessRef) -> Self {
SBProcess { raw }
}
fn maybe_from(raw: sys::SBProcessRef) -> Option<Self> {
if raw.is_null() {
None
} else {
Some(SBProcess::from(raw))
}
}
fn thread_by_index_id(&self, thread_index_id: u32) -> Option<SBThread> {
SBThread::maybe_from(unsafe { sys::SBProcessGetThreadByIndexID(self.raw, thread_index_id) })
}
fn continue_execution(&self) -> Result<(), SBError> {
let error = SBError::from(unsafe { sys::SBProcessContinue(self.raw) });
if error.is_success() {
Ok(())
} else {
Err(error)
}
}
}
struct SBAddress {
raw: sys::SBAddressRef,
}
impl SBAddress {
fn from(raw: sys::SBAddressRef) -> Self {
SBAddress { raw }
}
}
struct SBFunction {
raw: sys::SBFunctionRef,
}
impl SBFunction {
fn from(raw: sys::SBFunctionRef) -> Self {
SBFunction { raw }
}
fn start_address(&self) -> SBAddress {
SBAddress::from(unsafe { sys::SBFunctionGetStartAddress(self.raw) })
}
}
struct SBSymbolContext {
raw: sys::SBSymbolContextRef,
}
impl SBSymbolContext {
fn from(raw: sys::SBSymbolContextRef) -> Self {
SBSymbolContext { raw }
}
fn function(&self) -> SBFunction {
SBFunction::from(unsafe { sys::SBSymbolContextGetFunction(self.raw) })
}
}
struct SBSymbolContextList {
raw: sys::SBSymbolContextListRef,
}
impl SBSymbolContextList {
fn from(raw: sys::SBSymbolContextListRef) -> Self {
SBSymbolContextList { raw }
}
fn get(&self, index: usize) -> SBSymbolContext {
SBSymbolContext::from(unsafe {
sys::SBSymbolContextListGetContextAtIndex(self.raw, index as u32)
})
}
}
struct SBTarget {
raw: sys::SBTargetRef,
}
impl SBTarget {
fn maybe_from(raw: sys::SBTargetRef) -> Option<Self> {
if raw.is_null() {
None
} else {
Some(SBTarget { raw })
}
}
fn launch(&self, launch_info: SBLaunchInfo) -> Result<SBProcess, SBError> {
let error: SBError = SBError::new();
let process =
SBProcess::from(unsafe { sys::SBTargetLaunch2(self.raw, launch_info.raw, error.raw) });
if error.is_success() {
Ok(process)
} else {
Err(error)
}
}
fn find_functions(&self, name: &str, name_type_mask: u32) -> SBSymbolContextList {
let name = CString::new(name).unwrap();
SBSymbolContextList::from(unsafe {
sys::SBTargetFindFunctions(self.raw, name.as_ptr(), name_type_mask)
})
}
}
struct SBBreakpoint {
raw: sys::SBBreakpointRef,
}
impl SBBreakpoint {
fn from(raw: sys::SBBreakpointRef) -> Self {
SBBreakpoint { raw }
}
fn set_enabled(&self, enabled: bool) {
unsafe { sys::SBBreakpointSetEnabled(self.raw, enabled) }
}
}
struct SBFrame {
raw: sys::SBFrameRef,
}
struct SBDebugger {
raw: sys::SBDebuggerRef,
}
impl SBDebugger {
fn new(source_init_files: bool) -> Self {
unsafe { sys::SBDebuggerInitialize() };
SBDebugger {
raw: unsafe { sys::SBDebuggerCreate2(source_init_files) }
}
}
fn create_target_simple(&self, executable: &str) -> Option<SBTarget> {
let executable = CString::new(executable).unwrap();
SBTarget::maybe_from(unsafe { sys::SBDebuggerCreateTarget2(self.raw, executable.as_ptr()) })
}
}
struct SBStream {
raw: sys::SBStreamRef,
}
impl SBStream {
fn new() -> Self {
SBStream {
raw: unsafe { sys::CreateSBStream() }
}
}
fn data(&self) -> String {
let c_str = unsafe { CStr::from_ptr(sys::SBStreamGetData(self.raw)) };
c_str.to_string_lossy().into_owned()
}
}
impl fmt::Debug for SBTarget {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let stream = SBStream::new();
unsafe { sys::SBTargetGetDescription(self.raw, stream.raw, DescriptionLevel::Brief) };
write!(fmt, "SBTarget {{ {} }}", stream.data())
}
}
// Things are just not working for me. I can't step at all.
// I might just try using the sys bindings.
struct SBLaunchInfo {
raw: sys::SBLaunchInfoRef,
argv_c: Vec<CString>,
}
impl SBLaunchInfo {
fn with_args(args: &[String]) -> Self {
let argv_c: Vec<CString> = args.iter().map(|s| CString::new(s.clone()).unwrap()).collect();
let mut argv: Vec<*const c_char> = argv_c.iter().map(|s| s.as_ptr()).collect();
argv.push(std::ptr::null());
let launch_info = unsafe { sys::CreateSBLaunchInfo(std::ptr::null()) };
unsafe { sys::SBLaunchInfoSetArguments(launch_info, argv.as_ptr(), false) };
SBLaunchInfo {
raw: launch_info,
argv_c,
}
}
}
impl fmt::Debug for SBError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let stream = SBStream::new();
unsafe { sys::SBErrorGetDescription(self.raw, stream.raw) };
write!(fmt, "SBError {{ {} }}", stream.data())
}
}
fn wait_for_enter() {
let mut input = String::new();
println!("Press ENTER to continue...");
std::io::stdin().read_line(&mut input).unwrap();
}
fn breakpoint_create_by_sbaddress(target: &SBTarget, address: SBAddress) -> SBBreakpoint {
SBBreakpoint::from(unsafe {
sys::SBTargetBreakpointCreateBySBAddress(target.raw, address.raw)
})
}
#[allow(missing_docs)]
fn display_function_name(frame: &SBFrame) -> Option<String> {
unsafe {
let function_name = sys::SBFrameGetDisplayFunctionName(frame.raw);
if function_name.is_null() {
None
} else {
Some(CStr::from_ptr(function_name).to_str().unwrap().to_string())
}
}
}
fn main() {
let debugger = SBDebugger::new(false);
// debugger.set_asynchronous(false);
// debugger.enable_log("lldb", &["default"]);
// println!("{:?}", debugger);
// unsafe {
// let file_name = CString::new("/Users/jimmyhmiller/Downloads/debugger.txt".to_string()).unwrap();
// let mode = CString::new("r").unwrap();
// let file = libc::fopen(file_name.as_ptr(), mode.as_ptr());
// sys::SBDebuggerSetOutputFileHandle(debugger.raw, file, true);
// }
// let target = debugger.create_target_simple("/Users/jimmyhmiller/Documents/open-source/ruby/ruby");
// let target = debugger.create_target_simple("/Users/jimmyhmiller/Documents/Code/PlayGround/rust/editor/target/debug/editor");
let target = debugger.create_target_simple("/Users/jimmyhmiller/Documents/Code/ruby/ruby");
if let Some(target) = target {
println!("{:?}", target);
// I need to hold onto these strings or things will get deallocated.
// This works, is there an easier way? No idea.
let args = vec!["--jit".to_string(), "--yjit-call-threshold=1".to_string(), "/Users/jimmyhmiller/Documents/Code/ruby/my_file.rb".to_string()];
// let arg = CString::new("/Users/jimmyhmiller/Documents/Code/ruby/my_file.rb");
// let arg_pointer = arg.unwrap();
let launchinfo = SBLaunchInfo::with_args(&args);
// launchinfo.set_launch_flags(LaunchFlags::CLOSE_TTY_ON_EXIT);
// launchinfo.set_launch_flags(LaunchFlags::STOP_AT_ENTRY);
// println!("{:?}", unsafe { CStr::from_ptr(sys::SBLaunchInfoGetArgumentAtIndex(launchinfo.raw, 1)) });
println!("{}", unsafe {sys::SBLaunchInfoGetNumArguments(launchinfo.raw)});
// println!("functions!");
// println!("{:?}", target.breakpoints().collect::<Vec<_>>());
// target.enable_all_breakpoints();
// println!("End functions");
// launchinfo.set_arguments(&["/Users/jimmyhmiller/Documents/open-source/ruby/ruby"]);
match target.launch(launchinfo) {
Ok(process) => {
let thread = process.thread_by_index_id(1);
let thread2 = process.thread_by_index_id(2);
// let functions = target.find_functions("gen_single_block", 2);
// let function = functions.get(0);
// let address = function.function().start_address();
// let breakpoint = breakpoint_create_by_sbaddress(&target, address);
// breakpoint.set_enabled(true);
let c_string = CString::new("codegen.rs").unwrap();
let breakpoint = SBBreakpoint::from(unsafe{ sys::SBTargetBreakpointCreateByLocation(target.raw, c_string.as_ptr(), 781) });
breakpoint.set_enabled(true);
// thread::sleep(Duration::from_secs(2));
// println!("Func {:?}", function);
// let address = function.function().start_address();
// let breakpoint = breakpoint_create_by_sbaddress(&target, address);
// breakpoint.set_enabled(true);
// }
// let interpreter = debugger.command_interpreter();
// interpreter.handle_command("process handle -n main -p 0");
// process.detach();
// println!("here {:?} {:?} {:?} {:?} {:?}", process.process_info(), process.exit_status(), process.is_alive(), process.is_running(), process.state());
// process.continue_execution();
// thread::sleep(Duration::from_secs(10));
loop {
let mut buffer = [0; 1024];
let stdout = unsafe {sys::SBProcessGetSTDOUT(process.raw, buffer.as_mut_ptr(), buffer.len())};
println!("no stdout");
if stdout > 0 {
println!("===============\nstdout: {:?}===============", unsafe { CStr::from_ptr(buffer[..stdout as usize].as_ptr())});
}
// wait_for_enter();
// for thread in process.threads() {
// // if !thread.is_valid() { println!("Not valid"); continue; }
// println!("Thread: {:}", thread.index_id());
// let mut frame_string = "".to_string();
// for frame in thread.frames() {
// // if !frame.is_valid() { continue; }
// if let Some(name) = display_function_name(&frame) {
// frame_string = format!("{} fn {}\n", frame_string, name)
// }
// for var in frame.all_variables().iter() {
// unsafe {
// match CStr::from_ptr(sys::SBValueGetName(var.raw)).to_str() {
// Ok(name) => {
// frame_string = format!("{} {}:", frame_string, name)
// },
// //
// Err(e) => println!("{:?}", e),
// }
// }
// unsafe {
// let value = sys::SBValueGetValue(var.raw);
// if value.is_null() {
// frame_string = format!("{} null\n", frame_string);
// continue;
// }
// match CStr::from_ptr(value).to_str() {
// Ok(s) => {
// frame_string = format!("{} {}", frame_string, s)
// },
// _ => panic!("Invalid string?"),
// }
// }
// frame_string = format!("{}\n", frame_string);
// // if !var.is_valid() { continue; }
// // println!("{:?}: {:?}", var.name(), var.value());
// }
// frame_string = format!("{}\n\n", frame_string);
// }
// println!("{}", frame_string);
// println!("====================");
// }
// wait_for_enter();
// println!("{:?}", process.detach());
// println!("here {:?} {:?} {:?} {:?} {:?}", process, process.exit_status(), process.is_alive(), process.is_running(), process.state());
// println!("here {:?} {:?} {:?} {:?} {:?}", process, process.exit_status(), process.is_alive(), process.is_running(), process.state());
// target.disable_all_breakpoints();
// for thread in process.threads() {
// println!("STOP: {:?}", thread.stop_reason());
// // println!("{:?}", thread.resume());
// println!("{:?}", thread.step_instruction());
// println!("{:?}", thread);
// }
fn wait_for_instruction(process: &SBProcess, thread: &SBThread) {
let mut input = String::new();
println!("Press ENTER to continue...");
std::io::stdin().read_line(&mut input).unwrap();
let stripped_input = input.strip_suffix("\n").unwrap();
println!("{}, {:?} ", stripped_input,
match stripped_input {
"i" => thread.step_into(RunMode::OnlyDuringStepping),
"o" => thread.step_over(RunMode::OnlyDuringStepping),
"j" => thread.step_instruction(false),
"w" => thread.step_out(),
"c" => process.continue_execution(),
"r" => thread.resume(),
"s" => thread.suspend(),
_ => Ok(())
});
}
if let Some(ref thread) = thread {
// println!("{:?}", thread.resume());
// println!("{:?}", step_into(thread, RunMode::OnlyDuringStepping));
// println!("{:?}", step_over(thread, RunMode::OnlyDuringStepping));
// println!("{:?}", thread.step_into(RunMode::OnlyThisThread));
// sleep(Duration::from_secs(5));
// println!("{:?}", thread.step_over(RunMode::OnlyThisThread));
// sleep(Duration::from_secs(5));
println!("status {}", thread.status());
wait_for_instruction(&process, thread);
println!("status {}", thread.status());
if let Some(ref thread) = thread2 {
wait_for_instruction(&process, thread);
println!("status {}", thread.status());
}
// println!("{:?}", thread.step_out());
// println!("status {}", thread.status());
// wait_for_enter();
// println!("{:?}", thread.step_into(RunMode::OnlyThisThread));
// println!("status {}", thread.status());
// wait_for_enter();
// // println!("{:?}", thread.step_over(RunMode::OnlyThisThread));
// println!("{:?}", thread.step_over(RunMode::OnlyThisThread));
// println!("status {}", thread.status());
// wait_for_enter();
// println!("{:?}", thread.step_instruction(true));
// // println!("{:?}", thread.step_out());
// // println!("{:?}", thread.resume());
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", thread.resume());
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", step_instruction(thread));
// // println!("{:?}", thread.stop_reason());
// // println!("{:?}", thread.suspend());
// // println!("{:?}", process.continue_execution());
// println!("status {}", thread.status());
// println!("{:?}", thread.resume());
}
// for thread in process.threads().take(1) {
// println!("Step into {:?}", step_instruction(&thread));
// println!("{:?}", thread.resume());
// }
// println!("{:?}", process.continue_execution());
// println!("{:?}", process.stop());
// for thread in process.threads() {
// if !thread.is_valid() { println!("Not valid"); continue; }
// for frame in thread.frames() {
// // if !frame.is_valid() { println!("Not valid frame"); continue; }
// println!("{:?}", frame.display_function_name());
// }
// }
// wait_for_enter();
// process.continue_execution();
}
// println!("here {:?} {:?} {:?} {:?} {:?}", process, process.exit_status(), process.is_alive(), process.is_running(), process.state());
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// process.continue_execution().unwrap();
// wait_for_enter();
// for thread in process.threads().skip(1) {
// if !thread.is_valid() { continue; }
// for frame in thread.frames() {
// if !frame.is_valid() { continue; }
// println!("{:?}", frame.display_function_name());
// }
// }
// wait_for_enter();
// println!("here {:?}", process);
}
Err(e) => println!("Uhoh")
}
}
}
|
use std::collections::VecDeque;
pub fn brackets_are_balanced(text: &str) -> bool {
if text.is_empty() {
return true;
}
// maintain stack
let mut stack = VecDeque::new();
for c in text.chars() {
println!("stack : {:?} ", &stack);
if c == '[' || c == '{' || c == '(' {
stack.push_front(c);
continue;
}
match c {
']' => {
let matching = stack.pop_front();
println!("matching {:?} ", c);
if matching != Some('[') {
return false;
}
}
'}' => {
let matching = stack.pop_front();
println!("matching {:?} ", c);
if matching != Some('{') {
return false;
}
}
')' => {
let matching = stack.pop_front();
println!("matching {:?} ", c);
if matching != Some('(') {
return false;
}
}
_ => {}
}
}
return stack.is_empty();
}
|
/*
* Copyright Stalwart Labs Ltd. See the COPYING
* file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
use std::fmt::Display;
use serde::{Deserialize, Serialize};
use crate::Method;
pub mod changes;
pub mod copy;
pub mod error;
pub mod get;
pub mod query;
pub mod query_changes;
pub mod request;
pub mod response;
pub mod session;
pub mod set;
pub struct RequestParams {
pub account_id: String,
pub method: Method,
pub call_id: usize,
}
impl RequestParams {
pub fn new(account_id: impl Into<String>, method: Method, call_id: usize) -> Self {
Self {
account_id: account_id.into(),
method,
call_id,
}
}
}
pub trait Object: Sized {
type Property: Display + Serialize + for<'de> Deserialize<'de>;
fn requires_account_id() -> bool;
}
|
use std::fmt;
#[derive(PartialEq, Debug)]
pub enum Error {
MissingLead,
MissingPrefix,
MissingHost,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::MissingLead => write!(f, "missing lead colon"),
Error::MissingPrefix => write!(f, "missing entire prefix"),
Error::MissingHost => write!(f, "missing @ separator for user host"),
}
}
}
#[derive(PartialEq, Clone)]
pub enum Prefix {
User {
nick: String,
user: String,
host: String,
},
Server {
host: String,
},
}
impl Prefix {
pub fn parse(input: &str) -> Result<(Self, usize), Error> {
if !input.starts_with(':') {
return Err(Error::MissingLead);
}
let end = input.find(' ').ok_or_else(|| Error::MissingPrefix)?;
let s = input[1..end].trim();
match s.find('!') {
Some(pos) => {
let nick = &s[..pos];
let at = s.find('@').ok_or_else(|| Error::MissingHost)?;
let user = &s[pos + 1..at];
let host = &s[at + 1..];
Ok((
Prefix::User {
nick: nick.into(),
user: user.into(),
host: host.into(),
},
end,
))
}
None => Ok((Prefix::Server { host: s.into() }, end)),
}
}
}
impl fmt::Debug for Prefix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Prefix::User {
ref nick,
ref user,
ref host,
} => write!(f, "{}!{}@{}", nick, user, host),
Prefix::Server { ref host } => write!(f, "{}", host),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_user() {
let input = ":testuser{12}!user~@local.host ";
let prefix = Prefix::parse(input);
match prefix {
Ok((Prefix::User { nick, user, host }, _)) => {
assert_eq!(nick, "testuser{12}");
assert_eq!(user, "user~");
assert_eq!(host, "local.host");
}
Ok((Prefix::Server { .. }, _)) => panic!("parsed server prefix"),
Err(err) => panic!("failed to parse user prefix: {}", err),
}
let input = "testuser!user@host ";
let prefix = Prefix::parse(input);
assert_eq!(prefix, Err(Error::MissingLead));
let input = ":testuser!user ";
let prefix = Prefix::parse(input);
assert_eq!(prefix, Err(Error::MissingHost));
let input = ":invalidmessage";
let prefix = Prefix::parse(input);
assert_eq!(prefix, Err(Error::MissingPrefix));
}
#[test]
fn parse_server() {
let input = ":irc.test.server ";
let prefix = Prefix::parse(input);
match prefix {
Ok((Prefix::Server { host }, _)) => {
assert_eq!(host, "irc.test.server");
}
Ok((Prefix::User { .. }, _)) => panic!("parsed user prefix"),
Err(err) => panic!("failed to parse server prefix: {}", err),
}
}
}
|
#![allow(dead_code)]
use anyhow::{anyhow, Error};
type Num = i32;
type Dim = usize;
// Want to merely change the argument, think I did that right with a mutable reference?
// Scalar addition of a value x to the matrix
pub fn scalar_addition(v: &mut Vec<Num>, x: Num) {
// For each element add x
for i in v {
*i += x;
}
}
// Want to merely change the argument, think I did that right with a mutable reference?
// Scalar subtraction of a value x to the matrix
pub fn scalar_subtraction(v: &mut Vec<Num>, x: Num) {
// For each element subtract x
for i in v {
*i -= x;
}
}
// Want to merely change the argument, think I did that right with a mutable reference?
// Scalar multiplication of a value x to the matrix
pub fn scalar_multiplication(v: &mut Vec<Num>, x: Num) {
// For each element multiply by x
for i in v {
*i *= x;
}
}
// TODO: Decide how we want to check the dimensions of the matrix? Could be as simple
// TODO: as passing the width and depth as parameters, which I did from here on
// Maybe pass as a struct with the values and the width and depth?
// Addition of two matrices A and B to produce C
pub fn matrix_addition(
a: Vec<Num>,
b: Vec<Num>,
width_a: Dim,
depth_a: Dim,
width_b: Dim,
depth_b: Dim,
) -> Result<Vec<Num>, Error> {
if (width_a != width_b) || (depth_a != depth_b) {
Err(anyhow!("matrices must be identical dimensions"))
} else {
Ok(a.into_iter()
.zip(b.into_iter())
.map(|(x, y)| x + y)
.collect())
}
}
// Subtraction of two matrices A and B to produce C
pub fn matrix_subtraction(
a: Vec<Num>,
b: Vec<Num>,
width_a: Dim,
depth_a: Dim,
width_b: Dim,
depth_b: Dim,
) -> Result<Vec<Num>, Error> {
if (width_a != width_b) || (depth_a != depth_b) {
Err(anyhow!("matrices must be identical dimensions"))
} else {
Ok(a.into_iter()
.zip(b.into_iter())
.map(|(x, y)| x - y)
.collect())
}
}
// Hadamard product of two matrices A and B to produce C
pub fn hadamard_product(
a: Vec<Num>,
b: Vec<Num>,
width_a: Dim,
depth_a: Dim,
width_b: Dim,
depth_b: Dim,
) -> Result<Vec<Num>, Error> {
if (width_a != width_b) || (depth_a != depth_b) {
Err(anyhow!("matrices must be identical dimensions"))
} else {
Ok(a.into_iter()
.zip(b.into_iter())
.map(|(x, y)| x * y)
.collect())
}
}
// Helper function to convert vector index to i and j coordinates
pub fn index_to_coordinates(idx: usize, d: usize) -> (usize, usize) {
let i = idx / d + 1;
let j = d - ((i * d) - idx) + 1;
(i, j)
}
// Helper function to convert i and j to a vector index
pub fn coordinates_to_index(i: usize, j: usize) -> usize {
i * j - 1
}
// Transpose a matrix
pub fn transpose(v: Vec<Num>, width: Dim, depth: Dim) -> Vec<Num> {
// Allocate new vector of the same size as the input, correct?
let mut res: Vec<Num> = Vec::with_capacity(width * depth);
// Used to iterate
let mut i;
let mut j;
for idx in 0..v.len() {
// Get the i and j coordinate values from the current index
i = index_to_coordinates(idx, depth).0;
j = index_to_coordinates(idx, depth).1;
// Transpose by flipping i and j
let trans_idx = coordinates_to_index(j, i);
// Push the corresponding value to the new transposed matrix
res.push(v[trans_idx]);
}
res
}
// Dot product helper for multiplication
pub fn dot_product(mat_a: &[Num], mat_b: &[Num], i: Dim, j: Dim, len: &Dim) -> Num {
let mut x;
let mut y;
let mut sum: Num = 0;
// Normal dot product, multiply all numbers on a row of A and a column of B and keep a running sum
for idx in 0..*len {
x = coordinates_to_index(idx, j);
y = coordinates_to_index(i, idx);
sum += mat_a[x] * mat_b[y];
}
sum
}
// Traditional matrix multiplication
pub fn matrix_multiplication(
a: &[Num],
b: &[Num],
width_a: &Dim,
depth_a: Dim,
width_b: Dim,
depth_b: &Dim,
) -> Result<Vec<Num>, Error> {
let mut res: Vec<Num> = Vec::with_capacity(depth_a * width_b);
// The width of the first input needs to match the depth of the second input
if depth_b != width_a {
Err(anyhow!("matrices must be identical dimensions"))
} else {
for rows in 0..depth_a {
for cols in 0..width_b {
res.push(dot_product(a, b, rows, cols, width_a));
}
}
Ok(res)
}
}
// Matrix power
pub fn power(a: &[Num], width: &Dim, depth: Dim, pwr: Num) -> Vec<Num> {
let mut res: Vec<Num> = Vec::with_capacity(depth * width);
// First copy A into the result so it can be multiplied by itself (A * A)
// This will help for further powers
for idx in a {
res.push(*idx);
}
// Now execute the power multiplication, will only execute once for squared
let mut count = 1;
loop {
if count == pwr - 1 {
break;
}
res = matrix_multiplication(&res, a, width, depth, *width, &depth).unwrap();
count += 1;
}
res
}
// Generate an identity matrix for the provided matrix dimensions
pub fn generate_identity(n: Dim) -> Vec<Num> {
let mut res: Vec<Num> = Vec::with_capacity(n * n);
let mut i;
let mut j;
let size = n * n;
for idx in 0..size {
i = index_to_coordinates(idx, n).0;
j = index_to_coordinates(idx, n).1;
if i == j {
res.push(1);
} else {
res.push(0);
}
}
res
}
#[cfg(test)]
mod tests {
use crate::matrix::{matrix_addition, matrix_subtraction, scalar_addition, scalar_subtraction, scalar_multiplication, power, matrix_multiplication, generate_identity, transpose};
#[test]
fn add_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let b = vec![7, 8, 9, 5, 6, 4, 3, 1, 2];
let width_b = 3;
let depth_b = 3;
let c_check = vec![8, 10, 12, 9, 11, 10, 10, 9, 11];
let c = matrix_addition(a, b, width_a, depth_a, width_b, depth_b).unwrap();
assert_eq!(c, c_check);
}
#[test]
fn sub_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let b = vec![7, 8, 9, 5, 6, 4, 3, 1, 2];
let width_b = 3;
let depth_b = 3;
let c_check = vec![-6, -6, -6, -1, -1, 2, 4, 7, 7];
let c = matrix_subtraction(a, b, width_a, depth_a, width_b, depth_b).unwrap();
assert_eq!(c, c_check);
}
#[test]
fn scalar_add_test() {
let mut a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let x = 4;
let c_check = vec![5, 7, 8, 8, 9, 10, 11, 12, 13];
scalar_addition(&mut a, x);
assert_eq!(a, c_check);
}
#[test]
fn scalar_sub_test() {
let mut a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let x = 5;
let c_check = vec![-4, -3, -2, -1, 0, 1, 2, 3, 4];
scalar_subtraction(&mut a, x);
assert_eq!(a, c_check);
}
#[test]
fn scalar_mul_test() {
let mut a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let x = 2;
let c_check = vec![2, 4, 6, 8, 10, 12, 14, 16, 18];
scalar_multiplication(&mut a, x);
assert_eq!(a, c_check);
}
#[test]
fn power_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let x = 2;
let c_check = vec![30, 36, 42, 66, 81, 96, 104, 126, 150];
let c = power(&a, &width_a, depth_a, x);
assert_eq!(c, c_check);
}
#[test]
fn hadamard_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let b = vec![7, 8, 9, 5, 6, 4, 3, 1, 2];
let width_b = 3;
let depth_b = 3;
let c_check = vec![7, 16, 27, 20, 30, 24, 21, 8, 18];
let c = matrix_multiplication(&a, &b, &width_a, depth_a, width_b, &depth_b).unwrap();
assert_eq!(c, c_check);
}
#[test]
fn transpose_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let c_check = vec![1, 4, 7, 2, 5, 8, 3, 6, 9];
let c = transpose(a, width_a, depth_a);
assert_eq!(c, c_check);
}
#[test]
fn multiply_test() {
let a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let width_a = 3;
let depth_a = 3;
let b = vec![7, 8, 9, 5, 6, 4, 3, 1, 2];
let width_b = 3;
let depth_b = 3;
let c_check = vec![-6, -6, -6, -1, -1, 2, 4, 7, 7];
let c = matrix_subtraction(a, b, width_a, depth_a, width_b, depth_b).unwrap();
assert_eq!(c, c_check);
}
#[test]
fn identity_test() {
let width_a = 3;
let c_check = vec![1, 0, 0, 0, 1, 0, 0, 0, 1];
let c = generate_identity(width_a);
assert_eq!(c, c_check);
}
}
|
// (Full example with detailed comments in examples/17_yaml.rs)
//
// This example demonstrates clap's building from YAML style of creating arguments which is far
// more clean, but takes a very small performance hit compared to the other two methods.
use clap::{App, load_yaml};
fn main() {
// The YAML file is found relative to the current file, similar to how modules are found
let yaml = load_yaml!("../cli.yml");
let matches = App::from_yaml(yaml).get_matches();
}
|
use crate::days::day21::{FoodLine, parse_input, default_input};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
pub fn run() {
println!("{}", food_str(default_input()).unwrap())
}
pub fn food_str(input : &str) -> Result<String, ()> {
food(parse_input(input))
}
pub fn food(input : Vec<FoodLine>) -> Result<String, ()> {
let mut allergen_map : HashMap<&String, Vec<HashSet<String>>> = HashMap::new();
for food in &input {
for allergen in &food.allergens {
allergen_map.entry(allergen).or_insert(Vec::new()).push(food.ingredients.clone());
}
}
let mut constraints: HashMap<_, _>= allergen_map.iter()
.map(|(k, v)| {
let intersection = v.iter().skip(1).fold(v[0].clone(), |acc, set| acc.intersection(set).map(|s| s.clone()).collect());
(k, intersection)
}).collect();
let mut mappings = HashMap::new();
let final_length = constraints.len();
while mappings.len() < final_length {
let mut match_rule = None;
let mut match_value = None;
for (rule, values) in &constraints {
if values.len() == 1 {
match_rule = Some(rule.clone());
match_value = Some(values.iter().next().unwrap().clone());
break;
}
}
constraints.remove(&match_rule.unwrap());
constraints = constraints.iter()
.map(|(k, v)| {
(*k, v.iter().cloned().filter(|n| *n != match_value.as_ref().unwrap().clone()).collect())
})
.collect();
mappings.insert(match_rule.unwrap(), match_value.unwrap());
}
Ok(mappings.iter()
.sorted_by_key(|(k, _v)| k.clone())
.map(|(_k,v)| v).join(","))
}
#[cfg(test)]
pub mod tests {
use super::*;
#[test]
pub fn part2_answer() {
assert_eq!(food_str(default_input()).unwrap(), "fqhpsl,zxncg,clzpsl,zbbnj,jkgbvlxh,dzqc,ppj,glzb")
}
} |
use std::process;
use std::io;
use std::error::Error;
use colored::*;
use ini::Ini;
mod draw_charts;
use draw_charts::{save_final_chart, save_line_chart, save_cost_chart, normalize_elem};
use draw_charts::DataMM;
fn read_from_csv() -> Result<(Vec<(f64, f64)>, (String, String)), Box<dyn Error>> {
let mut records: Vec<(f64, f64)> = Vec::new();
let mut rdr = csv::ReaderBuilder::new()
.delimiter(b',')
.from_reader(io::stdin());
for result in rdr.records() {
let record = result?;
records.push((
record[0].parse::<f64>().unwrap_or_else(|e| {
println!("{}: {}", "error".red().bold(), e);
process::exit(1);
}),
record[1].parse::<f64>().unwrap_or_else(|e| {
println!("{}: {}", "error".red().bold(), e);
process::exit(1);
})
));
}
let headers = rdr.headers()?;
Ok((records, (headers[0].to_string(), headers[1].to_string())))
}
fn calc_partial_derivative(records: &Vec<(f64, f64)>, old_theta_0: f64, old_theta_1: f64) -> (f64, f64) {
let m = records.len();
let mut der_theta_0 = 0.0;
let mut der_theta_1 = 0.0;
for curr_thetas in records {
der_theta_0 += (old_theta_0 + (old_theta_1 * curr_thetas.0)) - curr_thetas.1;
der_theta_1 += ((old_theta_0 + (old_theta_1 * curr_thetas.0)) - curr_thetas.1) * curr_thetas.0;
}
der_theta_0 = (1 as f64/m as f64) * der_theta_0;
der_theta_1 = (1 as f64/m as f64) * der_theta_1;
(der_theta_0, der_theta_1)
}
fn calc_new_theta(records: &Vec<(f64, f64)>, old_theta_0: f64, old_theta_1: f64, learning_rate: f64) -> (f64, f64, f64, f64) {
let (der_theta_0, der_theta_1) = calc_partial_derivative(&records, old_theta_0, old_theta_1);
let new_theta_0 = old_theta_0 - (learning_rate * der_theta_0);
let new_theta_1 = old_theta_1 - (learning_rate * der_theta_1);
(new_theta_0, new_theta_1, der_theta_0, der_theta_1)
}
fn mean_squared_error(records: &Vec<(f64, f64)>, new_theta_0: f64, new_theta_1: f64) -> f64 {
let mut global_cost = 0.0;
for el in records {
let curr_cost = ((new_theta_0 + (new_theta_1 * el.0)) - el.1) * ((new_theta_0 + (new_theta_1 * el.0)) - el.1);
global_cost += curr_cost;
}
(1.0/(2.0 * records.len() as f64)) * global_cost
}
fn normalize_data(data: &Vec<(f64, f64)>, mm: &DataMM) -> Vec<(f64, f64)> {
let mut new_data: Vec<(f64, f64)> = Vec::new();
for el in data {
new_data.push((normalize_elem(el.0, mm.min_0, mm.max_0), normalize_elem(el.1, mm.min_1, mm.max_1)));
}
new_data
}
fn main() {
let records: Vec<(f64, f64)>;
let category_names: (String, String);
let csv = read_from_csv();
match csv {
Ok(v) => {
records = v.0;
category_names = v.1;
},
Err(e) => {
println!("error parsing: {:?}", e);
process::exit(1);
},
}
let mut mm = DataMM { min_0: f64::MAX, max_0: f64::MIN, min_1: f64::MAX, max_1: f64::MIN };
for el in &records {
if el.0 > mm.max_0 {
mm.max_0 = el.0;
}
if el.0 < mm.min_0 {
mm.min_0 = el.0;
}
if el.1 > mm.max_1 {
mm.max_1 = el.1;
}
if el.1 < mm.min_1 {
mm.min_1 = el.1;
}
}
let normalized_records: Vec<(f64, f64)> = normalize_data(&records, &mm);
let max_iteration = 20000;
let precision = 0.00001;
let learning_rate = 0.1;
let mut tmp_theta_0 = 0.0;
let mut tmp_theta_1 = 0.0;
let mut slope_theta_0: f64 = 1.0;
let mut slope_theta_1: f64 = 1.0;
let mut curr_iteration = 0;
let mut learning_curve: Vec<(f64, f64)> = Vec::new();
let mut costs: Vec<f64> = Vec::new();
while (curr_iteration < max_iteration) && (slope_theta_0.abs() > precision || slope_theta_1.abs() > precision) {
let (new_theta_0, new_theta_1, cur_slope_theta_0, cur_slope_theta_1) = calc_new_theta(&normalized_records, tmp_theta_0, tmp_theta_1, learning_rate);
slope_theta_0 = cur_slope_theta_0;
slope_theta_1 = cur_slope_theta_1;
costs.push(mean_squared_error(&normalized_records, new_theta_0, new_theta_1));
tmp_theta_0 = new_theta_0;
tmp_theta_1 = new_theta_1;
curr_iteration += 1;
if curr_iteration % 50 == 0 {
learning_curve.push(( curr_iteration as f64, (cur_slope_theta_0.abs() + cur_slope_theta_1.abs())/2.0));
}
}
let final_theta_0 = tmp_theta_0;
let final_theta_1 = tmp_theta_1;
println!("{}: [{}]", "Stopped at iteration".bright_yellow(), curr_iteration,);
println!("{}: {}", "final_theta_0".bright_green(), final_theta_0);
println!("{}: {}", "final_theta_1".bright_green(), final_theta_1);
save_final_chart(&records, final_theta_0, final_theta_1, &category_names, &mm);
let labels_curve = ("Iteration".to_string(), "Learning".to_string());
save_line_chart(&learning_curve, &labels_curve);
save_cost_chart(&costs);
let mut conf = Ini::new();
conf.with_section(Some("thetas"))
.set("theta_0", final_theta_0.to_string())
.set("theta_1", final_theta_1.to_string());
conf.with_section(Some("categories"))
.set("x", category_names.0)
.set("y", category_names.1);
conf.with_section(Some("denormalize"))
.set("min_0", mm.min_0.to_string())
.set("max_0", mm.max_0.to_string())
.set("min_1", mm.min_1.to_string())
.set("max_1", mm.max_1.to_string());
conf.write_to_file("data/theta.ini").unwrap();
}
|
pub mod controller;
pub mod model;
pub mod repository;
|
use super::{Coin,Inv,InvErr,InvWork,Intrinsics};
use std::collections::HashMap;
use std::any::TypeId;
use std::marker::Reflect;
#[derive(Debug)]
pub enum VendErr {
Stock,
Money,
Inv(InvErr)
}
impl VendErr {
fn from_inv (ie: InvErr) -> VendErr {
VendErr::Inv(ie)
}
}
#[derive(Debug)]
pub struct Vendor<K> {
inv: Inv<K>,
rate: HashMap<TypeId,f32>,
money: Coin,
cycle: u16, //time between restock
}
impl<K:Intrinsics+Clone+PartialEq> Vendor<K> {
pub fn new (dt: u16) -> Vendor<K> {
Vendor{ inv: Inv::<K>::new(None),
rate: HashMap::new(),
money: 0,
cycle: dt, }
}
/// player sells to vendor
pub fn sell (&mut self, id: u32, inv: &mut Inv<K>) -> Result<Coin,VendErr> {
let mut rate = 100.0;
let mut cost;
if let Some(k) = inv.get(&id) {
if let Some(_rate) = self.rate.get(&k.get_base().get_typeid().unwrap()) {
rate = *_rate;
}
else { return Err(VendErr::Inv(InvErr::Invalid)) } //need to initialize typeid first!
let value = *k.get_base().get_value() as f32;
cost = (value * (rate/100.0)) as u16;
if (self.money - cost) < 1 { return Err(VendErr::Money) }
try!(self.inv.add(k.clone()).map_err(VendErr::from_inv));
self.money -= cost;
}
else { return Err(VendErr::Inv(InvErr::Invalid)) }
inv.remove(id);
Ok(cost)
}
/// player buys from vendor
pub fn buy (&mut self, id: u32, c: Coin) -> Result<K,VendErr> {
let mut rate = 100.0;
//if let Some(_rate) = self.rate.get(&id) { rate=*_rate }
if let Some(item) = self.inv.get(&id) {
if let Some(_rate) = self.rate.get(&item.get_base().get_typeid().unwrap()) {
rate = *_rate;
}
else { return Err(VendErr::Inv(InvErr::Invalid)) } //this is likely never to be an issue, since its in vendors possession and thus initialized
let value = *item.get_base().get_value() as f32;
if (c as f32) < ((rate/100.0) * value) { return Err(VendErr::Money) }
}
else { return Err(VendErr::Inv(InvErr::Invalid)) }
let r = try!(self.inv.remove(id).map_err(VendErr::from_inv));
self.money += c;
Ok(r)
}
pub fn add_money (&mut self, c:Coin) {
self.money += c
}
pub fn get_inv(&self) -> &Inv<K> {
&self.inv
}
pub fn add_rate<T:Reflect+'static>(&mut self, rate:f32) {
self.rate.insert(TypeId::of::<T>(),rate);
}
}
|
// Enumerate all boards which are reachable from the initial board.
//
// Input: none
//
// Output:
// board depth
// ...
//
// board: hex representation of bit-board
// depth: the depth of the board
// 1: winning board (the player can capture the opponent's lion)
// 0: losing board (the opponent succeeded "try")
// -1: unknown board
#[macro_use]
extern crate precomp;
use std::cmp;
use precomp::Out;
use precomp::board::{Board, Result};
use precomp::board_collection::BoardSet;
fn main() {
log!("Step 1: enumerate all reachable boards");
let mut item_counts = vec![0, 0, 0];
let mut max_degree = 0;
let mut out = Out::new();
let mut boards = vec![Board::init().normalize()];
let mut visited = BoardSet::new();
// straightforward DFS
while let Some(b) = boards.pop() {
if visited.contains(b) { continue };
visited.insert(b);
let r = match b.next() {
Result::Win => { 1 },
Result::Lose => { 0 },
Result::Unknown(bs) => {
max_degree = cmp::max(max_degree, bs.len());
for b in bs { boards.push(b) }
-1
}
};
out!(out, "{:015x} {}\n", b.0, r);
item_counts[(1 - r) as usize] += 1;
if visited.len() % 10000000 == 0 {
let total = item_counts[0] + item_counts[1] + item_counts[2];
log!("enumerating... (winning: {}, losing: {}, unknown: {}, total: {})",
item_counts[0], item_counts[1], item_counts[2], total);
}
}
log!("Step 1: result");
log!(" winning board#: {:9}", item_counts[0]);
log!(" losing board# : {:9}", item_counts[1]);
log!(" unknown board#: {:9}", item_counts[2]);
log!(" total : {:9}", item_counts[0] + item_counts[1] + item_counts[2]);
log!(" max degree: {}", max_degree);
log!("Step 1: done!");
}
|
//! Logging utilities for the book data tools.
//!
//! This module contains support for initializing the logging infrastucture, and
//! for dynamically routing log messages based on whether there is an active
//! progress bar.
use std::fmt::Debug;
use std::marker::PhantomData;
use friendly::scalar;
use happylog::new_progress;
use indicatif::style::ProgressTracker;
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
const DATA_PROGRESS_TMPL: &str =
"{prefix}: {wide_bar} {bytes}/{total_bytes} ({bytes_per_sec}, {elapsed} elapsed, ETA {eta})";
const ITEM_PROGRESS_TMPL: &str = "{prefix}: {wide_bar} {friendly_pos}/{friendly_len} ({friendly_rate}/s, {elapsed} elapsed, ETA {eta}) {msg}";
trait FieldExtract: Default + Send + Sync {
fn extract(state: &ProgressState) -> f64;
}
#[derive(Default)]
struct Friendly<F: FieldExtract + 'static> {
_ghost: PhantomData<F>,
}
impl<F: FieldExtract + 'static> ProgressTracker for Friendly<F> {
fn clone_box(&self) -> Box<dyn ProgressTracker> {
Box::new(Self::default())
}
fn reset(&mut self, _state: &indicatif::ProgressState, _now: std::time::Instant) {
// do nothing
}
fn tick(&mut self, _state: &indicatif::ProgressState, _now: std::time::Instant) {
// do nothing
}
fn write(&self, state: &indicatif::ProgressState, w: &mut dyn std::fmt::Write) {
let val = F::extract(state);
let len = scalar(val);
write!(w, "{}", len).expect("failed to write progress");
}
}
#[derive(Default)]
struct Pos;
impl FieldExtract for Pos {
fn extract(state: &ProgressState) -> f64 {
state.pos() as f64
}
}
#[derive(Default)]
struct Len;
impl FieldExtract for Len {
fn extract(state: &ProgressState) -> f64 {
state.len().map(|x| x as f64).unwrap_or(f64::NAN)
}
}
#[derive(Default)]
struct Rate;
impl FieldExtract for Rate {
fn extract(state: &ProgressState) -> f64 {
state.per_sec()
}
}
/// Create a progress bar for tracking data.
///
/// If the size is unknown at creation time, pass 0.
pub fn data_progress<S>(len: S) -> ProgressBar
where
S: TryInto<u64>,
S::Error: Debug,
{
new_progress(len.try_into().expect("invalid length")).with_style(
ProgressStyle::default_bar()
.template(DATA_PROGRESS_TMPL)
.expect("template error"),
)
}
/// Create a progress bar for tracking items.
///
/// If the size is unknown at creation time, pass 0.
pub fn item_progress<S>(len: S, name: &str) -> ProgressBar
where
S: TryInto<u64>,
S::Error: Debug,
{
let len: u64 = len.try_into().expect("invalid length");
let len = Some(len).filter(|l| *l > 0);
let style = ProgressStyle::default_bar()
.with_key("friendly_pos", Friendly::<Pos>::default())
.with_key("friendly_len", Friendly::<Len>::default())
.with_key("friendly_rate", Friendly::<Rate>::default())
.template(ITEM_PROGRESS_TMPL)
.expect("template error");
new_progress(len.unwrap_or(0))
.with_style(style)
.with_prefix(name.to_string())
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Implementation of getters on module structure.
use super::{DeclStorageDefExt, StorageLineTypeDef};
use proc_macro2::TokenStream;
use quote::quote;
pub fn impl_getters(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStream {
let mut getters = TokenStream::new();
for (get_fn, line) in def
.storage_lines
.iter()
.filter_map(|line| line.getter.as_ref().map(|get_fn| (get_fn, line)))
{
let attrs = &line.doc_attrs;
let storage_struct = &line.storage_struct;
let storage_trait = &line.storage_trait;
let getter = match &line.storage_type {
StorageLineTypeDef::Simple(value) => {
quote! {
#( #[ #attrs ] )*
pub fn #get_fn() -> #value {
<#storage_struct as #scrate::#storage_trait>::get()
}
}
},
StorageLineTypeDef::Map(map) => {
let key = &map.key;
let value = &map.value;
quote! {
#( #[ #attrs ] )*
pub fn #get_fn<K: #scrate::codec::EncodeLike<#key>>(key: K) -> #value {
<#storage_struct as #scrate::#storage_trait>::get(key)
}
}
},
StorageLineTypeDef::DoubleMap(map) => {
let key1 = &map.key1;
let key2 = &map.key2;
let value = &map.value;
quote! {
pub fn #get_fn<KArg1, KArg2>(k1: KArg1, k2: KArg2) -> #value
where
KArg1: #scrate::codec::EncodeLike<#key1>,
KArg2: #scrate::codec::EncodeLike<#key2>,
{
<#storage_struct as #scrate::#storage_trait>::get(k1, k2)
}
}
},
};
getters.extend(getter);
}
let module_struct = &def.module_struct;
let module_impl = &def.module_impl;
let where_clause = &def.where_clause;
quote!(
impl#module_impl #module_struct #where_clause {
#getters
}
)
}
|
use std::ops::Add;
fn read_vec() -> Vec<i32> {
vec![3, 4, 6, -19]
}
fn vec_min<T: PartialOrd>(v: &Vec<T>) -> Option<&T> {
if v.is_empty() { return None; }
let mut min = Some(&v[0]);
for e in v {
min = min.map(|val| {
if e < val { e } else { val }
});
}
min
}
fn vec_sum<T: Copy + Add<T, Output=T>>(v: &Vec<T>) -> Option<T> {
let mut sum = None;
for e in v.iter() {
match sum {
None => {
sum = Some(*e);
},
Some(val) => {
let newsum = *e + val;
sum = Some(newsum);
},
}
}
sum
}
pub fn main() {
let vec1 = read_vec();
let min = vec_min(&vec1);
min.map(|val| println!("Vec min {}", val));
let sum = vec_sum(&vec1);
sum.map(|val| println!("Vec sum {}", val));
let vecf = vec![0.3, 0.5, 4.3, -0.3, -0.5];
let minf = vec_min(&vecf);
minf.map(|val| println!("Vec min {}", val));
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalcParam {
pub machine: String,
pub applicator: String,
pub energy: f64,
pub ssd: f64,
pub depth_zref: f64,
pub dose_zref: f64,
pub planned_beam_mu: f64,
pub fda_id: usize,
}
impl CalcParam {
pub fn new() -> Self {
Self {
machine: "".to_string(),
applicator: "".to_string(),
energy: 0.0,
ssd: 0.0,
depth_zref: 0.0,
dose_zref: 0.0,
planned_beam_mu: 0.0,
fda_id: std::usize::MAX,
}
}
pub fn has_machine(&self) -> bool {
!self.machine.is_empty()
}
pub fn has_applicator(&self) -> bool {
!self.applicator.is_empty()
}
pub fn has_energy(&self) -> bool {
self.energy != 0.0
}
pub fn has_ssd(&self) -> bool {
self.ssd != 0.0
}
pub fn has_depth_zref(&self) -> bool {
self.depth_zref != 0.0
}
pub fn has_dose_zref(&self) -> bool {
self.dose_zref != 0.0
}
pub fn has_planned_beam_mu(&self) -> bool {
self.planned_beam_mu != 0.0
}
pub fn has_fda_id(&self) -> bool {
self.fda_id != std::usize::MAX
}
}
impl Default for CalcParam {
fn default() -> Self {
CalcParam::new()
}
}
impl std::fmt::Display for CalcParam {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Machine: {}\nApplicator: {}\nEnergy(MeV): {}\nSSD[cm]: {}\nZref(depth[cm]): {}\nZref(dose[cGy]): {}\nFDA ID: {}\nMU(plan): {}\n",
self.machine, self.applicator, self.energy, self.ssd, self.depth_zref, self.dose_zref, self.fda_id, self.planned_beam_mu
)
}
}
|
extern crate jdbc;
pub mod app;
use app::zzzc::*;
use jdbc::java::sql::*;
use std::collections::HashMap;
fn main() {
let db = db::MDB::new("mysql", "127.0.0.1:3306", "root", "", "xxx").unwrap();
// exec
let mut sql = "DELETE FROM log";
let bind_array = HashMap::new();
let (id_or_count, data) = db.exec(sql, &bind_array);
print!("delete {} {:?}\n", id_or_count, data);
// insert
sql = "INSERT INTO log (id) VALUES (NULL)";
let (id_or_count, data) = db.exec(sql, &bind_array);
print!("insert {} {:?}\n", id_or_count, data);
// select
sql = "SELECT * FROM log";
let (id_or_count, data) = db.exec(sql, &bind_array);
print!("select {} {:?}\n", id_or_count, data);
// get db and exec youself start
let db1 = db.get_db().unwrap();
let statement = db1.create_statement().unwrap();
let result_set = statement
.execute_query("SELECT * FROM log LIMIT 1")
.unwrap();
let metadata = result_set.get_meta_data().unwrap();
let _columns = metadata.get_column_count().unwrap();
while result_set.next().unwrap() {
print!(
"row: {:?} => {:?}\n",
result_set.get_string(1).unwrap(),
result_set.get_string(2).unwrap()
);
}
result_set.close();
statement.close();
// get db and exec youself end
// finnaly close db obj
db.close();
}
|
//! ABI decoder.
use util::slice_data;
use {Word, Token, ErrorKind, Error, ResultExt, ParamType};
struct DecodeResult {
token: Token,
new_offset: usize,
}
struct BytesTaken {
bytes: Vec<u8>,
new_offset: usize,
}
fn as_u32(slice: &Word) -> Result<u32, Error> {
if !slice[..28].iter().all(|x| *x == 0) {
return Err(ErrorKind::InvalidData.into());
}
let result = ((slice[28] as u32) << 24) +
((slice[29] as u32) << 16) +
((slice[30] as u32) << 8) +
(slice[31] as u32);
Ok(result)
}
fn as_bool(slice: &Word) -> Result<bool, Error> {
if !slice[..31].iter().all(|x| *x == 0) {
return Err(ErrorKind::InvalidData.into());
}
Ok(slice[31] == 1)
}
/// Decodes ABI compliant vector of bytes into vector of tokens described by types param.
pub fn decode(types: &[ParamType], data: &[u8]) -> Result<Vec<Token>, Error> {
let is_empty_bytes_valid_encoding = types.iter().all(|t| t.is_empty_bytes_valid_encoding());
if !is_empty_bytes_valid_encoding && data.is_empty() {
bail!("please ensure the contract and method you're calling exist! failed to decode empty bytes. if you're using jsonrpc this is likely due to jsonrpc returning `0x` in case contract or method don't exist");
}
let slices = slice_data(data)?;
let mut tokens = Vec::with_capacity(types.len());
let mut offset = 0;
for param in types {
let res = decode_param(param, &slices, offset).chain_err(|| format!("Cannot decode {}", param))?;
offset = res.new_offset;
tokens.push(res.token);
}
Ok(tokens)
}
fn peek(slices: &[Word], position: usize) -> Result<&Word, Error> {
slices.get(position).ok_or_else(|| ErrorKind::InvalidData.into())
}
fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result<BytesTaken, Error> {
let slices_len = (len + 31) / 32;
let mut bytes_slices = Vec::with_capacity(slices_len);
for i in 0..slices_len {
let slice = try!(peek(slices, position + i));
bytes_slices.push(slice);
}
let bytes = bytes_slices.into_iter()
.flat_map(|slice| slice.to_vec())
.take(len)
.collect();
let taken = BytesTaken {
bytes,
new_offset: position + slices_len,
};
Ok(taken)
}
fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<DecodeResult, Error> {
match *param {
ParamType::Address => {
let slice = try!(peek(slices, offset));
let mut address = [0u8; 20];
address.copy_from_slice(&slice[12..]);
let result = DecodeResult {
token: Token::Address(address.into()),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::Int(_) => {
let slice = try!(peek(slices, offset));
let result = DecodeResult {
token: Token::Int(slice.clone().into()),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::Uint(_) => {
let slice = try!(peek(slices, offset));
let result = DecodeResult {
token: Token::Uint(slice.clone().into()),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::Bool => {
let slice = try!(peek(slices, offset));
let b = try!(as_bool(slice));
let result = DecodeResult {
token: Token::Bool(b),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::FixedBytes(len) => {
let taken = try!(take_bytes(slices, offset, len));
let result = DecodeResult {
token: Token::FixedBytes(taken.bytes),
new_offset: taken.new_offset,
};
Ok(result)
},
ParamType::Bytes => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let taken = try!(take_bytes(slices, len_offset + 1, len));
let result = DecodeResult {
token: Token::Bytes(taken.bytes),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::String => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let taken = try!(take_bytes(slices, len_offset + 1, len));
let result = DecodeResult {
token: Token::String(try!(String::from_utf8(taken.bytes))),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::Array(ref t) => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let sub_slices = &slices[len_offset + 1..];
let mut tokens = Vec::with_capacity(len);
let mut new_offset = 0;
for _ in 0..len {
let res = try!(decode_param(t, &sub_slices, new_offset));
new_offset = res.new_offset;
tokens.push(res.token);
}
let result = DecodeResult {
token: Token::Array(tokens),
new_offset: offset + 1,
};
Ok(result)
},
ParamType::FixedArray(ref t, len) => {
let mut tokens = Vec::with_capacity(len);
let new_offset = if param.is_dynamic() {
let offset_slice = peek(slices, offset)?;
let tail_offset = (as_u32(offset_slice)? / 32) as usize;
let slices = &slices[tail_offset..];
let mut new_offset = 0;
for _ in 0..len {
let res = decode_param(t, &slices, new_offset)?;
new_offset = res.new_offset;
tokens.push(res.token);
}
offset + 1
} else {
let mut new_offset = offset;
for _ in 0..len {
let res = decode_param(t, &slices, new_offset)?;
new_offset = res.new_offset;
tokens.push(res.token);
}
new_offset
};
let result = DecodeResult {
token: Token::FixedArray(tokens),
new_offset,
};
Ok(result)
}
}
}
#[cfg(test)]
mod tests {
use {decode, ParamType};
#[test]
fn decode_from_empty_byte_slice() {
// these can NOT be decoded from empty byte slice
assert!(decode(&[ParamType::Address], &[]).is_err());
assert!(decode(&[ParamType::Bytes], &[]).is_err());
assert!(decode(&[ParamType::Int(0)], &[]).is_err());
assert!(decode(&[ParamType::Int(1)], &[]).is_err());
assert!(decode(&[ParamType::Int(0)], &[]).is_err());
assert!(decode(&[ParamType::Int(1)], &[]).is_err());
assert!(decode(&[ParamType::Bool], &[]).is_err());
assert!(decode(&[ParamType::String], &[]).is_err());
assert!(decode(&[ParamType::Array(Box::new(ParamType::Bool))], &[]).is_err());
assert!(decode(&[ParamType::FixedBytes(1)], &[]).is_err());
assert!(decode(&[ParamType::FixedArray(Box::new(ParamType::Bool), 1)], &[]).is_err());
// these are the only ones that can be decoded from empty byte slice
assert!(decode(&[ParamType::FixedBytes(0)], &[]).is_ok());
assert!(decode(&[ParamType::FixedArray(Box::new(ParamType::Bool), 0)], &[]).is_ok());
}
}
|
#[doc = "Register `WRP1BR` reader"]
pub type R = crate::R<WRP1BR_SPEC>;
#[doc = "Register `WRP1BR` writer"]
pub type W = crate::W<WRP1BR_SPEC>;
#[doc = "Field `WRP1B_PSTRT` reader - WRP1B_PSTRT"]
pub type WRP1B_PSTRT_R = crate::FieldReader;
#[doc = "Field `WRP1B_PSTRT` writer - WRP1B_PSTRT"]
pub type WRP1B_PSTRT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `WRP1B_PEND` reader - WRP1B_PEND"]
pub type WRP1B_PEND_R = crate::FieldReader;
#[doc = "Field `WRP1B_PEND` writer - WRP1B_PEND"]
pub type WRP1B_PEND_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
impl R {
#[doc = "Bits 0:6 - WRP1B_PSTRT"]
#[inline(always)]
pub fn wrp1b_pstrt(&self) -> WRP1B_PSTRT_R {
WRP1B_PSTRT_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 16:22 - WRP1B_PEND"]
#[inline(always)]
pub fn wrp1b_pend(&self) -> WRP1B_PEND_R {
WRP1B_PEND_R::new(((self.bits >> 16) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - WRP1B_PSTRT"]
#[inline(always)]
#[must_use]
pub fn wrp1b_pstrt(&mut self) -> WRP1B_PSTRT_W<WRP1BR_SPEC, 0> {
WRP1B_PSTRT_W::new(self)
}
#[doc = "Bits 16:22 - WRP1B_PEND"]
#[inline(always)]
#[must_use]
pub fn wrp1b_pend(&mut self) -> WRP1B_PEND_W<WRP1BR_SPEC, 16> {
WRP1B_PEND_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Flash Bank 1 WRP area B address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wrp1br::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wrp1br::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WRP1BR_SPEC;
impl crate::RegisterSpec for WRP1BR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wrp1br::R`](R) reader structure"]
impl crate::Readable for WRP1BR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`wrp1br::W`](W) writer structure"]
impl crate::Writable for WRP1BR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets WRP1BR to value 0xff00_ff00"]
impl crate::Resettable for WRP1BR_SPEC {
const RESET_VALUE: Self::Ux = 0xff00_ff00;
}
|
use super::*;
pub struct RegisterID {
rc: u32,
virtual_register: VirtualRegister,
is_tmp: bool,
}
impl RegisterID {
pub fn ref_(&mut self) {
self.rc += 1;
}
pub fn deref(&mut self) {
self.rc -= 1;
}
pub fn rc(&self) -> u32 {
self.rc
}
pub fn is_tmp(&self) -> bool {
self.is_tmp
}
pub fn set_index(&mut self, reg: VirtualRegister) {
self.virtual_register = reg;
}
pub fn new_idx(i: i32) -> Self {
Self {
is_tmp: false,
virtual_register: VirtualRegister {
virtual_register: i,
},
rc: 0,
}
}
pub fn new(x: VirtualRegister) -> Self {
Self {
is_tmp: false,
rc: 0,
virtual_register: x,
}
}
pub fn set_tmp(&mut self) {
self.is_tmp = true;
}
}
|
use nanorand::{ChaCha, RNG};
/// Create a ChaCha RNG, using the specified number of rounds
#[no_mangle]
pub extern "C" fn new_chacha(rounds: u8) -> ChaCha {
ChaCha::new(rounds)
}
/// Create a ChaCha RNG, using the specified number of rounds,
/// and the provided 256-bit key and 64-bit nonce
#[no_mangle]
pub extern "C" fn new_chacha_key(rounds: u8, key: [u8; 32], nonce: [u8; 8]) -> ChaCha {
ChaCha::new_key(rounds, key, nonce)
}
/// Create a ChaCha RNG using 8 rounds
#[no_mangle]
pub extern "C" fn new_chacha8() -> ChaCha {
ChaCha::new(8)
}
/// Create a ChaCha RNG, using 8 rounds,
/// and the provided 256-bit key and 64-bit nonce
#[no_mangle]
pub extern "C" fn new_chacha8_key(key: [u8; 32], nonce: [u8; 8]) -> ChaCha {
ChaCha::new_key(8, key, nonce)
}
/// Create a ChaCha RNG, using 12 rounds
#[no_mangle]
pub extern "C" fn new_chacha12() -> ChaCha {
ChaCha::new(12)
}
/// Create a ChaCha RNG, using 12 rounds,
/// and the provided 256-bit key and 64-bit nonce
#[no_mangle]
pub extern "C" fn new_chacha12_key(key: [u8; 32], nonce: [u8; 8]) -> ChaCha {
ChaCha::new_key(12, key, nonce)
}
/// Create a ChaCha RNG, using 20 rounds
#[no_mangle]
pub extern "C" fn new_chacha20() -> ChaCha {
ChaCha::new(20)
}
/// Create a ChaCha RNG, using 20 rounds,
/// and the provided 256-bit key and 64-bit nonce
#[no_mangle]
pub extern "C" fn new_chacha20_key(key: [u8; 32], nonce: [u8; 8]) -> ChaCha {
ChaCha::new_key(20, key, nonce)
}
/// Get the raw 512-bit output from the provided RNG.
/// You need to free this yourself!
#[no_mangle]
pub extern "C" fn chacha_next(rng: &mut ChaCha) -> *mut u8 {
let mut out = rng.rand();
out.as_mut_ptr()
}
/// Generate a random 8-bit unsigned integer from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_next_u8(rng: &mut ChaCha) -> u8 {
rng.generate()
}
/// Generate a random 16-bit unsigned integer from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_next_u16(rng: &mut ChaCha) -> u16 {
rng.generate()
}
/// Generate a random 32-bit unsigned integer from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_next_u32(rng: &mut ChaCha) -> u32 {
rng.generate()
}
/// Generate a random 64-bit unsigned integer from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_next_u64(rng: &mut ChaCha) -> u64 {
rng.generate()
}
/// Generate a random boolean value from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_next_bool(rng: &mut ChaCha) -> bool {
rng.generate()
}
/// Generate a random 8-bit unsigned integer within a specified range from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_range_u8(rng: &mut ChaCha, lower: u8, upper: u8) -> u8 {
rng.generate_range(lower, upper)
}
/// Generate a random 16-bit unsigned integer within a specified range from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_range_u16(rng: &mut ChaCha, lower: u16, upper: u16) -> u16 {
rng.generate_range(lower, upper)
}
/// Generate a random 32-bit unsigned integer within a specified range from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_range_u32(rng: &mut ChaCha, lower: u32, upper: u32) -> u32 {
rng.generate_range(lower, upper)
}
/// Generate a random 64-bit unsigned integer within a specified range from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_range_u64(rng: &mut ChaCha, lower: u64, upper: u64) -> u64 {
rng.generate_range(lower, upper)
}
/// Generate a random pointer-sized unsigned integer within a specified range from the provided RNG
#[no_mangle]
pub extern "C" fn chacha_range_usize(rng: &mut ChaCha, lower: usize, upper: usize) -> usize {
rng.generate_range(lower, upper)
}
|
use crate::core::connection_string::*;
pub struct ConnectionStringBuilder<'a>(ConnectionString<'a>);
impl<'a> Default for ConnectionStringBuilder<'a> {
fn default() -> Self {
Self(ConnectionString::default())
}
}
impl<'a> ConnectionStringBuilder<'a> {
pub fn new() -> Self {
Self(ConnectionString::default())
}
pub fn build(&self) -> String {
let mut kv_pairs = Vec::new();
if let Some(account_name) = self.0.account_name {
kv_pairs.push(format!("{}={}", ACCOUNT_NAME_KEY_NAME, account_name));
}
if let Some(account_key) = self.0.account_key {
kv_pairs.push(format!("{}={}", ACCOUNT_KEY_KEY_NAME, account_key));
}
if let Some(sas) = self.0.sas {
kv_pairs.push(format!("{}={}", SAS_KEY_NAME, sas));
}
if let Some(use_development_storage) = self.0.use_development_storage {
kv_pairs.push(format!(
"{}={}",
USE_DEVELOPMENT_STORAGE_KEY_NAME, use_development_storage
));
}
if let Some(development_storage_proxy_uri) = self.0.development_storage_proxy_uri {
kv_pairs.push(format!(
"{}={}",
DEVELOPMENT_STORAGE_PROXY_URI_KEY_NAME, development_storage_proxy_uri
));
}
if let Some(endpoint_suffix) = self.0.endpoint_suffix {
kv_pairs.push(format!("{}={}", ENDPOINT_SUFFIX_KEY_NAME, endpoint_suffix));
}
if let Some(default_endpoints_protocol) = self.0.default_endpoints_protocol.as_ref() {
kv_pairs.push(format!(
"{}={}",
DEFAULT_ENDPOINTS_PROTOCOL_KEY_NAME, default_endpoints_protocol
));
}
if let Some(blob_endpoint) = self.0.blob_endpoint {
kv_pairs.push(format!("{}={}", BLOB_ENDPOINT_KEY_NAME, blob_endpoint));
}
if let Some(blob_secondary_endpoint) = self.0.blob_secondary_endpoint {
kv_pairs.push(format!(
"{}={}",
BLOB_SECONDARY_ENDPOINT_KEY_NAME, blob_secondary_endpoint
));
}
if let Some(table_endpoint) = self.0.table_endpoint {
kv_pairs.push(format!("{}={}", TABLE_ENDPOINT_KEY_NAME, table_endpoint));
}
if let Some(table_secondary_endpoint) = self.0.table_secondary_endpoint {
kv_pairs.push(format!(
"{}={}",
TABLE_SECONDARY_ENDPOINT_KEY_NAME, table_secondary_endpoint
));
}
if let Some(queue_endpoint) = self.0.queue_endpoint {
kv_pairs.push(format!("{}={}", QUEUE_ENDPOINT_KEY_NAME, queue_endpoint));
}
if let Some(queue_secondary_endpoint) = self.0.queue_secondary_endpoint {
kv_pairs.push(format!(
"{}={}",
QUEUE_SECONDARY_ENDPOINT_KEY_NAME, queue_secondary_endpoint
));
}
if let Some(file_endpoint) = self.0.file_endpoint {
kv_pairs.push(format!("{}={}", FILE_ENDPOINT_KEY_NAME, file_endpoint));
}
if let Some(file_secondary_endpoint) = self.0.file_secondary_endpoint {
kv_pairs.push(format!(
"{}={}",
FILE_SECONDARY_ENDPOINT_KEY_NAME, file_secondary_endpoint
));
}
kv_pairs.join(";")
}
pub fn with_account_name(&'a mut self, account_name: &'a str) -> &'a mut Self {
self.0.account_name = Some(account_name);
self
}
pub fn with_account_key(&'a mut self, account_key: &'a str) -> &'a mut Self {
self.0.account_key = Some(account_key);
self
}
pub fn with_sas(&'a mut self, sas: &'a str) -> &'a mut Self {
self.0.sas = Some(sas);
self
}
pub fn with_endpoint_suffix(&'a mut self, endpoint_suffix: &'a str) -> &'a mut Self {
self.0.endpoint_suffix = Some(endpoint_suffix);
self
}
pub fn with_default_endpoints_protocol(
&'a mut self,
default_endpoints_protocol: EndpointProtocol,
) -> &'a mut Self {
self.0.default_endpoints_protocol = Some(default_endpoints_protocol);
self
}
pub fn with_use_development_storage(
&'a mut self,
use_development_storage: bool,
) -> &'a mut Self {
self.0.use_development_storage = Some(use_development_storage);
self
}
pub fn with_development_storage_proxy_uri(
&'a mut self,
development_storage_proxy_uri: &'a str,
) -> &'a mut Self {
self.0.development_storage_proxy_uri = Some(development_storage_proxy_uri);
self
}
pub fn with_blob_endpoint(&'a mut self, blob_endpoint: &'a str) -> &'a mut Self {
self.0.blob_endpoint = Some(blob_endpoint);
self
}
pub fn with_blob_secondary_endpoint(
&'a mut self,
blob_secondary_endpoint: &'a str,
) -> &'a mut Self {
self.0.blob_secondary_endpoint = Some(blob_secondary_endpoint);
self
}
pub fn with_table_endpoint(&'a mut self, table_endpoint: &'a str) -> &'a mut Self {
self.0.table_endpoint = Some(table_endpoint);
self
}
pub fn with_table_secondary_endpoint(
&'a mut self,
table_secondary_endpoint: &'a str,
) -> &'a mut Self {
self.0.table_secondary_endpoint = Some(table_secondary_endpoint);
self
}
pub fn with_queue_endpoint(&'a mut self, queue_endpoint: &'a str) -> &'a mut Self {
self.0.queue_endpoint = Some(queue_endpoint);
self
}
pub fn with_queue_secondary_endpoint(
&'a mut self,
queue_secondary_endpoint: &'a str,
) -> &'a mut Self {
self.0.queue_secondary_endpoint = Some(queue_secondary_endpoint);
self
}
pub fn with_file_endpoint(&'a mut self, file_endpoint: &'a str) -> &'a mut Self {
self.0.file_endpoint = Some(file_endpoint);
self
}
pub fn with_file_secondary_endpoint(
&'a mut self,
file_secondary_endpoint: &'a str,
) -> &'a mut Self {
self.0.file_secondary_endpoint = Some(file_secondary_endpoint);
self
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn it_builds_generic_connection_strings() {
assert_eq!(ConnectionStringBuilder::new().build(), "");
assert_eq!(
ConnectionStringBuilder::new()
.with_account_name("a")
.with_account_key("b")
.build(),
"AccountName=a;AccountKey=b"
);
assert_eq!(
ConnectionStringBuilder::new()
.with_account_name("a")
.with_sas("b")
.with_default_endpoints_protocol(EndpointProtocol::Https)
.with_blob_endpoint("c")
.build(),
"AccountName=a;SharedAccessSignature=b;DefaultEndpointsProtocol=https;BlobEndpoint=c"
);
}
#[test]
fn it_builds_endpoints_with_development_storage() {
assert_eq!(
ConnectionStringBuilder::new()
.with_use_development_storage(true)
.with_development_storage_proxy_uri("a")
.build(),
"UseDevelopmentStorage=true;DevelopmentStorageProxyUri=a"
);
assert_eq!(
ConnectionStringBuilder::new()
.with_use_development_storage(false)
.build(),
"UseDevelopmentStorage=false"
);
}
#[test]
fn it_builds_all_endpoints() {
assert_eq!(
ConnectionStringBuilder::new()
.with_blob_endpoint("b1")
.with_blob_secondary_endpoint("b2")
.with_table_endpoint("t1")
.with_table_secondary_endpoint("t2")
.with_queue_endpoint("q1")
.with_queue_secondary_endpoint("q2")
.with_file_endpoint("f1")
.with_file_secondary_endpoint("f2")
.build(),
"BlobEndpoint=b1;BlobSecondaryEndpoint=b2;TableEndpoint=t1;TableSecondaryEndpoint=t2;QueueEndpoint=q1;QueueSecondaryEndpoint=q2;FileEndpoint=f1;FileSecondaryEndpoint=f2"
);
}
}
|
fn main() {
proconio::input! {
a: i32,
b: i32,
}
let mut plugs = 1;
let mut ans = 0;
while b > plugs {
plugs += a - 1;
ans += 1;
}
println!("{}", ans);
} |
#[doc = "Register `APB1ENR1` reader"]
pub type R = crate::R<APB1ENR1_SPEC>;
#[doc = "Register `APB1ENR1` writer"]
pub type W = crate::W<APB1ENR1_SPEC>;
#[doc = "Field `TIM2EN` reader - TIM2 timer clock enable"]
pub type TIM2EN_R = crate::BitReader<TIM2EN_A>;
#[doc = "TIM2 timer clock enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIM2EN_A {
#[doc = "0: The selected clock is disabled"]
Disabled = 0,
#[doc = "1: The selected clock is enabled"]
Enabled = 1,
}
impl From<TIM2EN_A> for bool {
#[inline(always)]
fn from(variant: TIM2EN_A) -> Self {
variant as u8 != 0
}
}
impl TIM2EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIM2EN_A {
match self.bits {
false => TIM2EN_A::Disabled,
true => TIM2EN_A::Enabled,
}
}
#[doc = "The selected clock is disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == TIM2EN_A::Disabled
}
#[doc = "The selected clock is enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == TIM2EN_A::Enabled
}
}
#[doc = "Field `TIM2EN` writer - TIM2 timer clock enable"]
pub type TIM2EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM2EN_A>;
impl<'a, REG, const O: u8> TIM2EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "The selected clock is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(TIM2EN_A::Disabled)
}
#[doc = "The selected clock is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(TIM2EN_A::Enabled)
}
}
#[doc = "Field `TIM3EN` reader - TIM3 timer clock enable"]
pub use TIM2EN_R as TIM3EN_R;
#[doc = "Field `TIM4EN` reader - TIM4 timer clock enable"]
pub use TIM2EN_R as TIM4EN_R;
#[doc = "Field `TIM5EN` reader - TIM5 timer clock enable"]
pub use TIM2EN_R as TIM5EN_R;
#[doc = "Field `TIM6EN` reader - TIM6 timer clock enable"]
pub use TIM2EN_R as TIM6EN_R;
#[doc = "Field `TIM7EN` reader - TIM7 timer clock enable"]
pub use TIM2EN_R as TIM7EN_R;
#[doc = "Field `RTCAPBEN` reader - RTC APB clock enable"]
pub use TIM2EN_R as RTCAPBEN_R;
#[doc = "Field `WWDGEN` reader - Window watchdog clock enable"]
pub use TIM2EN_R as WWDGEN_R;
#[doc = "Field `SPI2EN` reader - SPI2 clock enable"]
pub use TIM2EN_R as SPI2EN_R;
#[doc = "Field `SP3EN` reader - SPI3 clock enable"]
pub use TIM2EN_R as SP3EN_R;
#[doc = "Field `USART2EN` reader - USART2 clock enable"]
pub use TIM2EN_R as USART2EN_R;
#[doc = "Field `USART3EN` reader - USART3 clock enable"]
pub use TIM2EN_R as USART3EN_R;
#[doc = "Field `UART4EN` reader - UART4 clock enable"]
pub use TIM2EN_R as UART4EN_R;
#[doc = "Field `UART5EN` reader - UART5 clock enable"]
pub use TIM2EN_R as UART5EN_R;
#[doc = "Field `I2C1EN` reader - I2C1 clock enable"]
pub use TIM2EN_R as I2C1EN_R;
#[doc = "Field `I2C2EN` reader - I2C2 clock enable"]
pub use TIM2EN_R as I2C2EN_R;
#[doc = "Field `I2C3EN` reader - I2C3 clock enable"]
pub use TIM2EN_R as I2C3EN_R;
#[doc = "Field `CRSEN` reader - Clock Recovery System clock enable"]
pub use TIM2EN_R as CRSEN_R;
#[doc = "Field `PWREN` reader - Power interface clock enable"]
pub use TIM2EN_R as PWREN_R;
#[doc = "Field `DAC1EN` reader - DAC1 interface clock enable"]
pub use TIM2EN_R as DAC1EN_R;
#[doc = "Field `OPAMPEN` reader - OPAMP interface clock enable"]
pub use TIM2EN_R as OPAMPEN_R;
#[doc = "Field `LPTIM1EN` reader - Low power timer 1 clock enable"]
pub use TIM2EN_R as LPTIM1EN_R;
#[doc = "Field `TIM3EN` writer - TIM3 timer clock enable"]
pub use TIM2EN_W as TIM3EN_W;
#[doc = "Field `TIM4EN` writer - TIM4 timer clock enable"]
pub use TIM2EN_W as TIM4EN_W;
#[doc = "Field `TIM5EN` writer - TIM5 timer clock enable"]
pub use TIM2EN_W as TIM5EN_W;
#[doc = "Field `TIM6EN` writer - TIM6 timer clock enable"]
pub use TIM2EN_W as TIM6EN_W;
#[doc = "Field `TIM7EN` writer - TIM7 timer clock enable"]
pub use TIM2EN_W as TIM7EN_W;
#[doc = "Field `RTCAPBEN` writer - RTC APB clock enable"]
pub use TIM2EN_W as RTCAPBEN_W;
#[doc = "Field `WWDGEN` writer - Window watchdog clock enable"]
pub use TIM2EN_W as WWDGEN_W;
#[doc = "Field `SPI2EN` writer - SPI2 clock enable"]
pub use TIM2EN_W as SPI2EN_W;
#[doc = "Field `SP3EN` writer - SPI3 clock enable"]
pub use TIM2EN_W as SP3EN_W;
#[doc = "Field `USART2EN` writer - USART2 clock enable"]
pub use TIM2EN_W as USART2EN_W;
#[doc = "Field `USART3EN` writer - USART3 clock enable"]
pub use TIM2EN_W as USART3EN_W;
#[doc = "Field `UART4EN` writer - UART4 clock enable"]
pub use TIM2EN_W as UART4EN_W;
#[doc = "Field `UART5EN` writer - UART5 clock enable"]
pub use TIM2EN_W as UART5EN_W;
#[doc = "Field `I2C1EN` writer - I2C1 clock enable"]
pub use TIM2EN_W as I2C1EN_W;
#[doc = "Field `I2C2EN` writer - I2C2 clock enable"]
pub use TIM2EN_W as I2C2EN_W;
#[doc = "Field `I2C3EN` writer - I2C3 clock enable"]
pub use TIM2EN_W as I2C3EN_W;
#[doc = "Field `CRSEN` writer - Clock Recovery System clock enable"]
pub use TIM2EN_W as CRSEN_W;
#[doc = "Field `PWREN` writer - Power interface clock enable"]
pub use TIM2EN_W as PWREN_W;
#[doc = "Field `DAC1EN` writer - DAC1 interface clock enable"]
pub use TIM2EN_W as DAC1EN_W;
#[doc = "Field `OPAMPEN` writer - OPAMP interface clock enable"]
pub use TIM2EN_W as OPAMPEN_W;
#[doc = "Field `LPTIM1EN` writer - Low power timer 1 clock enable"]
pub use TIM2EN_W as LPTIM1EN_W;
impl R {
#[doc = "Bit 0 - TIM2 timer clock enable"]
#[inline(always)]
pub fn tim2en(&self) -> TIM2EN_R {
TIM2EN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TIM3 timer clock enable"]
#[inline(always)]
pub fn tim3en(&self) -> TIM3EN_R {
TIM3EN_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - TIM4 timer clock enable"]
#[inline(always)]
pub fn tim4en(&self) -> TIM4EN_R {
TIM4EN_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - TIM5 timer clock enable"]
#[inline(always)]
pub fn tim5en(&self) -> TIM5EN_R {
TIM5EN_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - TIM6 timer clock enable"]
#[inline(always)]
pub fn tim6en(&self) -> TIM6EN_R {
TIM6EN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - TIM7 timer clock enable"]
#[inline(always)]
pub fn tim7en(&self) -> TIM7EN_R {
TIM7EN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 10 - RTC APB clock enable"]
#[inline(always)]
pub fn rtcapben(&self) -> RTCAPBEN_R {
RTCAPBEN_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Window watchdog clock enable"]
#[inline(always)]
pub fn wwdgen(&self) -> WWDGEN_R {
WWDGEN_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 14 - SPI2 clock enable"]
#[inline(always)]
pub fn spi2en(&self) -> SPI2EN_R {
SPI2EN_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - SPI3 clock enable"]
#[inline(always)]
pub fn sp3en(&self) -> SP3EN_R {
SP3EN_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 17 - USART2 clock enable"]
#[inline(always)]
pub fn usart2en(&self) -> USART2EN_R {
USART2EN_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - USART3 clock enable"]
#[inline(always)]
pub fn usart3en(&self) -> USART3EN_R {
USART3EN_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - UART4 clock enable"]
#[inline(always)]
pub fn uart4en(&self) -> UART4EN_R {
UART4EN_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - UART5 clock enable"]
#[inline(always)]
pub fn uart5en(&self) -> UART5EN_R {
UART5EN_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - I2C1 clock enable"]
#[inline(always)]
pub fn i2c1en(&self) -> I2C1EN_R {
I2C1EN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - I2C2 clock enable"]
#[inline(always)]
pub fn i2c2en(&self) -> I2C2EN_R {
I2C2EN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - I2C3 clock enable"]
#[inline(always)]
pub fn i2c3en(&self) -> I2C3EN_R {
I2C3EN_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - Clock Recovery System clock enable"]
#[inline(always)]
pub fn crsen(&self) -> CRSEN_R {
CRSEN_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 28 - Power interface clock enable"]
#[inline(always)]
pub fn pwren(&self) -> PWREN_R {
PWREN_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - DAC1 interface clock enable"]
#[inline(always)]
pub fn dac1en(&self) -> DAC1EN_R {
DAC1EN_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - OPAMP interface clock enable"]
#[inline(always)]
pub fn opampen(&self) -> OPAMPEN_R {
OPAMPEN_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Low power timer 1 clock enable"]
#[inline(always)]
pub fn lptim1en(&self) -> LPTIM1EN_R {
LPTIM1EN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TIM2 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim2en(&mut self) -> TIM2EN_W<APB1ENR1_SPEC, 0> {
TIM2EN_W::new(self)
}
#[doc = "Bit 1 - TIM3 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim3en(&mut self) -> TIM3EN_W<APB1ENR1_SPEC, 1> {
TIM3EN_W::new(self)
}
#[doc = "Bit 2 - TIM4 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim4en(&mut self) -> TIM4EN_W<APB1ENR1_SPEC, 2> {
TIM4EN_W::new(self)
}
#[doc = "Bit 3 - TIM5 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim5en(&mut self) -> TIM5EN_W<APB1ENR1_SPEC, 3> {
TIM5EN_W::new(self)
}
#[doc = "Bit 4 - TIM6 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim6en(&mut self) -> TIM6EN_W<APB1ENR1_SPEC, 4> {
TIM6EN_W::new(self)
}
#[doc = "Bit 5 - TIM7 timer clock enable"]
#[inline(always)]
#[must_use]
pub fn tim7en(&mut self) -> TIM7EN_W<APB1ENR1_SPEC, 5> {
TIM7EN_W::new(self)
}
#[doc = "Bit 10 - RTC APB clock enable"]
#[inline(always)]
#[must_use]
pub fn rtcapben(&mut self) -> RTCAPBEN_W<APB1ENR1_SPEC, 10> {
RTCAPBEN_W::new(self)
}
#[doc = "Bit 11 - Window watchdog clock enable"]
#[inline(always)]
#[must_use]
pub fn wwdgen(&mut self) -> WWDGEN_W<APB1ENR1_SPEC, 11> {
WWDGEN_W::new(self)
}
#[doc = "Bit 14 - SPI2 clock enable"]
#[inline(always)]
#[must_use]
pub fn spi2en(&mut self) -> SPI2EN_W<APB1ENR1_SPEC, 14> {
SPI2EN_W::new(self)
}
#[doc = "Bit 15 - SPI3 clock enable"]
#[inline(always)]
#[must_use]
pub fn sp3en(&mut self) -> SP3EN_W<APB1ENR1_SPEC, 15> {
SP3EN_W::new(self)
}
#[doc = "Bit 17 - USART2 clock enable"]
#[inline(always)]
#[must_use]
pub fn usart2en(&mut self) -> USART2EN_W<APB1ENR1_SPEC, 17> {
USART2EN_W::new(self)
}
#[doc = "Bit 18 - USART3 clock enable"]
#[inline(always)]
#[must_use]
pub fn usart3en(&mut self) -> USART3EN_W<APB1ENR1_SPEC, 18> {
USART3EN_W::new(self)
}
#[doc = "Bit 19 - UART4 clock enable"]
#[inline(always)]
#[must_use]
pub fn uart4en(&mut self) -> UART4EN_W<APB1ENR1_SPEC, 19> {
UART4EN_W::new(self)
}
#[doc = "Bit 20 - UART5 clock enable"]
#[inline(always)]
#[must_use]
pub fn uart5en(&mut self) -> UART5EN_W<APB1ENR1_SPEC, 20> {
UART5EN_W::new(self)
}
#[doc = "Bit 21 - I2C1 clock enable"]
#[inline(always)]
#[must_use]
pub fn i2c1en(&mut self) -> I2C1EN_W<APB1ENR1_SPEC, 21> {
I2C1EN_W::new(self)
}
#[doc = "Bit 22 - I2C2 clock enable"]
#[inline(always)]
#[must_use]
pub fn i2c2en(&mut self) -> I2C2EN_W<APB1ENR1_SPEC, 22> {
I2C2EN_W::new(self)
}
#[doc = "Bit 23 - I2C3 clock enable"]
#[inline(always)]
#[must_use]
pub fn i2c3en(&mut self) -> I2C3EN_W<APB1ENR1_SPEC, 23> {
I2C3EN_W::new(self)
}
#[doc = "Bit 24 - Clock Recovery System clock enable"]
#[inline(always)]
#[must_use]
pub fn crsen(&mut self) -> CRSEN_W<APB1ENR1_SPEC, 24> {
CRSEN_W::new(self)
}
#[doc = "Bit 28 - Power interface clock enable"]
#[inline(always)]
#[must_use]
pub fn pwren(&mut self) -> PWREN_W<APB1ENR1_SPEC, 28> {
PWREN_W::new(self)
}
#[doc = "Bit 29 - DAC1 interface clock enable"]
#[inline(always)]
#[must_use]
pub fn dac1en(&mut self) -> DAC1EN_W<APB1ENR1_SPEC, 29> {
DAC1EN_W::new(self)
}
#[doc = "Bit 30 - OPAMP interface clock enable"]
#[inline(always)]
#[must_use]
pub fn opampen(&mut self) -> OPAMPEN_W<APB1ENR1_SPEC, 30> {
OPAMPEN_W::new(self)
}
#[doc = "Bit 31 - Low power timer 1 clock enable"]
#[inline(always)]
#[must_use]
pub fn lptim1en(&mut self) -> LPTIM1EN_W<APB1ENR1_SPEC, 31> {
LPTIM1EN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB1ENR1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1enr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1enr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1ENR1_SPEC;
impl crate::RegisterSpec for APB1ENR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1enr1::R`](R) reader structure"]
impl crate::Readable for APB1ENR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1enr1::W`](W) writer structure"]
impl crate::Writable for APB1ENR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1ENR1 to value 0"]
impl crate::Resettable for APB1ENR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
pub fn find(haystack: &str, needle: char) -> Option<usize> {
for (offset, c) in haystack.char_indices() {
if c == needle {
return Some(offset);
}
}
None
}
pub fn find_extension_smpl(file_name: &str) -> Option<&str> {
match find(file_name, '.') {
None => None,
Some(i) => Some(&file_name[i + 1..]),
}
}
fn local_map<F, T, A>(option: Option<T>, f: F) -> Option<A>
where
F: FnOnce(T) -> A,
{
match option {
None => None,
Some(value) => Some(f(value)),
}
}
pub fn find_extension_map(file_name: &str) -> Option<&str> {
find(file_name, '.').map(|i| &file_name[i + 1..])
}
pub fn find_extension_local_map(file_name: &str) -> Option<&str> {
let f = find(file_name, '.');
local_map(f, |i| &file_name[i + 1..])
}
|
pub fn run() {
let x = 4;
let equal_to_x = |z| z == x;
// Work fine as both values are equal
//assert!(equal_to_x(4));
//Will be panic due to un equalilty
assert!(equal_to_x(6), "My Code Error");
} |
use super::context::Context;
use super::error::Result;
use super::types::ToDuktape;
use duktape_sys::*;
use std::ffi::{c_void, CString};
static KEY: &'static [u8] = b"\xFFptr";
/// A Callable is callable from js
pub trait Callable {
/// Specify how many arguments the function accepts
fn argc(&self) -> i32 {
0
}
/// Call the fn with the context which the callable was registered
fn call(&self, ctx: &Context) -> Result<i32>;
}
unsafe extern "C" fn call(ctx: *mut duk_context) -> duk_ret_t {
duk_push_current_function(ctx);
duk_get_prop_string(ctx, -1, KEY.as_ptr() as *const i8);
let mut c = Context::with(ctx);
let ptr = duk_get_pointer(ctx, -1) as *mut Box<dyn Callable>;
let pp = Box::from_raw(ptr);
duk_pop_2(ctx);
let ret = match pp.call(&mut c) {
Err(e) => {
duk_error_raw(
ctx,
DUK_ERR_ERROR as i32,
"".as_ptr() as *const i8,
0,
CString::new(format!("{}", e.0)).unwrap().as_ptr(),
);
-1
}
Ok(ret) => ret,
};
// It should not be dropped
Box::into_raw(pp);
return ret;
}
unsafe extern "C" fn dtor(ctx: *mut duk_context) -> duk_ret_t {
duk_get_prop_string(ctx, -1, KEY.as_ptr() as *const i8);
let ptr = duk_get_pointer(ctx, -1) as *mut Box<dyn Callable>;
duk_pop(ctx);
duk_del_prop_string(ctx, -1, KEY.as_ptr() as *const i8);
let pp = Box::from_raw(ptr);
drop(pp);
return 0;
}
pub(crate) unsafe fn push_callable(context: &Context, callable: Box<dyn Callable>) {
duk_push_c_function(context.inner, Some(call), callable.argc());
let m = Box::new(callable);
duk_push_pointer(context.inner, Box::into_raw(m) as *mut c_void);
duk_put_prop_string(context.inner, -2, KEY.as_ptr() as *const i8);
duk_push_c_function(context.inner, Some(dtor), 1);
duk_set_finalizer(context.inner, -2);
}
impl<T: Fn(&Context) -> Result<i32>> Callable for (i32, T) {
fn argc(&self) -> i32 {
self.0
}
fn call(&self, ctx: &Context) -> Result<i32> {
self.1(ctx)
}
}
impl<T: Fn(&Context) -> Result<i32>> Callable for T {
fn argc(&self) -> i32 {
0
}
fn call(&self, ctx: &Context) -> Result<i32> {
self(ctx)
}
}
// impl<T: Callable> ToDuktape for T {
// }
impl<T: 'static + Fn(&Context) -> Result<i32>> ToDuktape for T {
fn to_context(self, ctx: &Context) -> Result<()> {
let boxed: Box<dyn Callable> = Box::new(self);
unsafe { push_callable(ctx, boxed) };
Ok(())
}
}
impl<T: 'static + Fn(&Context) -> Result<i32>> ToDuktape for (i32, T) {
fn to_context(self, ctx: &Context) -> Result<()> {
let boxed: Box<dyn Callable> = Box::new(self);
unsafe { push_callable(ctx, boxed) };
Ok(())
}
}
|
use std::collections::HashSet;
use crate::solutions::Solution;
use crate::util::int;
pub struct Day1 {}
impl Solution for Day1 {
fn part1(&self, input: String) {
let mut walker = Walker::new();
for (turn, magnitude) in movements(&input) {
walker.turn(turn);
walker.walk(magnitude);
}
println!("{}", walker.manhattan_dist());
}
fn part2(&self, input: String) {
let mut walker = Walker::new();
let mut visited: HashSet<(i32, i32)> = HashSet::new();
'outer: for (turn, magnitude) in movements(&input) {
walker.turn(turn);
for _ in 0..magnitude {
if !visited.insert((walker.x, walker.y)) {
// already present
break 'outer;
}
walker.walk(1);
}
}
println!("{}", walker.manhattan_dist());
}
}
fn movements(input: &str) -> impl Iterator<Item = (&str, i32)> {
input.split(", ").map(|movement| {
let (turn, magnitude) = movement.split_at(1);
(turn, int(magnitude).unwrap())
})
}
struct Walker {
x: i32,
y: i32,
direction: Direction,
}
impl Walker {
fn new() -> Walker {
Walker {
x: 0,
y: 0,
direction: Direction::North,
}
}
fn turn(&mut self, turn: &str) {
match turn {
"R" => self.direction = self.direction.right(),
"L" => self.direction = self.direction.left(),
_ => panic!("Bad direction."),
}
}
fn walk(&mut self, magnitude: i32) {
use Direction::*;
match self.direction {
East => self.x += magnitude,
West => self.x -= magnitude,
North => self.y += magnitude,
South => self.y -= magnitude,
}
}
fn manhattan_dist(&self) -> i32 {
self.x.abs() + self.y.abs()
}
}
#[derive(Copy, Clone)]
enum Direction {
North,
East,
South,
West,
}
impl Direction {
fn right(self) -> Direction {
use Direction::*;
match self {
North => East,
East => South,
South => West,
West => North,
}
}
fn left(self) -> Direction {
use Direction::*;
match self {
North => West,
East => North,
South => East,
West => South,
}
}
}
|
// SPDX-License-Identifier: Apache-2.0
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use cipher_bench::{bench_block, BlockCipherAlgorithm};
use openssl::Aes128CbcCtxBuilder;
use std::convert::TryInto;
pub fn block_ciphers(c: &mut Criterion) {
let mut group = c.benchmark_group("nettle/block-ciphers");
let parameters: Vec<usize> = (1..=cipher_bench::ITER).collect();
for i in parameters {
group.throughput(Throughput::Bytes(
(i * cipher_bench::STEP).try_into().unwrap(),
));
let builder = Aes128CbcCtxBuilder::new();
bench_block(&mut group, BlockCipherAlgorithm::Aes128Cbc, builder, i);
}
group.finish();
}
criterion_group!(benches, block_ciphers);
criterion_main!(benches);
|
use crate::uses::*;
use core::slice;
use core::mem::transmute;
use crate::util::misc::phys_to_virt_usize;
pub mod madt;
pub mod hpet;
use madt::Madt;
use hpet::Hpet;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SdtType {
// root system descriptor table
Rsdt,
// extended system descriptor table (64 bit version of rsdp)
Xsdt,
// multiple APIC description table
Madt,
// High precision event timer table
Hpet,
}
#[derive(Debug, Clone, Copy)]
pub enum AcpiTable<'a> {
Rsdt(&'a Rsdt),
Madt(&'a Madt),
Hpet(&'a Hpet),
}
impl AcpiTable<'_> {
pub fn assume_rsdt(&self) -> Option<&Rsdt> {
if let Self::Rsdt(rsdt) = self {
Some(rsdt)
} else {
None
}
}
pub fn assume_madt(&self) -> Option<&Madt> {
if let Self::Madt(madt) = self {
Some(madt)
} else {
None
}
}
pub fn assume_hpet(&self) -> Option<&Hpet> {
if let Self::Hpet(hpet) = self {
Some(hpet)
} else {
None
}
}
}
#[repr(C, packed)]
#[derive(Debug, Clone, Copy)]
pub struct SdtHeader {
signature: [u8; 4],
size: u32,
revision: u8,
checksum: u8,
oemid: [u8; 6],
oem_table_id: [u8; 8],
oem_revision: u32,
creator_id: u32,
creator_revision: u32,
}
impl SdtHeader {
pub fn size(&self) -> usize {
self.size as usize
}
pub fn data_size(&self) -> usize {
self.size() - size_of::<Self>()
}
pub fn data_ptr<T>(&self) -> *const T {
unsafe {
(self as *const Self).add(1) as *const T
}
}
pub fn data<T>(&self) -> &[T] {
if self.data_size() % size_of::<T>() != 0 {
panic!("tried to get data slice of ACPI table and the size of elements in the slice did not evenly divide the size of the data");
}
unsafe {
slice::from_raw_parts(self.data_ptr(), self.data_size() / size_of::<T>())
}
}
// safety: length must be valid
pub unsafe fn validate(&self) -> bool {
let mut sum: usize = 0;
let slice = slice::from_raw_parts(self as *const _ as *const u8, self.size());
for n in slice {
sum += *n as usize;
}
sum % 0x100 == 0
}
pub fn sdt_type(&self) -> Option<SdtType> {
let s = &self.signature;
// can't us match here
Some(if s == "APIC".as_bytes() {
SdtType::Madt
} else if s == "RSDT".as_bytes() {
SdtType::Rsdt
} else if s == "XSDT".as_bytes() {
SdtType::Xsdt
} else if s == "HPET".as_bytes() {
SdtType::Hpet
} else {
// TODO: add new acpi table types here
return None;
})
}
pub unsafe fn as_acpi_table(&self) -> Option<AcpiTable> {
Some(match self.sdt_type()? {
SdtType::Rsdt => {
assert!(size_of::<Rsdt>() <= self.size());
AcpiTable::Rsdt(transmute(self))
},
SdtType::Madt => {
assert!(size_of::<Madt>() <= self.size());
AcpiTable::Madt(transmute(self))
},
SdtType::Hpet => {
assert!(size_of::<Hpet>() <= self.size());
AcpiTable::Hpet(transmute(self))
},
_ => return None,
})
}
}
pub trait Sdt {
fn header(&self) -> &SdtHeader;
unsafe fn validate(&self) -> bool {
self.header().validate()
}
fn sdt_type(&self) -> Option<SdtType> {
self.header().sdt_type()
}
}
#[repr(transparent)]
#[derive(Debug, Clone, Copy)]
pub struct Rsdt(SdtHeader);
impl Rsdt {
// from a physical address
pub unsafe fn from<'a>(addr: usize) -> Option<&'a Rsdt> {
let out = (phys_to_virt_usize(addr) as *const Self).as_ref().unwrap();
if !out.0.validate() {
None
} else {
Some(out)
}
}
// have to use a vec, not a slice, because the pointers are only 32 bits
// safety: fields in rsdt must be valid
pub unsafe fn tables(&self) -> Vec<AcpiTable> {
let len = self.0.data_size() / 4;
let mut out = Vec::with_capacity(len);
let slice: &[u32] = self.0.data();
for n in slice {
let addr = phys_to_virt_usize(*n as usize);
let table = (addr as *const SdtHeader).as_ref().unwrap();
if let Some(table) = table.as_acpi_table() {
out.push(table);
}
}
out
}
// does not require memory allocation
pub unsafe fn get_table(&self, table_type: SdtType) -> Option<AcpiTable> {
let slice: &[u32] = self.0.data();
for n in slice {
let addr = phys_to_virt_usize(*n as usize);
let table = (addr as *const SdtHeader).as_ref().unwrap();
if let Some(typ) = table.sdt_type() {
if typ == table_type {
return table.as_acpi_table();
}
}
}
None
}
}
impl Sdt for Rsdt {
fn header(&self) -> &SdtHeader {
&self.0
}
}
|
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct User {
pub name: String,
pub is_admin: u32,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIdent {
// Post ID
pub id: u32,
// Post Title
pub title: String,
// Timestamp when it was created
// (ms since Unix epoch - but only accurate to the second)
pub created: i64,
}
pub struct Post {
// post ident info
pub ident: PostIdent,
// markdown content
pub content: String,
// Timestamp when it was last updated
// (ms since Unix epoch - but only accurate to the second)
pub updated: i64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct InventoryItem {
pub description: String,
pub price: f64, //in Yen
pub items_sold: Option<Vec<usize>>,
pub total_price: Option<Vec<f64>>,
}
impl Clone for InventoryItem {
fn clone(&self) -> Self {
Self {
items_sold: self.items_sold.clone(),
price: self.price,
description: String::from(&self.description),
total_price: self.total_price.clone()
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Category {
pub name: String,
pub items: Vec<InventoryItem>
}
#[derive(Serialize, Deserialize, Debug)]
pub struct InputSummary {
pub num_items: usize,
pub total_sale: f64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct CategoryResult {
pub category: Category,
pub summary: InputSummary,
}
|
use serde::{Serialize};
use actix_web::{error, Error, HttpResponse};
use vt::types::*;
pub type ActixResult = Result<HttpResponse, Error>;
pub fn to_actix_result<A: Serialize>(result: MyResult<A>) -> ActixResult {
match result {
Ok(x) => Ok(HttpResponse::Ok().json(x)),
Err(e) => Err(error::ErrorBadRequest(e)),
}
}
|
#![cfg_attr(test, allow(dead_code))]
extern crate lndir;
use std::vec::Vec;
use std::path::PathBuf;
use std::env;
use lndir::lndir;
use lndir::options::Options;
use lndir::argument_error::ArgumentError;
fn parse_args() -> Result<(Options, Vec<PathBuf>, PathBuf), ArgumentError> {
let mut options = Options::new();
let mut paths: Vec<PathBuf> = Vec::new();
let mut iterator = env::args().skip(1);
let mut stop_option_parsing = false;
while let Some(arg) = iterator.next() {
match arg.as_ref() {
"-silent" if !stop_option_parsing => {
options.silent = true;
},
"-ignorelinks" if !stop_option_parsing => {
options.ignore_links = true;
},
"-withrevinfo" if !stop_option_parsing => {
options.with_rev_info = true;
},
"-maxdepth" if !stop_option_parsing => {
if let Some(value) = iterator.next() {
options.max_depth = match value.parse() {
Ok(max_depth) => Some(max_depth),
Err(err) => return Err(ArgumentError::new(
format!("failed to parse -maxdepth argument \"{}\": {}", value, err),
Some(Box::new(err)),
)),
}
} else {
return Err(ArgumentError::new(
"no value specified for -maxdepth",
None,
))
}
},
"--" if !stop_option_parsing => {
stop_option_parsing = true;
},
_ => {
stop_option_parsing = true;
paths.push(PathBuf::from(arg));
},
}
}
let destination = if paths.len() < 2 {
env::current_dir().unwrap()
} else {
let len = paths.len();
paths.split_off(len - 1).first().map(|d| d.to_owned())
.ok_or_else(|| ArgumentError::new(
"no destination directory specified",
None,
))?
};
let sources = paths;
if sources.is_empty() {
return Err(ArgumentError::new(
"no source directory specified",
None,
))
}
Ok((options, sources, destination))
}
fn main() -> Result<(), ArgumentError> {
let program = env::args().next().unwrap();
let (options, sources, destination) = parse_args()?;
println!("program: {:?}", program);
lndir(sources, destination, Some(options)).unwrap();
Ok(())
}
|
use na::{RealField, Rotation3, Unit, UnitComplex};
use crate::aliases::{TMat4, TVec2, TVec3, TVec4};
/// Build the rotation matrix needed to align `normal` and `up`.
pub fn orientation<T: RealField>(normal: &TVec3<T>, up: &TVec3<T>) -> TMat4<T> {
if let Some(r) = Rotation3::rotation_between(normal, up) {
r.to_homogeneous()
} else {
TMat4::identity()
}
}
/// Rotate a two dimensional vector.
pub fn rotate_vec2<T: RealField>(v: &TVec2<T>, angle: T) -> TVec2<T> {
UnitComplex::new(angle) * v
}
/// Rotate a three dimensional vector around an axis.
pub fn rotate_vec3<T: RealField>(v: &TVec3<T>, angle: T, normal: &TVec3<T>) -> TVec3<T> {
Rotation3::from_axis_angle(&Unit::new_normalize(*normal), angle) * v
}
/// Rotate a thee dimensional vector in homogeneous coordinates around an axis.
pub fn rotate_vec4<T: RealField>(v: &TVec4<T>, angle: T, normal: &TVec3<T>) -> TVec4<T> {
Rotation3::from_axis_angle(&Unit::new_normalize(*normal), angle).to_homogeneous() * v
}
/// Rotate a three dimensional vector around the `X` axis.
pub fn rotate_x_vec3<T: RealField>(v: &TVec3<T>, angle: T) -> TVec3<T> {
Rotation3::from_axis_angle(&TVec3::x_axis(), angle) * v
}
/// Rotate a three dimensional vector in homogeneous coordinates around the `X` axis.
pub fn rotate_x_vec4<T: RealField>(v: &TVec4<T>, angle: T) -> TVec4<T> {
Rotation3::from_axis_angle(&TVec3::x_axis(), angle).to_homogeneous() * v
}
/// Rotate a three dimensional vector around the `Y` axis.
pub fn rotate_y_vec3<T: RealField>(v: &TVec3<T>, angle: T) -> TVec3<T> {
Rotation3::from_axis_angle(&TVec3::y_axis(), angle) * v
}
/// Rotate a three dimensional vector in homogeneous coordinates around the `Y` axis.
pub fn rotate_y_vec4<T: RealField>(v: &TVec4<T>, angle: T) -> TVec4<T> {
Rotation3::from_axis_angle(&TVec3::y_axis(), angle).to_homogeneous() * v
}
/// Rotate a three dimensional vector around the `Z` axis.
pub fn rotate_z_vec3<T: RealField>(v: &TVec3<T>, angle: T) -> TVec3<T> {
Rotation3::from_axis_angle(&TVec3::z_axis(), angle) * v
}
/// Rotate a three dimensional vector in homogeneous coordinates around the `Z` axis.
pub fn rotate_z_vec4<T: RealField>(v: &TVec4<T>, angle: T) -> TVec4<T> {
Rotation3::from_axis_angle(&TVec3::z_axis(), angle).to_homogeneous() * v
}
/// Computes a spherical linear interpolation between the vectors `x` and `y` assumed to be normalized.
pub fn slerp<T: RealField>(x: &TVec3<T>, y: &TVec3<T>, a: T) -> TVec3<T> {
Unit::new_unchecked(*x)
.slerp(&Unit::new_unchecked(*y), a)
.into_inner()
}
|
use std::cmp::{max, min};
fn gcd(a: usize, b: usize) -> usize {
match ((a, b), (a & 1, b & 1)) {
((x, y), _) if x == y => y,
((0, x), _) | ((x, 0), _) => x,
((x, y), (0, 1)) | ((y, x), (1, 0)) => gcd(x >> 1, y),
((x, y), (0, 0)) => gcd(x >> 1, y >> 1) << 1,
((x, y), (1, 1)) => {
let (x, y) = (min(x, y), max(x, y));
gcd((y - x) >> 1, x)
}
_ => unreachable!(),
}
}
fn lcm(a: usize, b: usize) -> usize {
a * b / gcd(a, b)
}
fn main() {
println!("{}", lcm(6324, 234))
}
|
use std::fmt::Debug;
use std::panic::RefUnwindSafe;
use serde::Deserialize;
use gotham::router::builder::*;
use gotham::router::Router;
use receiver::{LoginHandler, Receiver, ReturnInfo};
/// Builds the subrouter for the Shibboleth-protected part of application, where new sessions will
/// be received for processing.
pub fn auth_router<A, R>(r: R) -> Router
where
A: for<'de> Deserialize<'de> + Debug + 'static,
R: Receiver<A> + Copy + RefUnwindSafe + 'static,
{
build_simple_router(|route| {
route
.get("/login")
.with_query_string_extractor::<ReturnInfo>()
.to(LoginHandler::new(r));
})
}
|
use ast::{Ast, AstTrait};
use token::{Token, Type};
pub struct Parser {
stack: Vec<Ast>,
}
pub trait ParserTrait {
fn parse_token(&mut self, token: Token) -> bool;
fn is_done(&self) -> bool;
fn get_parsed_tree(&mut self) -> Option<Ast>;
}
impl ParserTrait for Parser {
fn parse_token(&mut self, token: Token) -> bool {
let result = match token.tok_type {
Type::Oparen => self.open(token),
Type::Cparen => self.close(token),
Type::Func => self.parse_literal(token),
Type::OpenList => self.open(token),
Type::CloseList => self.close(token),
Type::Number => self.parse_literal(token),
Type::StrType => self.parse_literal(token),
Type::Space => true,
Type::Empty => false,
Type::Error => false
};
if !result {
self.stack.clear();
}
result
}
fn is_done(&self) -> bool {
self.stack.len() == 1
}
fn get_parsed_tree(&mut self) -> Option<Ast> {
match self.is_done() {
true => self.stack.pop(),
false => None
}
}
}
impl Parser {
pub fn new() -> Self {
Parser {
stack: vec![Ast::new_null()],
}
}
fn open(&mut self, token: Token) -> bool {
self.stack.push(Ast::new(token));
true
}
fn close(&mut self, token: Token) -> bool {
match self.stack.pop() {
Some(child) => {
match self.stack.pop() {
Some(mut parent) => {
let should_push = match child.node_val {
Some(ref tok) => tok.tok_type.is_matching_close(token.tok_type),
None => false
};
if should_push {
parent.push_child(child);
self.stack.push(parent);
}
should_push
},
None => true
}
},
None => false
}
}
fn parse_literal(&mut self, token: Token) -> bool {
match self.stack.pop() {
Some(mut node) => {
let should_push = node.node_val.is_some();
if should_push {
node.push_child(Ast::new(token));
self.stack.push(node);
}
should_push
},
None => false
}
}
}
|
//! The `par_direct` backend operates on `CompileNodes` directly in parallel
use super::{JITBackend, JITResetData};
use crate::blocks::{Block, BlockEntity, BlockPos, ComparatorMode};
use crate::redpiler::{CompileNode, Link, LinkType};
use crate::world::{TickEntry, TickPriority};
use log::warn;
use rayon::prelude::*;
use std::collections::HashMap;
use std::fmt;
use std::sync::atomic::{AtomicBool, AtomicU8, Ordering};
use std::sync::mpsc::{channel, Receiver, Sender};
use std::sync::Arc;
#[derive(Debug)]
struct RPTickEntry {
ticks_left: i32,
node: usize,
}
#[derive(Debug)]
enum NodeType {
Repeater(u8),
Comparator(ComparatorMode),
Torch,
WallTorch,
Wire,
StoneButton,
Lamp,
RedstoneBlock,
Container,
Lever,
}
enum BlockChange {
Power(usize, u8),
RepeaterLock(usize, bool),
}
struct Node {
// Constant
ty: NodeType,
facing_diode: bool,
inputs: Vec<Link>,
outputs: Vec<usize>,
// State
update_queued: AtomicBool,
powered: AtomicBool,
/// Only used for comparators and containers
output_strength: AtomicU8,
/// Repeater locking
locked: AtomicBool,
pending_tick: AtomicBool,
}
impl From<CompileNode> for Node {
fn from(node: CompileNode) -> Self {
Node {
ty: match node.state {
Block::RedstoneRepeater { repeater } => NodeType::Repeater(repeater.delay),
Block::RedstoneComparator { comparator } => NodeType::Comparator(comparator.mode),
Block::RedstoneTorch { .. } => NodeType::Torch,
Block::RedstoneWallTorch { .. } => NodeType::WallTorch,
Block::RedstoneWire { .. } => NodeType::Wire,
Block::StoneButton { .. } => NodeType::StoneButton,
Block::RedstoneLamp { .. } => NodeType::Lamp,
Block::RedstoneBlock { .. } => NodeType::RedstoneBlock,
Block::Lever { .. } => NodeType::Lever,
block if block.has_comparator_override() => NodeType::Container,
_ => unreachable!(),
},
facing_diode: node.facing_diode,
inputs: node.inputs,
outputs: node.updates.into_iter().collect(),
update_queued: AtomicBool::new(false),
powered: AtomicBool::new(match node.state {
Block::RedstoneRepeater { repeater } => repeater.powered,
Block::RedstoneComparator { comparator } => comparator.powered,
Block::RedstoneTorch { lit } => lit,
Block::RedstoneWallTorch { lit, .. } => lit,
Block::StoneButton { button } => button.powered,
Block::RedstoneLamp { lit } => lit,
Block::Lever { lever } => lever.powered,
Block::RedstoneBlock {} => true,
Block::RedstoneWire { .. } => false,
block if block.has_comparator_override() => false,
_ => unreachable!(),
}),
output_strength: AtomicU8::new(node.comparator_output),
locked: AtomicBool::new(if let Block::RedstoneRepeater { repeater } = node.state {
repeater.locked
} else {
false
}),
pending_tick: AtomicBool::new(false),
}
}
}
impl Node {
fn get_output_power(&self) -> u8 {
match self.ty {
NodeType::Comparator(_) | NodeType::Container => {
self.output_strength.load(Ordering::Relaxed)
}
_ => {
if self.powered.load(Ordering::Relaxed) {
15
} else {
0
}
}
}
}
}
pub struct ParDirectBackend {
blocks: Vec<(BlockPos, Block)>,
block_changes: Vec<(BlockPos, Block)>,
nodes: Arc<Vec<Node>>,
updates_tx: Sender<usize>,
updates_rx: Receiver<usize>,
ticks_tx: Sender<RPTickEntry>,
ticks_rx: Receiver<RPTickEntry>,
changes_tx: Sender<BlockChange>,
changes_rx: Receiver<BlockChange>,
updates: Vec<usize>,
ticks: Vec<RPTickEntry>,
pos_map: HashMap<BlockPos, usize>,
}
impl Default for ParDirectBackend {
fn default() -> Self {
let (updates_tx, updates_rx) = channel();
let (ticks_tx, ticks_rx) = channel();
let (changes_tx, changes_rx) = channel();
Self {
blocks: Vec::new(),
nodes: Arc::new(vec![]),
block_changes: vec![],
updates_tx,
updates_rx,
ticks_tx,
ticks_rx,
changes_tx,
changes_rx,
updates: vec![],
ticks: vec![],
pos_map: HashMap::new(),
}
}
}
impl JITBackend for ParDirectBackend {
fn reset(&mut self) -> JITResetData {
let mut ticks = Vec::new();
self.ticks.retain(|tick| tick.ticks_left >= 0);
for entry in self.ticks.drain(..) {
ticks.push(TickEntry {
ticks_left: entry.ticks_left as u32 / 5,
tick_priority: match entry.ticks_left % 4 {
0 => TickPriority::Highest,
1 => TickPriority::Higher,
2 => TickPriority::High,
3 => TickPriority::Normal,
_ => unreachable!(),
},
pos: self.blocks[entry.node].0,
});
}
let mut block_entities = Vec::new();
for (node_id, node) in self.nodes.iter().enumerate() {
if let NodeType::Comparator(_) = node.ty {
let block_entity = BlockEntity::Comparator {
output_strength: node.output_strength.load(Ordering::Relaxed),
};
block_entities.push((self.blocks[node_id].0, block_entity));
}
}
// TODO: Reuse the old allocations
*self = Default::default();
JITResetData {
block_entities,
tick_entries: ticks,
}
}
fn on_use_block(&mut self, pos: BlockPos) {
let node_id = self.pos_map[&pos];
let node = &self.nodes[node_id];
match node.ty {
NodeType::Lever => {
let powered = !node.powered.load(Ordering::Relaxed);
node.powered.store(powered, Ordering::Relaxed);
self.changes_tx
.send(BlockChange::Power(node_id, powered as u8))
.unwrap();
schedule_updates(&self.updates_tx, &self.nodes, node_id);
}
NodeType::StoneButton => {
let powered = !node.powered.load(Ordering::Relaxed);
node.powered.store(powered, Ordering::Relaxed);
self.changes_tx
.send(BlockChange::Power(node_id, powered as u8))
.unwrap();
schedule_tick(
&self.ticks_tx,
&self.nodes,
node_id,
10,
TickPriority::Normal,
);
schedule_updates(&self.updates_tx, &self.nodes, node_id);
}
_ => {}
}
self.run_updates();
self.collect_changes();
}
fn tick(&mut self) {
self.ticks.extend(self.ticks_rx.try_iter());
for _ in [
TickPriority::Normal,
TickPriority::High,
TickPriority::Higher,
TickPriority::Highest,
] {
self.ticks.par_iter_mut().for_each_with(
(
self.updates_tx.clone(),
self.changes_tx.clone(),
self.nodes.clone(),
),
|(updates_tx, changes_tx, nodes), tick: &mut RPTickEntry| {
if tick.ticks_left == 0 {
tick_single(tick.node, nodes, updates_tx, changes_tx);
}
tick.ticks_left -= 1;
},
);
}
self.ticks.retain(|tick| tick.ticks_left > 0);
self.run_updates();
self.collect_changes();
}
fn compile(&mut self, nodes: Vec<CompileNode>, ticks: Vec<TickEntry>) {
for (i, node) in nodes.iter().enumerate() {
self.blocks.push((node.pos, node.state));
self.pos_map.insert(node.pos, i);
}
for entry in ticks {
if let Some(node) = self.pos_map.get(&entry.pos) {
self.ticks.push(RPTickEntry {
ticks_left: entry.ticks_left as i32 * 5 + entry.tick_priority as i32,
node: *node,
});
}
}
let pnodes = nodes.into_iter().map(Into::into).collect();
self.nodes = Arc::new(pnodes);
}
fn block_changes(&mut self) -> &mut Vec<(BlockPos, Block)> {
&mut self.block_changes
}
}
impl ParDirectBackend {
fn run_updates(&mut self) {
self.updates.clear();
self.updates.extend(self.updates_rx.try_iter());
self.updates.par_iter().for_each_with(
(
self.ticks_tx.clone(),
self.changes_tx.clone(),
self.nodes.clone(),
),
|(ticks_tx, changes_tx, nodes), node_id| {
update_single(*node_id, nodes, ticks_tx, changes_tx);
},
);
}
fn collect_changes(&mut self) {
for change in self.changes_rx.try_iter() {
match change {
BlockChange::Power(node_id, power) => {
let powered = power > 0;
match &mut self.blocks[node_id].1 {
Block::RedstoneComparator { comparator } => comparator.powered = powered,
Block::RedstoneTorch { lit } => *lit = powered,
Block::RedstoneWallTorch { lit, .. } => *lit = powered,
Block::RedstoneRepeater { repeater } => repeater.powered = powered,
Block::RedstoneWire { wire } => wire.power = power as u8,
Block::Lever { lever } => lever.powered = powered,
Block::StoneButton { button } => button.powered = powered,
Block::RedstoneLamp { lit } => *lit = powered,
_ => {}
}
self.block_changes.push(self.blocks[node_id]);
}
BlockChange::RepeaterLock(node_id, locked) => match &mut self.blocks[node_id].1 {
Block::RedstoneRepeater { repeater } => repeater.locked = locked,
_ => panic!("tried to lock a node which wasn't a repeater"),
},
}
}
}
}
fn calculate_comparator_output(mode: ComparatorMode, input_strength: u8, power_on_sides: u8) -> u8 {
if mode == ComparatorMode::Subtract {
input_strength.saturating_sub(power_on_sides)
} else if input_strength >= power_on_sides {
input_strength
} else {
0
}
}
fn comparator_should_be_powered(
mode: ComparatorMode,
input_strength: u8,
power_on_sides: u8,
) -> bool {
if input_strength == 0 {
false
} else if input_strength > power_on_sides {
true
} else {
power_on_sides == input_strength && mode == ComparatorMode::Compare
}
}
fn schedule_tick(
ticks_tx: &Sender<RPTickEntry>,
nodes: &Arc<Vec<Node>>,
node_id: usize,
delay: u32,
priority: TickPriority,
) {
if let Ok(false) = nodes[node_id].pending_tick.compare_exchange(
false,
true,
Ordering::Relaxed,
Ordering::Relaxed,
) {
ticks_tx
.send(RPTickEntry {
node: node_id,
ticks_left: (delay as i32 - 1) * 5 + priority as i32,
})
.unwrap();
}
}
fn schedule_updates(updates_tx: &Sender<usize>, nodes: &Arc<Vec<Node>>, node_id: usize) {
updates_tx.send(node_id).unwrap();
for link in &nodes[node_id].outputs {
if let Ok(false) = nodes[*link].update_queued.compare_exchange(
false,
true,
Ordering::Relaxed,
Ordering::Relaxed,
) {
updates_tx.send(*link).unwrap();
}
}
}
fn update_single(
node_id: usize,
nodes: &Arc<Vec<Node>>,
ticks_tx: &Sender<RPTickEntry>,
changes_tx: &Sender<BlockChange>,
) {
let node = &nodes[node_id];
node.update_queued.store(false, Ordering::Relaxed);
let mut input_power = 0u8;
let mut side_input_power = 0u8;
for link in &node.inputs {
let power = match link.ty {
LinkType::Default => &mut input_power,
LinkType::Side => &mut side_input_power,
};
*power = (*power).max(
nodes[link.end]
.get_output_power()
.saturating_sub(link.weight),
);
}
match node.ty {
NodeType::Repeater(delay) => {
let should_be_locked = side_input_power > 0;
let mut locked = node.locked.load(Ordering::Relaxed);
if !locked && should_be_locked {
locked = true;
node.locked.store(true, Ordering::Relaxed);
changes_tx
.send(BlockChange::RepeaterLock(node_id, true))
.unwrap();
} else if locked && !should_be_locked {
locked = false;
node.locked.store(false, Ordering::Relaxed);
changes_tx
.send(BlockChange::RepeaterLock(node_id, false))
.unwrap();
}
if !locked && !nodes[node_id].pending_tick.load(Ordering::Relaxed) {
let powered = node.powered.load(Ordering::Relaxed);
let should_be_powered = input_power > 0;
if should_be_powered != powered {
let priority = if node.facing_diode {
TickPriority::Highest
} else if !should_be_powered {
TickPriority::Higher
} else {
TickPriority::High
};
schedule_tick(ticks_tx, nodes, node_id, delay as u32, priority);
}
}
}
NodeType::Torch | NodeType::WallTorch => {
let lit = node.powered.load(Ordering::Relaxed);
if lit == (input_power > 0) && !nodes[node_id].pending_tick.load(Ordering::Relaxed) {
schedule_tick(ticks_tx, nodes, node_id, 1, TickPriority::Normal);
}
}
NodeType::Comparator(mode) => {
if nodes[node_id].pending_tick.load(Ordering::Relaxed) {
return;
}
let output_power = calculate_comparator_output(mode, input_power, side_input_power);
let old_strength = node.output_strength.load(Ordering::Relaxed);
let powered = node.powered.load(Ordering::Relaxed);
if output_power != old_strength
|| powered != comparator_should_be_powered(mode, input_power, side_input_power)
{
let priority = if node.facing_diode {
TickPriority::High
} else {
TickPriority::Normal
};
schedule_tick(ticks_tx, nodes, node_id, 1, priority);
}
}
NodeType::Lamp => {
let should_be_lit = input_power > 0;
let lit = node.powered.load(Ordering::Relaxed);
if lit && !should_be_lit {
schedule_tick(ticks_tx, nodes, node_id, 2, TickPriority::Normal);
} else if !lit && should_be_lit {
node.powered.store(true, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 15)).unwrap();
}
}
NodeType::Wire => {
let power = node.output_strength.load(Ordering::Relaxed);
if power != input_power {
node.output_strength.store(input_power, Ordering::Relaxed);
changes_tx
.send(BlockChange::Power(node_id, input_power))
.unwrap();
}
}
_ => {}
}
}
fn tick_single(
node_id: usize,
nodes: &Arc<Vec<Node>>,
updates_tx: &Sender<usize>,
changes_tx: &Sender<BlockChange>,
) {
let node = &nodes[node_id];
node.pending_tick.store(false, Ordering::Relaxed);
let mut input_power = 0u8;
let mut side_input_power = 0u8;
for link in &node.inputs {
let power = match link.ty {
LinkType::Default => &mut input_power,
LinkType::Side => &mut side_input_power,
};
*power = (*power).max(
nodes[link.end]
.get_output_power()
.saturating_sub(link.weight),
);
}
match node.ty {
NodeType::Repeater(_) => {
if node.locked.load(Ordering::Relaxed) {
return;
}
let should_be_powered = input_power > 0;
let powered = node.powered.load(Ordering::Relaxed);
if powered && !should_be_powered {
node.powered.store(false, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 0)).unwrap();
schedule_updates(updates_tx, nodes, node_id);
} else if !powered {
node.powered.store(true, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 15)).unwrap();
schedule_updates(updates_tx, nodes, node_id);
}
}
NodeType::Torch | NodeType::WallTorch => {
let should_be_off = input_power > 0;
let lit = node.powered.load(Ordering::Relaxed);
if lit && should_be_off {
node.powered.store(false, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 0)).unwrap();
schedule_updates(updates_tx, nodes, node_id);
} else if !lit && !should_be_off {
node.powered.store(true, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 15)).unwrap();
schedule_updates(updates_tx, nodes, node_id);
}
}
NodeType::Comparator(mode) => {
let new_strength = calculate_comparator_output(mode, input_power, side_input_power);
let old_strength = node.output_strength.load(Ordering::Relaxed);
if new_strength != old_strength || mode == ComparatorMode::Compare {
node.output_strength.store(new_strength, Ordering::Relaxed);
let should_be_powered =
comparator_should_be_powered(mode, input_power, side_input_power);
let powered = node.powered.load(Ordering::Relaxed);
if powered && !should_be_powered {
node.powered.store(false, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 0)).unwrap();
} else if !powered && should_be_powered {
node.powered.store(true, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 15)).unwrap();
}
schedule_updates(updates_tx, nodes, node_id);
}
}
NodeType::Lamp => {
let lit = node.powered.load(Ordering::Relaxed);
let should_be_lit = input_power > 0;
if lit && !should_be_lit {
node.powered.store(false, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 0)).unwrap();
}
}
NodeType::StoneButton => {
let powered = node.powered.load(Ordering::Relaxed);
if powered {
node.powered.store(false, Ordering::Relaxed);
changes_tx.send(BlockChange::Power(node_id, 0)).unwrap();
schedule_updates(updates_tx, nodes, node_id);
}
}
_ => warn!("Node {:?} should not be ticked!", node.ty),
}
}
impl fmt::Display for ParDirectBackend {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("digraph{")?;
for (id, node) in self.nodes.iter().enumerate() {
let pos = self.blocks[id].0;
write!(
f,
"n{}[label=\"{}: {}\\n({}, {}, {})\"];",
id,
id,
format!("{:?}", node.ty).split_whitespace().next().unwrap(),
pos.x,
pos.y,
pos.z
)?;
for link in &node.inputs {
let color = match link.ty {
LinkType::Default => "",
LinkType::Side => ",color=\"blue\"",
};
write!(
f,
"n{}->n{}[label=\"{}\"{}];",
link.end, link.start, link.weight, color
)?;
}
// for update in &node.updates {
// write!(f, "n{}->n{}[style=dotted];", id, update)?;
// }
}
f.write_str("}\n")
}
}
|
use std::collections::HashMap;
include!(concat!(env!("OUT_DIR"), "/english_frequencies.rs"));
pub fn english(message: &str) -> bool {
let expected_counts: HashMap<char, f32> = english_frequencies()
.iter()
.map(|(k, freq)| (k.clone() as char, (freq / 100.0) * (message.len() as f32)))
.collect();
let actual_counts = message
.chars()
.fold(HashMap::new(), |mut acc: HashMap<char, isize>, c| {
let count = match acc.get(&c) {
Some(x) => x.clone() + 1,
None => 1,
};
acc.insert(c, count);
acc
});
let chi_statistic = chi_statistic(&actual_counts, &expected_counts);
if cfg!(debug_assertions) {
println!("Expected: {:#?}", expected_counts);
println!("Actual: {:#?}", actual_counts);
println!("X-statistic: {}", chi_statistic);
}
// Degrees of freedom = 256 - 1 = 255 (character space)
// Usign this table:
// https://en.wikibooks.org/wiki/Engineering_Tables/Chi-Squared_Distibution
// We can use the approximate value for 250 degrees of fredom.
// Given a significance factor (alpha) of 0.05, our critical value is 287.882.
// If our chi_statistic is < the critical_value, then we have a match.
// See this page for an explanation:
// https://en.wikipedia.org/wiki/Chi-squared_distribution#Table_of_%CF%872_values_vs_p-values
chi_statistic < 287.882
}
/// Calculates Pearson's Cumulative Chi Statistic
/// https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test#Calculating_the_test-statistic
///
/// This is a slight variation.
/// Technichally, if the expected value is zero and the actual is non-zero, then the statistic is infinite.
/// For the sake of ergonommics, this implementation assumes missing expected values to be small, but non-zero.
/// This allows us to only specify values in the expected frequencies that are statistically
/// significant while allowing for all valid utf-8 characters in the message.
fn chi_statistic(observed: &HashMap<char, isize>, expected: &HashMap<char, f32>) -> f32 {
observed
.into_iter()
.map(|(key, obs)| {
let exp = match expected.get(&key) {
Some(x) => x.clone() as f32,
None => 0.0000001, //non-zero, but tiny possibility
};
(*obs as f32 - exp).powi(2) / exp
}).sum()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn bacon_message_is_english() {
let message = "Cooking MC's like a pound of bacon";
assert!(english(message));
}
#[test]
fn message_with_new_line_is_english() {
let message = "Now that the party is jumping\n";
assert!(english(message));
}
#[test]
fn message_with_unprintable_chars_is_not_english() {
assert!(!english(
"\u{7f}SSWUR[\u{1c}q\u{7f}\u{1b}O\u{1c}PUWY\u{1c}]\u{1c}LSIRX\u{1c}SZ\u{1c}^]_SR"
));
}
#[test]
fn printable_nonsense_is_not_english() {
assert!(!english("Yuuqst}:WY=i:vsq\u{7f}:{:juot~:u|:x{yut"));
}
#[test]
fn readable_but_incorrect_is_not_english() {
assert!(!english(
"cOOKING\u{0}mc\u{7}S\u{0}LIKE\u{0}A\u{0}POUND\u{0}OF\u{0}BACON"
));
}
}
|
extern crate bspline;
extern crate image;
use std::iter;
use std::ops::{Add, Mul};
#[derive(Copy, Clone, Debug)]
struct Point {
x: f32,
y: f32,
}
impl Point {
fn new(x: f32, y: f32) -> Point {
Point { x: x, y: y }
}
}
impl Mul<f32> for Point {
type Output = Point;
fn mul(self, rhs: f32) -> Point {
Point {
x: self.x * rhs,
y: self.y * rhs,
}
}
}
impl Add for Point {
type Output = Point;
fn add(self, rhs: Point) -> Point {
Point {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
/// Evaluate the B-spline and plot it to the image buffer passed
fn plot_2d(
spline: &bspline::BSpline<Point, f32>,
plot: &mut [u8],
plot_dim: (usize, usize),
scale: (f32, f32),
offset: (f32, f32),
) {
let step_size = 0.001;
let t_range = spline.knot_domain();
let steps = ((t_range.1 - t_range.0) / step_size) as usize;
for s in 0..steps + 1 {
let t = step_size * s as f32 + t_range.0;
let pt = spline.point(t);
let ix = ((pt.x + offset.0) * scale.0) as isize;
let iy = ((pt.y + offset.1) * scale.1) as isize;
for y in iy - 1..iy + 1 {
for x in ix - 1..ix + 1 {
if y >= 0 && y < plot_dim.1 as isize && x >= 0 && x < plot_dim.0 as isize {
let px = (plot_dim.1 - 1 - y as usize) * plot_dim.0 * 3 + x as usize * 3;
for i in 0..3 {
plot[px + i] = 0;
}
}
}
}
}
// Draw the control points
for pt in spline.control_points() {
let ix = ((pt.x + offset.0) * scale.0) as isize;
let iy = ((pt.y + offset.1) * scale.1) as isize;
for y in iy - 3..iy + 3 {
for x in ix - 3..ix + 3 {
if y >= 0 && y < plot_dim.1 as isize && x >= 0 && x < plot_dim.0 as isize {
let px = (plot_dim.1 - 1 - y as usize) * plot_dim.0 * 3 + x as usize * 3;
plot[px] = 255;
plot[px + 1] = 0;
plot[px + 2] = 0;
}
}
}
}
}
#[test]
fn plot_quadratic2d() {
let points = vec![
Point::new(-1.5, 0.0),
Point::new(0.0, 1.5),
Point::new(1.5, 0.0),
];
let knots = vec![0.0, 0.0, 0.0, 3.0, 3.0, 3.0];
let degree = 2;
let spline = bspline::BSpline::new(degree, points, knots);
let plot_dim = (200, 200);
let scale = (plot_dim.0 as f32 / 4.0, plot_dim.1 as f32 / 4.0);
let offset = (2.0, 2.0);
let mut plot: Vec<_> = iter::repeat(255u8)
.take(plot_dim.0 * plot_dim.1 * 3)
.collect();
plot_2d(&spline, &mut plot[..], plot_dim, scale, offset);
let expect_plot = match image::open("tests/quadratic_2d_expect.png") {
Ok(image::ImageRgb8(img)) => img.into_vec(),
Ok(_) => panic!("Invalid image found for expected quadratic 2d plot"),
Err(e) => panic!("{}", e),
};
assert!(plot == expect_plot);
}
#[test]
fn plot_cubic2d() {
let points = vec![
Point::new(-1.5, -1.5),
Point::new(-0.5, 1.5),
Point::new(0.5, -1.5),
Point::new(1.5, 1.5),
];
let knots = vec![0.0, 1.0, 2.0, 2.0, 5.0, 5.0, 6.0, 7.0];
let degree = 3;
let spline = bspline::BSpline::new(degree, points, knots);
let plot_dim = (200, 200);
let scale = (plot_dim.0 as f32 / 4.0, plot_dim.1 as f32 / 4.0);
let offset = (2.0, 2.0);
let mut plot: Vec<_> = iter::repeat(255u8)
.take(plot_dim.0 * plot_dim.1 * 3)
.collect();
plot_2d(&spline, &mut plot[..], plot_dim, scale, offset);
let expect_plot = match image::open("tests/cubic_2d_expect.png") {
Ok(image::ImageRgb8(img)) => img.into_vec(),
Ok(_) => panic!("Invalid image found for expected cubic 2d plot"),
Err(e) => panic!("{}", e),
};
assert!(plot == expect_plot);
}
#[test]
fn plot_quartic2d() {
let points = vec![
Point::new(-1.8, -1.4),
Point::new(-1.2, 0.5),
Point::new(-0.2, -0.8),
Point::new(-0.6, 0.7),
Point::new(0.0, 1.6),
Point::new(1.0, 0.0),
Point::new(0.6, -0.3),
Point::new(0.0, -1.0),
];
let knots = vec![
0.0, 0.0, 0.0, 0.0, 0.2, 1.0, 2.0, 3.0, 5.0, 5.0, 5.0, 5.0, 5.0,
];
let degree = 4;
let spline = bspline::BSpline::new(degree, points, knots);
let plot_dim = (200, 200);
let scale = (plot_dim.0 as f32 / 4.0, plot_dim.1 as f32 / 4.0);
let offset = (2.0, 2.0);
let mut plot: Vec<_> = iter::repeat(255u8)
.take(plot_dim.0 * plot_dim.1 * 3)
.collect();
plot_2d(&spline, &mut plot[..], plot_dim, scale, offset);
let expect_plot = match image::open("tests/quartic_2d_expect.png") {
Ok(image::ImageRgb8(img)) => img.into_vec(),
Ok(_) => panic!("Invalid image found for expected quartic 2d plot"),
Err(e) => panic!("{}", e),
};
assert!(plot == expect_plot);
}
|
extern crate chrono;
#[macro_use]
extern crate diesel;
extern crate jsonrpc_core;
#[macro_use]
extern crate jsonrpc_macros;
#[macro_use]
extern crate nom;
extern crate rumqtt;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate uuid;
#[macro_use]
extern crate failure;
use diesel::{r2d2, PgConnection};
use rumqtt::{Message as MqttMessage, MqttCallback, MqttClient, MqttOptions, QoS};
use std::sync::{mpsc, Arc, Mutex};
use std::thread;
use messages::{Envelope, EventKind, Notification};
use topic::{AgentTopic, AppTopic, ResourceKind, Topic};
macro_rules! establish_connection {
($pool:expr) => {
&$pool.get().expect("Error establishing DB connection")
};
}
pub mod error;
pub mod messages;
pub mod rpc;
pub mod topic;
pub mod version;
pub mod models;
pub mod schema;
type DbPool = r2d2::Pool<r2d2::ConnectionManager<PgConnection>>;
#[derive(Clone)]
pub struct Options {
pub mqtt: MqttOptions,
pub database_url: String,
}
pub fn try_run(options: Options) -> Result<(), failure::Error> {
let database_url = options.database_url.clone();
let (tx, rx) = mpsc::channel::<MqttMessage>();
let tx = Mutex::new(tx);
let (notification_tx, notification_rx) = mpsc::channel::<Notification>();
let callbacks = MqttCallback::new().on_message(move |msg| {
let tx = tx.lock().unwrap();
tx.send(msg).unwrap();
});
let mut client = MqttClient::start(options.mqtt, Some(callbacks))?;
subscribe(&mut client)?;
let client = Arc::new(Mutex::new(client));
let mut handles = vec![];
let handle = thread::spawn({
let client = Arc::clone(&client);
move || {
let manager = r2d2::ConnectionManager::<PgConnection>::new(database_url);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Error creating pool.");
let server = rpc::build_server();
for msg in rx.iter() {
let notification_tx = notification_tx.clone();
let pool = pool.clone();
let mut client = client.lock().unwrap();
if let Err(e) = handle_message(&server, &mut client, &msg, notification_tx, pool) {
use std::io::Write;
let stderr = &mut ::std::io::stderr();
let errmsg = "Error writing to stderr";
writeln!(stderr, "error: {}", e).expect(errmsg);
}
}
}
});
handles.push(handle);
let handle = thread::spawn({
let client = Arc::clone(&client);
move || {
for notification in notification_rx.iter() {
let topic = match notification {
Notification::Event(ref kind) => {
let app_topic = match *kind {
EventKind::AgentJoin(ref event) => AppTopic {
room_id: event.room_id,
resource: ResourceKind::Agents,
},
EventKind::AgentLeave(ref event) => AppTopic {
room_id: event.room_id,
resource: ResourceKind::Agents,
},
EventKind::TrackCreate(ref event) => AppTopic {
room_id: event.room_id,
resource: ResourceKind::Tracks,
},
EventKind::TrackDelete(ref event) => AppTopic {
room_id: event.room_id,
resource: ResourceKind::Tracks,
},
EventKind::StateUpdate(_) => unreachable!(),
};
Topic::App(app_topic)
}
Notification::Method(ref m) => Topic::Agent(AgentTopic::new_in(m.agent_id)),
};
let note = jsonrpc_core::Notification::from(notification);
let payload = serde_json::to_string(¬e).unwrap();
println!("EVENT: {}", payload);
let mut client = client.lock().unwrap();
client
.publish(&topic.to_string(), QoS::Level1, payload.into_bytes())
.unwrap();
}
}
});
handles.push(handle);
for handle in handles {
handle.join().expect("Error joining a thread");
}
Ok(())
}
fn subscribe(client: &mut MqttClient) -> Result<(), failure::Error> {
let topics = vec![
("ping", QoS::Level0),
(
"agents/+/out/signals.netology-group.services/api/v1",
QoS::Level1,
),
("agents/+/state/api/v1", QoS::Level1),
];
client.subscribe(topics)?;
Ok(())
}
fn handle_message(
server: &rpc::Server,
mqtt_client: &mut MqttClient,
mqtt_msg: &MqttMessage,
notification_tx: ::std::sync::mpsc::Sender<Notification>,
pool: DbPool,
) -> Result<(), failure::Error> {
println!("Received message: {:?}", mqtt_msg);
let topic = Topic::parse(&mqtt_msg.topic)?;
println!("Topic: {:?}", topic);
let payload = String::from_utf8(mqtt_msg.payload.to_vec())?;
println!("Payload: {:?}", payload);
let envelope: Envelope = serde_json::from_str(&payload)?;
let request = envelope.msg;
let meta = rpc::Meta {
subject: envelope.sub,
notification_tx: Some(notification_tx),
db_pool: Some(pool),
};
if let Some(resp) = server.handle_request_sync(&request, meta) {
if let Some(topic) = topic.get_reverse() {
mqtt_client.publish(&topic.to_string(), QoS::Level1, resp.into_bytes())?;
}
}
Ok(())
}
|
use crate::compiler::{Code, CompiledFunction};
use crate::ast;
use std::collections::HashMap;
#[derive(thiserror::Error, Debug)]
#[error("running")]
pub enum RunError {
}
#[derive(Default)]
pub struct Environment {
args: Vec<Evaluation>,
values: HashMap<ast::Name, Evaluation>,
}
pub struct Tables<'compiler, Context> {
pub ctxt: Context,
pub fns: &'compiler HashMap<ast::Name, CompiledFunction>,
pub dump: &'compiler dyn Fn(),
pub switch_tables: &'compiler dyn for <'a> Fn(&Tables<'a, Context>, &ast::Name) -> Tables<'a, Context>,
pub name: &'compiler dyn Fn(ast::Name) -> &'compiler str,
}
#[derive(Debug, Clone)]
pub enum Evaluation {
Nil,
IntrinsicInt32(i32),
Composite {
fields: Vec<(ast::Name, Evaluation)>,
}
}
pub fn run_expression<Context>(env: &mut Environment, tables: &Tables<'_, Context>, expr: Code) -> Result<Evaluation, RunError> {
match expr {
Code::Nop => Ok(Evaluation::Nil),
Code::Clear => Ok(Evaluation::Nil),
Code::Dump => {
(tables.dump)();
println!("# runtime");
println!("## values");
for (name, value) in &env.values {
println!("{}: {}", (tables.name)(*name), EvalDisplay(&value, tables.name));
}
Ok(Evaluation::Nil)
}
Code::IntrinsicLiteralInt32(i) => Ok(Evaluation::IntrinsicInt32(i)),
Code::IntrinsicCallInt32WrappingAdd(a, b) => {
let eval_a = env.values.remove(&a).expect("eval");
let eval_b = env.values.remove(&b).expect("eval");
let val_a = match eval_a {
Evaluation::IntrinsicInt32(a) => a,
_ => panic!(),
};
let val_b = match eval_b {
Evaluation::IntrinsicInt32(a) => a,
_ => panic!(),
};
let res = val_a.wrapping_add(val_b);
Ok(Evaluation::IntrinsicInt32(res))
}
Code::Set(name, code) => {
let eval = run_expression(env, tables, *code)?;
env.values.insert(name, eval);
Ok(Evaluation::Nil)
}
Code::PopArg { name } => {
let eval = env.args.pop().expect("pop arg");
env.values.insert(name, eval);
Ok(Evaluation::Nil)
}
Code::Read(name) => {
let eval = if let Some(eval) = env.values.get(&name) {
eval
} else {
panic!("reading empty name {}", (tables.name)(name));
};
let eval = eval.clone();
Ok(eval)
}
Code::Call { name, args } => {
let mut eval_args = vec![];
for arg in args {
let eval = run_expression(env, tables, arg)?;
eval_args.push(eval);
}
let mut env = Environment {
args: eval_args,
.. Environment::default()
};
let fn_ = &tables.fns[&name];
let tables = (tables.switch_tables)(tables, &name);
let mut final_eval = Evaluation::Nil;
for code in &fn_.codes {
let code = code.clone();
final_eval = run_expression(&mut env, &tables, code)?;
}
Ok(final_eval)
}
Code::Composite { fields } => {
let mut evals = vec![];
for field in fields {
let eval = run_expression(env, &tables, *field.1)?;
evals.push((field.0, eval));
}
Ok(Evaluation::Composite {
fields: evals
})
}
}
}
pub struct EvalDisplay<'e, 'n>(pub &'e Evaluation, pub &'n dyn Fn(ast::Name) -> &'n str);
impl<'e, 'n> std::fmt::Display for EvalDisplay<'e, 'n> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0 {
Evaluation::Nil => {
write!(f, "Nil")
},
Evaluation::IntrinsicInt32(i) => {
write!(f, "{}", i)
},
Evaluation::Composite { fields } => {
writeln!(f, "{{ ");
for field in fields {
let name = self.1(field.0);
let value = EvalDisplay(&field.1, self.1);
writeln!(f, " {}: {}", name, value);
}
write!(f, "}}")
}
}
}
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Light client backend. Only stores headers and justifications of blocks.
//! Everything else is requested from full nodes on demand.
use parking_lot::RwLock;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use codec::{Decode, Encode};
use super::blockchain::Blockchain;
use hash_db::Hasher;
use sc_client_api::{
backend::{
AuxStore, Backend as ClientBackend, BlockImportOperation, NewBlockState,
PrunableStateChangesTrieStorage, RemoteBackend,
},
blockchain::{well_known_cache_keys, HeaderBackend as BlockchainHeaderBackend},
in_mem::check_genesis_storage,
light::Storage as BlockchainStorage,
UsageInfo,
};
use sp_blockchain::{Error as ClientError, Result as ClientResult};
use sp_core::offchain::storage::InMemOffchainStorage;
use sp_core::storage::{well_known_keys, ChildInfo};
use sp_core::ChangesTrieConfiguration;
use sp_runtime::traits::{Block as BlockT, HashFor, Header, NumberFor, Zero};
use sp_runtime::{generic::BlockId, Justification, Storage};
use sp_state_machine::{
Backend as StateBackend, ChangesTrieTransaction, ChildStorageCollection, InMemoryBackend,
StorageCollection, TrieBackend,
};
const IN_MEMORY_EXPECT_PROOF: &str =
"InMemory state backend has Void error type and always succeeds; qed";
/// Light client backend.
pub struct Backend<S, H: Hasher> {
blockchain: Arc<Blockchain<S>>,
genesis_state: RwLock<Option<InMemoryBackend<H>>>,
import_lock: RwLock<()>,
}
/// Light block (header and justification) import operation.
pub struct ImportOperation<Block: BlockT, S> {
header: Option<Block::Header>,
cache: HashMap<well_known_cache_keys::Id, Vec<u8>>,
leaf_state: NewBlockState,
aux_ops: Vec<(Vec<u8>, Option<Vec<u8>>)>,
finalized_blocks: Vec<BlockId<Block>>,
set_head: Option<BlockId<Block>>,
storage_update: Option<InMemoryBackend<HashFor<Block>>>,
changes_trie_config_update: Option<Option<ChangesTrieConfiguration>>,
_phantom: std::marker::PhantomData<S>,
}
/// Either in-memory genesis state, or locally-unavailable state.
pub enum GenesisOrUnavailableState<H: Hasher> {
/// Genesis state - storage values are stored in-memory.
Genesis(InMemoryBackend<H>),
/// We know that state exists, but all calls will fail with error, because it
/// isn't locally available.
Unavailable,
}
impl<S, H: Hasher> Backend<S, H> {
/// Create new light backend.
pub fn new(blockchain: Arc<Blockchain<S>>) -> Self {
Self { blockchain, genesis_state: RwLock::new(None), import_lock: Default::default() }
}
/// Get shared blockchain reference.
pub fn blockchain(&self) -> &Arc<Blockchain<S>> {
&self.blockchain
}
}
impl<S: AuxStore, H: Hasher> AuxStore for Backend<S, H> {
fn insert_aux<
'a,
'b: 'a,
'c: 'a,
I: IntoIterator<Item = &'a (&'c [u8], &'c [u8])>,
D: IntoIterator<Item = &'a &'b [u8]>,
>(
&self,
insert: I,
delete: D,
) -> ClientResult<()> {
self.blockchain.storage().insert_aux(insert, delete)
}
fn get_aux(&self, key: &[u8]) -> ClientResult<Option<Vec<u8>>> {
self.blockchain.storage().get_aux(key)
}
}
impl<S, Block> ClientBackend<Block> for Backend<S, HashFor<Block>>
where
Block: BlockT,
S: BlockchainStorage<Block>,
Block::Hash: Ord,
{
type BlockImportOperation = ImportOperation<Block, S>;
type Blockchain = Blockchain<S>;
type State = GenesisOrUnavailableState<HashFor<Block>>;
type OffchainStorage = InMemOffchainStorage;
fn begin_operation(&self) -> ClientResult<Self::BlockImportOperation> {
Ok(ImportOperation {
header: None,
cache: Default::default(),
leaf_state: NewBlockState::Normal,
aux_ops: Vec::new(),
finalized_blocks: Vec::new(),
set_head: None,
storage_update: None,
changes_trie_config_update: None,
_phantom: Default::default(),
})
}
fn begin_state_operation(
&self,
_operation: &mut Self::BlockImportOperation,
_block: BlockId<Block>,
) -> ClientResult<()> {
Ok(())
}
fn commit_operation(&self, mut operation: Self::BlockImportOperation) -> ClientResult<()> {
if !operation.finalized_blocks.is_empty() {
for block in operation.finalized_blocks {
self.blockchain.storage().finalize_header(block)?;
}
}
if let Some(header) = operation.header {
let is_genesis_import = header.number().is_zero();
if let Some(new_config) = operation.changes_trie_config_update {
operation
.cache
.insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_config.encode());
}
self.blockchain.storage().import_header(
header,
operation.cache,
operation.leaf_state,
operation.aux_ops,
)?;
// when importing genesis block => remember its state
if is_genesis_import {
*self.genesis_state.write() = operation.storage_update.take();
}
} else {
for (key, maybe_val) in operation.aux_ops {
match maybe_val {
Some(val) => self
.blockchain
.storage()
.insert_aux(&[(&key[..], &val[..])], std::iter::empty())?,
None =>
self.blockchain.storage().insert_aux(std::iter::empty(), &[&key[..]])?,
}
}
}
if let Some(set_head) = operation.set_head {
self.blockchain.storage().set_head(set_head)?;
}
Ok(())
}
fn finalize_block(
&self,
block: BlockId<Block>,
_justification: Option<Justification>,
) -> ClientResult<()> {
self.blockchain.storage().finalize_header(block)
}
fn blockchain(&self) -> &Blockchain<S> {
&self.blockchain
}
fn usage_info(&self) -> Option<UsageInfo> {
self.blockchain.storage().usage_info()
}
fn changes_trie_storage(&self) -> Option<&dyn PrunableStateChangesTrieStorage<Block>> {
None
}
fn offchain_storage(&self) -> Option<Self::OffchainStorage> {
None
}
fn state_at(&self, block: BlockId<Block>) -> ClientResult<Self::State> {
let block_number = self.blockchain.expect_block_number_from_id(&block)?;
// special case for genesis block
if block_number.is_zero() {
if let Some(genesis_state) = self.genesis_state.read().clone() {
return Ok(GenesisOrUnavailableState::Genesis(genesis_state))
}
}
// else return unavailable state. We do not return error here, because error
// would mean that we do not know this state at all. But we know that it exists
Ok(GenesisOrUnavailableState::Unavailable)
}
fn revert(
&self,
_n: NumberFor<Block>,
_revert_finalized: bool,
) -> ClientResult<(NumberFor<Block>, HashSet<Block::Hash>)> {
Err(ClientError::NotAvailableOnLightClient)
}
fn get_import_lock(&self) -> &RwLock<()> {
&self.import_lock
}
}
impl<S, Block> RemoteBackend<Block> for Backend<S, HashFor<Block>>
where
Block: BlockT,
S: BlockchainStorage<Block> + 'static,
Block::Hash: Ord,
{
fn is_local_state_available(&self, block: &BlockId<Block>) -> bool {
self.genesis_state.read().is_some() &&
self.blockchain
.expect_block_number_from_id(block)
.map(|num| num.is_zero())
.unwrap_or(false)
}
fn remote_blockchain(&self) -> Arc<dyn super::blockchain::RemoteBlockchain<Block>> {
self.blockchain.clone()
}
}
impl<S, Block> BlockImportOperation<Block> for ImportOperation<Block, S>
where
Block: BlockT,
S: BlockchainStorage<Block>,
Block::Hash: Ord,
{
type State = GenesisOrUnavailableState<HashFor<Block>>;
fn state(&self) -> ClientResult<Option<&Self::State>> {
// None means 'locally-stateless' backend
Ok(None)
}
fn set_block_data(
&mut self,
header: Block::Header,
_body: Option<Vec<Block::Extrinsic>>,
_justification: Option<Justification>,
state: NewBlockState,
) -> ClientResult<()> {
self.leaf_state = state;
self.header = Some(header);
Ok(())
}
fn update_cache(&mut self, cache: HashMap<well_known_cache_keys::Id, Vec<u8>>) {
self.cache = cache;
}
fn update_db_storage(
&mut self,
_update: <Self::State as StateBackend<HashFor<Block>>>::Transaction,
) -> ClientResult<()> {
// we're not storing anything locally => ignore changes
Ok(())
}
fn update_changes_trie(
&mut self,
_update: ChangesTrieTransaction<HashFor<Block>, NumberFor<Block>>,
) -> ClientResult<()> {
// we're not storing anything locally => ignore changes
Ok(())
}
fn reset_storage(&mut self, input: Storage) -> ClientResult<Block::Hash> {
check_genesis_storage(&input)?;
// changes trie configuration
let changes_trie_config =
input.top.iter().find(|(k, _)| &k[..] == well_known_keys::CHANGES_TRIE_CONFIG).map(
|(_, v)| {
Decode::decode(&mut &v[..])
.expect("changes trie configuration is encoded properly at genesis")
},
);
self.changes_trie_config_update = Some(changes_trie_config);
// this is only called when genesis block is imported => shouldn't be performance bottleneck
let mut storage: HashMap<Option<ChildInfo>, _> = HashMap::new();
storage.insert(None, input.top);
// create a list of children keys to re-compute roots for
let child_delta = input
.children_default
.iter()
.map(|(_storage_key, storage_child)| (&storage_child.child_info, std::iter::empty()));
// make sure to persist the child storage
for (_child_key, storage_child) in input.children_default.clone() {
storage.insert(Some(storage_child.child_info), storage_child.data);
}
let storage_update = InMemoryBackend::from(storage);
let (storage_root, _) = storage_update.full_storage_root(std::iter::empty(), child_delta);
self.storage_update = Some(storage_update);
Ok(storage_root)
}
fn insert_aux<I>(&mut self, ops: I) -> ClientResult<()>
where
I: IntoIterator<Item = (Vec<u8>, Option<Vec<u8>>)>,
{
self.aux_ops.append(&mut ops.into_iter().collect());
Ok(())
}
fn update_storage(
&mut self,
_update: StorageCollection,
_child_update: ChildStorageCollection,
) -> ClientResult<()> {
// we're not storing anything locally => ignore changes
Ok(())
}
fn mark_finalized(
&mut self,
block: BlockId<Block>,
_justification: Option<Justification>,
) -> ClientResult<()> {
self.finalized_blocks.push(block);
Ok(())
}
fn mark_head(&mut self, block: BlockId<Block>) -> ClientResult<()> {
self.set_head = Some(block);
Ok(())
}
}
impl<H: Hasher> std::fmt::Debug for GenesisOrUnavailableState<H> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
GenesisOrUnavailableState::Genesis(ref state) => state.fmt(f),
GenesisOrUnavailableState::Unavailable => write!(f, "Unavailable"),
}
}
}
impl<H: Hasher> StateBackend<H> for GenesisOrUnavailableState<H>
where
H::Out: Ord + codec::Codec,
{
type Error = ClientError;
type Transaction = <InMemoryBackend<H> as StateBackend<H>>::Transaction;
type TrieBackendStorage = <InMemoryBackend<H> as StateBackend<H>>::TrieBackendStorage;
fn storage(&self, key: &[u8]) -> ClientResult<Option<Vec<u8>>> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
Ok(state.storage(key).expect(IN_MEMORY_EXPECT_PROOF)),
GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient),
}
}
fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> ClientResult<Option<Vec<u8>>> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
Ok(state.child_storage(child_info, key).expect(IN_MEMORY_EXPECT_PROOF)),
GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient),
}
}
fn next_storage_key(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
Ok(state.next_storage_key(key).expect(IN_MEMORY_EXPECT_PROOF)),
GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient),
}
}
fn next_child_storage_key(
&self,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<Vec<u8>>, Self::Error> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
Ok(state.next_child_storage_key(child_info, key).expect(IN_MEMORY_EXPECT_PROOF)),
GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient),
}
}
fn for_keys_with_prefix<A: FnMut(&[u8])>(&self, prefix: &[u8], action: A) {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
state.for_keys_with_prefix(prefix, action),
GenesisOrUnavailableState::Unavailable => (),
}
}
fn for_key_values_with_prefix<A: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], action: A) {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
state.for_key_values_with_prefix(prefix, action),
GenesisOrUnavailableState::Unavailable => (),
}
}
fn for_keys_in_child_storage<A: FnMut(&[u8])>(&self, child_info: &ChildInfo, action: A) {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
state.for_keys_in_child_storage(child_info, action),
GenesisOrUnavailableState::Unavailable => (),
}
}
fn for_child_keys_with_prefix<A: FnMut(&[u8])>(
&self,
child_info: &ChildInfo,
prefix: &[u8],
action: A,
) {
match *self {
GenesisOrUnavailableState::Genesis(ref state) =>
state.for_child_keys_with_prefix(child_info, prefix, action),
GenesisOrUnavailableState::Unavailable => (),
}
}
fn storage_root<'a>(
&self,
delta: impl Iterator<Item = (&'a [u8], Option<&'a [u8]>)>,
) -> (H::Out, Self::Transaction)
where
H::Out: Ord,
{
match *self {
GenesisOrUnavailableState::Genesis(ref state) => state.storage_root(delta),
GenesisOrUnavailableState::Unavailable => Default::default(),
}
}
fn child_storage_root<'a>(
&self,
child_info: &ChildInfo,
delta: impl Iterator<Item = (&'a [u8], Option<&'a [u8]>)>,
) -> (H::Out, bool, Self::Transaction)
where
H::Out: Ord,
{
match *self {
GenesisOrUnavailableState::Genesis(ref state) => {
let (root, is_equal, _) = state.child_storage_root(child_info, delta);
(root, is_equal, Default::default())
},
GenesisOrUnavailableState::Unavailable => (H::Out::default(), true, Default::default()),
}
}
fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) => state.pairs(),
GenesisOrUnavailableState::Unavailable => Vec::new(),
}
}
fn keys(&self, prefix: &[u8]) -> Vec<Vec<u8>> {
match *self {
GenesisOrUnavailableState::Genesis(ref state) => state.keys(prefix),
GenesisOrUnavailableState::Unavailable => Vec::new(),
}
}
fn register_overlay_stats(&mut self, _stats: &sp_state_machine::StateMachineStats) {}
fn usage_info(&self) -> sp_state_machine::UsageInfo {
sp_state_machine::UsageInfo::empty()
}
fn as_trie_backend(&mut self) -> Option<&TrieBackend<Self::TrieBackendStorage, H>> {
match self {
GenesisOrUnavailableState::Genesis(ref mut state) => state.as_trie_backend(),
GenesisOrUnavailableState::Unavailable => None,
}
}
}
|
pub fn test() -> String {
"asdf".to_string()
}
|
use tui::{
backend::Backend,
layout::{Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
text::Span,
text::Spans,
widgets::{Block, Borders, List, ListItem, ListState, Paragraph},
Frame, Terminal,
};
use crate::{model::Post, model::PostView, state::State};
use anyhow::Result;
use snailquote::unescape;
use textwrap::wrap;
fn wrap_text<'a>(s: &'a str, rect: Rect) -> Vec<Spans<'a>> {
unescape(s)
.expect("failed to escape string")
.lines()
.map(|line| wrap(line, rect.width as usize - 7).into_iter())
.flatten()
.map(|line| Spans::from(Span::from(String::from(line))))
.collect() // TODO: Less froms?
}
fn render_subreddit_view<B: Backend>(
f: &mut Frame<B>,
size: Rect,
posts: &[Post],
list_state: &mut ListState,
) {
let items: Vec<ListItem> = posts
.iter()
.map(|p| {
let mut spans = wrap_text(p.title.as_str(), size);
spans.push(Spans::from(Span::from(format!(
" {} comments, {} upvotes",
p.num_comments, p.up_votes
))));
ListItem::new(spans)
})
.collect();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("List"))
.style(Style::default().fg(Color::White))
.highlight_style(
Style::default()
.bg(Color::LightGreen)
.add_modifier(Modifier::BOLD),
)
.highlight_symbol(">> ");
f.render_stateful_widget(list, size, list_state);
}
fn render_post_view(
f: &mut Frame<impl Backend>,
size: Rect,
post_view: &PostView,
list_state: &mut ListState,
) {
use std::iter::once;
let items: Vec<ListItem> = once(
post_view
.self_text
.as_ref()
.map(|(body, up)| (&*body.as_str(), *up)),
)
.filter_map(|self_text| self_text)
.chain(
post_view
.comments
.iter()
.map(|comment| (comment.body.as_str(), comment.up_votes)),
)
.map(|(body, up_votes)| {
let mut spans = wrap_text(body, size);
spans.push(Spans::from(Span::from(format!(
" {} upvotes",
up_votes
))));
ListItem::new(spans)
})
.collect();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("List"))
.style(Style::default().fg(Color::White))
.highlight_style(
Style::default()
.bg(Color::LightGreen)
.add_modifier(Modifier::BOLD),
)
.highlight_symbol(">> ");
f.render_stateful_widget(list, size, list_state);
}
fn render_loading<B: Backend>(f: &mut Frame<B>) {
let text = Paragraph::new(Span::from("Loading..."));
f.render_widget(text, f.size());
}
fn render_select_subreddit(
f: &mut Frame<impl Backend>,
size: Rect,
prompt: &str,
stack_tail: &mut [State],
) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Max(size.height - 1), Constraint::Max(1)])
.split(size);
let text = Paragraph::new(Spans::from(vec![Span::from("/r/"), Span::from(prompt)]));
render_frame(f, chunks[0], stack_tail);
f.render_widget(text, chunks[1]);
}
pub fn render_frame(f: &mut Frame<impl Backend>, size: Rect, state_stack: &mut [State]) {
match state_stack {
[.., State::Loading] => {
render_loading(f);
}
[.., State::SubList(ref posts, ref mut list_state, _sub)] => {
render_subreddit_view(f, size, posts, list_state);
}
[.., State::PostView(ref post_view, ref mut list_state)] => {
render_post_view(f, size, post_view, list_state);
}
[old_stack @ .., State::SelectSubreddit(ref prompt)] => {
render_select_subreddit(f, size, prompt, old_stack);
}
[] => {}
};
}
pub fn render<B: Backend>(terminal: &mut Terminal<B>, state_stack: &mut [State]) -> Result<()> {
terminal.draw(|f| {
render_frame(f, f.size(), state_stack);
})?;
Ok(())
}
|
use std::ops;
pub type Variable = u32;
#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug, Hash)]
pub struct Literal {
x: u32,
}
impl Literal {
pub fn new(variable: Variable, signed: bool) -> Literal {
Literal {
x: variable << 1 | (signed as u32),
}
}
/// Returns true if `Literal` is signed
///
/// # Examples
///
/// ```
/// assert!(caqe::Literal::new(0, true).signed());
/// assert!(!caqe::Literal::new(0, false).signed());
/// ```
pub fn signed(&self) -> bool {
(self.x & 1) != 0
}
pub fn unsigned(&self) -> Literal {
Literal { x: self.x & !1 }
}
pub fn variable(&self) -> Variable {
self.x >> 1
}
pub fn dimacs(&self) -> i32 {
let base = self.variable() as i32;
if self.signed() {
-base
} else {
base
}
}
}
impl ops::Neg for Literal {
type Output = Literal;
fn neg(self) -> Literal {
Literal { x: self.x ^ 1 }
}
}
impl From<i32> for Literal {
fn from(literal: i32) -> Self {
let signed = literal < 0;
let abs = literal.abs() as Variable;
Literal::new(abs, signed)
}
}
#[cfg(test)]
mod tests {
use std::mem;
use super::*;
#[test]
fn size_of_literal() {
let result = mem::size_of::<Literal>();
assert!(
result == 4,
"Size of `Literal` should be 4 bytes, was `{}`",
result
);
}
}
|
use DataHelper;
#[derive(Default, System)]
#[process(update)]
pub struct AudioUpdate;
fn update(_: &mut AudioUpdate, data: &mut DataHelper) {
data.services
.audio
.studio
.update()
.expect("FMOD Studio updating shouldn't fail");
}
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
_reserved0: [u8; 64usize],
#[doc = "0x40 - RTC Counters Lower"]
pub ctrlow: CTRLOW,
#[doc = "0x44 - RTC Counters Upper"]
pub ctrup: CTRUP,
#[doc = "0x48 - RTC Alarms Lower"]
pub almlow: ALMLOW,
#[doc = "0x4c - RTC Alarms Upper"]
pub almup: ALMUP,
#[doc = "0x50 - RTC Control Register"]
pub rtcctl: RTCCTL,
_reserved1: [u8; 172usize],
#[doc = "0x100 - RTC Interrupt Register: Enable"]
pub inten: INTEN,
#[doc = "0x104 - RTC Interrupt Register: Status"]
pub intstat: INTSTAT,
#[doc = "0x108 - RTC Interrupt Register: Clear"]
pub intclr: INTCLR,
#[doc = "0x10c - RTC Interrupt Register: Set"]
pub intset: INTSET,
}
#[doc = "RTC Counters Lower"]
pub struct CTRLOW {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Counters Lower"]
pub mod ctrlow;
#[doc = "RTC Counters Upper"]
pub struct CTRUP {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Counters Upper"]
pub mod ctrup;
#[doc = "RTC Alarms Lower"]
pub struct ALMLOW {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Alarms Lower"]
pub mod almlow;
#[doc = "RTC Alarms Upper"]
pub struct ALMUP {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Alarms Upper"]
pub mod almup;
#[doc = "RTC Control Register"]
pub struct RTCCTL {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Control Register"]
pub mod rtcctl;
#[doc = "RTC Interrupt Register: Enable"]
pub struct INTEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Interrupt Register: Enable"]
pub mod inten;
#[doc = "RTC Interrupt Register: Status"]
pub struct INTSTAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Interrupt Register: Status"]
pub mod intstat;
#[doc = "RTC Interrupt Register: Clear"]
pub struct INTCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Interrupt Register: Clear"]
pub mod intclr;
#[doc = "RTC Interrupt Register: Set"]
pub struct INTSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "RTC Interrupt Register: Set"]
pub mod intset;
|
pub use std::collections::HashMap;
pub use std::boxed::Box;
pub use std::string::ToString;
#[macro_export]
macro_rules! json {
(null) => {
$crate::Json::Null
};
([ $( $element:tt ),* ]) => {
$crate::Json::Array(vec![ $( json!($element) ),* ])
};
({ $( $key:tt : $value:tt ),* }) => {
$crate::Json::Object(::std::boxed::Box::new(vec![
$( ($key.to_string(), json!($value)) ),*
].into_iter().collect()))
};
($other:tt) => {
$crate::Json::from($other)
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum Json {
Null,
Boolean(bool),
Number(f64),
String(String),
Array(Vec<Json>),
Object(Box<HashMap<String, Json>>)
}
impl From<bool> for Json {
fn from(b: bool) -> Json {
Json::Boolean(b)
}
}
macro_rules! impl_from_num_for_json {
( $( $t:ident )* ) => {
$(
impl From<$t> for Json {
fn from(n: $t) -> Json {
Json::Number(n as f64)
}
}
)*
};
}
impl_from_num_for_json!(u8 i8 u16 i16 u32 i32 u64 i64 usize isize f32 f64);
impl From<String> for Json {
fn from(s: String) -> Json {
Json::String(s)
}
}
impl<'a> From<&'a str> for Json {
fn from(s: &'a str) -> Json {
Json::String(s.to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn json_null() {
assert_eq!(json!(null), Json::Null);
}
#[test]
fn json_array_with_json_element() {
let macro_generated_value = json!(
[
// valid JSON that doesn't match `$element:expr`
{
"pitch": 440.0
}
]
);
let mut map = HashMap::new();
map.insert("pitch".to_string(), Json::Number(440.0));
let hand_coded_value =
Json::Array(vec![
Json::Object(Box::new(map))
]);
assert_eq!(macro_generated_value, hand_coded_value);
}
}
|
//! Components for constructing HTTP applications.
pub mod concurrency;
pub mod config;
pub mod path;
mod recognizer;
mod scope;
mod service;
#[cfg(test)]
mod tests;
pub(crate) use self::recognizer::Captures;
pub use self::{
config::{Error, Result},
service::{AppBody, AppService},
};
use {
self::{
concurrency::{Concurrency, DefaultConcurrency},
recognizer::{RecognizeError, Recognizer},
scope::{Scope, ScopeId, Scopes},
},
crate::{input::localmap::local_key, uri::Uri},
http::Method,
indexmap::IndexMap,
std::{fmt, sync::Arc},
};
local_key! {
pub const REMOTE_ADDR: std::net::SocketAddr;
}
/// The main type representing an HTTP application.
#[derive(Debug)]
pub struct App<C: Concurrency = DefaultConcurrency> {
inner: Arc<AppInner<C>>,
}
impl<C: Concurrency> Clone for App<C> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<C: Concurrency> App<C> {
/// Creates a new instance of `AppService` associated with this `App`.
pub fn new_service(&self) -> AppService<C> {
AppService::new(self.inner.clone())
}
}
#[derive(Debug)]
struct AppInner<C: Concurrency> {
recognizer: Recognizer<Arc<ResourceData<C>>>,
scopes: Scopes<ScopeData<C>>,
}
impl<C: Concurrency> AppInner<C> {
fn scope(&self, id: ScopeId) -> &Scope<ScopeData<C>> {
&self.scopes[id]
}
/// Infers the scope where the input path belongs from the extracted candidates.
fn infer_scope<'a>(
&self,
path: &str,
resources: impl IntoIterator<Item = &'a ResourceData<C>>,
) -> &Scope<ScopeData<C>> {
// First, extract a series of common ancestors of candidates.
let ancestors = {
let mut ancestors: Option<&[ScopeId]> = None;
for resource in resources {
let ancestors = ancestors.get_or_insert(&resource.ancestors);
let n = (*ancestors)
.iter()
.zip(&resource.ancestors)
.position(|(a, b)| a != b)
.unwrap_or_else(|| std::cmp::min(ancestors.len(), resource.ancestors.len()));
*ancestors = &ancestors[..n];
}
ancestors
};
// Then, find the oldest ancestor that with the input path as the prefix of URI.
let node_id = ancestors
.and_then(|ancestors| {
ancestors
.iter()
.find(|&&scope| self.scope(scope).data.prefix.as_str().starts_with(path)) //
.or_else(|| ancestors.last())
.cloned()
})
.unwrap_or_else(ScopeId::root);
self.scope(node_id)
}
fn find_fallback(&self, start: ScopeId) -> Option<&C::Handler> {
let scope = self.scope(start);
if let Some(ref fallback) = scope.data.fallback {
return Some(fallback);
}
scope
.ancestors()
.iter()
.rev()
.filter_map(|&id| self.scope(id).data.fallback.as_ref())
.next()
}
fn find_resource(
&self,
path: &str,
captures: &mut Option<Captures>,
) -> std::result::Result<&Arc<ResourceData<C>>, &Scope<ScopeData<C>>> {
match self.recognizer.recognize(path, captures) {
Ok(resource) => Ok(resource),
Err(RecognizeError::NotMatched) => Err(self.scope(ScopeId::root())),
Err(RecognizeError::PartiallyMatched(candidates)) => Err(self.infer_scope(
path,
candidates
.iter()
.filter_map(|i| self.recognizer.get(i).map(|e| &**e)),
)),
}
}
}
struct ScopeData<C: Concurrency> {
prefix: Uri,
fallback: Option<C::Handler>,
}
impl<C: Concurrency> fmt::Debug for ScopeData<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ScopeData")
.field("prefix", &self.prefix)
.field("fallback", &self.fallback.as_ref().map(|_| "<fallback>"))
.finish()
}
}
/// A type representing a set of endpoints with the same HTTP path.
struct ResourceData<C: Concurrency> {
scope: ScopeId,
ancestors: Vec<ScopeId>,
uri: Uri,
routes: Vec<RouteData<C>>,
default_route: Option<RouteData<C>>,
verbs: IndexMap<Method, usize>,
}
impl<C: Concurrency> fmt::Debug for ResourceData<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ResourceData")
.field("scope", &self.scope)
.field("ancestors", &self.ancestors)
.field("uri", &self.uri)
.field("verbs", &self.verbs)
.finish()
}
}
impl<C> ResourceData<C>
where
C: Concurrency,
{
fn find_route(&self, method: &Method) -> Option<&RouteData<C>> {
self.verbs
.get(method)
.map(|&i| &self.routes[i])
.or_else(|| self.default_route.as_ref())
}
}
struct RouteData<C: Concurrency> {
handler: C::Handler,
}
impl<C: Concurrency> fmt::Debug for RouteData<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RouteData").finish()
}
}
|
extern crate julia;
use julia::Julia;
// TODO: This is absolute garbage.
#[test]
fn init_works() {
let jl = Julia::new("/Applications/Julia-0.5.app/Contents/Resources/julia/lib");
assert!(true)
}
|
// Copyright © 2020, Oracle and/or its affiliates.
//
// Copyright (c) 2019 Intel Corporation. All rights reserved.
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-BSD-3-Clause file.
//
// SPDX-License-Identifier: Apache-2.0 AND BSD-3-Clause
//! Traits and structs for configuring and loading boot parameters on `x86_64` using the Linux
//! boot protocol.
use vm_memory::{Bytes, GuestMemory};
use crate::configurator::{BootConfigurator, BootParams, Error as BootConfiguratorError, Result};
use std::fmt;
/// Boot configurator for the Linux boot protocol.
pub struct LinuxBootConfigurator {}
/// Errors specific to the Linux boot protocol configuration.
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
/// The zero page extends past the end of guest memory.
ZeroPagePastRamEnd,
/// Error writing to the zero page of guest memory.
ZeroPageSetup,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Error::*;
let desc = match self {
ZeroPagePastRamEnd => "the zero page extends past the end of guest memory.",
ZeroPageSetup => "error writing to the zero page of guest memory.",
};
write!(f, "Linux Boot Configurator: {}", desc,)
}
}
impl std::error::Error for Error {}
impl From<Error> for BootConfiguratorError {
fn from(err: Error) -> Self {
BootConfiguratorError::Linux(err)
}
}
impl BootConfigurator for LinuxBootConfigurator {
/// Writes the boot parameters (configured elsewhere) into guest memory.
///
/// # Arguments
///
/// * `params` - boot parameters. The header contains a [`boot_params`] struct. The `sections`
/// and `modules` are unused.
/// * `guest_memory` - guest's physical memory.
///
/// # Examples
///
/// ```rust
/// # extern crate vm_memory;
/// # use linux_loader::configurator::{BootConfigurator, BootParams};
/// # use linux_loader::configurator::linux::LinuxBootConfigurator;
/// # use linux_loader::loader::bootparam::boot_params;
/// # use vm_memory::{Address, ByteValued, GuestMemory, GuestMemoryMmap, GuestAddress};
/// # const KERNEL_BOOT_FLAG_MAGIC: u16 = 0xaa55;
/// # const KERNEL_HDR_MAGIC: u32 = 0x53726448;
/// # const KERNEL_LOADER_OTHER: u8 = 0xff;
/// # const KERNEL_MIN_ALIGNMENT_BYTES: u32 = 0x1000000;
/// # const MEM_SIZE: u64 = 0x100_0000;
/// # fn create_guest_memory() -> GuestMemoryMmap {
/// # GuestMemoryMmap::from_ranges(&[(GuestAddress(0x0), (MEM_SIZE as usize))]).unwrap()
/// # }
/// fn build_bootparams() -> boot_params {
/// let mut params = boot_params::default();
/// params.hdr.boot_flag = KERNEL_BOOT_FLAG_MAGIC;
/// params.hdr.header = KERNEL_HDR_MAGIC;
/// params.hdr.kernel_alignment = KERNEL_MIN_ALIGNMENT_BYTES;
/// params.hdr.type_of_loader = KERNEL_LOADER_OTHER;
/// params
/// }
///
/// fn main() {
/// # let zero_page_addr = GuestAddress(0x30000);
/// let guest_memory = create_guest_memory();
/// let params = build_bootparams();
/// let mut bootparams = BootParams::new::<boot_params>(¶ms, zero_page_addr);
/// LinuxBootConfigurator::write_bootparams::<GuestMemoryMmap>(&bootparams, &guest_memory)
/// .unwrap();
/// }
/// ```
///
/// [`boot_params`]: ../loader/bootparam/struct.boot_params.html
fn write_bootparams<M>(params: &BootParams, guest_memory: &M) -> Result<()>
where
M: GuestMemory,
{
// The VMM has filled a `boot_params` struct and its e820 map.
// This will be written in guest memory at the zero page.
guest_memory
.checked_offset(params.header_start, params.header.len())
.ok_or(Error::ZeroPagePastRamEnd)?;
guest_memory
.write_slice(params.header.as_slice(), params.header_start)
.map_err(|_| Error::ZeroPageSetup)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::loader_gen::bootparam::boot_params;
use std::mem;
use vm_memory::{Address, GuestAddress, GuestMemoryMmap};
const KERNEL_BOOT_FLAG_MAGIC: u16 = 0xaa55;
const KERNEL_HDR_MAGIC: u32 = 0x53726448;
const KERNEL_LOADER_OTHER: u8 = 0xff;
const KERNEL_MIN_ALIGNMENT_BYTES: u32 = 0x1000000;
const MEM_SIZE: u64 = 0x100_0000;
fn create_guest_mem() -> GuestMemoryMmap {
GuestMemoryMmap::from_ranges(&[(GuestAddress(0x0), (MEM_SIZE as usize))]).unwrap()
}
fn build_bootparams_common() -> boot_params {
let mut params = boot_params::default();
params.hdr.boot_flag = KERNEL_BOOT_FLAG_MAGIC;
params.hdr.header = KERNEL_HDR_MAGIC;
params.hdr.kernel_alignment = KERNEL_MIN_ALIGNMENT_BYTES;
params.hdr.type_of_loader = KERNEL_LOADER_OTHER;
params
}
#[test]
fn test_configure_linux_boot() {
let zero_page_addr = GuestAddress(0x30000);
let params = build_bootparams_common();
// This is where we'd append e820 entries, cmdline, PCI, ACPI etc.
let guest_memory = create_guest_mem();
// Error case: boot params don't fit in guest memory (zero page address too close to end).
let bad_zeropg_addr = GuestAddress(
guest_memory.last_addr().raw_value() - mem::size_of::<boot_params>() as u64 + 1,
);
let mut bootparams = BootParams::new::<boot_params>(¶ms, bad_zeropg_addr);
assert_eq!(
LinuxBootConfigurator::write_bootparams::<GuestMemoryMmap>(&bootparams, &guest_memory,)
.err(),
Some(Error::ZeroPagePastRamEnd.into()),
);
// Success case.
bootparams.header_start = zero_page_addr;
assert!(LinuxBootConfigurator::write_bootparams::<GuestMemoryMmap>(
&bootparams,
&guest_memory,
)
.is_ok());
}
#[test]
fn test_error_messages() {
assert_eq!(
format!("{}", Error::ZeroPagePastRamEnd),
"Linux Boot Configurator: the zero page extends past the end of guest memory."
);
assert_eq!(
format!("{}", Error::ZeroPageSetup),
"Linux Boot Configurator: error writing to the zero page of guest memory."
);
}
}
|
use cocoa::base::{class, id};
use cocoa::foundation::NSUInteger;
use objc::runtime::BOOL;
use {MTLCPUCacheMode, MTLPixelFormat, MTLResourceOptions, MTLStorageMode, MTLTextureType,
MTLTextureUsage};
/// A `MTLTextureDescriptor` object is used to configure new texture objects.
///
/// A `MTLTexture` object is specific to an individual device. To create a new texture,
/// first create a `MTLTextureDescriptor` object and set its property values, including
/// the texture’s type, size (width, height, and depth), pixel format, number of mipmap
/// levels, sample count (for multisampling), and memory allocation behavior. Then, call
/// either the `newTextureWithDescriptor:` or `newTextureWithDescriptor:iosurface:plane:`
/// method of a `MTLDevice` object, or the `newTextureWithDescriptor:offset:bytesPerRow:`
/// method of a `MTLBuffer` object.
///
/// You can reuse a `MTLTextureDescriptor` object, modifying its property values as needed,
/// to create more `MTLTexture` objects. Texture descriptor properties are used only during
/// the creation of a MTLTexture object. After the texture has been created, property
/// changes in its descriptor have no further effects on it.
///
/// For 2D and cube textures, there are convenience methods to create an MTLTextureDescriptor
/// object: `texture2DDescriptorWithPixelFormat:width:height:mipmapped:` and
/// `textureCubeDescriptorWithPixelFormat:size:mipmapped:`.
pub trait MTLTextureDescriptor {
/// Creates a texture descriptor object for a 2D texture.
unsafe fn texture2DDescriptorWithPixelFormat_width_height_mipmapped(_: Self,
format: MTLPixelFormat,
width: NSUInteger,
height: NSUInteger,
mipmapped: BOOL)
-> id {
msg_send![class("MTLTextureDescriptor"), texture2DDescriptorWithPixelFormat:format
width:width
height:height
mipmapped:mipmapped]
}
/// Creates a texture descriptor object for a cube texture.
unsafe fn textureCubeDescriptorWithPixelFormat_size_mipmapped(_: Self, format: MTLPixelFormat,
size: NSUInteger, mipmapped: BOOL)
-> id {
msg_send![class("MTLTextureDescriptor"), textureCubeDescriptorWithPixelFormat:format
size:size
mipmapped:mipmapped]
}
unsafe fn new(_: Self) -> id {
msg_send![class("MTLTextureDescriptor"), new]
}
/// The dimension and arrangement of texture image data.
///
/// # Discussion
///
/// The default value is `MTLTexture2D`.
unsafe fn textureType(self) -> MTLTextureType;
unsafe fn setTextureType(self, textureType: MTLTextureType);
/// Format that determines how a pixel is written to, stored, and read from the storage
/// allocation of the texture.
///
/// # Discussion
///
/// The default value is MTLPixelFormatRGBA8Unorm.
unsafe fn pixelFormat(self) -> MTLPixelFormat;
unsafe fn setPixelFormat(self, pixelFormat: MTLPixelFormat);
/// The width of the texture image for the base level mipmap, in pixels.
///
/// # Discussion
///
/// The default value is 1. The value must be greater than or equal to `1`.
unsafe fn width(self) -> NSUInteger;
unsafe fn setWidth(self, width: NSUInteger);
/// The height of the texture image for the base level mipmap, in pixels.
///
/// # Discussion
///
/// The default value is 1. The value must be greater than or equal to `1`.
/// For a 1D texture, the value must be `1`.
unsafe fn height(self) -> NSUInteger;
unsafe fn setHeight(self, height: NSUInteger);
/// The depth of the texture image for the base level mipmap, in pixels.
///
/// # Discussion
///
/// The default value is `1`. The value must be greater than or equal to `1`.
/// For 1D, 2D, and cube textures, the value must be `1`.
unsafe fn depth(self) -> NSUInteger;
unsafe fn setDepth(self, depth: NSUInteger);
/// The number of mipmap levels for this texture.
///
/// # Discussion
///
/// The default value is `1`. For a buffer-backed or multisample textures,
/// the value must be `1`.
unsafe fn mipmapLevelCount(self) -> NSUInteger;
unsafe fn setMipmapLevelCount(self, mipmapLevelCount: NSUInteger);
/// The number of samples in each pixel.
///
/// # Discussion
///
/// The default value is 1. If `textureType` is not `MTLTextureType2DMultisample`,
/// the value must be 1.
unsafe fn sampleCount(self) -> NSUInteger;
unsafe fn setSampleCount(self, sampleCount: NSUInteger);
/// The number of array elements for a `MTLTextureType1DArray` or `MTLTextureType2DArray`
/// type texture object.
///
/// # Discussion
///
/// The default value is `1`. The value must be between 1 and 2048, inclusive.
unsafe fn arrayLength(self) -> NSUInteger;
unsafe fn setArrayLength(self, arrayLength: NSUInteger);
/// The behavior of a new memory allocation.
///
/// # Discussion
///
/// This property only has an effect when you are allocating new memory. If you are
/// creating a texture from buffer data or creating a texture view from another texture,
/// this property value is ignored.
unsafe fn resourceOptions(self) -> MTLResourceOptions;
unsafe fn setResourceOptions(self, resourceOptions: MTLResourceOptions);
/// The CPU cache mode used for the CPU mapping of the texture.
///
/// # Discussion
///
/// The default value is `MTLCPUCacheModeDefaultCache`.
unsafe fn cpuCacheMode(self) -> MTLCPUCacheMode;
unsafe fn setCpuCacheMode(self, cpuCacheMode: MTLCPUCacheMode);
/// The storage mode used for the location and mapping of the texture.
///
/// # Discsussion
///
/// The default value is `MTLStorageModeAuto`.
unsafe fn storageMode(self) -> MTLStorageMode;
unsafe fn setStorageMode(self, storageMode: MTLStorageMode);
/// Describes how the texture will be used in your app.
///
/// # Discussion
///
/// The default value is `MTLTextureUsageShaderRead` in OS X and `MTLTextureUsageUnknown` in
/// iOS. You should always aim to determine and set specific texture usages; do not rely on
/// the `MTLTextureUsageUnknown` value for the best performance.
unsafe fn usage(self) -> MTLTextureUsage;
unsafe fn setUsage(self, usage: MTLTextureUsage);
unsafe fn copy(self) -> id;
}
impl MTLTextureDescriptor for id {
unsafe fn textureType(self) -> MTLTextureType {
msg_send![self, textureType]
}
unsafe fn setTextureType(self, textureType: MTLTextureType) {
msg_send![self, setTextureType: textureType]
}
unsafe fn pixelFormat(self) -> MTLPixelFormat {
msg_send![self, pixelFormat]
}
unsafe fn setPixelFormat(self, pixelFormat: MTLPixelFormat) {
msg_send![self, setPixelFormat: pixelFormat]
}
unsafe fn width(self) -> NSUInteger {
msg_send![self, width]
}
unsafe fn setWidth(self, width: NSUInteger) {
msg_send![self, setWidth:width]
}
unsafe fn height(self) -> NSUInteger {
msg_send![self, height]
}
unsafe fn setHeight(self, height: NSUInteger) {
msg_send![self, setHeight:height]
}
unsafe fn depth(self) -> NSUInteger {
msg_send![self, depth]
}
unsafe fn setDepth(self, depth: NSUInteger) {
msg_send![self, setDepth:depth]
}
unsafe fn mipmapLevelCount(self) -> NSUInteger {
msg_send![self, mipmapLevelCount]
}
unsafe fn setMipmapLevelCount(self, mipmapLevelCount: NSUInteger) {
msg_send![self, setMipmapLevelCount:mipmapLevelCount]
}
unsafe fn sampleCount(self) -> NSUInteger {
msg_send![self, sampleCount]
}
unsafe fn setSampleCount(self, sampleCount: NSUInteger) {
msg_send![self, setSampleCount:sampleCount]
}
unsafe fn arrayLength(self) -> NSUInteger {
msg_send![self, arrayLength]
}
unsafe fn setArrayLength(self, arrayLength: NSUInteger) {
msg_send![self, setArrayLength:arrayLength]
}
unsafe fn resourceOptions(self) -> MTLResourceOptions {
msg_send![self, resourceOptions]
}
unsafe fn setResourceOptions(self, resourceOptions: MTLResourceOptions) {
msg_send![self, setResourceOptions:resourceOptions]
}
unsafe fn cpuCacheMode(self) -> MTLCPUCacheMode {
msg_send![self, cpuCacheMode]
}
unsafe fn setCpuCacheMode(self, cpuCacheMode: MTLCPUCacheMode) {
msg_send![self, setCpuCacheMode:cpuCacheMode]
}
unsafe fn storageMode(self) -> MTLStorageMode {
msg_send![self, storageMode]
}
unsafe fn setStorageMode(self, storageMode: MTLStorageMode) {
msg_send![self, setStorageMode:storageMode]
}
unsafe fn usage(self) -> MTLTextureUsage {
msg_send![self, usage]
}
unsafe fn setUsage(self, usage: MTLTextureUsage) {
msg_send![self, setUsage:usage]
}
unsafe fn copy(self) -> id {
msg_send![self, copy]
}
}
|
use sys_consts::options::*;
use crate::uses::*;
use crate::cap::CapFlags;
use crate::sysret;
use crate::syscall::{SysErr, SyscallVals};
use super::{VirtRange, PAGE_SIZE};
use super::phys_alloc::zm;
use super::virt_alloc::{AllocType, PageMappingFlags, VirtLayout, VirtLayoutElement};
use super::shared_mem::*;
use super::error::MemErr;
use crate::sched::proc_c;
const READ: u32 = 1;
const WRITE: u32 = 1 << 1;
const EXEC: u32 = 1 << 2;
const REALLOC_EXACT: usize = 1 << 4;
// FIXME: this doesn't return the right error codes yet
// FIXME: this doesn't obey REALLOC_EXACT when decreasing size of virtual memory
pub extern "C" fn realloc(vals: &mut SyscallVals)
{
let options = vals.options;
let addr = vals.a1;
let size = vals.a2 * PAGE_SIZE;
let at_addr = vals.a3;
if align_of(addr) < PAGE_SIZE || align_of(at_addr) < PAGE_SIZE {
sysret!(vals, SysErr::InvlPtr.num(), 0, 0);
}
let at_vaddr = match VirtAddr::try_new(at_addr as u64) {
Ok(addr) => addr,
Err(_) => sysret!(vals, SysErr::InvlVirtAddr.num(), 0, 0),
};
let flags = PageMappingFlags::from_bits_truncate(options as usize) | PageMappingFlags::USER;
if addr == 0 {
// allocate memory
if size == 0 {
sysret!(vals, SysErr::Ok.num(), 0, 0);
}
let layout_element = match VirtLayoutElement::new(size, flags) {
Some(elem) => elem,
None => sysret!(vals, SysErr::OutOfMem.num(), 0, 0),
};
let vec = vec![layout_element];
let layout = VirtLayout::from(vec, AllocType::VirtMem);
if at_addr == 0 {
unsafe {
match proc_c().addr_space.map(layout) {
Ok(virt_range) => sysret!(
vals,
SysErr::Ok.num(),
virt_range.as_usize(),
virt_range.size() / PAGE_SIZE
),
Err(_) => sysret!(vals, SysErr::OutOfMem.num(), 0, 0),
}
}
} else {
let virt_zone = VirtRange::new(VirtAddr::new_truncate(at_addr as u64), layout.size());
unsafe {
match proc_c().addr_space.map_at(layout, virt_zone) {
Ok(virt_range) => sysret!(
vals,
SysErr::Ok.num(),
virt_range.as_usize(),
virt_range.size() / PAGE_SIZE
),
Err(_) => sysret!(vals, SysErr::OutOfMem.num(), 0, 0),
}
}
}
} else if size == 0 {
// free memory
let mapper = &proc_c().addr_space;
let virt_zone = match mapper.get_mapped_range(VirtAddr::new_truncate(addr as u64)) {
Some(range) => range,
None => sysret!(vals, SysErr::InvlPtr.num(), 0, 0),
};
match unsafe { mapper.unmap(virt_zone, AllocType::VirtMem) } {
Ok(layout) => {
unsafe { layout.dealloc() };
sysret!(vals, SysErr::Ok.num(), 0, 0);
},
Err(_) => sysret!(vals, SysErr::Unknown.num(), 0, 0),
}
} else {
// realloc memory
let mapper = &proc_c().addr_space;
let virt_zone = match mapper.get_mapped_range(VirtAddr::new_truncate(addr as u64)) {
Some(range) => range,
None => sysret!(vals, SysErr::InvlPtr.num(), 0, 0),
};
// closure type annotation needed, compiler complains for some reason if I don't have it
// TODO: obey exact size flag here
let realloc_func = |phys_zones: &mut VirtLayout| {
let psize = phys_zones.size();
let mut new_flags = phys_zones.flags().unwrap();
if flags.contains(PageMappingFlags::EXACT_SIZE) {
new_flags |= PageMappingFlags::EXACT_SIZE;
}
if size > psize {
let elem = VirtLayoutElement::new(size - psize, new_flags)
.ok_or(MemErr::OutOfMem("out of memory"))?;
phys_zones.push(elem);
} else if size < psize {
let mut diff = psize - size;
while let Some(a) = phys_zones.clean_slice().last() {
if diff > a.size() {
diff -= a.size();
phys_zones.pop_delete();
}
}
}
Ok(())
};
if at_addr == 0 {
unsafe {
match proc_c()
.addr_space
.remap(virt_zone, AllocType::VirtMem, realloc_func)
{
Ok(virt_range) => sysret!(
vals,
SysErr::Ok.num(),
virt_range.as_usize(),
virt_range.size() / PAGE_SIZE
),
Err(_) => sysret!(vals, SysErr::OutOfMem.num(), 0, 0),
}
}
} else {
unsafe {
match proc_c().addr_space.remap_at(
virt_zone,
at_vaddr,
AllocType::VirtMem,
realloc_func,
) {
Ok(virt_range) => sysret!(
vals,
SysErr::Ok.num(),
virt_range.as_usize(),
virt_range.size() / PAGE_SIZE
),
Err(_) => sysret!(vals, SysErr::OutOfMem.num(), 0, 0),
}
}
}
}
}
pub extern "C" fn mprotect(vals: &mut SyscallVals) {
todo!();
}
pub extern "C" fn smem_new(vals: &mut SyscallVals)
{
let size = vals.a1 * PAGE_SIZE;
let options = CapFlags::from_bits_truncate(vals.options as usize);
let smem = match SharedMem::new(size, options) {
Some(smem) => smem,
None => sysret!(vals, SysErr::OutOfMem.num(), 0),
};
let cid = proc_c().smem().insert(smem);
sysret!(vals, SysErr::Ok.num(), cid.into());
}
|
use aoc_utils::read_file;
use day03::Claim;
use day03::CLAIM_REGEX;
use std::collections::HashMap;
fn main() {
if let Ok(contents) = read_file("./input") {
let result: i32 = contents
.lines()
.filter_map(|line| {
if let Some(caps) = CLAIM_REGEX.captures(line) {
return Some(Claim::new(
caps["n"].parse::<i32>().unwrap(),
caps["x"].parse::<i32>().unwrap(),
caps["y"].parse::<i32>().unwrap(),
caps["width"].parse::<i32>().unwrap(),
caps["height"].parse::<i32>().unwrap(),
));
}
None
})
.flat_map(|claim| {
let mut v = vec![];
for i in 0..claim.width {
for j in 0..claim.height {
v.push((claim.x + i, claim.y + j));
}
}
v
})
.fold(HashMap::new(), |mut acc, coord| {
acc.entry(coord)
.and_modify(|counter| *counter += 1)
.or_insert(1);
acc
})
.into_iter()
.filter(|&(_, v)| v >= 2)
.collect::<HashMap<(i32, i32), i32>>()
.keys()
.len() as i32;
println!("{:?}", result);
}
}
|
use rand::prelude::*;
use std::fmt;
use std::slice::Iter;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Suit {
SPADE,
DIAMOND,
CLUB,
HEART,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Rank {
ACE = 1,
TWO,
THREE,
FOUR,
FIVE,
SIX,
SEVEN,
EIGHT,
NINE,
TEN,
JACK,
QUEEN,
KING,
}
impl fmt::Display for Suit {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
Suit::SPADE => write!(fmt, "Spades"),
Suit::DIAMOND => write!(fmt, "Diamonds"),
Suit::CLUB => write!(fmt, "Clubs"),
Suit::HEART => write!(fmt, "Hearts"),
}
}
}
impl fmt::Display for Rank {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
Rank::ACE => write!(fmt, "Ace"),
Rank::TWO => write!(fmt, "Two"),
Rank::THREE => write!(fmt, "Three"),
Rank::FOUR => write!(fmt, "Four"),
Rank::FIVE => write!(fmt, "Five"),
Rank::SIX => write!(fmt, "Six"),
Rank::SEVEN => write!(fmt, "Seven"),
Rank::EIGHT => write!(fmt, "Eight"),
Rank::NINE => write!(fmt, "Nine"),
Rank::TEN => write!(fmt, "Ten"),
Rank::JACK => write!(fmt, "Jack"),
Rank::QUEEN => write!(fmt, "Queen"),
Rank::KING => write!(fmt, "King"),
}
}
}
impl Suit {
pub fn iterator() -> Iter<'static, Suit> {
static SUITS: [Suit; 4] = [Suit::SPADE, Suit::DIAMOND, Suit::CLUB, Suit::HEART];
SUITS.iter()
}
}
impl Rank {
pub fn iterator() -> Iter<'static, Rank> {
static RANKS: [Rank; 13] = [
Rank::ACE,
Rank::TWO,
Rank::THREE,
Rank::FOUR,
Rank::FIVE,
Rank::SIX,
Rank::SEVEN,
Rank::EIGHT,
Rank::NINE,
Rank::TEN,
Rank::JACK,
Rank::QUEEN,
Rank::KING,
];
RANKS.iter()
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Card {
pub suit: Suit,
pub rank: Rank,
}
impl fmt::Display for Card {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{} of {}", self.rank, self.suit)
}
}
impl Card {
pub fn new(suit: Suit, rank: Rank) -> Card {
Card {
suit: suit,
rank: rank,
}
}
}
#[allow(dead_code)]
fn abs_rank(card: &Card) -> u8 {
return ((card.suit as u8) * 13) + card.rank as u8;
}
pub fn shuffle(cards: Vec<Card>) -> Vec<Card> {
let mut v: Vec<Card> = cards.clone();
v.shuffle(&mut thread_rng());
return v;
}
#[allow(dead_code)]
pub fn default_sort(cards: Vec<Card>) -> Vec<Card> {
let mut v: Vec<Card> = cards.clone();
v.sort_by(|a: &Card, b: &Card| abs_rank(a).partial_cmp(&abs_rank(b)).unwrap());
return v;
}
#[allow(dead_code)]
pub fn filter(filter_fn: fn(&Card) -> bool) -> impl Fn(Vec<Card>) -> Vec<Card> {
return move |x: Vec<Card>| -> Vec<Card> {
let mut v: Vec<Card> = x.clone();
v.retain(|y: &Card| !filter_fn(y));
return v;
};
}
pub fn deck(n: u8) -> impl Fn(Vec<Card>) -> Vec<Card> {
return move |x: Vec<Card>| -> Vec<Card> {
let mut v: Vec<Card> = Vec::new();
for _i in 0..n {
v.append(&mut x.clone())
}
return v;
};
}
pub fn cards(opts: &[&dyn Fn(Vec<Card>) -> Vec<Card>]) -> Vec<Card> {
let mut v: Vec<Card> = Vec::new();
for suit in Suit::iterator() {
for rank in Rank::iterator() {
v.push(Card::new(*suit, *rank));
}
}
for opt in opts.iter() {
let c = v.clone();
v.splice(.., opt(c));
}
return v.clone();
}
|
pub mod consumer;
pub mod consumer_to_game;
pub mod consumer_structs; |
use rusqlite::functions::{Aggregate, Context, FunctionFlags};
use rusqlite::types::ValueRef;
use rusqlite::{Connection, Result};
use stats::OnlineStats;
pub(crate) fn add_udfs(connection: &Connection) -> Result<()> {
connection.create_scalar_function(
"md5",
1,
FunctionFlags::SQLITE_UTF8 | FunctionFlags::SQLITE_DETERMINISTIC,
move |ctx| calculate_md5(ctx).map_err(|e| rusqlite::Error::UserFunctionError(e.into())),
)?;
connection.create_scalar_function(
"sqrt",
1,
FunctionFlags::SQLITE_DETERMINISTIC,
move |ctx| calculate_sqrt(ctx).map_err(|e| rusqlite::Error::UserFunctionError(e.into())),
)?;
connection.create_aggregate_function(
"stddev",
1,
FunctionFlags::SQLITE_DETERMINISTIC,
Stddev,
)?;
connection.create_aggregate_function("mean", 1, FunctionFlags::SQLITE_DETERMINISTIC, Mean)?;
Ok(())
}
pub(crate) fn calculate_md5(ctx: &Context) -> Result<String> {
assert_eq!(ctx.len(), 1, "called with unexpected number of arguments");
let str = ctx.get_raw(0).as_str()?;
let hash = md5::compute(str);
Ok(format!("{:x}", hash))
}
pub(crate) fn calculate_sqrt(ctx: &Context) -> Result<f64> {
assert_eq!(ctx.len(), 1, "called with unexpected number of arguments");
let arg = ctx.get_raw(0);
if let Ok(f64) = arg.as_f64() {
Ok(f64.sqrt())
} else {
let i64 = arg.as_i64()?;
Ok((i64 as f64).sqrt())
}
}
pub struct Stddev;
impl Aggregate<OnlineStats, Option<f64>> for Stddev {
fn init(&self, _: &mut Context<'_>) -> Result<OnlineStats> {
Ok(OnlineStats::new())
}
fn step(&self, ctx: &mut Context<'_>, stdev: &mut OnlineStats) -> Result<()> {
let value = ctx.get_raw(0);
match value {
ValueRef::Null => stdev.add_null(),
ValueRef::Integer(i) => stdev.add(i as f64),
ValueRef::Real(f) => stdev.add(f),
ValueRef::Text(_) | ValueRef::Blob(_) => {
return Result::Err(rusqlite::Error::UserFunctionError(
"can't take stddev of this value".into(),
));
}
}
Ok(())
}
fn finalize(&self, _: &mut Context<'_>, numbers: Option<OnlineStats>) -> Result<Option<f64>> {
let stddev = numbers.map(|n| n.stddev());
Ok(stddev)
}
}
pub struct Mean;
impl Aggregate<OnlineStats, Option<f64>> for Mean {
fn init(&self, _: &mut Context<'_>) -> Result<OnlineStats> {
Ok(OnlineStats::new())
}
fn step(&self, ctx: &mut Context<'_>, mean: &mut OnlineStats) -> Result<()> {
let value = ctx.get_raw(0);
match value {
ValueRef::Null => mean.add_null(),
ValueRef::Integer(i) => mean.add(i as f64),
ValueRef::Real(f) => mean.add(f),
ValueRef::Text(_) | ValueRef::Blob(_) => {
return Result::Err(rusqlite::Error::UserFunctionError(
"can't take mean of this value".into(),
));
}
}
Ok(())
}
fn finalize(&self, _: &mut Context<'_>, numbers: Option<OnlineStats>) -> Result<Option<f64>> {
let mean = numbers.map(|n| n.mean());
Ok(mean)
}
}
|
#![no_std]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/RustCrypto/meta/master/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/meta/master/logo.svg"
)]
#![warn(missing_docs, rust_2018_idioms, unused_qualifications)]
//! # Usage
//!
//! Simple usage (allocating, no associated data):
//!
#![cfg_attr(all(feature = "getrandom", feature = "std"), doc = "```")]
#![cfg_attr(not(all(feature = "getrandom", feature = "std")), doc = "```ignore")]
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! use aes_siv::{
//! aead::{Aead, AeadCore, KeyInit, OsRng},
//! Aes256SivAead, Nonce // Or `Aes128SivAead`
//! };
//!
//! let key = Aes256SivAead::generate_key(&mut OsRng);
//! let cipher = Aes256SivAead::new(&key);
//! let nonce = Aes256SivAead::generate_nonce(&mut OsRng); // 128-bits; unique per message
//! let ciphertext = cipher.encrypt(&nonce, b"plaintext message".as_ref())?;
//! let plaintext = cipher.decrypt(&nonce, ciphertext.as_ref())?;
//! assert_eq!(&plaintext, b"plaintext message");
//! # Ok(())
//! # }
//! ```
//!
//! ## In-place Usage (eliminates `alloc` requirement)
//!
//! This crate has an optional `alloc` feature which can be disabled in e.g.
//! microcontroller environments that don't have a heap.
//!
//! The [`AeadInPlace::encrypt_in_place`] and [`AeadInPlace::decrypt_in_place`]
//! methods accept any type that impls the [`aead::Buffer`] trait which
//! contains the plaintext for encryption or ciphertext for decryption.
//!
//! Note that if you enable the `heapless` feature of this crate,
//! you will receive an impl of [`aead::Buffer`] for `heapless::Vec`
//! (re-exported from the [`aead`] crate as [`aead::heapless::Vec`]),
//! which can then be passed as the `buffer` parameter to the in-place encrypt
//! and decrypt methods:
//!
#![cfg_attr(
all(feature = "getrandom", feature = "heapless", feature = "std"),
doc = "```"
)]
#![cfg_attr(
not(all(feature = "getrandom", feature = "heapless", feature = "std")),
doc = "```ignore"
)]
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! use aes_siv::{
//! aead::{AeadCore, AeadInPlace, KeyInit, OsRng, heapless::Vec},
//! Aes256SivAead, Nonce, // Or `Aes128SivAead`
//! };
//!
//! let key = Aes256SivAead::generate_key(&mut OsRng);
//! let cipher = Aes256SivAead::new(&key);
//! let nonce = Aes256SivAead::generate_nonce(&mut OsRng); // 128-bits; unique per message
//!
//! let mut buffer: Vec<u8, 128> = Vec::new(); // Note: buffer needs 16-bytes overhead for auth tag
//! buffer.extend_from_slice(b"plaintext message");
//!
//! // Encrypt `buffer` in-place, replacing the plaintext contents with ciphertext
//! cipher.encrypt_in_place(&nonce, b"", &mut buffer)?;
//!
//! // `buffer` now contains the message ciphertext
//! assert_ne!(&buffer, b"plaintext message");
//!
//! // Decrypt `buffer` in-place, replacing its ciphertext context with the original plaintext
//! cipher.decrypt_in_place(&nonce, b"", &mut buffer)?;
//! assert_eq!(&buffer, b"plaintext message");
//! # Ok(())
//! # }
//! ```
//!
//! Similarly, enabling the `arrayvec` feature of this crate will provide an impl of
//! [`aead::Buffer`] for `arrayvec::ArrayVec` (re-exported from the [`aead`] crate as
//! [`aead::arrayvec::ArrayVec`]).
#[cfg(feature = "alloc")]
extern crate alloc;
pub mod siv;
pub use aead::{self, AeadCore, AeadInPlace, Error, Key, KeyInit, KeySizeUser};
use crate::siv::Siv;
use aead::{
consts::{U0, U16, U32, U64},
generic_array::GenericArray,
Buffer,
};
use aes::{Aes128, Aes256};
use cipher::{BlockCipher, BlockEncryptMut};
use cmac::Cmac;
use core::{marker::PhantomData, ops::Add};
use digest::{FixedOutputReset, Mac};
#[cfg(feature = "pmac")]
use pmac::Pmac;
/// AES-SIV nonces
pub type Nonce = GenericArray<u8, U16>;
/// AES-SIV tags (i.e. the Synthetic Initialization Vector value)
pub type Tag = GenericArray<u8, U16>;
/// The `SivAead` type wraps the more powerful `Siv` interface in a more
/// commonly used Authenticated Encryption with Associated Data (AEAD) API,
/// which accepts a key, nonce, and associated data when encrypting/decrypting.
pub struct SivAead<C, M>
where
Self: KeySizeUser,
C: BlockCipher<BlockSize = U16> + BlockEncryptMut + KeyInit + KeySizeUser,
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
<C as KeySizeUser>::KeySize: Add,
{
key: GenericArray<u8, <Self as KeySizeUser>::KeySize>,
mac: PhantomData<M>, // TODO(tarcieri): include `M` in `KeySize` calculation
}
/// SIV AEAD modes based on CMAC
pub type CmacSivAead<BlockCipher> = SivAead<BlockCipher, Cmac<BlockCipher>>;
/// SIV AEAD modes based on PMAC
#[cfg(feature = "pmac")]
#[cfg_attr(docsrs, doc(cfg(feature = "pmac")))]
pub type PmacSivAead<BlockCipher> = SivAead<BlockCipher, Pmac<BlockCipher>>;
/// AES-CMAC-SIV in AEAD mode with 256-bit key size (128-bit security)
pub type Aes128SivAead = CmacSivAead<Aes128>;
/// AES-CMAC-SIV in AEAD mode with 512-bit key size (256-bit security)
pub type Aes256SivAead = CmacSivAead<Aes256>;
/// AES-PMAC-SIV in AEAD mode with 256-bit key size (128-bit security)
#[cfg(feature = "pmac")]
#[cfg_attr(docsrs, doc(cfg(feature = "pmac")))]
pub type Aes128PmacSivAead = PmacSivAead<Aes128>;
/// AES-PMAC-SIV in AEAD mode with 512-bit key size (256-bit security)
#[cfg(feature = "pmac")]
#[cfg_attr(docsrs, doc(cfg(feature = "pmac")))]
pub type Aes256PmacSivAead = PmacSivAead<Aes256>;
impl<M> KeySizeUser for SivAead<Aes128, M>
where
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
{
type KeySize = U32;
}
impl<M> KeySizeUser for SivAead<Aes256, M>
where
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
{
type KeySize = U64;
}
impl<M> KeyInit for SivAead<Aes128, M>
where
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
{
fn new(key: &GenericArray<u8, Self::KeySize>) -> Self {
Self {
key: *key,
mac: PhantomData,
}
}
}
impl<M> KeyInit for SivAead<Aes256, M>
where
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
{
fn new(key: &GenericArray<u8, Self::KeySize>) -> Self {
Self {
key: *key,
mac: PhantomData,
}
}
}
impl<C, M> AeadCore for SivAead<C, M>
where
Self: KeySizeUser,
C: BlockCipher<BlockSize = U16> + BlockEncryptMut + KeyInit + KeySizeUser,
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
<C as KeySizeUser>::KeySize: Add,
{
// "If the nonce is random, it SHOULD be at least 128 bits in length"
// https://tools.ietf.org/html/rfc5297#section-3
// TODO(tarcieri): generic nonce sizes
type NonceSize = U16;
type TagSize = U16;
type CiphertextOverhead = U0;
}
impl<C, M> AeadInPlace for SivAead<C, M>
where
Self: KeySizeUser,
Siv<C, M>: KeyInit + KeySizeUser<KeySize = <Self as KeySizeUser>::KeySize>,
C: BlockCipher<BlockSize = U16> + BlockEncryptMut + KeyInit + KeySizeUser,
M: Mac<OutputSize = U16> + FixedOutputReset + KeyInit,
<C as KeySizeUser>::KeySize: Add,
{
fn encrypt_in_place(
&self,
nonce: &GenericArray<u8, Self::NonceSize>,
associated_data: &[u8],
buffer: &mut dyn Buffer,
) -> Result<(), Error> {
// "SIV performs nonce-based authenticated encryption when a component of
// the associated data is a nonce. For purposes of interoperability the
// final component -- i.e., the string immediately preceding the
// plaintext in the vector input to S2V -- is used for the nonce."
// https://tools.ietf.org/html/rfc5297#section-3
Siv::<C, M>::new(&self.key).encrypt_in_place([associated_data, nonce.as_slice()], buffer)
}
fn encrypt_in_place_detached(
&self,
nonce: &GenericArray<u8, Self::NonceSize>,
associated_data: &[u8],
buffer: &mut [u8],
) -> Result<GenericArray<u8, Self::TagSize>, Error> {
Siv::<C, M>::new(&self.key)
.encrypt_in_place_detached([associated_data, nonce.as_slice()], buffer)
}
fn decrypt_in_place(
&self,
nonce: &GenericArray<u8, Self::NonceSize>,
associated_data: &[u8],
buffer: &mut dyn Buffer,
) -> Result<(), Error> {
Siv::<C, M>::new(&self.key).decrypt_in_place([associated_data, nonce.as_slice()], buffer)
}
fn decrypt_in_place_detached(
&self,
nonce: &GenericArray<u8, Self::NonceSize>,
associated_data: &[u8],
buffer: &mut [u8],
tag: &GenericArray<u8, Self::TagSize>,
) -> Result<(), Error> {
Siv::<C, M>::new(&self.key).decrypt_in_place_detached(
[associated_data, nonce.as_slice()],
buffer,
tag,
)
}
}
|
use std::env;
use std::path::Path;
use anyhow::*;
/// Read the input file for the current day's puzzle, i.e. `input/dayxx.txt`, and return its content as a String.
pub fn input_string() -> Result<String> {
let executable_name = env::args_os()
.next()
.ok_or(format_err!("no executable name?"))?;
let mut infile = Path::new("input").join(
Path::new(&executable_name)
.file_name()
.ok_or(format_err!("no file name?"))?,
);
infile.set_extension("txt");
std::fs::read_to_string(&infile).context("Could not read input file")
}
|
use super::*;
#[pymethods]
impl EnsmallenGraph {
#[text_signature = "(self, name)"]
/// Set the name of the graph.
///
/// Parameters
/// -----------------------
/// name: str,
/// Name of the graph.
fn set_name(&mut self, name: String) {
self.graph.set_name(name)
}
#[text_signature = "(self, embedding)"]
/// Set the embedding of the graph.
///
/// Parameters
/// -----------------------
/// embedding: np.ndarray,
/// Embedding of the graph.
fn set_embedding(&mut self, embedding: Vec<Vec<f64>>) -> PyResult<()> {
pyex!(self.graph.set_embedding(embedding))
}
}
|
#[doc = "Reader of register PWR_CR1"]
pub type R = crate::R<u32, super::PWR_CR1>;
#[doc = "Writer for register PWR_CR1"]
pub type W = crate::W<u32, super::PWR_CR1>;
#[doc = "Register PWR_CR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::PWR_CR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `LPDS`"]
pub type LPDS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LPDS`"]
pub struct LPDS_W<'a> {
w: &'a mut W,
}
impl<'a> LPDS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `LPCFG`"]
pub type LPCFG_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LPCFG`"]
pub struct LPCFG_W<'a> {
w: &'a mut W,
}
impl<'a> LPCFG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `LVDS`"]
pub type LVDS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LVDS`"]
pub struct LVDS_W<'a> {
w: &'a mut W,
}
impl<'a> LVDS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `PVDEN`"]
pub type PVDEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PVDEN`"]
pub struct PVDEN_W<'a> {
w: &'a mut W,
}
impl<'a> PVDEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `PLS`"]
pub type PLS_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `PLS`"]
pub struct PLS_W<'a> {
w: &'a mut W,
}
impl<'a> PLS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 5)) | (((value as u32) & 0x07) << 5);
self.w
}
}
#[doc = "Reader of field `DBP`"]
pub type DBP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DBP`"]
pub struct DBP_W<'a> {
w: &'a mut W,
}
impl<'a> DBP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `AVDEN`"]
pub type AVDEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `AVDEN`"]
pub struct AVDEN_W<'a> {
w: &'a mut W,
}
impl<'a> AVDEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `ALS`"]
pub type ALS_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ALS`"]
pub struct ALS_W<'a> {
w: &'a mut W,
}
impl<'a> ALS_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 17)) | (((value as u32) & 0x03) << 17);
self.w
}
}
impl R {
#[doc = "Bit 0 - LPDS"]
#[inline(always)]
pub fn lpds(&self) -> LPDS_R {
LPDS_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - LPCFG"]
#[inline(always)]
pub fn lpcfg(&self) -> LPCFG_R {
LPCFG_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - LVDS"]
#[inline(always)]
pub fn lvds(&self) -> LVDS_R {
LVDS_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 4 - PVDEN"]
#[inline(always)]
pub fn pvden(&self) -> PVDEN_R {
PVDEN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 5:7 - PLS"]
#[inline(always)]
pub fn pls(&self) -> PLS_R {
PLS_R::new(((self.bits >> 5) & 0x07) as u8)
}
#[doc = "Bit 8 - DBP"]
#[inline(always)]
pub fn dbp(&self) -> DBP_R {
DBP_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 16 - AVDEN"]
#[inline(always)]
pub fn avden(&self) -> AVDEN_R {
AVDEN_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bits 17:18 - ALS"]
#[inline(always)]
pub fn als(&self) -> ALS_R {
ALS_R::new(((self.bits >> 17) & 0x03) as u8)
}
}
impl W {
#[doc = "Bit 0 - LPDS"]
#[inline(always)]
pub fn lpds(&mut self) -> LPDS_W {
LPDS_W { w: self }
}
#[doc = "Bit 1 - LPCFG"]
#[inline(always)]
pub fn lpcfg(&mut self) -> LPCFG_W {
LPCFG_W { w: self }
}
#[doc = "Bit 2 - LVDS"]
#[inline(always)]
pub fn lvds(&mut self) -> LVDS_W {
LVDS_W { w: self }
}
#[doc = "Bit 4 - PVDEN"]
#[inline(always)]
pub fn pvden(&mut self) -> PVDEN_W {
PVDEN_W { w: self }
}
#[doc = "Bits 5:7 - PLS"]
#[inline(always)]
pub fn pls(&mut self) -> PLS_W {
PLS_W { w: self }
}
#[doc = "Bit 8 - DBP"]
#[inline(always)]
pub fn dbp(&mut self) -> DBP_W {
DBP_W { w: self }
}
#[doc = "Bit 16 - AVDEN"]
#[inline(always)]
pub fn avden(&mut self) -> AVDEN_W {
AVDEN_W { w: self }
}
#[doc = "Bits 17:18 - ALS"]
#[inline(always)]
pub fn als(&mut self) -> ALS_W {
ALS_W { w: self }
}
}
|
use quote::quote_spanned;
use super::{
FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, OperatorInstance,
OperatorWriteOutput, WriteContextArgs, RANGE_0, RANGE_1,
};
/// > 1 input stream, 0 output streams
///
/// > Arguments: a Rust closure
///
/// Iterates through a stream passing each element to the closure in the
/// argument.
///
/// > Note: The closure has access to the [`context` object](surface_flows.md#the-context-object).
///
/// ```hydroflow
/// source_iter(vec!["Hello", "World"])
/// -> for_each(|x| println!("{}", x));
/// ```
pub const FOR_EACH: OperatorConstraints = OperatorConstraints {
name: "for_each",
categories: &[OperatorCategory::Sink],
hard_range_inn: RANGE_1,
soft_range_inn: RANGE_1,
hard_range_out: RANGE_0,
soft_range_out: RANGE_0,
num_args: 1,
persistence_args: RANGE_0,
type_args: RANGE_0,
is_external_input: false,
ports_inn: None,
ports_out: None,
properties: FlowProperties {
deterministic: FlowPropertyVal::DependsOnArgs,
monotonic: FlowPropertyVal::DependsOnArgs,
inconsistency_tainted: false,
},
input_delaytype_fn: |_| None,
write_fn: |&WriteContextArgs {
root,
op_span,
ident,
op_inst: OperatorInstance { arguments, .. },
..
},
_| {
let write_iterator = quote_spanned! {op_span=>
let #ident = #root::pusherator::for_each::ForEach::new(#arguments);
};
Ok(OperatorWriteOutput {
write_iterator,
..Default::default()
})
},
};
|
pub mod new_game;
use self::new_game::*;
use crate::{components::animal_kind::AnimalKind, utils::rgba_to_raui_color};
use oxygengine::user_interface::raui::core::prelude::*;
#[pre_hooks(use_nav_container_active)]
pub fn menu(mut context: WidgetContext) -> WidgetNode {
let WidgetContext { key, .. } = context;
let background_props = ImageBoxProps {
material: ImageBoxMaterial::Color(ImageBoxColor {
color: rgba_to_raui_color(101, 119, 255, 255),
..Default::default()
}),
..Default::default()
};
let content_props = ContentBoxItemLayout {
margin: Rect {
left: 200.0,
right: 200.0,
top: 100.0,
bottom: 100.0,
},
..Default::default()
};
let game_logo_props = ImageBoxProps {
content_keep_aspect_ratio: Some(ImageBoxAspectRatio {
horizontal_alignment: 0.5,
vertical_alignment: 0.5,
outside: false,
}),
material: ImageBoxMaterial::Image(ImageBoxImage {
id: "images/game-logo.svg".to_owned(),
..Default::default()
}),
..Default::default()
};
let text_props = TextBoxProps {
text: "Select your starting soul:".to_owned(),
height: TextBoxSizeValue::Exact(20.0),
horizontal_align: TextBoxHorizontalAlign::Center,
font: TextBoxFont {
name: "fonts/aquatico.json".to_owned(),
size: 40.0,
},
color: Color {
r: 1.0,
g: 1.0,
b: 1.0,
a: 1.0,
},
..Default::default()
};
widget! {
(#{key} content_box [
(#{"background"} image_box: {background_props})
(#{"content"} vertical_box: {content_props} [
(#{"game-logo"} image_box: {game_logo_props})
(space_box: {SpaceBoxProps::vertical(60.0)})
(#{"text"} text_box: {text_props})
(#{"animals"} nav_horizontal_box: {NavJumpLooped} [
(#{"ground-water"} new_game_button: {NewGameButtonProps(AnimalKind::GroundWater)})
(#{"water-air"} new_game_button: {NewGameButtonProps(AnimalKind::WaterAir)})
(#{"air-ground"} new_game_button: {NewGameButtonProps(AnimalKind::AirGround)})
])
])
])
}
}
|
use gfx;
use super::pipeline::*;
use collision;
pub struct ChunkObject<R: gfx::Resources> {
pub data: pipe::Data<R>,
pub slice: gfx::Slice<R>,
pub position: [f32; 3],
pub bounds: collision::Aabb3<f32>,
pub cubes: usize,
pub vertices: usize,
pub triangles: usize,
}
|
#[doc = "Register `ETH_MACLETR` reader"]
pub type R = crate::R<ETH_MACLETR_SPEC>;
#[doc = "Register `ETH_MACLETR` writer"]
pub type W = crate::W<ETH_MACLETR_SPEC>;
#[doc = "Field `LPIET` reader - LPIET"]
pub type LPIET_R = crate::FieldReader<u32>;
#[doc = "Field `LPIET` writer - LPIET"]
pub type LPIET_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 17, O, u32>;
impl R {
#[doc = "Bits 3:19 - LPIET"]
#[inline(always)]
pub fn lpiet(&self) -> LPIET_R {
LPIET_R::new((self.bits >> 3) & 0x0001_ffff)
}
}
impl W {
#[doc = "Bits 3:19 - LPIET"]
#[inline(always)]
#[must_use]
pub fn lpiet(&mut self) -> LPIET_W<ETH_MACLETR_SPEC, 3> {
LPIET_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "The LPI Entry Timer Register is used to store the LPI Idle Timer Value in Micro-Seconds.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_macletr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eth_macletr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETH_MACLETR_SPEC;
impl crate::RegisterSpec for ETH_MACLETR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eth_macletr::R`](R) reader structure"]
impl crate::Readable for ETH_MACLETR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`eth_macletr::W`](W) writer structure"]
impl crate::Writable for ETH_MACLETR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ETH_MACLETR to value 0"]
impl crate::Resettable for ETH_MACLETR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.