text stringlengths 8 4.13M |
|---|
use std::fmt;
use rand::{distributions::Standard, prelude::Distribution, seq::SliceRandom, Rng};
use crate::payload::Generator;
use super::{choose_or_not, common};
#[derive(Debug, Clone)]
pub(crate) struct EventGenerator {
pub(crate) titles: Vec<String>,
pub(crate) texts_or_messages: Vec<String>,
pub(crate) small_strings: Vec<String>,
pub(crate) tags: Vec<common::tags::Tags>,
}
impl Generator<Event> for EventGenerator {
fn generate<R>(&self, mut rng: &mut R) -> Event
where
R: rand::Rng + ?Sized,
{
let title = self.titles.choose(&mut rng).unwrap().clone();
let text = self.texts_or_messages.choose(&mut rng).unwrap().clone();
let tags = choose_or_not(&mut rng, &self.tags);
Event {
title_utf8_length: title.len(),
text_utf8_length: text.len(),
title,
text,
timestamp_second: rng.gen(),
hostname: choose_or_not(&mut rng, &self.small_strings),
aggregation_key: choose_or_not(&mut rng, &self.small_strings),
priority: rng.gen(),
source_type_name: choose_or_not(&mut rng, &self.small_strings),
alert_type: rng.gen(),
tags,
}
}
}
pub(crate) struct Event {
title: String,
text: String,
title_utf8_length: usize,
text_utf8_length: usize,
timestamp_second: Option<u32>,
hostname: Option<String>,
aggregation_key: Option<String>,
priority: Option<Priority>,
source_type_name: Option<String>,
alert_type: Option<Alert>,
tags: Option<common::tags::Tags>,
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// _e{<TITLE_UTF8_LENGTH>,<TEXT_UTF8_LENGTH>}:<TITLE>|<TEXT>|d:<TIMESTAMP>|h:<HOSTNAME>|p:<PRIORITY>|t:<ALERT_TYPE>|#<TAG_KEY_1>:<TAG_VALUE_1>,<TAG_2>
write!(
f,
"_e{{{title_utf8_length},{text_utf8_length}}}:{title}|{text}",
title_utf8_length = self.title_utf8_length,
text_utf8_length = self.text_utf8_length,
title = self.title,
text = self.text,
)?;
if let Some(timestamp) = self.timestamp_second {
write!(f, "|d:{timestamp}")?;
}
if let Some(ref hostname) = self.hostname {
write!(f, "|h:{hostname}")?;
}
if let Some(ref priority) = self.priority {
write!(f, "|p:{priority}")?;
}
if let Some(ref alert_type) = self.alert_type {
write!(f, "|t:{alert_type}")?;
}
if let Some(ref aggregation_key) = self.aggregation_key {
write!(f, "|k:{aggregation_key}")?;
}
if let Some(ref source_type_name) = self.source_type_name {
write!(f, "|s:{source_type_name}")?;
}
if let Some(ref tags) = self.tags {
if !tags.is_empty() {
write!(f, "|#")?;
let mut commas_remaining = tags.len() - 1;
for (k, v) in tags.iter() {
write!(f, "{k}:{v}")?;
if commas_remaining != 0 {
write!(f, ",")?;
commas_remaining -= 1;
}
}
}
}
Ok(())
}
}
enum Priority {
Normal,
Low,
}
impl Distribution<Priority> for Standard {
fn sample<R>(&self, rng: &mut R) -> Priority
where
R: Rng + ?Sized,
{
match rng.gen_range(0..2) {
0 => Priority::Low,
1 => Priority::Normal,
_ => unreachable!(),
}
}
}
impl fmt::Display for Priority {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Normal => write!(f, "normal"),
Self::Low => write!(f, "low"),
}
}
}
#[derive(Clone, Copy)]
enum Alert {
Error,
Warning,
Info,
Success,
}
impl Distribution<Alert> for Standard {
fn sample<R>(&self, rng: &mut R) -> Alert
where
R: Rng + ?Sized,
{
match rng.gen_range(0..4) {
0 => Alert::Error,
1 => Alert::Warning,
2 => Alert::Info,
3 => Alert::Success,
_ => unreachable!(),
}
}
}
impl fmt::Display for Alert {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Error => write!(f, "error"),
Self::Warning => write!(f, "warning"),
Self::Info => write!(f, "info"),
Self::Success => write!(f, "success"),
}
}
}
|
#[doc = "Reader of register AREF_CTRL"]
pub type R = crate::R<u32, super::AREF_CTRL>;
#[doc = "Writer for register AREF_CTRL"]
pub type W = crate::W<u32, super::AREF_CTRL>;
#[doc = "Register AREF_CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::AREF_CTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Control bit to trade off AREF settling and noise performance\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AREF_MODE_A {
#[doc = "0: Nominal noise normal startup mode (meets normal mode settling and noise specifications)"]
NORMAL,
#[doc = "1: High noise fast startup mode (meets fast mode settling and noise specifications)"]
FAST_START,
}
impl From<AREF_MODE_A> for bool {
#[inline(always)]
fn from(variant: AREF_MODE_A) -> Self {
match variant {
AREF_MODE_A::NORMAL => false,
AREF_MODE_A::FAST_START => true,
}
}
}
#[doc = "Reader of field `AREF_MODE`"]
pub type AREF_MODE_R = crate::R<bool, AREF_MODE_A>;
impl AREF_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AREF_MODE_A {
match self.bits {
false => AREF_MODE_A::NORMAL,
true => AREF_MODE_A::FAST_START,
}
}
#[doc = "Checks if the value of the field is `NORMAL`"]
#[inline(always)]
pub fn is_normal(&self) -> bool {
*self == AREF_MODE_A::NORMAL
}
#[doc = "Checks if the value of the field is `FAST_START`"]
#[inline(always)]
pub fn is_fast_start(&self) -> bool {
*self == AREF_MODE_A::FAST_START
}
}
#[doc = "Write proxy for field `AREF_MODE`"]
pub struct AREF_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> AREF_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: AREF_MODE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Nominal noise normal startup mode (meets normal mode settling and noise specifications)"]
#[inline(always)]
pub fn normal(self) -> &'a mut W {
self.variant(AREF_MODE_A::NORMAL)
}
#[doc = "High noise fast startup mode (meets fast mode settling and noise specifications)"]
#[inline(always)]
pub fn fast_start(self) -> &'a mut W {
self.variant(AREF_MODE_A::FAST_START)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `AREF_BIAS_SCALE`"]
pub type AREF_BIAS_SCALE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `AREF_BIAS_SCALE`"]
pub struct AREF_BIAS_SCALE_W<'a> {
w: &'a mut W,
}
impl<'a> AREF_BIAS_SCALE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);
self.w
}
}
#[doc = "Reader of field `AREF_RMB`"]
pub type AREF_RMB_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `AREF_RMB`"]
pub struct AREF_RMB_W<'a> {
w: &'a mut W,
}
impl<'a> AREF_RMB_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4);
self.w
}
}
#[doc = "Reader of field `CTB_IPTAT_SCALE`"]
pub type CTB_IPTAT_SCALE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CTB_IPTAT_SCALE`"]
pub struct CTB_IPTAT_SCALE_W<'a> {
w: &'a mut W,
}
impl<'a> CTB_IPTAT_SCALE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `CTB_IPTAT_REDIRECT`"]
pub type CTB_IPTAT_REDIRECT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CTB_IPTAT_REDIRECT`"]
pub struct CTB_IPTAT_REDIRECT_W<'a> {
w: &'a mut W,
}
impl<'a> CTB_IPTAT_REDIRECT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);
self.w
}
}
#[doc = "iztat current select control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum IZTAT_SEL_A {
#[doc = "0: Use 250nA IZTAT from SRSS"]
SRSS,
#[doc = "1: Use locally generated 250nA"]
LOCAL,
}
impl From<IZTAT_SEL_A> for bool {
#[inline(always)]
fn from(variant: IZTAT_SEL_A) -> Self {
match variant {
IZTAT_SEL_A::SRSS => false,
IZTAT_SEL_A::LOCAL => true,
}
}
}
#[doc = "Reader of field `IZTAT_SEL`"]
pub type IZTAT_SEL_R = crate::R<bool, IZTAT_SEL_A>;
impl IZTAT_SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IZTAT_SEL_A {
match self.bits {
false => IZTAT_SEL_A::SRSS,
true => IZTAT_SEL_A::LOCAL,
}
}
#[doc = "Checks if the value of the field is `SRSS`"]
#[inline(always)]
pub fn is_srss(&self) -> bool {
*self == IZTAT_SEL_A::SRSS
}
#[doc = "Checks if the value of the field is `LOCAL`"]
#[inline(always)]
pub fn is_local(&self) -> bool {
*self == IZTAT_SEL_A::LOCAL
}
}
#[doc = "Write proxy for field `IZTAT_SEL`"]
pub struct IZTAT_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> IZTAT_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: IZTAT_SEL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Use 250nA IZTAT from SRSS"]
#[inline(always)]
pub fn srss(self) -> &'a mut W {
self.variant(IZTAT_SEL_A::SRSS)
}
#[doc = "Use locally generated 250nA"]
#[inline(always)]
pub fn local(self) -> &'a mut W {
self.variant(IZTAT_SEL_A::LOCAL)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `CLOCK_PUMP_PERI_SEL`"]
pub type CLOCK_PUMP_PERI_SEL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CLOCK_PUMP_PERI_SEL`"]
pub struct CLOCK_PUMP_PERI_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> CLOCK_PUMP_PERI_SEL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "bandgap voltage select control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VREF_SEL_A {
#[doc = "0: Use 0.8V Vref from SRSS"]
SRSS,
#[doc = "1: Use locally generated Vref"]
LOCAL,
#[doc = "2: Use externally supplied Vref (aref_ext_vref)"]
EXTERNAL,
}
impl From<VREF_SEL_A> for u8 {
#[inline(always)]
fn from(variant: VREF_SEL_A) -> Self {
match variant {
VREF_SEL_A::SRSS => 0,
VREF_SEL_A::LOCAL => 1,
VREF_SEL_A::EXTERNAL => 2,
}
}
}
#[doc = "Reader of field `VREF_SEL`"]
pub type VREF_SEL_R = crate::R<u8, VREF_SEL_A>;
impl VREF_SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, VREF_SEL_A> {
use crate::Variant::*;
match self.bits {
0 => Val(VREF_SEL_A::SRSS),
1 => Val(VREF_SEL_A::LOCAL),
2 => Val(VREF_SEL_A::EXTERNAL),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `SRSS`"]
#[inline(always)]
pub fn is_srss(&self) -> bool {
*self == VREF_SEL_A::SRSS
}
#[doc = "Checks if the value of the field is `LOCAL`"]
#[inline(always)]
pub fn is_local(&self) -> bool {
*self == VREF_SEL_A::LOCAL
}
#[doc = "Checks if the value of the field is `EXTERNAL`"]
#[inline(always)]
pub fn is_external(&self) -> bool {
*self == VREF_SEL_A::EXTERNAL
}
}
#[doc = "Write proxy for field `VREF_SEL`"]
pub struct VREF_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> VREF_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: VREF_SEL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Use 0.8V Vref from SRSS"]
#[inline(always)]
pub fn srss(self) -> &'a mut W {
self.variant(VREF_SEL_A::SRSS)
}
#[doc = "Use locally generated Vref"]
#[inline(always)]
pub fn local(self) -> &'a mut W {
self.variant(VREF_SEL_A::LOCAL)
}
#[doc = "Use externally supplied Vref (aref_ext_vref)"]
#[inline(always)]
pub fn external(self) -> &'a mut W {
self.variant(VREF_SEL_A::EXTERNAL)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20);
self.w
}
}
#[doc = "AREF DeepSleep Operation Modes (only applies if DEEPSLEEP_ON = 1)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DEEPSLEEP_MODE_A {
#[doc = "0: All blocks 'OFF' in DeepSleep"]
OFF,
#[doc = "1: IPTAT bias generator 'ON' in DeepSleep (used for fast AREF wakeup only: IPTAT outputs not available)"]
IPTAT,
#[doc = "2: IPTAT bias generator and outputs 'ON' in DeepSleep (used for biasing the CTBm with a PTAT current only in deepsleep)\n\n*Note that this mode also requires that the CTB_IPTAT_REDIRECT be set if the CTBm opamp is to operate in DeepSleep"]
IPTAT_IZTAT,
#[doc = "3: IPTAT, VREF, and IZTAT generators 'ON' in DeepSleep. This mode provides identical AREF functionality in DeepSleep as in the Active mode."]
IPTAT_IZTAT_VREF,
}
impl From<DEEPSLEEP_MODE_A> for u8 {
#[inline(always)]
fn from(variant: DEEPSLEEP_MODE_A) -> Self {
match variant {
DEEPSLEEP_MODE_A::OFF => 0,
DEEPSLEEP_MODE_A::IPTAT => 1,
DEEPSLEEP_MODE_A::IPTAT_IZTAT => 2,
DEEPSLEEP_MODE_A::IPTAT_IZTAT_VREF => 3,
}
}
}
#[doc = "Reader of field `DEEPSLEEP_MODE`"]
pub type DEEPSLEEP_MODE_R = crate::R<u8, DEEPSLEEP_MODE_A>;
impl DEEPSLEEP_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DEEPSLEEP_MODE_A {
match self.bits {
0 => DEEPSLEEP_MODE_A::OFF,
1 => DEEPSLEEP_MODE_A::IPTAT,
2 => DEEPSLEEP_MODE_A::IPTAT_IZTAT,
3 => DEEPSLEEP_MODE_A::IPTAT_IZTAT_VREF,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `OFF`"]
#[inline(always)]
pub fn is_off(&self) -> bool {
*self == DEEPSLEEP_MODE_A::OFF
}
#[doc = "Checks if the value of the field is `IPTAT`"]
#[inline(always)]
pub fn is_iptat(&self) -> bool {
*self == DEEPSLEEP_MODE_A::IPTAT
}
#[doc = "Checks if the value of the field is `IPTAT_IZTAT`"]
#[inline(always)]
pub fn is_iptat_iztat(&self) -> bool {
*self == DEEPSLEEP_MODE_A::IPTAT_IZTAT
}
#[doc = "Checks if the value of the field is `IPTAT_IZTAT_VREF`"]
#[inline(always)]
pub fn is_iptat_iztat_vref(&self) -> bool {
*self == DEEPSLEEP_MODE_A::IPTAT_IZTAT_VREF
}
}
#[doc = "Write proxy for field `DEEPSLEEP_MODE`"]
pub struct DEEPSLEEP_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> DEEPSLEEP_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DEEPSLEEP_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "All blocks 'OFF' in DeepSleep"]
#[inline(always)]
pub fn off(self) -> &'a mut W {
self.variant(DEEPSLEEP_MODE_A::OFF)
}
#[doc = "IPTAT bias generator 'ON' in DeepSleep (used for fast AREF wakeup only: IPTAT outputs not available)"]
#[inline(always)]
pub fn iptat(self) -> &'a mut W {
self.variant(DEEPSLEEP_MODE_A::IPTAT)
}
#[doc = "IPTAT bias generator and outputs 'ON' in DeepSleep (used for biasing the CTBm with a PTAT current only in deepsleep) *Note that this mode also requires that the CTB_IPTAT_REDIRECT be set if the CTBm opamp is to operate in DeepSleep"]
#[inline(always)]
pub fn iptat_iztat(self) -> &'a mut W {
self.variant(DEEPSLEEP_MODE_A::IPTAT_IZTAT)
}
#[doc = "IPTAT, VREF, and IZTAT generators 'ON' in DeepSleep. This mode provides identical AREF functionality in DeepSleep as in the Active mode."]
#[inline(always)]
pub fn iptat_iztat_vref(self) -> &'a mut W {
self.variant(DEEPSLEEP_MODE_A::IPTAT_IZTAT_VREF)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28);
self.w
}
}
#[doc = "Reader of field `DEEPSLEEP_ON`"]
pub type DEEPSLEEP_ON_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DEEPSLEEP_ON`"]
pub struct DEEPSLEEP_ON_W<'a> {
w: &'a mut W,
}
impl<'a> DEEPSLEEP_ON_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `ENABLED`"]
pub type ENABLED_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLED`"]
pub struct ENABLED_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLED_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - Control bit to trade off AREF settling and noise performance"]
#[inline(always)]
pub fn aref_mode(&self) -> AREF_MODE_R {
AREF_MODE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 2:3 - BIAS Current Control for all AREF Amplifiers. (These are risk mitigation bits that should not be touched by the customer: the impact on IDDA/noise/startup still needs to be characterized) 0: 125nA (reduced bias: reduction in total AREF IDDA, higher noise and longer startup times) 1: 250nA ('default' setting to meet bandgap performance (noise/startup) and IDDA specifications) 2: 375nA (increased bias: increase in total AREF IDDA, lower noise and shorter startup times) 3: 500nA (further increased bias: increase in total AREF IDDA, lower noise and shorter startup times)"]
#[inline(always)]
pub fn aref_bias_scale(&self) -> AREF_BIAS_SCALE_R {
AREF_BIAS_SCALE_R::new(((self.bits >> 2) & 0x03) as u8)
}
#[doc = "Bits 4:6 - AREF control signals (RMB). Bit 0: Manual VBG startup circuit enable 0: normal VBG startup circuit operation 1: VBG startup circuit is forced 'always on' Bit 1: Manual disable of IPTAT2 DAC 0: normal IPTAT2 DAC operation 1: PTAT2 DAC is disabled while VBG startup is active Bit 2: Manual enable of VBG offset correction DAC 0: normal VBG offset correction DAC operation 1: VBG offset correction DAC is enabled while VBG startup is active"]
#[inline(always)]
pub fn aref_rmb(&self) -> AREF_RMB_R {
AREF_RMB_R::new(((self.bits >> 4) & 0x07) as u8)
}
#[doc = "Bit 7 - CTB IPTAT current scaler. This bit must be set in order to operate the CTB amplifiers in the lowest power mode. This bit is chip-wide (controls all CTB amplifiers). 0: 1uA 1: 100nA"]
#[inline(always)]
pub fn ctb_iptat_scale(&self) -> CTB_IPTAT_SCALE_R {
CTB_IPTAT_SCALE_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:15 - Re-direct the CTB IPTAT output current. This can be used to reduce amplifier bias glitches during power mode transitions (for PSoC4A/B DSAB backwards compatibility). 0: Opamp<n>.IPTAT = AREF.IPTAT and Opamp<n>.IZTAT= AREF.IZTAT 1: Opamp<n>.IPTAT = HiZ and Opamp<n>.IZTAT= AREF.IPTAT *Note that in Deep Sleep, the AREF IZTAT and/or IPTAT currents can be disabled and therefore the corresponding Opamp<n>.IZTAT/IPTAT will be HiZ."]
#[inline(always)]
pub fn ctb_iptat_redirect(&self) -> CTB_IPTAT_REDIRECT_R {
CTB_IPTAT_REDIRECT_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bit 16 - iztat current select control"]
#[inline(always)]
pub fn iztat_sel(&self) -> IZTAT_SEL_R {
IZTAT_SEL_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 19 - CTBm charge pump clock source select. This field has nothing to do with the AREF. 0: Use the dedicated pump clock from SRSS (default) 1: Use one of the CLK_PERI dividers"]
#[inline(always)]
pub fn clock_pump_peri_sel(&self) -> CLOCK_PUMP_PERI_SEL_R {
CLOCK_PUMP_PERI_SEL_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bits 20:21 - bandgap voltage select control"]
#[inline(always)]
pub fn vref_sel(&self) -> VREF_SEL_R {
VREF_SEL_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 28:29 - AREF DeepSleep Operation Modes (only applies if DEEPSLEEP_ON = 1)"]
#[inline(always)]
pub fn deepsleep_mode(&self) -> DEEPSLEEP_MODE_R {
DEEPSLEEP_MODE_R::new(((self.bits >> 28) & 0x03) as u8)
}
#[doc = "Bit 30 - - 0: AREF IP disabled/off during DeepSleep power mode - 1: AREF IP remains enabled during DeepSleep power mode (if ENABLED=1)"]
#[inline(always)]
pub fn deepsleep_on(&self) -> DEEPSLEEP_ON_R {
DEEPSLEEP_ON_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Disable AREF"]
#[inline(always)]
pub fn enabled(&self) -> ENABLED_R {
ENABLED_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Control bit to trade off AREF settling and noise performance"]
#[inline(always)]
pub fn aref_mode(&mut self) -> AREF_MODE_W {
AREF_MODE_W { w: self }
}
#[doc = "Bits 2:3 - BIAS Current Control for all AREF Amplifiers. (These are risk mitigation bits that should not be touched by the customer: the impact on IDDA/noise/startup still needs to be characterized) 0: 125nA (reduced bias: reduction in total AREF IDDA, higher noise and longer startup times) 1: 250nA ('default' setting to meet bandgap performance (noise/startup) and IDDA specifications) 2: 375nA (increased bias: increase in total AREF IDDA, lower noise and shorter startup times) 3: 500nA (further increased bias: increase in total AREF IDDA, lower noise and shorter startup times)"]
#[inline(always)]
pub fn aref_bias_scale(&mut self) -> AREF_BIAS_SCALE_W {
AREF_BIAS_SCALE_W { w: self }
}
#[doc = "Bits 4:6 - AREF control signals (RMB). Bit 0: Manual VBG startup circuit enable 0: normal VBG startup circuit operation 1: VBG startup circuit is forced 'always on' Bit 1: Manual disable of IPTAT2 DAC 0: normal IPTAT2 DAC operation 1: PTAT2 DAC is disabled while VBG startup is active Bit 2: Manual enable of VBG offset correction DAC 0: normal VBG offset correction DAC operation 1: VBG offset correction DAC is enabled while VBG startup is active"]
#[inline(always)]
pub fn aref_rmb(&mut self) -> AREF_RMB_W {
AREF_RMB_W { w: self }
}
#[doc = "Bit 7 - CTB IPTAT current scaler. This bit must be set in order to operate the CTB amplifiers in the lowest power mode. This bit is chip-wide (controls all CTB amplifiers). 0: 1uA 1: 100nA"]
#[inline(always)]
pub fn ctb_iptat_scale(&mut self) -> CTB_IPTAT_SCALE_W {
CTB_IPTAT_SCALE_W { w: self }
}
#[doc = "Bits 8:15 - Re-direct the CTB IPTAT output current. This can be used to reduce amplifier bias glitches during power mode transitions (for PSoC4A/B DSAB backwards compatibility). 0: Opamp<n>.IPTAT = AREF.IPTAT and Opamp<n>.IZTAT= AREF.IZTAT 1: Opamp<n>.IPTAT = HiZ and Opamp<n>.IZTAT= AREF.IPTAT *Note that in Deep Sleep, the AREF IZTAT and/or IPTAT currents can be disabled and therefore the corresponding Opamp<n>.IZTAT/IPTAT will be HiZ."]
#[inline(always)]
pub fn ctb_iptat_redirect(&mut self) -> CTB_IPTAT_REDIRECT_W {
CTB_IPTAT_REDIRECT_W { w: self }
}
#[doc = "Bit 16 - iztat current select control"]
#[inline(always)]
pub fn iztat_sel(&mut self) -> IZTAT_SEL_W {
IZTAT_SEL_W { w: self }
}
#[doc = "Bit 19 - CTBm charge pump clock source select. This field has nothing to do with the AREF. 0: Use the dedicated pump clock from SRSS (default) 1: Use one of the CLK_PERI dividers"]
#[inline(always)]
pub fn clock_pump_peri_sel(&mut self) -> CLOCK_PUMP_PERI_SEL_W {
CLOCK_PUMP_PERI_SEL_W { w: self }
}
#[doc = "Bits 20:21 - bandgap voltage select control"]
#[inline(always)]
pub fn vref_sel(&mut self) -> VREF_SEL_W {
VREF_SEL_W { w: self }
}
#[doc = "Bits 28:29 - AREF DeepSleep Operation Modes (only applies if DEEPSLEEP_ON = 1)"]
#[inline(always)]
pub fn deepsleep_mode(&mut self) -> DEEPSLEEP_MODE_W {
DEEPSLEEP_MODE_W { w: self }
}
#[doc = "Bit 30 - - 0: AREF IP disabled/off during DeepSleep power mode - 1: AREF IP remains enabled during DeepSleep power mode (if ENABLED=1)"]
#[inline(always)]
pub fn deepsleep_on(&mut self) -> DEEPSLEEP_ON_W {
DEEPSLEEP_ON_W { w: self }
}
#[doc = "Bit 31 - Disable AREF"]
#[inline(always)]
pub fn enabled(&mut self) -> ENABLED_W {
ENABLED_W { w: self }
}
}
|
use gdnative::prelude::*;
pub fn init_panic_hook() {
// To enable backtrace, you will need the `backtrace` crate to be included in your cargo.toml, or
// a version of Rust where backtrace is included in the standard library (e.g. Rust nightly as of the date of publishing)
// use backtrace::Backtrace;
// use std::backtrace::Backtrace;
let old_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic_info| {
let loc_string;
if let Some(location) = panic_info.location() {
loc_string = format!("file '{}' at line {}", location.file(), location.line());
} else {
loc_string = "unknown location".to_owned()
}
let error_message;
if let Some(s) = panic_info.payload().downcast_ref::<&str>() {
error_message = format!("[RUST] {}: panic occurred: {:?}", loc_string, s);
} else if let Some(s) = panic_info.payload().downcast_ref::<String>() {
error_message = format!("[RUST] {}: panic occurred: {:?}", loc_string, s);
} else {
error_message = format!("[RUST] {}: unknown panic occurred", loc_string);
}
godot_error!("{}", error_message);
// Uncomment the following line if backtrace crate is included as a dependency
// godot_error!("Backtrace:\n{:?}", Backtrace::new());
(*(old_hook.as_ref()))(panic_info);
unsafe {
if let Some(gd_panic_hook) =
gdnative::api::utils::autoload::<gdnative::api::Node>("rust_panic_hook")
{
gd_panic_hook.call(
"rust_panic_hook",
&[GodotString::from_str(error_message).to_variant()],
);
}
}
}));
}
|
extern crate byteorder;
// #[cfg(test)]
// #[macro_use]
// extern crate quickcheck;
pub mod varint;
pub mod vbyte;
// #[cfg(test)]
// mod tests;
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate log;
extern crate env_logger;
extern crate docopt;
extern crate sat;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use docopt::Docopt;
use sat::parse;
use sat::{Satness, SATSolver};
use sat::{naive, watch, nonchro};
// Write the Docopt usage string.
const USAGE: &'static str = "
Usage: rust-sat [--solver TYPE] <inputfile>
rust-sat --help
Options:
--solver TYPE Valid values: naive, watch, nonchro.
--help Show this message.
";
#[derive(Deserialize)]
enum SolverType { Naive, Watch, Nonchro }
#[derive(Deserialize)]
struct Args {
arg_inputfile: String,
flag_solver: Option<SolverType>,
}
pub fn solve_file<Solver: SATSolver>(mut solver: Solver) {
let solvable = solver.solve();
print!("Formula is ");
match solvable {
Satness::UNSAT(_) => println!("UNSAT"),
Satness::SAT(interp) => {
println!("SAT with model:");
println!(" {:?}", interp);
}
}
}
pub fn main() {
let args: Args = Docopt::new(USAGE).and_then(|d| d.deserialize()).unwrap_or_else(|e| e.exit());
let filename: &str = args.arg_inputfile.as_ref();
let file = File::open(&Path::new(filename));
let mut contents = String::new();
let file_read = file.and_then(|mut f| f.read_to_string(&mut contents));
match file_read {
Ok(_) => {
match parse::parse_file(contents) {
Ok(cnf) => {
match args.flag_solver {
Some(SolverType::Naive) =>
solve_file(naive::Solver::create(cnf, None)),
Some(SolverType::Watch) =>
solve_file(watch::Solver::create(cnf, None)),
_ =>
solve_file(nonchro::Solver::create(cnf, None))
}
},
Err(_) => panic!("Error parsing file encountered.")
}
}
Err(_) => panic!("Error reading file encountered.")
}
}
|
use crate::layout::{LayoutBox, Rect};
use crate::paint::entity::{DisplayCommand, DisplayList};
use crate::paint::utils::get_color;
pub fn render_borders(list: &mut DisplayList, layout_box: &LayoutBox) {
let color = match get_color(layout_box, "border-color") {
Some(color) => color,
_ => return,
};
let d = &layout_box.dimensions;
let border_box = d.border_box();
// left border
list.push(DisplayCommand::SolidColor(
color,
Rect {
x: border_box.x,
y: border_box.y,
width: d.border.left,
height: border_box.height,
},
));
// right border
list.push(DisplayCommand::SolidColor(
color,
Rect {
x: border_box.x + border_box.width - d.border.right,
y: border_box.y,
width: d.border.right,
height: border_box.height,
},
));
// top border
list.push(DisplayCommand::SolidColor(
color,
Rect {
x: border_box.x,
y: border_box.y,
width: border_box.width,
height: d.border.top,
},
));
// bottom border
list.push(DisplayCommand::SolidColor(
color,
Rect {
x: border_box.x,
y: border_box.y + border_box.height - d.border.bottom,
width: border_box.width,
height: d.border.bottom,
},
));
}
|
use table::{TableRow, TableHeader, Table};
use definitions::{ResultColumn, RusqlStatement, InsertDef, SelectDef};
use definitions::{AlterTableDef, AlterTable, Expression, FromClause, JoinOperator};
use definitions::{DeleteDef, InsertDataSource, UpdateDef, Order, JoinConstraint};
use definitions::{BinaryOperator};
use expressions::{ExpressionResult, ExpressionEvaluator, expr_to_literal, result_to_literal};
use rusql::Rusql;
peg_file! parser("sql.rustpeg");
pub fn rusql_exec<F: FnMut(&TableRow, &TableHeader)>(db: &mut Rusql, sql_str: &str, callback: F) -> Option<Table> {
match parser::rusql_parse(sql_str) {
Ok(res) => {
for stmt in res.into_iter() {
match stmt {
RusqlStatement::AlterTable(alter_table_def) => alter_table(db, alter_table_def),
RusqlStatement::CreateTable(table_def) => db.create_table(table_def),
RusqlStatement::Delete(delete_def) => delete(db, delete_def),
RusqlStatement::DropTable(drop_table_def) => db.drop_table(&drop_table_def.name),
RusqlStatement::Insert(insert_def) => insert(db, insert_def),
RusqlStatement::Select(select_def) => return Some(select(db, select_def, callback)),
RusqlStatement::Update(update_def) => update(db, update_def),
}
}
}
Err(e) => println!("syntax error: {}", e),
}
None
}
fn alter_table(db: &mut Rusql, alter_table_def: AlterTableDef) {
match alter_table_def.mode {
AlterTable::RenameTo(new_name) => db.rename_table(&alter_table_def.name, new_name),
AlterTable::AddColumn(column_def) => db.get_mut_table(&alter_table_def.name)
.add_column(column_def),
}
}
fn delete(db: &mut Rusql, delete_def: DeleteDef) {
let table = db.get_mut_table(&delete_def.name);
if let Some(ref expr) = delete_def.where_expr {
// FIXME just making the borrow checker happy...
let header = table.header.clone();
table.delete_where(|row| ExpressionEvaluator::new(row, &header).eval_bool(expr));
} else {
table.clear();
}
}
fn insert(db: &mut Rusql, insert_def: InsertDef) {
match insert_def.data_source {
InsertDataSource::Values(column_data) => {
let mut table = db.get_mut_table(&insert_def.table_name);
table.insert(column_data, &insert_def.column_names);
}
InsertDataSource::Select(select_def) => {
let results_table = select(db, select_def, |_,_| {});
let mut table = db.get_mut_table(&insert_def.table_name);
for (_, row) in results_table.data.into_iter() {
table.push_row(row);
}
}
_ => {}
}
}
fn update(db: &mut Rusql, update_def: UpdateDef) {
let mut table = db.get_mut_table(&update_def.name);
for (_, row) in table.data.iter_mut() {
if let Some(ref expr) = update_def.where_expr {
if !ExpressionEvaluator::new(row, &table.header).eval_bool(expr) {
continue;
}
}
for &(ref name, ref expr) in update_def.set.iter() {
let x = table.header.iter().position(|ref cols| &cols.name == name).unwrap();
row[x] = expr_to_literal(expr);
}
}
}
fn product(tables: Vec<&Table>, input_product: &mut Table, new_row_opt: Option<TableRow>) {
let mut remaining = tables.clone();
if remaining.len() == 0 {
if let Some(new_row) = new_row_opt {
input_product.push_row(new_row);
}
} else {
let table = remaining.remove(0);
for row in table.data.values() {
let mut new_row: TableRow = if let Some(ref new_row) = new_row_opt {
new_row.clone()
} else {
Vec::new()
};
new_row.push_all(&*row.clone());
product(remaining.clone(), input_product, Some(new_row));
}
}
}
fn select<F: FnMut(&TableRow, &TableHeader)>(db: &mut Rusql, select_def: SelectDef, mut callback: F) -> Table {
let mut input_tables: Vec<&Table> = Vec::new();
let mut input_product = generate_inputs(db, &mut input_tables, &select_def);
filter_inputs(&mut input_product, &input_tables, &select_def);
let results_table = generate_result_set(input_product, &input_tables, &select_def);
for row in results_table.data.values() {
callback(row, &results_table.header);
}
results_table
}
fn natural_join(tables: &Vec<&Table>, constraints: &mut Vec<JoinConstraint>) {
// FIXME ...
let mut columns: Vec<(String, String, String)> = Vec::new();
for table in tables.iter() {
for other in tables.iter() {
if table == other {
continue;
}
for col in table.header.iter() {
for other_col in other.header.iter() {
if col.name == other_col.name {
columns.push((table.name.clone(), other.name.clone(), other_col.name.clone()));
}
}
}
}
}
for (table1, table2, column_name) in columns.into_iter() {
constraints.push(JoinConstraint::On(
Expression::BinaryOperator((BinaryOperator::Equals,
box Expression::TableName((table1, box Expression::ColumnName(column_name.clone()))),
box Expression::TableName((table2, box Expression::ColumnName(column_name)))))));
}
}
fn generate_inputs<'a>(db: &'a Rusql, input_tables: &mut Vec<&'a Table>, select_def: &SelectDef) -> Table {
// https://www.sqlite.org/lang_select.html#fromclause
let mut input_header: TableHeader = Vec::new();
if let Some(ref from_clause) = select_def.from_clause {
// FIXME CLEANUP PLZ
match from_clause {
&FromClause::TableOrSubquery(ref table_or_subquery) => {
for name in table_or_subquery.iter() {
let table = db.get_table(name);
input_tables.push(table);
input_header.push_all(&*table.header.clone());
}
let mut input_product = Table::new_result_table(input_header);
product(input_tables.clone(), &mut input_product, None);
input_product
},
&FromClause::JoinClause(ref name, ref join_clauses) => {
let table = db.get_table(name);
let mut constraints: Vec<JoinConstraint> = Vec::new();
input_tables.push(table);
input_header.push_all(&*table.header.clone());
if let &Some(ref join_clauses) = join_clauses {
for &(ref join_operator, ref name, ref join_const) in join_clauses.iter() {
let table = db.get_table(name);
input_tables.push(table);
input_header.push_all(&*table.header.clone());
if let &Some(ref constraint) = join_const {
constraints.push(constraint.clone());
}
match join_operator {
&JoinOperator::Natural => natural_join(input_tables, &mut constraints),
_ => {}
}
}
}
let mut input_product = Table::new_result_table(input_header);
product(input_tables.clone(), &mut input_product, None);
let header = input_product.header.clone();
for constraint in constraints.into_iter() {
match constraint {
JoinConstraint::On(ref expr) => {
input_product.delete_where(|row| {
!ExpressionEvaluator::new(row, &header).with_tables(input_tables.clone())
.eval_bool(expr)
});
}
}
}
input_product
},
}
} else {
let mut input_product = Table::new_result_table(input_header);
let empty_row: TableRow = Vec::new();
input_product.push_row(empty_row);
input_product
}
}
fn filter_inputs(input_product: &mut Table, input_tables: &Vec<&Table>, select_def: &SelectDef) {
// https://www.sqlite.org/lang_select.html#whereclause
if let Some(ref expr) = select_def.where_expr {
let header = input_product.header.clone();
input_product.delete_where(|row| {
!ExpressionEvaluator::new(row, &header).with_tables(input_tables.clone())
.eval_bool(expr)
});
}
}
fn generate_result_set(input_product: Table, input_tables: &Vec<&Table>, select_def: &SelectDef) -> Table {
// https://www.sqlite.org/lang_select.html#resultset
let results_header: TableHeader = Vec::new();
let mut results_table = Table::new_result_table(results_header);
let mut rows: Vec<TableRow> = Vec::new();
for row in input_product.data.values() {
match select_def.result_column {
ResultColumn::Expressions(ref exprs) => generate_row_from_expressions(&mut results_table, row, exprs, input_tables),
ResultColumn::Asterisk => {
if results_table.header.len() == 0 {
results_table.header = input_product.header.clone();
}
rows.push(row.clone());
}
}
}
if let Some(ref ordering_terms) = select_def.ordering_terms {
debug!("ORDER BY");
let mut ordering_terms = ordering_terms.clone();
ordering_terms.as_mut_slice().reverse();
for term in ordering_terms.iter() {
rows.as_mut_slice().sort_by(|a, b| {
// FIXME ... ugly.
match term.order {
Order::Ascending => a[result_to_literal(
ExpressionEvaluator::new(a, &results_table.header)
.as_column_alias()
.eval_expr(&term.expr)
).to_uint()].cmp(&b[result_to_literal(
ExpressionEvaluator::new(b, &results_table.header)
.as_column_alias()
.eval_expr(&term.expr)
).to_uint()]),
Order::Descending => b[result_to_literal(
ExpressionEvaluator::new(b, &results_table.header)
.as_column_alias()
.eval_expr(&term.expr)
).to_uint()].cmp(&a[result_to_literal(
ExpressionEvaluator::new(a, &results_table.header)
.as_column_alias()
.eval_expr(&term.expr)
).to_uint()]),
}
});
}
}
results_table.insert(rows, &None);
results_table
}
fn generate_row_from_expressions(results_table: &mut Table, row: &TableRow, exprs: &Vec<Expression>, input_tables: &Vec<&Table>) {
let mut new_row: TableRow = Vec::new();
let push_header = if results_table.header.len() == 0 { true } else { false };
for expr in exprs.iter() {
if push_header {
match ExpressionEvaluator::new(row, &results_table.header).with_tables(input_tables.clone())
.with_column_def()
.eval_expr(expr) {
ExpressionResult::ColumnDef(def) => results_table.header.push(def.clone()),
_ => {}, // FIXME No idea
}
}
match ExpressionEvaluator::new(row, &results_table.header).with_tables(input_tables.clone())
.eval_expr(expr) {
ExpressionResult::Value(v) => new_row.push(v),
_ => {}, // FIXME No idea
}
}
results_table.push_row(new_row);
}
|
pub struct LeisureParameters {}
impl LeisureParameters {
pub fn new() -> LeisureParameters {
LeisureParameters {}
}
}
impl Default for LeisureParameters {
fn default() -> Self {
LeisureParameters::new()
}
}
|
//#![feature(test)]
#![feature(box_patterns)]
#![feature(cow_is_borrowed)]
extern crate fancy_regex;
pub mod builtin;
pub mod error;
pub mod globals;
pub mod loader;
pub mod parse;
pub mod test;
pub mod util;
pub mod value;
pub mod vm;
pub use crate::builtin::enumerator::*;
pub use crate::builtin::fiber::*;
pub use crate::builtin::procobj::*;
pub use crate::builtin::range::*;
pub use crate::builtin::regexp::*;
pub use crate::builtin::string::RString;
pub use crate::error::*;
pub use crate::globals::*;
pub use crate::parse::parser::{LvarCollector, LvarId, ParseResult, Parser};
pub use crate::util::*;
pub use crate::value::*;
pub use crate::vm::*;
|
use std::{
fs::File,
io::prelude::*,
net::{ TcpListener, TcpStream },
};
// let device = rodio::default_output_device().unwrap();
// let sink = Sink::new(&device);
// sink.append(source);
// thread::sleep(Duration::from_secs(10));
// sink.pause();
// thread::sleep(Duration::from_secs(2));
// sink.play();
// sink.sleep_until_end();
// let source = rodio::Decoder::new(BufReader::new(file)).unwrap();
fn main(){
let listener = TcpListener::bind("127.0.0.1:5533").unwrap();
for stream in listener.incoming(){
handler(stream.unwrap());
}
}
fn handler(mut stream: TcpStream){
println!("Debug: client connected");
let mut file = File::open("/storage/music/temp/you! .mp3").unwrap();
let mut buffer = [0u8;4096];
// rodio::source::Buffered::from(buffer);
while let Ok(size) = file.read(&mut buffer){
if size> 0 {
stream.write(&buffer[..size]).unwrap();
}else{
break;
}
}
}
|
use core::ptr::Unique;
use spin::Mutex;
use arch::cpuio::Port;
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
macro_rules! print {
($($arg:tt)*) => ({
use core::fmt::Write;
$crate::vga::WRITER.lock().write_fmt(format_args!($($arg)*)).unwrap();
});
}
#[repr(u8)]
pub enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Magenta = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
Pink = 13,
Yellow = 14,
White = 15,
}
#[derive(Clone, Copy)]
struct ColorCode(u8);
impl ColorCode {
const fn new(foreground: Color, background: Color) -> ColorCode {
ColorCode((background as u8) << 4 | (foreground as u8))
}
}
#[repr(C)]
#[derive(Clone, Copy)]
struct ScreenChar {
char: u8,
color: ColorCode,
}
const BUFFER_HEIGHT: usize = 25;
const BUFFER_WIDTH: usize = 80;
static CURSOR_INDEX: Mutex<Port<u8>> = Mutex::new(unsafe { Port::new(0x3D4) });
static CURSOR_DATA: Mutex<Port<u8>> = Mutex::new(unsafe { Port::new(0x3D5) });
pub static WRITER: Mutex<Writer> = Mutex::new(Writer {
column: 0,
row: 0,
color: ColorCode::new(Color::LightGreen, Color::Black),
buffer: unsafe { Unique::new(0xb8000 as *mut _) },
});
struct Buffer {
chars: [ScreenChar; BUFFER_WIDTH * BUFFER_HEIGHT],
}
pub struct Writer {
column: usize,
row: usize,
color: ColorCode,
buffer: Unique<Buffer>,
}
fn mk_scr_char(c: u8, clr: ColorCode) -> ScreenChar {
ScreenChar {
char: c,
color: clr,
}
}
impl Writer {
pub fn write_byte(&mut self, byte: u8) {
match byte {
b'\n' => self.new_line(),
byte => {
let row = self.row;
let col = self.column;
self.buffer().chars[row * BUFFER_WIDTH + col] = mk_scr_char(byte, self.color);
self.column += 1;
}
}
self.scroll();
}
fn update_cursor(&mut self) {
let position: u16 = (BUFFER_WIDTH * self.row + self.column) as u16;
CURSOR_INDEX.lock().write(0x0F);
CURSOR_DATA.lock().write((position & 0xFF) as u8);
CURSOR_INDEX.lock().write(0x0E);
CURSOR_DATA.lock().write((position >> 8) as u8);
}
fn buffer(&mut self) -> &mut Buffer {
unsafe { self.buffer.get_mut() }
}
fn scroll(&mut self) {
if self.row > BUFFER_HEIGHT - 1 {
let blank = mk_scr_char(b' ', self.color);
{
let buffer = self.buffer();
for i in 0..((BUFFER_HEIGHT - 1) * (BUFFER_WIDTH)) {
buffer.chars[i] = buffer.chars[i + BUFFER_WIDTH];
}
for i in ((BUFFER_HEIGHT - 1) * (BUFFER_WIDTH))..(BUFFER_HEIGHT * BUFFER_WIDTH) {
buffer.chars[i] = blank;
}
}
self.row = BUFFER_HEIGHT - 1;
}
}
fn new_line(&mut self) {
self.column = 0;
self.row += 1;
}
fn clear(&mut self) {
let blank = mk_scr_char(b' ', self.color);
for i in 0..(BUFFER_HEIGHT * BUFFER_WIDTH) {
self.buffer().chars[i] = blank;
}
self.update_cursor();
}
fn clear_row(&mut self) {
let blank = mk_scr_char(b' ', self.color);
let row = self.row;
for i in (row * BUFFER_WIDTH)..(row * BUFFER_WIDTH + BUFFER_WIDTH) {
self.buffer().chars[i] = blank;
}
}
pub fn write_str(&mut self, s: &str) {
for byte in s.bytes() {
self.write_byte(byte)
}
self.scroll();
self.update_cursor();
}
}
impl ::core::fmt::Write for Writer {
fn write_str(&mut self, s: &str) -> ::core::fmt::Result {
for byte in s.bytes() {
self.write_byte(byte)
}
self.update_cursor();
Ok(())
}
}
pub fn clear_screen() {
WRITER.lock().clear();
}
|
//! We define the custom scalars present in the GitHub schema. More precise types could be provided here (see tests), as long as they are deserializable.
pub type X509Certificate = String;
pub type URI = String;
pub type HTML = String;
pub type GitTimestamp = String;
pub type GitSSHRemote = String;
pub type GitObjectID = String;
pub type Date = String;
pub type DateTime = String;
|
use std::error::Error;
use std::fmt::Display;
use std::net::SocketAddr;
use std::path::PathBuf;
use native_dialog::FileDialog;
use skulpin::skia_safe::*;
use crate::app::{paint, AppState, StateArgs};
use crate::assets::{Assets, ColorScheme};
use crate::net::{Message, Peer};
use crate::ui::*;
use crate::util::get_window_size;
#[derive(Debug)]
enum Status {
None,
Info(String),
Error(String),
}
impl<T: Display> From<T> for Status {
fn from(error: T) -> Self {
Self::Error(format!("{}", error))
}
}
pub struct State {
assets: Assets,
ui: Ui,
// UI elements
nickname_field: TextField,
matchmaker_field: TextField,
room_id_field: TextField,
join_expand: Expand,
host_expand: Expand,
// net
status: Status,
peer: Option<Peer>,
connected: bool, // when this is true, the state is transitioned to paint::State
image_file: Option<PathBuf>, // when this is Some, the canvas is loaded from a file
}
impl State {
pub fn new(assets: Assets, error: Option<&str>) -> Self {
Self {
assets,
ui: Ui::new(),
nickname_field: TextField::new(Some("Anon")),
matchmaker_field: TextField::new(Some("localhost:62137")),
room_id_field: TextField::new(None),
join_expand: Expand::new(true),
host_expand: Expand::new(false),
status: match error {
Some(err) => Status::Error(err.into()),
None => Status::None,
},
peer: None,
connected: false,
image_file: None,
}
}
fn process_header(&mut self, canvas: &mut Canvas) {
self.ui.push_group((self.ui.width(), 72.0), Layout::Vertical);
self.ui.push_group((self.ui.width(), 56.0), Layout::Freeform);
self.ui.set_font_size(48.0);
self.ui.text(
canvas,
"NetCanv",
self.assets.colors.text,
(AlignH::Left, AlignV::Middle),
);
self.ui.pop_group();
self.ui
.push_group((self.ui.width(), self.ui.remaining_height()), Layout::Freeform);
self.ui.text(
canvas,
"Welcome! Host a room or join an existing one to start painting.",
self.assets.colors.text,
(AlignH::Left, AlignV::Middle),
);
self.ui.pop_group();
self.ui.pop_group();
}
fn process_menu(&mut self, canvas: &mut Canvas, input: &mut Input) -> Option<Box<dyn AppState>> {
self.ui
.push_group((self.ui.width(), self.ui.remaining_height()), Layout::Vertical);
let button = ButtonArgs {
height: 32.0,
colors: &self.assets.colors.button,
};
let textfield = TextFieldArgs {
width: 160.0,
colors: &self.assets.colors.text_field,
hint: None,
};
let expand = ExpandArgs {
label: "",
font_size: 22.0,
icons: &self.assets.icons.expand,
colors: &self.assets.colors.expand,
};
// nickname, matchmaker
self.ui.push_group(
(self.ui.width(), TextField::labelled_height(&self.ui)),
Layout::Horizontal,
);
self.nickname_field
.with_label(&mut self.ui, canvas, input, "Nickname", TextFieldArgs {
hint: Some("Name shown to others"),
..textfield
});
self.ui.space(16.0);
self.matchmaker_field
.with_label(&mut self.ui, canvas, input, "Matchmaker", TextFieldArgs {
hint: Some("IP address"),
..textfield
});
self.ui.pop_group();
self.ui.space(32.0);
// join room
if self
.join_expand
.process(&mut self.ui, canvas, input, ExpandArgs {
label: "Join an existing room",
..expand
})
.mutually_exclude(&mut self.host_expand)
.expanded()
{
self.ui.push_group(self.ui.remaining_size(), Layout::Vertical);
self.ui.offset((32.0, 8.0));
self.ui
.paragraph(canvas, self.assets.colors.text, AlignH::Left, None, &[
"Ask your friend for the Room ID",
"and enter it into the text field below.",
]);
self.ui.space(16.0);
self.ui
.push_group((0.0, TextField::labelled_height(&self.ui)), Layout::Horizontal);
self.room_id_field
.with_label(&mut self.ui, canvas, input, "Room ID", TextFieldArgs {
hint: Some("4–6 digits"),
..textfield
});
self.ui.offset((16.0, 16.0));
if Button::with_text(&mut self.ui, canvas, input, button, "Join").clicked() {
match Self::join_room(
self.nickname_field.text(),
self.matchmaker_field.text(),
self.room_id_field.text(),
) {
Ok(peer) => {
self.peer = Some(peer);
self.status = Status::None;
},
Err(status) => self.status = status,
}
}
self.ui.pop_group();
self.ui.fit();
self.ui.pop_group();
}
self.ui.space(16.0);
// host room
if self
.host_expand
.process(&mut self.ui, canvas, input, ExpandArgs {
label: "Host a new room",
..expand
})
.mutually_exclude(&mut self.join_expand)
.expanded()
{
self.ui.push_group(self.ui.remaining_size(), Layout::Vertical);
self.ui.offset((32.0, 8.0));
self.ui
.paragraph(canvas, self.assets.colors.text, AlignH::Left, None, &[
"Create a blank canvas, or load an existing one from file,",
"and share the Room ID with your friends.",
]);
self.ui.space(16.0);
macro_rules! host_room {
() => {
match Self::host_room(self.nickname_field.text(), self.matchmaker_field.text()) {
Ok(peer) => {
self.peer = Some(peer);
self.status = Status::None;
},
Err(status) => self.status = status,
}
};
}
self.ui
.push_group((self.ui.remaining_width(), 32.0), Layout::Horizontal);
if Button::with_text(&mut self.ui, canvas, input, button, "Host").clicked() {
host_room!();
}
self.ui.space(8.0);
if Button::with_text(&mut self.ui, canvas, input, button, "from File").clicked() {
match FileDialog::new()
.set_filename("canvas.png")
.add_filter("Supported image files", &[
"png", "jpg", "jpeg", "jfif", "gif", "bmp", "tif", "tiff", "webp", "avif", "pnm", "tga",
])
.add_filter("NetCanv canvas", &["toml"])
.show_open_single_file()
{
Ok(Some(path)) => {
self.image_file = Some(path);
host_room!();
},
Err(error) => self.status = Status::from(error),
_ => (),
}
}
self.ui.pop_group();
self.ui.fit();
self.ui.pop_group();
}
self.ui.pop_group();
chain_focus(input, &mut [
&mut self.nickname_field,
&mut self.matchmaker_field,
&mut self.room_id_field,
]);
None
}
fn process_status(&mut self, canvas: &mut Canvas) {
if !matches!(self.status, Status::None) {
self.ui.push_group((self.ui.width(), 24.0), Layout::Horizontal);
let icon = match self.status {
Status::None => unreachable!(),
Status::Info(_) => &self.assets.icons.status.info,
Status::Error(_) => &self.assets.icons.status.error,
};
let color = match self.status {
Status::None => unreachable!(),
Status::Info(_) => self.assets.colors.text,
Status::Error(_) => self.assets.colors.error,
};
self.ui
.icon(canvas, icon, color, Some((self.ui.height(), self.ui.height())));
self.ui.space(8.0);
self.ui
.push_group((self.ui.remaining_width(), self.ui.height()), Layout::Freeform);
let text = match &self.status {
Status::None => unreachable!(),
Status::Info(text) | Status::Error(text) => text,
};
self.ui.text(canvas, text, color, (AlignH::Left, AlignV::Middle));
self.ui.pop_group();
self.ui.pop_group();
}
}
fn validate_nickname(nickname: &str) -> Result<(), Status> {
if nickname.is_empty() {
return Err(Status::Error("Nickname must not be empty".into()))
}
if nickname.len() > 16 {
return Err(Status::Error(
"The maximum length of a nickname is 16 characters".into(),
))
}
Ok(())
}
fn host_room(nickname: &str, matchmaker_addr_str: &str) -> Result<Peer, Status> {
Self::validate_nickname(nickname)?;
Ok(Peer::host(nickname, matchmaker_addr_str)?)
}
fn join_room(nickname: &str, matchmaker_addr_str: &str, room_id_str: &str) -> Result<Peer, Status> {
if !matches!(room_id_str.len(), 4..=6) {
return Err(Status::Error("Room ID must be a number with 4–6 digits".into()))
}
Self::validate_nickname(nickname)?;
let room_id: u32 = room_id_str
.parse()
.map_err(|_| Status::Error("Room ID must be an integer".into()))?;
Ok(Peer::join(nickname, matchmaker_addr_str, room_id)?)
}
}
impl AppState for State {
fn process(
&mut self,
StateArgs {
canvas,
coordinate_system_helper,
input,
}: StateArgs,
) {
canvas.clear(self.assets.colors.panel);
if let Some(peer) = &mut self.peer {
match peer.tick() {
Ok(messages) =>
for message in messages {
match message {
Message::Error(error) => self.status = Status::Error(error.into()),
Message::Connected => self.connected = true,
_ => (),
}
},
Err(error) => {
self.status = error.into();
},
}
}
self.ui
.begin(get_window_size(&coordinate_system_helper), Layout::Freeform);
self.ui.set_font(self.assets.sans.clone());
self.ui.set_font_size(14.0);
self.ui.pad((64.0, 64.0));
self.ui.push_group((self.ui.width(), 384.0), Layout::Vertical);
self.ui.align((AlignH::Left, AlignV::Middle));
self.process_header(canvas);
self.ui.space(24.0);
self.process_menu(canvas, input);
self.ui.space(24.0);
self.process_status(canvas);
self.ui.pop_group();
self.ui.push_group((32.0, self.ui.height()), Layout::Vertical);
self.ui.align((AlignH::Right, AlignV::Top));
if Button::with_icon(
&mut self.ui,
canvas,
input,
ButtonArgs {
height: 32.0,
colors: &self.assets.colors.tool_button,
},
if self.assets.dark_mode {
&self.assets.icons.color_switcher.light
} else {
&self.assets.icons.color_switcher.dark
},
)
.clicked()
{
self.assets.dark_mode = !self.assets.dark_mode;
if self.assets.dark_mode {
self.assets.colors = ColorScheme::dark();
} else {
self.assets.colors = ColorScheme::light();
}
}
self.ui.pop_group();
}
fn next_state(self: Box<Self>) -> Box<dyn AppState> {
if self.connected {
Box::new(paint::State::new(self.assets, self.peer.unwrap(), self.image_file))
} else {
self
}
}
}
|
//
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
extern crate test;
use maplit::hashmap;
use oak_functions_abi::proto::{Request, Response, StatusCode};
use oak_functions_loader::{
grpc::{create_and_start_grpc_server, create_wasm_handler},
logger::Logger,
lookup::{parse_lookup_entries, LookupData, LookupDataAuth, LookupDataSource},
server::{apply_policy, format_bytes, Policy, WasmHandler},
};
use prost::Message;
use std::{
convert::TryInto,
io::{Seek, Write},
net::{Ipv6Addr, SocketAddr},
sync::Arc,
time::Duration,
};
use test::Bencher;
use test_utils::make_request;
const MANIFEST_PATH: &str = "examples/key_value_lookup/module/Cargo.toml";
#[tokio::test]
async fn test_valid_policy() {
// Policy values are large enough to allow successful serving of the request, and responding
// with the actual response from the Wasm module.
let constant_processing_time = Duration::from_millis(200);
let policy = Policy {
constant_response_size_bytes: 100,
constant_processing_time,
};
let scenario = |server_port: u16| async move {
let result = make_request(server_port, br#"key_1"#).await;
// Check that the processing time is within a reasonable range of
// `constant_processing_time` specified in the policy.
assert!(result.elapsed > constant_processing_time);
assert!(
(result.elapsed.as_millis() as f64)
< 1.05 * constant_processing_time.as_millis() as f64,
"elapsed time is: {:?}",
result.elapsed
);
let response = result.response;
assert_eq!(StatusCode::Success as i32, response.status);
assert_eq!(
std::str::from_utf8(response.body().unwrap()).unwrap(),
r#"value_1"#
);
};
run_scenario_with_policy(scenario, policy).await;
}
#[tokio::test(flavor = "multi_thread", worker_threads = 4)]
async fn test_long_response_time() {
// The `constant_processing_time` is too low.
let constant_processing_time = Duration::from_millis(10);
let policy = Policy {
constant_response_size_bytes: 100,
constant_processing_time,
};
// So we expect the request to fail, with `response not available error`.
let scenario = |server_port: u16| async move {
let result = make_request(server_port, br#"key_1"#).await;
// Check the elapsed time, allowing a margin of 10ms.
let margin = Duration::from_millis(10);
assert!(
result.elapsed < constant_processing_time + margin,
"elapsed: {:?}",
result.elapsed
);
let response = result.response;
assert_eq!(StatusCode::PolicyTimeViolation as i32, response.status);
assert_eq!(
std::str::from_utf8(response.body().unwrap()).unwrap(),
"Reason: response not available."
);
};
run_scenario_with_policy(scenario, policy).await;
}
/// Starts the server with the given policy, and runs the given test scenario.
///
/// A normal test scenario makes any number of requests and checks the responses. It has to be an
/// async function, with a single `u16` input argument as the `server_port`, and returning the unit
/// type (`()`).
async fn run_scenario_with_policy<F, S>(test_scenario: F, policy: Policy)
where
F: FnOnce(u16) -> S,
S: std::future::Future<Output = ()>,
{
let server_port = test_utils::free_port();
let address = SocketAddr::from((Ipv6Addr::UNSPECIFIED, server_port));
let mut manifest_path = std::env::current_dir().unwrap();
manifest_path.pop();
manifest_path.push(MANIFEST_PATH);
let wasm_module_bytes =
test_utils::compile_rust_wasm(manifest_path.to_str().expect("Invalid target dir"), false)
.expect("Couldn't read Wasm module");
let logger = Logger::for_test();
let mock_static_server = Arc::new(test_utils::MockStaticServer::default());
let mock_static_server_clone = mock_static_server.clone();
let static_server_port = test_utils::free_port();
let mock_static_server_background = test_utils::background(|term| async move {
mock_static_server_clone
.serve(static_server_port, term)
.await
});
mock_static_server.set_response_body(test_utils::serialize_entries(hashmap! {
b"key_0".to_vec() => br#"value_0"#.to_vec(),
b"key_1".to_vec() => br#"value_1"#.to_vec(),
b"key_2".to_vec() => br#"value_2"#.to_vec(),
}));
let lookup_data = Arc::new(LookupData::new_empty(
Some(LookupDataSource::Http {
url: format!("http://localhost:{}", static_server_port),
auth: LookupDataAuth::default(),
}),
logger.clone(),
));
lookup_data.refresh().await.unwrap();
let tee_certificate = vec![];
let wasm_handler = create_wasm_handler(&wasm_module_bytes, lookup_data, vec![], logger.clone())
.expect("could not create wasm_handler");
let server_background = test_utils::background(|term| async move {
create_and_start_grpc_server(
&address,
wasm_handler,
tee_certificate,
policy,
term,
logger,
)
.await
});
// Wait for the server thread to make progress before starting the client. This is needed for a
// more accurate measurement of the processing time, and to avoid `connection refused` from the
// client in tests that run with multiple threads.
tokio::time::sleep(Duration::from_secs(1)).await;
test_scenario(server_port).await;
let res = server_background.terminate_and_join().await;
assert!(res.is_ok());
mock_static_server_background.terminate_and_join().await;
}
#[bench]
fn bench_wasm_handler(bencher: &mut Bencher) {
let mut manifest_path = std::env::current_dir().unwrap();
manifest_path.pop();
manifest_path.push(MANIFEST_PATH);
let wasm_module_bytes =
test_utils::compile_rust_wasm(manifest_path.to_str().expect("Invalid target dir"), true)
.expect("Couldn't read Wasm module");
let summary = bencher.bench(|bencher| {
let logger = Logger::for_test();
let static_server_port = test_utils::free_port();
let lookup_data = Arc::new(LookupData::new_empty(
Some(LookupDataSource::Http {
url: format!("http://localhost:{}", static_server_port),
auth: LookupDataAuth::default(),
}),
logger.clone(),
));
let wasm_handler =
WasmHandler::create(&wasm_module_bytes, lookup_data.clone(), vec![], logger)
.expect("Couldn't create the server");
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
let (terminate_static_server_tx, terminate_static_server_rx) =
tokio::sync::oneshot::channel::<()>();
let mock_static_server = Arc::new(test_utils::MockStaticServer::default());
let mock_static_server_clone = mock_static_server.clone();
let static_server_join_handle = tokio::spawn(async move {
mock_static_server_clone
.serve(static_server_port, async {
terminate_static_server_rx.await.unwrap()
})
.await
});
mock_static_server.set_response_body(test_utils::serialize_entries(hashmap! {
b"key_0".to_vec() => br#"value_0"#.to_vec(),
b"key_1".to_vec() => br#"value_1"#.to_vec(),
b"key_2".to_vec() => br#"value_2"#.to_vec(),
}));
lookup_data.refresh().await.unwrap();
terminate_static_server_tx.send(()).unwrap();
static_server_join_handle.await.unwrap();
});
bencher.iter(|| {
let request = Request {
body: br#"key_1"#.to_vec(),
};
let resp = rt
.block_on(wasm_handler.clone().handle_invoke(request))
.unwrap();
assert_eq!(resp.status, StatusCode::Success as i32);
assert_eq!(std::str::from_utf8(&resp.body).unwrap(), r#"value_1"#);
});
});
// When running `cargo test` this benchmark test gets executed too, but `summary` will be `None`
// in that case. So, here we first check that `summary` is not empty.
if let Some(summary) = summary {
// `summary.mean` is in nanoseconds, even though it is not explicitly documented in
// https://doc.rust-lang.org/test/stats/struct.Summary.html.
let elapsed = Duration::from_nanos(summary.mean as u64);
// We expect the `mean` time for loading the test Wasm module and running its main function
// to be less than a fixed threshold.
assert!(
elapsed < Duration::from_millis(5),
"elapsed time: {:.0?}",
elapsed
);
}
}
#[test]
fn parse_lookup_entries_empty() {
let empty = vec![];
let entries = parse_lookup_entries(empty.as_ref()).unwrap();
assert!(entries.is_empty());
}
// Fix the serialized representation for testing by manually annotating individual bytes.
//
// See https://developers.google.com/protocol-buffers/docs/encoding#structure.
const ENTRY_0_LENGTH_DELIMITED: &[u8] = &[
8, // Message total length.
10, // Field 1 key: (1<<3) | 2
2, // Field 1 length.
14, 12, // Field 1 value.
18, // Field 2 key: (2<<3) | 2
2, // Field 2 length.
19, 88, // Field 2 value.
];
const ENTRY_1_LENGTH_DELIMITED: &[u8] = &[
15, // Message total length.
10, // Field 1 key: (1<<3) | 2
5, // Field 1 length.
b'H', b'a', b'r', b'r', b'y', // Field 1 value.
18, // Field 2 key: (2<<3) | 2
6, // Field 2 length.
b'P', b'o', b't', b't', b'e', b'r', // Field 2 value.
];
// Ensure that the serialized representation is correct.
#[test]
fn check_serialized_lookup_entries() {
{
let mut buf = vec![];
let entry = oak_functions_abi::proto::Entry {
key: vec![14, 12],
value: vec![19, 88],
};
entry.encode_length_delimited(&mut buf).unwrap();
assert_eq!(buf, ENTRY_0_LENGTH_DELIMITED);
}
{
let mut buf = vec![];
let entry = oak_functions_abi::proto::Entry {
key: b"Harry".to_vec(),
value: b"Potter".to_vec(),
};
entry.encode_length_delimited(&mut buf).unwrap();
assert_eq!(buf, ENTRY_1_LENGTH_DELIMITED);
}
}
#[test]
fn parse_lookup_entries_multiple_entries() {
let mut buf = vec![];
buf.append(&mut ENTRY_0_LENGTH_DELIMITED.to_vec());
buf.append(&mut ENTRY_1_LENGTH_DELIMITED.to_vec());
let entries = parse_lookup_entries(buf.as_ref()).unwrap();
assert_eq!(entries.len(), 2);
assert_eq!(entries.get(&[14, 12].to_vec()), Some(&vec![19, 88]));
assert_eq!(entries.get(&b"Harry".to_vec()), Some(&b"Potter".to_vec()));
}
#[test]
fn parse_lookup_entries_multiple_entries_trailing() {
let mut buf = vec![];
buf.append(&mut ENTRY_0_LENGTH_DELIMITED.to_vec());
buf.append(&mut ENTRY_1_LENGTH_DELIMITED.to_vec());
// Add invalid trailing bytes.
buf.append(&mut vec![1, 2, 3]);
let res = parse_lookup_entries(buf.as_ref());
assert!(res.is_err());
}
#[test]
fn parse_lookup_entries_invalid() {
// Invalid bytes.
let buf = vec![1, 2, 3];
let res = parse_lookup_entries(buf.as_ref());
assert!(res.is_err());
}
#[tokio::test]
async fn lookup_data_refresh_http() {
let mock_static_server = Arc::new(test_utils::MockStaticServer::default());
let static_server_port = test_utils::free_port();
let mock_static_server_clone = mock_static_server.clone();
let mock_static_server_background = test_utils::background(|term| async move {
mock_static_server_clone
.serve(static_server_port, term)
.await
});
let lookup_data = crate::LookupData::new_empty(
Some(LookupDataSource::Http {
url: format!("http://localhost:{}", static_server_port),
auth: LookupDataAuth::default(),
}),
Logger::for_test(),
);
assert!(lookup_data.is_empty());
// Initially empty file, no entries.
lookup_data.refresh().await.unwrap();
assert!(lookup_data.is_empty());
// Single entry.
mock_static_server.set_response_body(ENTRY_0_LENGTH_DELIMITED.to_vec());
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 1);
assert_eq!(lookup_data.get(&[14, 12]), Some(vec![19, 88]));
assert_eq!(lookup_data.get(b"Harry"), None);
// Empty file again.
mock_static_server.set_response_body(vec![]);
lookup_data.refresh().await.unwrap();
assert!(lookup_data.is_empty());
// A different entry.
mock_static_server.set_response_body(ENTRY_1_LENGTH_DELIMITED.to_vec());
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 1);
assert_eq!(lookup_data.get(&[14, 12]), None);
assert_eq!(lookup_data.get(b"Harry"), Some(b"Potter".to_vec()));
// Two entries.
let mut buf = ENTRY_0_LENGTH_DELIMITED.to_vec();
buf.append(&mut ENTRY_1_LENGTH_DELIMITED.to_vec());
mock_static_server.set_response_body(buf);
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 2);
assert_eq!(lookup_data.get(&[14, 12]), Some(vec![19, 88]));
assert_eq!(lookup_data.get(b"Harry"), Some(b"Potter".to_vec()));
mock_static_server_background.terminate_and_join().await;
}
#[tokio::test]
async fn lookup_data_refresh_file() {
let temp_file = tempfile::NamedTempFile::new().unwrap();
let lookup_data = crate::LookupData::new_empty(
Some(LookupDataSource::File(temp_file.path().to_path_buf())),
Logger::for_test(),
);
assert!(lookup_data.is_empty());
// Initially empty file, no entries.
lookup_data.refresh().await.unwrap();
assert!(lookup_data.is_empty());
// Single entry.
temp_file.as_file().set_len(0).unwrap();
temp_file.as_file().rewind().unwrap();
temp_file
.as_file()
.write_all(ENTRY_0_LENGTH_DELIMITED)
.unwrap();
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 1);
assert_eq!(lookup_data.get(&[14, 12]), Some(vec![19, 88]));
assert_eq!(lookup_data.get(b"Harry"), None);
// Empty file again.
temp_file.as_file().set_len(0).unwrap();
temp_file.as_file().rewind().unwrap();
lookup_data.refresh().await.unwrap();
assert!(lookup_data.is_empty());
// A different entry.
temp_file.as_file().set_len(0).unwrap();
temp_file.as_file().rewind().unwrap();
temp_file
.as_file()
.write_all(ENTRY_1_LENGTH_DELIMITED)
.unwrap();
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 1);
assert_eq!(lookup_data.get(&[14, 12]), None);
assert_eq!(lookup_data.get(b"Harry"), Some(b"Potter".to_vec()));
// Two entries.
temp_file.as_file().set_len(0).unwrap();
temp_file.as_file().rewind().unwrap();
temp_file
.as_file()
.write_all(ENTRY_0_LENGTH_DELIMITED)
.unwrap();
temp_file
.as_file()
.write_all(ENTRY_1_LENGTH_DELIMITED)
.unwrap();
lookup_data.refresh().await.unwrap();
assert_eq!(lookup_data.len(), 2);
assert_eq!(lookup_data.get(&[14, 12]), Some(vec![19, 88]));
assert_eq!(lookup_data.get(b"Harry"), Some(b"Potter".to_vec()));
}
#[tokio::test]
async fn lookup_data_refresh_no_lookup_source() {
let lookup_data = crate::LookupData::new_empty(None, Logger::for_test());
assert!(lookup_data.is_empty());
// Still empty, no errors.
lookup_data.refresh().await.unwrap();
assert!(lookup_data.is_empty());
}
#[tokio::test]
async fn test_apply_policy() {
// A valid constant response body size
let size = 50;
// A valid policy
let policy = Policy {
constant_response_size_bytes: size,
constant_processing_time: Duration::from_millis(10),
};
{
// Wasm response with small enough body is serialized with padding, and no other change
let small_success_response = Response::create(StatusCode::Success, vec![b'x'; size]);
let function = async move || Ok(small_success_response);
let res = apply_policy(policy.try_into().unwrap(), function).await;
assert!(res.is_ok());
let response = res.unwrap();
assert_eq!(response.status, StatusCode::Success as i32);
assert_eq!(response.body.len(), policy.constant_response_size_bytes);
}
{
// Success Wasm response with a large body is discarded, and replaced with an error response
let large_success_response = Response::create(StatusCode::Success, vec![b'x'; size + 1]);
let function = async move || Ok(large_success_response);
let res = apply_policy(policy.try_into().unwrap(), function).await;
assert!(res.is_ok());
let response = res.unwrap();
assert_eq!(response.status, StatusCode::PolicySizeViolation as i32);
assert_eq!(response.body.len(), policy.constant_response_size_bytes);
}
}
#[test]
fn test_format_bytes() {
// Valid UTF-8 string.
assert_eq!("🚀oak⭐", format_bytes("🚀oak⭐".as_bytes()));
// Incorrect UTF-8 bytes, as per https://doc.rust-lang.org/std/string/struct.String.html#examples-3.
assert_eq!("[0, 159, 146, 150]", format_bytes(&[0, 159, 146, 150]));
}
|
#![feature(trace_macros)]
trace_macros!(true);
#[macro_use]
extern crate native_versioning;
mod c {
pub type long = u16;
}
versioned_extern! {
static demo: c::long;
pub static demo2: usize;
#[cfg(test)]
#[doc = "hi"]
fn f() -> usize;
pub fn g();
}
pub fn main() { }
|
use builder::Builder;
pub fn gen_intrinsics(builder : &mut Builder) {
let int_type = builder.int32_type();
let string_type = builder.string_type();
let void_type = builder.void_type();
builder.declare_function(
"puts", vec!(string_type), int_type
);
builder.declare_variadic_function(
"sprintf", vec!(string_type, string_type), int_type
);
builder.create_function("print_int", vec!(int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let char_type = fb.char_type();
let buf = fb.array_malloc(char_type, 9, "buf");
let gen_fmt = fb.string("%d", "gen_fmt");
let v = vec!(buf, gen_fmt, fb.get_param(0));
let fmtd = fb.call("sprintf", v, "fmtd");
let p_args = vec!(buf);
fb.call("puts", p_args, "putsres");
let ret = fb.int(0);
fb.ret(ret);
});
builder.create_function("+", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.add_op(left_val, right_val, "tmp");
fb.ret(tmp);
});
builder.create_function("-", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.sub_op(left_val, right_val, "tmp");
fb.ret(tmp);
});
builder.create_function("*", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.mul_op(left_val, right_val, "tmp");
fb.ret(tmp);
});
builder.create_function("/", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.div_op(left_val, right_val, "tmp");
fb.ret(tmp);
});
builder.create_function("%", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.mod_op(left_val, right_val, "tmp");
fb.ret(tmp);
});
builder.create_function("<", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_less_than(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
builder.create_function(">", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_greater_than(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
builder.create_function("<=", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_less_than_eq(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
builder.create_function(">=", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_greater_than_eq(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
builder.create_function("==", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_eq(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
builder.create_function("!=", vec!(int_type, int_type), int_type, |fb : &mut Builder|{
fb.goto_first_block();
let left_val = fb.get_param(0);
let right_val = fb.get_param(1);
let tmp = fb.cmp_not_eq(left_val, right_val, "tmp");
let int32_tmp = fb.zext(tmp, int_type, "cast_tmp");
fb.ret(int32_tmp);
});
} |
use std::{
borrow::Borrow,
collections::{BTreeMap, BTreeSet},
};
use petgraph::{graph::NodeIndex, Graph};
use thiserror::Error;
use crate::{
binding::{apply_bindings, find_bindings, BindingStorage, Formula, FormulaError, ManualAnyFunctionBinding},
expr::{ExprPositionOwned, Expression, ExpressionExtension, ExpressionMeta, PositionError},
parsing::{clear_parsing_info, process_expression_parsing, Math, NamedFormulas, Proof, ProofStep},
utils::{char_index::get_char_range, id::*, span::*},
};
#[derive(Default, Ord, PartialOrd, Debug, Clone, Eq, PartialEq, Hash)]
pub struct FormulaPosition {
pub module_name: String,
pub position: usize,
}
#[derive(Debug, Error)]
pub enum ReadMathError {
#[error("wrong number, should be {should_be}")]
WrongNumberInStart { should_be: usize },
#[error("{0}")]
FormulaError(FormulaError),
}
pub fn read_math(math: &Math) -> Result<BTreeMap<FormulaPosition, Formula>, Vec<Spanned<ReadMathError>>> {
let mut errors = Vec::new();
let mut result = BTreeMap::new();
for NamedFormulas { name, formulas } in &math.0 {
for (index, formula) in formulas.iter().enumerate() {
if index + 1 != formula.position.inner as usize {
errors.push(Spanned::new(
ReadMathError::WrongNumberInStart { should_be: index + 1 },
formula.position.span.clone(),
));
continue;
}
let position = FormulaPosition { module_name: name.clone(), position: formula.position.inner as usize };
let formula = match Formula::new(
clear_parsing_info(formula.formula.inner.left.clone()),
clear_parsing_info(formula.formula.inner.right.clone()),
) {
Ok(x) => x,
Err(x) => {
errors.push(Spanned::new(ReadMathError::FormulaError(x), formula.formula.span.clone()));
continue;
},
};
result.insert(position, formula);
}
}
if errors.len() == 0 { Ok(result) } else { Err(errors) }
}
pub fn proofs_has_cycles(math: &Math) -> Result<(), &'static str> {
let mut id_generator = IdGenerator::default();
let mut edges = vec![];
for NamedFormulas { name, formulas } in &math.0 {
for (index, formula) in formulas.iter().enumerate() {
let current_position = NodeIndex::new(
id_generator.get_or_add_id(FormulaPosition { module_name: name.clone(), position: index + 1 }) as usize,
);
if let Some(proof) = &formula.proof {
for ProofStep { used_formula, .. } in &proof.inner.steps {
let used_position = NodeIndex::new(id_generator.get_or_add_id(FormulaPosition {
module_name: used_formula.inner.module_name.clone(),
position: used_formula.inner.position,
}) as usize);
edges.push((current_position, used_position));
}
}
}
}
let graph = Graph::<(), ()>::from_edges(&edges);
if petgraph::algo::is_cyclic_directed(&graph) { Err("proof has cycles") } else { Ok(()) }
}
#[derive(Debug, Error)]
pub enum ProofError {
#[error("position is not found")]
PositionNotFound,
#[error("result of this step is not equal to expected, actual is {actual}")]
StepWrong { actual: Expression },
#[error("result of latest step is not equal to right side of formula, actual is {actual}")]
LatestStepWrong { actual: Expression },
#[error("formula by this name is not found")]
FormulaNotFound,
#[error("not all bindings provided")]
NotAllBindingsProvided, // TODO add which bindings needed
#[error("not all function bindings provided")]
NotAllFunctionBindingsProvided,
#[error("internal error about getting part of formula, in {position:?}, on {error_in:?}")]
InternalError {
position: ExprPositionOwned,
error_in: PositionError,
},
#[error("cannot match formula with this equation")]
CannotFindBindings,
}
pub fn is_proof_correct(
formula: &crate::parsing::Formula,
proof: &Spanned<Proof>,
global_formulas: &BTreeMap<FormulaPosition, Formula>,
) -> Result<(), Spanned<ProofError>> {
let mut current = clear_parsing_info(formula.left.clone());
for ProofStep { string, expr, position, used_formula, bindings, function_bindings } in &proof.inner.steps {
let expr_parsing = &expr.inner;
let expr_span = expr.span.clone();
let (mut expr, position) = {
let (expr, positions) = process_expression_parsing(expr.inner.clone());
let position = positions
.iter()
.find(|(_, range)| {
get_char_range(&string, range.0.clone())
.map(|x| x == position.inner)
.unwrap_or(false)
})
.ok_or(Spanned::new(ProofError::PositionNotFound, position.span.clone()))?
.0
.clone();
(expr, position)
};
if expr != current {
return Err(Spanned::new(ProofError::StepWrong { actual: current }, expr_span));
}
let formula = {
let formula_position = FormulaPosition {
module_name: used_formula.inner.module_name.clone(),
position: used_formula.inner.position,
};
let mut result = global_formulas
.get(&formula_position)
.ok_or(Spanned::new(ProofError::FormulaNotFound, used_formula.span.clone()))?
.clone();
if !used_formula.inner.left_to_right {
std::mem::swap(&mut result.left, &mut result.right);
}
let sorted_unknown_names: BTreeSet<String> = result.left.unknown_patterns_names.iter().cloned().collect();
let sorted_used_names: BTreeSet<String> = bindings.inner.iter().map(|b| b.pattern_name.clone()).collect();
if sorted_unknown_names != sorted_used_names {
return Err(Spanned::new(ProofError::NotAllBindingsProvided, bindings.span.clone()));
}
let sorted_unknown_anyfunctions: BTreeSet<(String, usize)> =
result.left.anyfunction_names.iter().cloned().collect();
let sorted_function_bindings: BTreeSet<(String, usize)> = function_bindings
.inner
.iter()
.map(|(name, pattern)| (name.clone(), pattern.variables.len()))
.collect();
if sorted_unknown_anyfunctions != sorted_function_bindings {
return Err(Spanned::new(ProofError::NotAllFunctionBindingsProvided, function_bindings.span.clone()));
}
result
};
let mut current_expr_part = Expression(ExpressionMeta::IntegerValue { value: 0 });
let current_expr = expr.get_mut(position.borrow()).map_err(|pos| {
Spanned::new(
ProofError::InternalError { position: position.clone(), error_in: pos },
expr_parsing
.get(position.cut_to_error(pos))
.unwrap()
.span
.clone()
.globalize_span(expr_span.0.start),
)
})?;
std::mem::swap(&mut current_expr_part, current_expr);
let mut bindings = {
let mut result = BindingStorage::default();
for binding in &bindings.inner {
result.add(binding.clone());
}
result
};
let mut any_function_bindings = {
let mut binding_map = BTreeMap::new();
for binding in &function_bindings.inner {
binding_map.insert(binding.0.clone(), binding.1.clone());
}
ManualAnyFunctionBinding::new(binding_map)
};
find_bindings(current_expr_part, &formula.left.pattern, &mut bindings, &mut any_function_bindings)
.ok_or(Spanned::new(ProofError::CannotFindBindings, expr_span.clone()))?;
let mut current_expr_part = apply_bindings(formula.right.pattern.clone(), &bindings, &any_function_bindings);
std::mem::swap(&mut current_expr_part, current_expr);
current = expr;
}
if clear_parsing_info(formula.right.clone()) != current {
return Err(Spanned::new(ProofError::LatestStepWrong { actual: current }, proof.span.clone()));
}
Ok(())
}
pub fn is_proofs_correct(
math: &Math,
global_formulas: &BTreeMap<FormulaPosition, Formula>,
) -> Result<(), Vec<Spanned<ProofError>>> {
let mut result = Vec::new();
for NamedFormulas { name: _, formulas } in &math.0 {
for formula in formulas {
if let Some(proof) = &formula.proof {
if let Err(error) = is_proof_correct(&formula.formula.inner, proof, global_formulas) {
result.push(error);
}
}
}
}
if result.is_empty() { Ok(()) } else { Err(result) }
}
|
use std::time::Duration;
use std::time::Instant;
const INPUT_MAX: u8 = 8;
#[derive(Debug, PartialEq)]
struct Player {
name: String,
word: String,
found: bool,
time: Duration,
}
fn main() {
let mut p1 = get_player("player_1");
let mut p2 = get_player("player_2");
println!(
"******************************************** {} plays",
p1.name
);
let (found, time) = chrono_play(&p2.word);
p1.found = found;
p1.time = time;
println!(
"******************************************** {} plays",
p2.name
);
let (found, time) = chrono_play(&p1.word);
p2.found = found;
p2.time = time;
println!("******************************************** Result");
println!(
"{}: {}.{} seconds",
p1.name,
p1.time.as_secs(),
p1.time.subsec_millis()
);
println!(
"{}: {}.{} seconds",
p2.name,
p2.time.as_secs(),
p2.time.subsec_millis()
);
println!("{}", get_winner(&p1, &p2));
}
fn get_player(file_path: &str) -> Player {
let contents = std::fs::read_to_string(file_path).unwrap();
let main_vec: Vec<_> = contents.lines().collect();
Player {
name: main_vec[0].to_string(),
word: main_vec[1].to_uppercase().to_string(),
found: false,
time: Duration::new(0, 0),
}
}
fn get_dashed_word(word: &str, letters: &str) -> String {
let mut s: String = String::new();
for i in word.chars() {
if letters.contains(i) {
s.push(i);
} else {
s.push('-');
}
}
s
}
fn get_letter() -> char {
loop {
println!("Enter a letter:");
let mut user_input = String::new();
std::io::stdin().read_line(&mut user_input).unwrap();
let user_input = user_input.trim();
if user_input.len() != 1 {
continue;
}
if let Some(letter) = user_input.to_uppercase().pop() {
break letter;
}
}
}
//Need pay attention on this segment
fn play(word: &str) -> bool {
let mut letters: String = String::new();
let mut input_count = INPUT_MAX;
while input_count > 0 {
let dashed_word = get_dashed_word(word, &letters);
print!("{},{}\n", input_count, dashed_word);
if dashed_word == word {
return true;
}
let letter = get_letter();
if !word.contains(letter) {
input_count -= 1
}
if !letters.contains(letter) {
letters.push(letter)
}
}
return false;
}
fn chrono_play(word: &str) -> (bool, Duration) {
let duration1 = Instant::now();
let status = play(word);
let duration2 = Instant::now();
(status, duration2 - duration1)
}
fn get_winner(p1: &Player, p2: &Player) -> String {
let mut winner_string: String = String::new();
if p1.found == true && p2.found == false {
winner_string = format!("The winner is {}", p1.name)
} else if p2.found == true && p1.found == false {
winner_string = format!("The winner is {}", p2.name)
} else if p1.found == false && p2.found == false {
winner_string = format!("No winner, two losers")
} else if p1.found == true && p2.found == true {
if p1.time == p2.time {
winner_string = format!("No winner, no loser")
} else if p1.time < p2.time {
winner_string = format!("The winner is {}", p1.name)
} else {
winner_string = format!("The winner is {}", p2.name)
}
}
winner_string
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_player() {
let player = get_player("player_test");
assert_eq!(
player,
Player {
name: String::from("David"),
word: String::from("RUST"),
found: false,
time: Duration::new(0, 0),
}
);
}
#[test]
fn test_get_dashed_word() {
assert_eq!(get_dashed_word("AZERTY", ""), "------");
assert_eq!(get_dashed_word("TEST", "TAB"), "T--T");
assert_eq!(get_dashed_word("LITERALIZATION", "AEIOU"), "-I-E-A-I-A-IO-");
assert_eq!(get_dashed_word("LITERAL", "AEIOU"), "-I-E-A-");
}
#[test]
fn test_get_winner() {
let mut p1 = Player {
name: String::from("David"),
word: String::from("RUST"),
found: false,
time: Duration::new(0, 0),
};
let mut p2 = Player {
name: String::from("Rolland"),
word: String::from("LANGUAGE"),
found: false,
time: Duration::new(0, 0),
};
// Players 1 and 2 did not find the word.
assert_eq!(get_winner(&p1, &p2), "No winner, two losers");
// Player 1 did not find the word.
// Player 2 found the word.
p2.found = true;
assert_eq!(get_winner(&p1, &p2), "The winner is Rolland");
// Player 1 found the word.
// Player 2 did not find the word.
p1.found = true;
p2.found = false;
assert_eq!(get_winner(&p1, &p2), "The winner is David");
// Players 1 and 2 found the word.
// Same time.
p2.found = true;
assert_eq!(get_winner(&p1, &p2), "No winner, no loser");
// Player 1 was faster.
p2.time = Duration::new(5, 0);
assert_eq!(get_winner(&p1, &p2), "The winner is David");
// Player 2 was faster.
p1.time = Duration::new(6, 0);
assert_eq!(get_winner(&p1, &p2), "The winner is Rolland");
}
}
|
#[allow(dead_code)]
mod rsbf {
use std::collections::HashMap;
fn jump_table(code: &str) -> HashMap<usize, usize> {
let mut jumps = HashMap::new();
let mut stack : Vec<usize> = Vec::new();
for (index, c) in code.char_indices() {
match c {
'[' => stack.push(index),
']' => {
let open_index = stack.pop().expect("Missing opening bracket");
jumps.insert(index, open_index);
jumps.insert(open_index, index);
}
_ => {}
}
}
jumps
}
pub fn run(code: &str) -> String {
let mut output = String::new();
let mut data: Vec<u8> = Vec::new();
data.resize(30000, 0);
let mut icode = 0;
let mut idata = 0;
let jumps = jump_table(code);
while icode < code.len() {
let cmd = &code[icode..icode + 1];
match cmd {
"<" => idata = idata - 1,
">" => idata = idata + 1,
"+" => data[idata] = data[idata] + 1,
"-" => data[idata] = data[idata] - 1,
"." => output.push(data[idata] as char),
"[" => if data[idata] == 0 {
icode = *jumps.get(&icode).expect("missing [ jump");
},
"]" => if data[idata] != 0 {
icode = *jumps.get(&icode).expect("missing ] jump");
},
_ => {}
}
icode = icode + 1;
}
output
}
}
#[cfg(test)]
mod tests {
use rsbf::*;
#[test]
fn it_works() {
let result = run("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.");
assert_eq!(result, "Hello World!\n");
}
}
|
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
pub mod error;
pub mod audio_decoder;
pub mod audio_encoder;
pub mod filter;
pub mod filter_graph;
pub mod format_context;
pub mod frame;
pub mod order;
pub mod packet;
pub mod prelude;
pub mod probe;
pub mod stream;
pub mod subtitle_decoder;
pub mod subtitle_encoder;
pub mod tools;
pub mod video_decoder;
pub mod video_encoder;
|
extern crate rand;
use rand::Rng;
use std::io;
use std::cmp::Ordering;
fn main() {
println!("请猜测数字,1-100之间!");
let mut count = 0;
let rand_num = rand::thread_rng().gen_range(1, 101);
loop {
let mut number= String::new();
io::stdin().read_line(&mut number)
.ok().expect("获取输入失败!");
let number: u32 = match number.trim().parse() {
Ok(num) => num,
Err(_) => {println!("请输入数字!\n");continue;},
};
match number.cmp(&rand_num) {
Ordering::Less => { println!("\n{} 太小!", number); count += 1;},
Ordering::Greater => { println!("\n{} 太大!", number); count += 1;},
Ordering::Equal => { println!("{} 你赢了!", number); break;},
}
}
println!("你一共猜测了 {} 次", count);
}
|
use std::thread;
use std::time::Duration;
fn spawned_not_completing () {
println!("--- 1 ---");
thread::spawn(|| {
for i in 1..10 {
println!("1. [spawned thread] {}", i);
thread::sleep(Duration::from_millis(1));
}
});
for i in 1..5 {
println!("1. [main thread] {}", i);
thread::sleep(Duration::from_millis(1));
}
// The loop in the spawned thread takes about 10ms; the loop in the main
// thread about 5. As it is here, the main thread will not wait for the
// spawned thread to complete.
}
fn wait_for_me_please () {
println!("--- 2 ---");
let handle = thread::spawn(|| {
for i in 1..10 {
println!("2. [spawned thread] {}", i);
thread::sleep(Duration::from_millis(1));
}
});
// handle.join().unwrap(); // block until the underlying thread terminates
for i in 1..5 {
println!("2. [main thread] {}", i);
thread::sleep(Duration::from_millis(1));
}
handle.join().unwrap(); // block until the underlying thread terminates
}
fn move_data () {
println!("--- 3 ---");
let value = 42;
// Can't use values unless closure has `move`. If it does, then values
// referenced inside the closure are moved into it; the closure will become
// the owner of the moved values. Can't just use references, even with
// lifetimes, because the lifetime of a thread is unknown at compile time.
let handle = thread::spawn(move || {
println!("value: {}", value);
});
// let value = 42; // won't work, because `value` was moved into the closure
// drop(value); // same here - `value` moved, so can't use it
handle.join().unwrap();
}
fn main() {
spawned_not_completing();
wait_for_me_please();
move_data();
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
MetricBaseline_Get(#[from] metric_baseline::get::Error),
#[error(transparent)]
MetricBaseline_CalculateBaseline(#[from] metric_baseline::calculate_baseline::Error),
}
pub mod metric_baseline {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_uri: &str,
metric_name: &str,
timespan: Option<&str>,
interval: Option<&str>,
aggregation: Option<&str>,
sensitivities: Option<&str>,
result_type: Option<&str>,
) -> std::result::Result<models::BaselineResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Insights/baseline/{}",
operation_config.base_path(),
resource_uri,
metric_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(timespan) = timespan {
url.query_pairs_mut().append_pair("timespan", timespan);
}
if let Some(interval) = interval {
url.query_pairs_mut().append_pair("interval", interval);
}
if let Some(aggregation) = aggregation {
url.query_pairs_mut().append_pair("aggregation", aggregation);
}
if let Some(sensitivities) = sensitivities {
url.query_pairs_mut().append_pair("sensitivities", sensitivities);
}
if let Some(result_type) = result_type {
url.query_pairs_mut().append_pair("resultType", result_type);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::BaselineResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn calculate_baseline(
operation_config: &crate::OperationConfig,
resource_uri: &str,
time_series_information: &models::TimeSeriesInformation,
) -> std::result::Result<models::CalculateBaselineResponse, calculate_baseline::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Insights/calculatebaseline",
operation_config.base_path(),
resource_uri
);
let mut url = url::Url::parse(url_str).map_err(calculate_baseline::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(calculate_baseline::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(time_series_information).map_err(calculate_baseline::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(calculate_baseline::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(calculate_baseline::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CalculateBaselineResponse = serde_json::from_slice(rsp_body)
.map_err(|source| calculate_baseline::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| calculate_baseline::Error::DeserializeError(source, rsp_body.clone()))?;
Err(calculate_baseline::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod calculate_baseline {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
mod stdout;
fn main() {
stdout::greeting();
}
|
use std::process::Command;
let mut echo_hello = Command::new("sh");
echo_hello.arg("-c")
.arg("echo hello");
let hello_1 = echo_hello.output().expect("failed to execute process");
let hello_2 = echo_hello.output().expect("failed to execute process"); |
use hex;
fn main() {
find_the_xor(
&hex::decode("1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736")
.unwrap(),
);
}
fn score_ascii_byte(c: u8) -> i64 {
let c = if b'A' <= c && c <= b'Z' {
c - b'A' + b'a'
} else {
c
};
return match c as char {
'e' => 12,
't' | 'a' | 'o' => 8,
'i' | 'n' => 7,
's' | 'h' | 'r' => 6,
'd' | 'l' => 4,
'c' | 'u' => 3,
'm' | 'w' | 'f' | 'g' | 'y' | 'p' => 2,
'b' | 'v' | 'k' | ' ' => 1,
'j' | 'x' | 'q' | 'z' => 0,
_ => -1,
};
}
fn score_ascii(string: Vec<u8>) -> i64 {
string.into_iter().map(|x| score_ascii_byte(x)).sum()
}
fn find_the_xor(string: &[u8]) {
let answer = (0u8..std::u8::MAX)
.map(|i| {
let test: Vec<u8> = string.iter().map(|a| a ^ i).collect();
if let Ok(str) = String::from_utf8(test.clone()) {
if str.is_ascii() {
return (score_ascii(test), str, i.clone());
}
}
(0, "Not Found".into(), 0u8)
})
.max()
.unwrap();
println!("{:?}", answer);
}
|
/// A color.
#[derive(Debug)]
pub struct Color {
pub red: u8,
pub green: u8,
pub blue: u8,
pub alpha: u8
}
impl Color {
/// Create a color with a alpha value of 255
pub fn new(red: u8, green: u8, blue: u8) -> Color {
Color {
red,
green,
blue,
alpha: 255
}
}
/// Create a color
pub fn new_with_alpha(red: u8, green: u8, blue: u8, alpha: u8) -> Color {
Color {
red,
green,
blue,
alpha
}
}
/// Green color
pub fn green() -> Color {
Color {
red: 0,
green: 128,
blue: 0,
alpha: 255
}
}
/// Yellow color
pub fn yellow() -> Color {
Color {
red: 255,
green: 255,
blue: 0,
alpha: 255
}
}
/// Orange color
pub fn orange() -> Color {
Color {
red: 255,
green: 165,
blue: 0,
alpha: 255
}
}
/// Red color
pub fn red() -> Color {
Color {
red: 255,
green: 0,
blue: 0,
alpha: 255
}
}
/// Blue color
pub fn blue() -> Color {
Color {
red: 0,
green: 0,
blue: 255,
alpha: 255
}
}
/// Cyan color
pub fn cyan() -> Color {
Color {
red: 0,
green: 255,
blue: 255,
alpha: 255
}
}
/// Grey color
pub fn grey() -> Color {
Color {
red: 128,
green: 128,
blue: 128,
alpha: 255
}
}
/// Pink color
pub fn pink() -> Color {
Color {
red: 255,
green: 192,
blue: 203,
alpha: 255
}
}
/// Purple color
pub fn purple() -> Color {
Color {
red: 128,
green: 0,
blue: 128,
alpha: 255
}
}
/// No color
pub fn black() -> Color {
Color {
red: 0,
green: 0,
blue: 0,
alpha: 255
}
}
/// Pure white
pub fn white() -> Color {
Color {
red: 255,
green: 255,
blue: 255,
alpha: 255
}
}
}
impl ToString for Color {
fn to_string(&self) -> String {
let mut color = String::from("rgba(");
color.push_str(&self.red.to_string());
color.push_str(",");
color.push_str(&self.green.to_string());
color.push_str(",");
color.push_str(&self.blue.to_string());
color.push_str(",");
color.push_str(&(self.alpha as f64 / 255.0).to_string());
color.push_str(")");
color
}
}
#[cfg(test)]
mod test {
#[test]
fn color_to_string() {
use super::Color;
assert_eq!(&Color::purple().to_string(), "rgba(128,0,128,1)");
}
} |
use bevy::prelude::*;
use crate::{WINDOW_WIDTH, WINDOW_HEIGHT, Materials, collider::Collider};
const WALL_THICKNESS: f32 = 10.;
const WALL_LEFT_X: f32 = WALL_THICKNESS / 2. - WINDOW_WIDTH / 2.;
const WALL_RIGHT_X: f32 = WINDOW_WIDTH / 2. - WALL_THICKNESS / 2.;
const WALL_TOP_Y: f32 = WINDOW_HEIGHT / 2. - WALL_THICKNESS / 2.;
pub fn spawn_walls(mut commands: Commands, materials: Res<Materials>) {
commands
// left
.spawn(SpriteComponents {
material: materials.wall_material.clone(),
sprite: Sprite::new(Vec2::new(WALL_THICKNESS, WINDOW_HEIGHT)),
transform: Transform::from_translation(Vec3::new(WALL_LEFT_X, 0., 0.)),
..Default::default()
})
.with(Collider::WallLeft)
// right
.spawn(SpriteComponents {
material: materials.wall_material.clone(),
sprite: Sprite::new(Vec2::new(WALL_THICKNESS, WINDOW_HEIGHT)),
transform: Transform::from_translation(Vec3::new(WALL_RIGHT_X, 0., 0.)),
..Default::default()
})
.with(Collider::WallRight)
// top
.spawn(SpriteComponents {
material: materials.wall_material.clone(),
sprite: Sprite::new(Vec2::new(WINDOW_WIDTH, WALL_THICKNESS)),
transform: Transform::from_translation(Vec3::new(0., WALL_TOP_Y, 0.)),
..Default::default()
})
.with(Collider::WallTop);
} |
use crate::requests::health_check_get::{HealthCheckRequester, Request as HealthCheckRequest};
use crate::requests::metrics_mixes_get::{MetricsMixRequester, Request as MetricsMixRequest};
use crate::requests::metrics_mixes_post::{MetricsMixPoster, Request as MetricsMixPost};
use crate::requests::presence_coconodes_post::{
PresenceCocoNodesPoster, Request as PresenceCocoNodesPost,
};
use crate::requests::presence_mixnodes_post::{
PresenceMixNodesPoster, Request as PresenceMixNodesPost,
};
use crate::requests::presence_providers_post::{
PresenceMixProviderPoster, Request as PresenceProvidersPost,
};
use crate::requests::presence_topology_get::{
PresenceTopologyGetRequester, Request as PresenceTopologyRequest,
};
pub mod metrics;
pub mod presence;
pub mod requests;
pub struct Config {
pub base_url: String,
}
impl Config {
pub fn new(base_url: String) -> Self {
Config { base_url }
}
}
pub trait DirectoryClient {
fn new(config: Config) -> Self;
}
pub struct Client {
pub health_check: HealthCheckRequest,
pub metrics_mixes: MetricsMixRequest,
pub metrics_post: MetricsMixPost,
pub presence_coconodes_post: PresenceCocoNodesPost,
pub presence_mix_nodes_post: PresenceMixNodesPost,
pub presence_providers_post: PresenceProvidersPost,
pub presence_topology: PresenceTopologyRequest,
}
impl DirectoryClient for Client {
fn new(config: Config) -> Client {
let health_check: HealthCheckRequest = HealthCheckRequest::new(config.base_url.clone());
let metrics_mixes: MetricsMixRequest = MetricsMixRequest::new(config.base_url.clone());
let metrics_post: MetricsMixPost = MetricsMixPost::new(config.base_url.clone());
let presence_topology: PresenceTopologyRequest =
PresenceTopologyRequest::new(config.base_url.clone());
let presence_coconodes_post: PresenceCocoNodesPost =
PresenceCocoNodesPost::new(config.base_url.clone());
let presence_mix_nodes_post: PresenceMixNodesPost =
PresenceMixNodesPost::new(config.base_url.clone());
let presence_providers_post: PresenceProvidersPost =
PresenceProvidersPost::new(config.base_url.clone());
Client {
health_check,
metrics_mixes,
metrics_post,
presence_coconodes_post,
presence_mix_nodes_post,
presence_providers_post,
presence_topology,
}
}
}
|
use crate::errors::SdError;
use libc::pid_t;
use nix::unistd;
use std::os::unix::net::UnixDatagram;
use std::{env, fmt, fs, path, time};
/// Check for systemd presence at runtime.
///
/// Return true if the system was booted with systemd.
/// This check is based on the presence of the systemd
/// runtime directory.
pub fn booted() -> bool {
fs::symlink_metadata("/run/systemd/system")
.map(|p| p.is_dir())
.unwrap_or(false)
}
/// Check for watchdog support at runtime.
///
/// Return a timeout before which the watchdog expects a
/// response from the process, or `None` if watchdog support is
/// not enabled. If `unset_env` is true, environment will be cleared.
pub fn watchdog_enabled(unset_env: bool) -> Option<time::Duration> {
let env_usec = env::var("WATCHDOG_USEC").ok();
let env_pid = env::var("WATCHDOG_PID").ok();
if unset_env {
env::remove_var("WATCHDOG_USEC");
env::remove_var("WATCHDOG_PID");
};
let timeout = {
if let Some(usec) = env_usec.and_then(|usec_str| usec_str.parse::<u64>().ok()) {
time::Duration::from_millis(usec / 1_000)
} else {
return None;
}
};
let pid = {
if let Some(pid_str) = env_pid {
if let Ok(p) = pid_str.parse::<pid_t>() {
unistd::Pid::from_raw(p)
} else {
return None;
}
} else {
return Some(timeout);
}
};
if unistd::getpid() == pid {
Some(timeout)
} else {
None
}
}
/// Notify service manager about status changes.
///
/// Send a notification to the manager about service status changes.
/// The returned boolean show whether notifications are supported for
/// this service. If `unset_env` is true, environment will be cleared
/// and no further notifications are possible.
pub fn notify(unset_env: bool, state: &[NotifyState]) -> Result<bool, SdError> {
let env_sock = env::var("NOTIFY_SOCKET").ok();
if unset_env {
env::remove_var("NOTIFY_SOCKET");
};
let path = {
if let Some(p) = env_sock.map(path::PathBuf::from) {
p
} else {
return Ok(false);
}
};
let sock =
UnixDatagram::unbound().map_err(|e| format!("failed to open datagram socket: {}", e))?;
let msg = state
.iter()
.fold(String::new(), |res, s| res + &format!("{}\n", s));
let msg_len = msg.len();
let sent_len = sock
.send_to(msg.as_bytes(), path)
.map_err(|e| format!("failed to send datagram: {}", e))?;
if sent_len != msg_len {
return Err(format!("incomplete write, sent {} out of {}", sent_len, msg_len).into());
}
Ok(true)
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
/// Status changes, see `sd_notify(3)`.
pub enum NotifyState {
/// D-Bus error-style error code.
Buserror(String),
/// errno-style error code.
Errno(u8),
/// A name for the submitted file descriptors.
Fdname(String),
/// Stores additional file descriptors in the service manager.
Fdstore,
/// The main process ID of the service, in case of forking applications.
Mainpid(unistd::Pid),
/// Custom state change, as a `KEY=VALUE` string.
Other(String),
/// Service startup is finished.
Ready,
/// Service is reloading.
Reloading,
/// Custom status change.
Status(String),
/// Service is beginning to shutdown.
Stopping,
/// Tell the service manager to update the watchdog timestamp.
Watchdog,
/// Reset watchdog timeout value during runtime.
WatchdogUsec(u64),
}
impl fmt::Display for NotifyState {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
NotifyState::Buserror(ref s) => write!(f, "BUSERROR={}", s),
NotifyState::Errno(e) => write!(f, "ERRNO={}", e),
NotifyState::Fdname(ref s) => write!(f, "FDNAME={}", s),
NotifyState::Fdstore => write!(f, "FDSTORE=1"),
NotifyState::Mainpid(ref p) => write!(f, "MAINPID={}", p),
NotifyState::Other(ref s) => write!(f, "{}", s),
NotifyState::Ready => write!(f, "READY=1"),
NotifyState::Reloading => write!(f, "RELOADING=1"),
NotifyState::Status(ref s) => write!(f, "STATUS={}", s),
NotifyState::Stopping => write!(f, "STOPPING=1"),
NotifyState::Watchdog => write!(f, "WATCHDOG=1"),
NotifyState::WatchdogUsec(u) => write!(f, "WATCHDOG_USEC={}", u),
}
}
}
|
mod cli;
mod proxy;
#[async_std::main]
async fn main() -> std::io::Result<()> {
match futures::try_join!(
cli::run(),
proxy::run(),
) {
Err(e) => {
println!("Ошибка в процессе выполнения программы.");
println!("Информация об ошибке: {}", e);
}
_ => {}
}
Ok(())
} |
use anyhow::Context;
use parse_duration::parse as parse_duration;
use poise::{
command,
serenity::model::{
guild::Guild,
id::{ChannelId, UserId},
misc::Mentionable,
},
};
use tokio::time::Instant;
use url::Url;
use crate::{
constants::{
DESCRIPTION_LENGTH_CUTOFF, LIVE_INDICATOR, MAX_LIST_ENTRY_LENGTH, MAX_SINGLE_ENTRY_LENGTH,
UNKNOWN_TITLE,
},
data::{IdleGuildMap, LastMessageMap},
types::{Error, PoiseContext},
utils::{
discord::{guild_check, reply, reply_embed},
helpers::{chop_str, display_time_span, push_chopped_str},
},
};
async fn join_internal<G, C>(
ctx: &PoiseContext<'_>,
guild_id: G,
channel_id: C,
) -> Result<(), Error>
where
G: Into<u64>,
C: Into<u64>,
{
let guild_id: u64 = guild_id.into();
let (_, handler) = ctx
.data()
.songbird
.join_gateway(guild_id, channel_id.into())
.await;
match handler {
Ok(connection_info) => {
match ctx
.data()
.lavalink
.create_session_with_songbird(&connection_info)
.await
{
Ok(_) => {
{
let data = ctx.discord().data.read().await;
let mut idle_hash_map =
data.get::<IdleGuildMap>().expect("msg").write().await;
idle_hash_map.insert(guild_id, Instant::now());
}
Ok(())
}
Err(e) => Err(Box::new(e)),
}
}
Err(e) => Err(Box::new(e)),
}
}
fn author_channel_id_from_guild(guild: &Guild, authour_id: &UserId) -> Option<ChannelId> {
guild
.voice_states
.get(authour_id)
.and_then(|voice_state| voice_state.channel_id)
}
/// Have bot join the voice channel you're in.
#[command(slash_command, aliases("j"))]
pub async fn join(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let channel_id = match author_channel_id_from_guild(&guild, &ctx.author().id) {
Some(channel) => channel,
None => {
reply(ctx, "You must use this command while in a voice channel.").await?;
return Ok(());
}
};
match join_internal(&ctx, guild.id, channel_id).await {
Ok(_) => reply(ctx, format!("Joined: {}", channel_id.mention())).await?,
Err(e) => {
reply(
ctx,
format!("Error joining {}: {}", channel_id.mention(), e),
)
.await?;
return Ok(());
}
};
Ok(())
}
/// Have bot leave the voice channel it's in, if any.
#[command(slash_command, aliases("l"))]
pub async fn leave(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let manager = &ctx.data().songbird;
if manager.get(guild.id).is_some() {
if let Err(e) = manager.remove(guild.id).await {
reply(ctx, format!("Error leaving voice channel: {}", e)).await?;
}
let lava_client = &ctx.data().lavalink;
lava_client.destroy(guild.id.0).await?;
reply(ctx, "Left the voice channel.").await?;
} else {
reply(ctx, "Not in a voice channel.").await?;
}
Ok(())
}
/// Queue up a song or playlist from YouTube, Twitch, Vimeo, SoundCloud, etc.
#[command(slash_command, defer_response, aliases("p"))]
pub async fn play(
ctx: PoiseContext<'_>,
#[rest]
#[description = "What to play."]
query: String,
) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
{
let data = ctx.discord().data.read().await;
let mut last_message_map = data.get::<LastMessageMap>().expect("msg").write().await;
last_message_map.insert(guild.id.0, ctx.channel_id());
}
let manager = &ctx.data().songbird;
let lava_client = &ctx.data().lavalink;
if manager.get(guild.id).is_none() {
let channel_id = match author_channel_id_from_guild(&guild, &ctx.author().id) {
Some(channel) => channel,
None => {
reply(
ctx,
"You must use this command while either you or the bot is in a voice channel.",
)
.await?;
return Ok(());
}
};
if let Err(e) = join_internal(&ctx, guild.id, channel_id).await {
reply(
ctx,
format!("Error joining {}: {}", channel_id.mention(), e),
)
.await?;
return Ok(());
}
}
let mut queueable_tracks = Vec::new();
// Queue up any attachments
match ctx {
PoiseContext::Prefix(prefix_ctx) => {
for attachment in &prefix_ctx.msg.attachments {
// Verify the attachment is playable
let playable_content = match &attachment.content_type {
Some(t) => t.starts_with("audio") || t.starts_with("video"),
None => false,
};
if !playable_content {
continue;
}
// Queue it up
let mut query_result = lava_client.auto_search_tracks(&attachment.url).await?;
for track in &mut query_result.tracks {
track.info = match &track.info {
Some(old_info) => {
let mut new_info = old_info.clone();
if old_info.title == UNKNOWN_TITLE {
new_info.title = attachment.filename.clone();
}
Some(new_info)
}
None => None,
}
}
queueable_tracks.extend_from_slice(&query_result.tracks)
}
}
PoiseContext::Application(_) => {}
}
// Load the command query - if playable attachments were also with the message,
// the attachments are queued first
let query_information = lava_client.auto_search_tracks(&query).await?;
let is_url = Url::parse(query.trim()).is_ok();
// If the query was a URL, then it's likely a playlist where all retrieved
// tracks are desired - otherwise, only queue the top result
let query_tracks = if is_url {
query_information.tracks.len()
} else {
1
};
queueable_tracks.extend_from_slice(
&query_information
.tracks
.iter()
.take(query_tracks)
.cloned()
.collect::<Vec<_>>(),
);
if queueable_tracks.is_empty() {
reply(ctx, "Could not find anything for the search query.").await?;
return Ok(());
}
let queueable_tracks_len = queueable_tracks.len();
// For URLs that point to raw files, Lavalink seems to just return them with a
// title of "Unknown title" - this is a slightly hacky solution to set the title
// to the filename of the raw file
if is_url && query_tracks == 1 {
let track_info = &mut queueable_tracks[queueable_tracks_len - 1];
if track_info.info.is_some() && track_info.info.as_ref().unwrap().title.eq(UNKNOWN_TITLE) {
track_info.info = match &track_info.info {
Some(old_info) => {
let mut new_info = old_info.clone();
new_info.title = Url::parse(old_info.uri.as_str())
.expect(
"Unable to parse track info URI when it should have been guaranteed \
to be valid",
)
.path_segments()
.expect("Unable to parse URI as a proper path")
.last()
.expect("Unable to find the last path segment of URI")
.to_owned();
Some(new_info)
}
None => None,
};
}
}
// Queue the tracks up
for track in &queueable_tracks {
if let Err(e) = lava_client
.play(guild.id.0, track.clone())
.requester(ctx.author().id.0)
.queue()
.await
{
reply(ctx, "Failed to queue up query result.").await?;
eprintln!("Failed to queue up query result: {}", e);
return Ok(());
};
}
// Notify the user of the added tracks
if queueable_tracks_len == 1 {
let track_info = queueable_tracks[0].info.as_ref().unwrap();
reply(
ctx,
format!(
"Added to queue: [{}]({}) [{}]",
chop_str(track_info.title.as_str(), MAX_SINGLE_ENTRY_LENGTH),
track_info.uri,
if track_info.is_stream {
LIVE_INDICATOR.to_owned()
} else {
display_time_span(track_info.length)
}
),
)
.await?;
} else {
let mut desc = String::from("Requested by ");
desc.push_str(ctx.author().mention().to_string().as_str());
desc.push('\n');
for (i, track) in queueable_tracks.iter().enumerate() {
let track_info = track.info.as_ref().unwrap();
desc.push_str("- [");
push_chopped_str(&mut desc, track_info.title.as_str(), MAX_LIST_ENTRY_LENGTH);
desc.push_str("](");
desc.push_str(track_info.uri.as_str());
desc.push(')');
if i < queueable_tracks_len - 1 {
desc.push('\n');
if desc.len() > DESCRIPTION_LENGTH_CUTOFF {
desc.push_str("*…the rest has been clipped*");
break;
}
}
}
reply_embed(ctx, |e| {
e.title(format!("Added {} Tracks:", queueable_tracks_len))
.description(desc)
})
.await?;
}
Ok(())
}
/// Skip the current track.
#[command(slash_command, aliases("next", "stop", "n", "s"))]
pub async fn skip(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
if let Some(track) = lava_client.skip(guild.id.0).await {
let track_info = track.track.info.as_ref().unwrap();
// If the queue is now empty, the player needs to be stopped
if lava_client
.nodes()
.await
.get(&guild.id.0)
.unwrap()
.queue
.is_empty()
{
lava_client
.stop(guild.id.0)
.await
.with_context(|| "Failed to stop playback of the current track".to_owned())?;
}
reply(
ctx,
format!(
"Skipped: [{}]({})",
chop_str(track_info.title.as_str(), MAX_SINGLE_ENTRY_LENGTH),
track_info.uri
),
)
.await?;
} else {
reply(ctx, "Nothing to skip.").await?;
}
Ok(())
}
/// Pause the current track.
///
/// The opposite of `resume`.
#[command(slash_command)]
pub async fn pause(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
if let Err(e) = lava_client.pause(guild.id.0).await {
reply(ctx, "Failed to pause playback.").await?;
eprintln!("Failed to pause playback: {}", e);
return Ok(());
};
reply(ctx, "Paused playback.").await?;
Ok(())
}
/// Resume the current track.
///
/// The opposite of `pause`.
#[command(slash_command)]
pub async fn resume(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
if let Err(e) = lava_client.resume(guild.id.0).await {
reply(ctx, "Failed to resume playback.").await?;
eprintln!("Failed to resume playback: {}", e);
return Ok(());
};
reply(ctx, "Resumed playback.").await?;
Ok(())
}
/// Seek to a specific time in the current track.
///
/// You can specify the time to skip to as a timecode (`2:35`) or as individual
/// time values (`2m35s`).
///
/// If the time specified is past the end of the track, the track ends.
#[command(slash_command, aliases("scrub", "jump"))]
pub async fn seek(
ctx: PoiseContext<'_>,
#[rest]
#[description = "What time to skip to."]
time: String,
) -> Result<(), Error> {
// Constants
const COLON: char = ':';
const DECIMAL: char = '.';
// Parse the time - this is a little hacky and gross, but it allows for support
// of timecodes like `2:35`. This is more ergonomic for users than something
// like `2m35s`, and this way both formats are supported.
let mut invalid_value = false;
let mut time_prepared = String::with_capacity(time.len());
'prepare_time: for timecode in time.split_whitespace() {
// First iteration to find indices and make sure the timecode is valid
let mut colon_index_first = None;
let mut colon_index_second = None;
let mut decimal_index = None;
for (i, c) in timecode.chars().enumerate() {
if c == COLON {
if colon_index_first.is_none() {
colon_index_first = Some(i);
} else if colon_index_second.is_none() {
colon_index_second = Some(i);
} else {
// Maximum of two colons in a timecode
invalid_value = true;
break 'prepare_time;
}
if decimal_index.is_some() {
// Colons don't come after decimals
invalid_value = true;
break 'prepare_time;
}
} else if c == DECIMAL {
if decimal_index.is_none() {
decimal_index = Some(i);
} else {
// Only one decimal value
invalid_value = true;
break 'prepare_time;
}
}
}
// Second iteration using those indices to convert the timecode to a duration
// representation
let mut new_word = String::with_capacity(timecode.len());
for (i, c) in timecode.chars().enumerate() {
if colon_index_first.is_some() && i == colon_index_first.unwrap() {
if colon_index_second.is_some() {
new_word.push('h');
} else {
new_word.push('m');
}
} else if colon_index_second.is_some() && i == colon_index_second.unwrap() {
new_word.push('m');
} else if decimal_index.is_some() && i == decimal_index.unwrap() {
new_word.push('s');
} else {
new_word.push(c);
}
}
if decimal_index.is_some() {
new_word.push_str("ms");
} else if colon_index_first.is_some() {
new_word.push('s');
}
// Push the prepared timecode to the result
time_prepared.push_str(new_word.as_str());
time_prepared.push(' ');
}
if invalid_value {
reply(ctx, "Invalid value for time.").await?;
return Ok(());
}
let time_dur = match parse_duration(time_prepared.as_str()) {
Ok(duration) => duration,
Err(_) => {
reply(ctx, "Invalid value for time.").await?;
return Ok(());
}
};
// Seek to the parsed time
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
if let Err(e) = lava_client.seek(guild.id.0, time_dur).await {
reply(ctx, "Failed to seek to the specified time.").await?;
eprintln!("Failed to seek to the specified time: {}", e);
return Ok(());
};
reply(ctx, "Scrubbed to the specified time.").await?;
Ok(())
}
/// Clear the playback queue.
///
/// In addition to clearing the queue, this also resets the queue position for
/// new tracks. This is the only way this happens other than when the bot goes
/// offline.
#[command(slash_command, aliases("c"))]
pub async fn clear(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
while lava_client.skip(guild.id.0).await.is_some() {}
lava_client
.stop(guild.id.0)
.await
.with_context(|| "Failed to stop playback of the current track".to_owned())?;
reply(ctx, "The queue is now empty.").await?;
Ok(())
}
/// Show what's currently playing, and how far in you are in the track.
///
/// If the track has a defined end point, a progress bar will be displayed.
/// Otherwise, if the track is a live stream, only the time it's been playing
/// will be displayed.
#[command(
slash_command,
rename = "nowplaying",
aliases("np", "position", "current", "rn")
)]
pub async fn now_playing(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
let mut something_playing = false;
if let Some(node) = lava_client.nodes().await.get(&guild.id.0) {
if let Some(now_playing) = &node.now_playing {
let track_info = now_playing.track.info.as_ref().unwrap();
reply_embed(ctx, |e| {
e.title("Now Playing")
.field(
"Track:",
format!(
"[{}]({})",
chop_str(track_info.title.as_str(), MAX_SINGLE_ENTRY_LENGTH),
track_info.uri,
),
false,
)
.field("Duration:", display_time_span(track_info.length), true)
.field(
"Requested By:",
UserId(
now_playing
.requester
.expect("Expected a requester associated with a playing track")
.0,
)
.mention(),
true,
)
})
.await?;
something_playing = true;
}
}
if !something_playing {
reply(ctx, "Nothing is playing at the moment.").await?;
}
Ok(())
}
/// Show the playback queue.
#[command(slash_command, aliases("q"))]
pub async fn queue(ctx: PoiseContext<'_>) -> Result<(), Error> {
let guild = guild_check(ctx).await?;
let lava_client = &ctx.data().lavalink;
let mut something_in_queue = false;
if let Some(node) = lava_client.nodes().await.get(&guild.id.0) {
let queue = &node.queue;
let queue_len = queue.len();
if queue_len > 0 {
something_in_queue = true;
let mut desc = String::new();
for (i, queued_track) in queue.iter().enumerate() {
let track_info = queued_track.track.info.as_ref().unwrap();
desc.push_str(format!("`{}.` [", i + 1).as_str());
push_chopped_str(&mut desc, track_info.title.as_str(), MAX_LIST_ENTRY_LENGTH);
desc.push_str("](");
desc.push_str(track_info.uri.as_str());
desc.push(')');
if i < queue_len - 1 {
desc.push('\n');
if desc.len() > DESCRIPTION_LENGTH_CUTOFF {
desc.push_str("*…the rest has been clipped*");
break;
}
}
}
reply_embed(ctx, |e| {
e.title(if queue_len != 1 {
format!("Queue ({} total tracks):", queue_len)
} else {
format!("Queue ({} total track):", queue_len)
})
.description(desc)
})
.await?;
}
}
if !something_in_queue {
reply(ctx, "Nothing is in the queue.").await?;
}
Ok(())
}
|
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate azure_core;
pub use azure_storage::{Error, Result};
mod clients;
mod message_ttl;
mod number_of_messages;
mod pop_receipt;
pub mod prelude;
mod queue_service_properties;
mod queue_stored_access_policy;
pub mod requests;
pub mod responses;
mod visibility_timeout;
pub use clients::*;
pub use message_ttl::MessageTTL;
pub use number_of_messages::NumberOfMessages;
pub use pop_receipt::PopReceipt;
pub use queue_service_properties::QueueServiceProperties;
pub use queue_stored_access_policy::QueueStoredAccessPolicy;
pub use visibility_timeout::VisibilityTimeout;
|
#[doc = "Reader of register ADC_LHTR1"]
pub type R = crate::R<u32, super::ADC_LHTR1>;
#[doc = "Writer for register ADC_LHTR1"]
pub type W = crate::W<u32, super::ADC_LHTR1>;
#[doc = "Register ADC_LHTR1 `reset()`'s with value 0x0fff_0000"]
impl crate::ResetValue for super::ADC_LHTR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0fff_0000
}
}
#[doc = "Reader of field `LHTR1`"]
pub type LHTR1_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `LHTR1`"]
pub struct LHTR1_W<'a> {
w: &'a mut W,
}
impl<'a> LHTR1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03ff_ffff) | ((value as u32) & 0x03ff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:25 - ADC analog watchdog 2 threshold low"]
#[inline(always)]
pub fn lhtr1(&self) -> LHTR1_R {
LHTR1_R::new((self.bits & 0x03ff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:25 - ADC analog watchdog 2 threshold low"]
#[inline(always)]
pub fn lhtr1(&mut self) -> LHTR1_W {
LHTR1_W { w: self }
}
}
|
extern crate bit_set;
extern crate rand;
use bit_set::BitSet;
use std::collections::HashMap;
// how does this differ from
// use crate::model::*; ?
use super::mating::*;
use super::model::*;
use super::parameters::*;
use super::population_founding::*;
///
/// Structure for capturing the simulation state.
///
#[derive(Clone)]
pub struct Simulation {
pub params: SimParameters,
pub current_generation: Option<Population>,
}
impl Simulation {
///
/// Create a new simulation
///
pub fn new(params: SimParameters) -> Simulation {
Simulation {
params: params,
current_generation: None
}
}
///
/// Initialize simulation by generating an initial population
///
pub fn initialize(&mut self) -> () {
println!("Initializing!");
let strategy = &self.params.population_initialization_strategy;
let population = match strategy {
PopulationInitializationStrategy::ClonedFromSingleIndividual => clone_population(&self.params),
PopulationInitializationStrategy::AllRandomIndividuals => randomly_generate_population(&self.params)
};
self.current_generation = Some(population);
}
///
/// Simulate one generation
///
pub fn step(&mut self) -> () {
self.current_generation = match self.current_generation {
None => None,
Some(ref g) => Some(reproduce(g, &self.params)),
};
}
///
/// Identifies and removes positions where all samples are
/// homozygous.
///
pub fn trim(&mut self) -> () {
let matrix_option = self.to_matrix();
if matrix_option.is_none() {
return;
}
let matrix = matrix_option.unwrap();
let current_generation = self.current_generation.as_ref().unwrap();
let mut fixed = BitSet::new();
// find positions
for (pos, genotypes) in matrix {
let sum = genotypes.iter()
.fold(0usize, |sum, val| sum + *val as usize);
// 2 chromosomes per individual
if sum == 2 * self.params.n_individuals {
fixed.insert(pos);
}
}
// filter out mutations
let mut trimmed = Vec::with_capacity(self.params.n_individuals);
for ref indiv in current_generation {
let mut chrom1 = indiv[0].clone();
let mut chrom2 = indiv[1].clone();
chrom1.alleles.difference_with(&fixed);
chrom2.alleles.difference_with(&fixed);
trimmed.push([chrom1, chrom2]);
}
self.current_generation = Some(trimmed);
}
///
/// Print out all individuals
///
pub fn print(&mut self) -> () {
match self.current_generation {
Some(ref g) =>
for ref indiv in g {
print!("[");
for i in indiv[0].alleles.iter() {
print!("{}, ", i);
}
print!("], ");
print!("[");
for i in indiv[1].alleles.iter() {
print!("{}, ", i);
}
println!("] ");
},
None => println!("Uninitialized!")
}
}
///
/// Convert mutations to a sparse matrix of genotypes.
/// The keys of the resulting map are the positions, while
/// the values are vectors of the individuals' genotypes.
///
pub fn to_matrix(&self) -> Option<HashMap<usize, Vec<u8>>> {
match self.current_generation {
None => None,
Some(ref g) => {
let n_individuals = self.params.n_individuals;
let mut matrix : HashMap<usize, Vec<u8>> = HashMap::new();
for (idx, individual) in g.iter().enumerate() {
for pos in individual[0].alleles.iter() {
match matrix.get_mut(&pos) {
Some(ref mut genotype_counts) => genotype_counts[idx] += 1,
None => {
let mut genotype_counts = vec![0u8; n_individuals];
genotype_counts[idx] += 1;
matrix.insert(pos, genotype_counts);
}
}
}
for pos in individual[1].alleles.iter() {
match matrix.get_mut(&pos) {
Some(ref mut genotype_counts) => genotype_counts[idx] += 1,
None => {
let mut genotype_counts = vec![0u8; n_individuals];
genotype_counts[idx] += 1;
matrix.insert(pos, genotype_counts);
}
}
}
}
Some(matrix)
}
}
}
}
|
#[macro_use]
pub mod irq;
pub mod eabi;
pub mod semihosting;
pub use self::irq::{IrqHandler, STACK_START, start};
pub use self::cpu::isr::{VectorTable, ExceptionVectors, IrqContext};
#[cfg(target_cpu = "cortex-m0")] pub mod cortex_m0;
#[cfg(target_cpu = "cortex-m0")] pub use self::cortex_m0 as cpu;
#[cfg(target_cpu = "cortex-m3")] pub mod cortex_m3;
#[cfg(target_cpu = "cortex-m3")] pub use self::cortex_m3 as cpu;
#[macro_export]
macro_rules! svc {
($svc:expr, $r0:expr) => {
unsafe {
let out: u32;
asm!("svc $2" :"={r0}"(out):"{r0}"($r0 as u32), "i"($svc):: "volatile");
out as usize
}
};
($svc:expr, $r0:expr, $r1:expr, $r2:expr) => {
unsafe {
let out: u32;
asm!("svc $4" :"={r0}"(out):"{r0}"($r0 as u32), "{r1}"($r1 as u32), "{r2}"($r2 as u32), "i"($svc):: "volatile");
out as usize
}
};
}
|
fn nearest_bus(estimate: i32, available: &Vec<i32>) -> (i32, i32) {
// let value: Vec<(usize, i32)> = available.iter().enumerate().map(|(idx, x)| (idx, ((estimate / x) + 1) * x)).collect();
// println!("{:?}", value);
let (idx, bus) = available.iter().enumerate().min_by_key(|(idx, x)| ((estimate / **x) + 1) * **x).unwrap();
println!("{:?}", bus);
(*bus, ((((estimate / *bus) + 1) * *bus) - estimate))
}
fn day2() {
let test_data: Vec<(usize, i64)> = r"
7,13,x,x,59,x,31,19".to_string().trim().split(",").enumerate().map(|(i, x)| (i, x.parse())).filter(|(i, x)| x.is_ok()).map(|(i, x)| (i, x.unwrap())).collect();
println!("len={}", test_data.len());
println!("{:?}", test_data);
let mut count = 5;
let mut check = 1;
while count > 0 {
while !(check % 7 == 0 && (check + 1) % 13 == 0) {
check = check + 1;
}
println!("check={}", check);
count = count - 1;
check = check + 1;
}
// let mut prev = 7;
// let mut check = 7;
// for (i, x) in &test_data {
// println!("{} {} {}", i, x, prev);
// while !((check + (*i as i64)) % x == 0) {
// check = check + prev;
// }
// println!("check={}", check);
// prev = *x;
// }
//
println!("check={}", check);
}
fn main() {
let estimate: i32 = 1001796;
let test_data: Vec<i32> = r"37,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,41,x,x,x,x,x,x,x,x,x,457,x,x,x,x,x,x,x,x,x,x,x,x,13,17,x,x,x,x,x,x,x,x,23,x,x,x,x,x,29,x,431,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,x,19".to_string().trim().split(",").filter_map(|x| x.parse().ok()).collect();
let (bus, wait) = nearest_bus(estimate, &test_data);
println!("part1 = {}", bus * wait);
day2();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn initial_test() {
let estimate: i32 = 939;
let test_data: Vec<i32> = r"
7,13,x,x,59,x,31,19".to_string().trim().split(",").filter_map(|x| x.parse().ok()).collect();
let (bus, wait) = nearest_bus(estimate, &test_data);
assert_eq!(wait * bus, 295);
}
#[test]
fn day2_test() {
let test_data: Vec<(usize, i64)> = r"
7,13,x,x,59,x,31,19".to_string().trim().split(",").enumerate().map(|(i, x)| (i, x.parse())).filter(|(i, x)| x.is_ok()).map(|(i, x)| (i, x.unwrap())).collect();
println!("len={}", test_data.len());
println!("{:?}", test_data);
let mut check = 7;
for (i, x) in &test_data {
println!("{} {}", i, x);
while !(check % x == 0) {
check = check + x;
println!("{}", check);
}
}
println!("check={}", check);
let mut value: i64 = 1;
for (i, x) in &test_data {
value = value * x;
}
println!("mult={}", value);
assert_eq!(true, false);
}
}
|
use parser::ast::{IntSize, PrimitiveType, TypeKind};
pub trait AatbeSizeOf {
fn size_of(&self) -> usize;
fn smallest(&self) -> usize;
}
impl AatbeSizeOf for PrimitiveType {
fn size_of(&self) -> usize {
match self {
PrimitiveType::UInt(IntSize::Bits8) => 1,
PrimitiveType::UInt(IntSize::Bits16) => 2,
PrimitiveType::UInt(IntSize::Bits32) => 4,
PrimitiveType::UInt(IntSize::Bits64) => 8,
PrimitiveType::Int(IntSize::Bits8) => 1,
PrimitiveType::Int(IntSize::Bits16) => 2,
PrimitiveType::Int(IntSize::Bits32) => 4,
PrimitiveType::Int(IntSize::Bits64) => 8,
PrimitiveType::Str => 8, // TODO: Platform specific pointer size
PrimitiveType::Pointer(_) => 8, // FIXME: Platform specific pointer size
PrimitiveType::Box(_) => 8, // FIXME: Platform specific pointer size
PrimitiveType::Bool => 1,
PrimitiveType::Char => 1,
_ => unimplemented!("{:?}", self),
}
}
fn smallest(&self) -> usize {
unimplemented!()
}
}
impl AatbeSizeOf for TypeKind {
fn size_of(&self) -> usize {
match self {
TypeKind::Newtype(ty) => ty.size_of(),
TypeKind::Variant(_, Some(types)) => {
types.iter().map(|ty| ty.size_of()).sum::<usize>() + 1
}
TypeKind::Variant(_, None) => 1,
}
}
fn smallest(&self) -> usize {
match self {
TypeKind::Newtype(ty) => ty.size_of(),
TypeKind::Variant(_, Some(types)) => types.iter().map(|ty| ty.size_of()).min().unwrap(),
TypeKind::Variant(_, None) => 1,
}
}
}
|
#![cfg_attr(feature = "flame_it", feature(plugin, custom_attribute))]
#![cfg_attr(feature = "flame_it", plugin(flamer))]
#[macro_use]
extern crate structopt;
extern crate url;
#[macro_use]
extern crate log;
extern crate fern;
#[cfg(feature = "flame_it")]
extern crate flame;
extern crate ytdl;
// mod decipher;
// mod downloader;
mod logger;
// mod parser;
// use downloader::download;
// use parser::parse;
use std::collections::HashMap;
use structopt::StructOpt;
use url::Url;
use ytdl::downloader::download;
use ytdl::parse;
use ytdl::Provider;
/// A basic example
#[derive(StructOpt, Debug)]
#[structopt(name = "ytdl")]
enum Opt {
#[structopt(name = "download")]
/// download video
Download { url_or_id: String },
#[structopt(name = "list")]
/// list douyin videos by user_id
List { url_or_id: String },
}
fn main() {
// ::std::env::set_var("RUST_BACKTRACE", "full");
logger::init();
let opt = Opt::from_args();
match opt {
Opt::Download { url_or_id } => {
if let Some(ref p) = get_info(url_or_id) {
let video_url = parse(p);
#[cfg_attr(feature = "flame_it", flame)]
download(video_url);
} else {
debug!("unknown provider")
}
}
Opt::List { url_or_id } => {
println!("{}", url_or_id);
}
}
// Dump the report to disk
#[cfg(feature = "flame_it")]
flame::dump_html(&mut ::std::fs::File::create("flame-graph.html").unwrap()).unwrap();
}
fn get_info(url_or_id: String) -> Option<Provider> {
if url_or_id.starts_with("http") {
let url = Url::parse(&url_or_id).unwrap();
let m: HashMap<_, _> = url.query_pairs().into_owned().collect();
match url.host_str() {
Some("youtube") => Some(Provider::Youtube(m["v"].clone())),
Some("www.douyin.com") | Some("www.tiktokv.com") => Some(Provider::Douyin(url_or_id)),
h => {
println!("{:?}", h);
None
}
}
} else {
Some(Provider::Youtube(url_or_id))
}
}
// https://www.tiktokv.com/i18n/share/video/6560042923969219841
|
use chip8::{Address, Register};
use std::fmt;
macro_rules! no_opcode {
($x: expr) => {
panic!(format!("No OpCode for 0x{:04X} yet!", $x));
};
}
pub enum OpCode {
Set(Register, u8),
Copy(Register, Register),
Add(Register, u8),
AddVy(Register, Register),
SubVx(Register, Register),
SubVy(Register, Register),
And(Register, Register),
Or(Register, Register),
Xor(Register, Register),
ShiftRight(Register, Register),
ShiftLeft(Register, Register),
Jmp(Address),
JmpV0(Address),
Jeq(Register, u8),
JeqVy(Register, Register),
Jneq(Register, u8),
JneqVy(Register, Register),
JmpK(Register),
JmpNK(Register),
Load(Register),
Store(Register),
Call(Address),
Return(),
SetDelayTimer(Register),
LdDelayTimer(Register),
SetSoundTimer(Register),
SetI(Address),
AddIVx(Register),
DrawSprite(Register, Register, u8),
Font(Register),
ClearScreen(),
BCD(Register),
Random(Register, u8),
WaitForKey(Register),
}
impl fmt::Debug for OpCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
OpCode::Set(vx, value) => write!(f, "Set({:1x},{:2x})", vx, value),
OpCode::Copy(vx, vy) => write!(f, "Copy({:1x},{:1x})", vx, vy),
OpCode::Add(vx, value) => write!(f, "Add({:1x},{:2x})", vx, value),
OpCode::Or(vx, vy) => write!(f, "Or({:1x},{:1x})", vx, vy),
OpCode::And(vx, vy) => write!(f, "And({:1x},{:1x})", vx, vy),
OpCode::Xor(vx, vy) => write!(f, "Xor({:1x},{:1x})", vx, vy),
OpCode::AddVy(vx, vy) => write!(f, "AddVy({:1x},{:1x})", vx, vy),
OpCode::SubVy(vx, vy) => write!(f, "SubVy({:1x},{:1x})", vx, vy),
OpCode::ShiftRight(vx, vy) => write!(f, "ShiftRight({:1x},{:1x})", vx, vy),
OpCode::SubVx(vx, vy) => write!(f, "SubVx({:1x},{:1x})", vx, vy),
OpCode::ShiftLeft(vx, vy) => write!(f, "ShiftLeft({:1x},{:1x})", vx, vy),
OpCode::JneqVy(vx, vy) => write!(f, "JneqVy({:1x},{:1x})", vx, vy),
OpCode::SetI(address) => write!(f, "SetI({:3x})", address),
OpCode::Jmp(address) => write!(f, "Jmp({:3x})", address),
OpCode::Jeq(vx, value) => write!(f, "Jeq({:1x},{:2x})", vx, value),
OpCode::Jneq(vx, value) => write!(f, "Jneq({:1x},{:2x})", vx, value),
OpCode::JeqVy(vx, vy) => write!(f, "JeqVy({:1x},{:1x})", vx, vy),
OpCode::JmpV0(address) => write!(f, "JmpV0({:3x})", address),
OpCode::JmpK(vx) => write!(f, "JmpK({:1x})", vx),
OpCode::JmpNK(vx) => write!(f, "JmpNK({:1x})", vx),
OpCode::Store(vx) => write!(f, "Store({:1x})", vx),
OpCode::Load(vx) => write!(f, "Load({:1x})", vx),
OpCode::Return() => write!(f, "Return()"),
OpCode::Call(address) => write!(f, "Call({:3x})", address),
OpCode::SetDelayTimer(vx) => write!(f, "SetDelayTimer({:1x})", vx),
OpCode::LdDelayTimer(vx) => write!(f, "LdDelayTimer({:1x})", vx),
OpCode::SetSoundTimer(vx) => write!(f, "SetSoundTimer({:1x})", vx),
OpCode::DrawSprite(vx, vy, value) => {
write!(f, "DrawSprite({:1x},{:1x},{:1x})", vx, vy, value)
}
OpCode::Font(vx) => write!(f, "Font({:1x})", vx),
OpCode::ClearScreen() => write!(f, "ClearScreen()"),
OpCode::AddIVx(vx) => write!(f, "AddIVx({:1x})", vx),
OpCode::BCD(vx) => write!(f, "BCD({:1x})", vx),
OpCode::Random(vx, value) => write!(f, "Random({:1x},{:2x})", vx, value),
OpCode::WaitForKey(vx) => write!(f, "WaitForKey({:1x})", vx),
// _ => write!(f, ""),
}
}
}
pub struct Instruction {
value: u16,
}
impl Instruction {
pub fn new(value: u16) -> Instruction {
Instruction { value }
}
pub fn decode(&self) -> OpCode {
match (self.value & 0xF000) >> 12 {
0x0 => match self.value & 0x0FFF {
0x0E0 => OpCode::ClearScreen(),
0x0EE => OpCode::Return(),
_ => no_opcode!(self.value),
},
0x1 => OpCode::Jmp(self.get_address()),
0x2 => OpCode::Call(self.get_address()),
0x3 => OpCode::Jeq(self.get_vx(), self.get_8bconst()),
0x4 => OpCode::Jneq(self.get_vx(), self.get_8bconst()),
0x5 => OpCode::JeqVy(self.get_vx(), self.get_vy()),
0x6 => OpCode::Set(self.get_vx(), self.get_8bconst()),
0x7 => OpCode::Add(self.get_vx(), self.get_8bconst()),
0x8 => match self.value & 0x000F {
0x0 => OpCode::Copy(self.get_vx(), self.get_vy()),
0x1 => OpCode::Or(self.get_vx(), self.get_vy()),
0x2 => OpCode::And(self.get_vx(), self.get_vy()),
0x3 => OpCode::Xor(self.get_vx(), self.get_vy()),
0x4 => OpCode::AddVy(self.get_vx(), self.get_vy()),
0x5 => OpCode::SubVy(self.get_vx(), self.get_vy()),
0x6 => OpCode::ShiftRight(self.get_vx(), self.get_vy()),
0x7 => OpCode::SubVx(self.get_vx(), self.get_vy()),
0xE => OpCode::ShiftLeft(self.get_vx(), self.get_vy()),
_ => no_opcode!(self.value),
},
0x9 => OpCode::JneqVy(self.get_vx(), self.get_vy()),
0xA => OpCode::SetI(self.get_address()),
0xB => OpCode::JmpV0(self.get_address()),
0xC => OpCode::Random(self.get_vx(), self.get_8bconst()),
0xD => OpCode::DrawSprite(self.get_vx(), self.get_vy(), self.get_4bconst()),
0xE => match self.value & 0x00FF {
0x9E => OpCode::JmpK(self.get_vx()),
0xA1 => OpCode::JmpNK(self.get_vx()),
_ => no_opcode!(self.value),
},
0xF => match self.value & 0x00FF {
0x0A => OpCode::WaitForKey(self.get_vx()),
0x15 => OpCode::SetDelayTimer(self.get_vx()),
0x07 => OpCode::LdDelayTimer(self.get_vx()),
0x18 => OpCode::SetSoundTimer(self.get_vx()),
0x1E => OpCode::AddIVx(self.get_vx()),
0x29 => OpCode::Font(self.get_vx()),
0x33 => OpCode::BCD(self.get_vx()),
0x55 => OpCode::Store(self.get_vx()),
0x65 => OpCode::Load(self.get_vx()),
_ => no_opcode!(self.value),
},
_ => no_opcode!(self.value),
}
}
pub fn get_address(&self) -> Address {
self.value & 0x0FFF
}
pub fn get_vx(&self) -> Register {
((self.value & 0x0F00) >> 8) as u8
}
pub fn get_vy(&self) -> Register {
((self.value & 0x00F0) >> 4) as u8
}
pub fn get_8bconst(&self) -> u8 {
(self.value & 0x00FF) as u8
}
pub fn get_4bconst(&self) -> u8 {
(self.value & 0x000F) as u8
}
}
|
mod stack;
use stack::List;
use crate::FunctionalWindow;
use alga::general::AbstractGroup;
use alga::general::Operator;
use std::marker::PhantomData;
#[derive(Clone)]
pub struct Elem<T> {
val: T,
agg: T,
}
struct FOA<Value, BinOp>
where
Value: AbstractGroup<BinOp> + Clone,
BinOp: Operator,
{
front: List<Elem<Value>>,
next: List<Elem<Value>>,
back: List<Elem<Value>>,
op: PhantomData<BinOp>,
}
impl<Value, BinOp> FunctionalWindow<Value, BinOp> for FOA<Value, BinOp>
where
Value: AbstractGroup<BinOp> + Clone,
BinOp: Operator,
{
fn new() -> FOA<Value, BinOp> {
FOA {
front: List::empty(),
next: List::empty(),
back: List::empty(),
op: PhantomData,
}
}
fn insert(&mut self, v: Value) -> FOA<Value, BinOp> {
FOA {
front: self.front.clone(),
next: self.next.clone(),
back: self.back.cons(Elem {
agg: Self::agg(&self.back).operate(&v),
val: v,
}),
op: self.op,
}
.makeq()
}
fn evict(&mut self) -> FOA<Value, BinOp> {
FOA {
front: self.front.tail(),
next: self.next.clone(),
back: self.back.clone(),
op: self.op,
}
.makeq()
}
fn query(&self) -> Value {
Self::agg(&self.front).operate(&Self::agg(&self.back))
}
}
impl<Value, BinOp> FOA<Value, BinOp>
where
Value: AbstractGroup<BinOp> + Clone,
BinOp: Operator,
{
fn agg(list: &List<Elem<Value>>) -> Value {
list.head().map(|elem| elem.agg).unwrap_or(Value::identity())
}
fn makeq(&self) -> FOA<Value, BinOp> {
if self.next.is_empty() {
let front = Self::rot(FOA {
front: self.front.clone(),
next: self.back.clone(),
back: List::empty(),
op: self.op,
});
FOA {
next: front.clone(),
front,
back: List::empty(),
op: self.op,
}
} else {
FOA {
front: self.front.clone(),
next: self.next.tail(),
back: self.back.clone(),
op: self.op,
}
}
}
fn rot(self) -> List<Elem<Value>> {
let back = self.back.cons(
self.next
.head()
.map(|mut elem| {
elem.agg = elem.val.operate(&Self::agg(&self.back));
elem
})
.unwrap(),
);
if self.front.is_empty() {
back
} else {
FOA {
front: self.front.tail(),
next: self.next.tail(),
back,
op: self.op,
}
.rot()
.cons(
self.front
.head()
.map(|mut elem| {
elem.agg = elem
.val
.operate(&Self::agg(&self.next))
.operate(&Self::agg(&self.back));
elem
})
.unwrap(),
)
}
}
}
|
use graph::{Graph, NodeT};
use numpy::{PyArray1, PyArray2};
use pyo3::prelude::*;
#[pyclass]
#[derive(Clone)]
pub(crate) struct EnsmallenGraph {
pub(crate) graph: Graph,
}
pub type PyContexts = Py<PyArray2<NodeT>>;
pub type PyWords = Py<PyArray1<NodeT>>;
pub type PyFrequencies = Py<PyArray1<f64>>; |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// OrganizationCreateResponse : Response object for an organization creation.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OrganizationCreateResponse {
#[serde(rename = "api_key", skip_serializing_if = "Option::is_none")]
pub api_key: Option<Box<crate::models::ApiKey>>,
#[serde(rename = "application_key", skip_serializing_if = "Option::is_none")]
pub application_key: Option<Box<crate::models::ApplicationKey>>,
#[serde(rename = "org", skip_serializing_if = "Option::is_none")]
pub org: Option<Box<crate::models::Organization>>,
#[serde(rename = "user", skip_serializing_if = "Option::is_none")]
pub user: Option<Box<crate::models::User>>,
}
impl OrganizationCreateResponse {
/// Response object for an organization creation.
pub fn new() -> OrganizationCreateResponse {
OrganizationCreateResponse {
api_key: None,
application_key: None,
org: None,
user: None,
}
}
}
|
use crate::vec3::Vec3;
pub fn to_ppm_color(color: &Vec3) -> (i32, i32, i32) {
let ppm: Vec3 = color.sqrt().clamp(0.0, 0.999) * 256.0;
let vals: (f64, f64, f64) = ppm.into();
(vals.0 as i32, vals.1 as i32, vals.2 as i32)
} |
use crate::search::search_field::TermId;
use fnv::FnvHashSet;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum FilterResult {
Vec(Vec<TermId>),
Set(FnvHashSet<TermId>),
}
impl FilterResult {
pub fn from_result(res: &[TermId]) -> FilterResult {
if res.len() > 100_000 {
FilterResult::Vec(res.to_vec())
} else {
let mut filter = FnvHashSet::with_capacity_and_hasher(100_000, Default::default());
for id in res {
filter.insert(*id);
}
FilterResult::Set(filter)
}
}
}
|
enum_impl! {
/// Operating system
Os {
/// Unknown operating system
Unknown => "unknown",
/// AIX
AIX => "aix",
/// AMDHSA
AMDHSA => "amdhsa",
/// AMDPAL
AMDPAL => "amdpal",
/// Ananas
Ananas => "ananas",
/// CUDA
CUDA => "cuda",
/// CloudABI
CloudABI => "cloudabi",
/// Contiki
Contiki => "contiki",
/// Darwin
Darwin => "darwin",
/// DragonFly
DragonFly => "dragonfly",
/// ELFIAMCU
ELFIAMCU => "elfiamcu",
/// Emscripten
Emscripten => "emscripten",
/// FreeBSD
FreeBSD => "freebsd",
/// Fuchsia
Fuchsia => "fuchsia",
/// Haiku
Haiku => "haiku",
/// HermitCore
HermitCore => "hermit",
/// Hurd
Hurd => "hurd",
/// IOS
IOS => "ios",
/// KFreeBSD
KFreeBSD => "kfreebsd",
/// Linux
Linux => "linux",
/// Lv2
Lv2 => "lv2",
/// MacOSX
MacOSX => "macosx",
/// Mesa3D
Mesa3D => "mesa3d",
/// Minix
Minix => "minix",
/// NVCL
NVCL => "nvcl",
/// NaCl
NaCl => "nacl",
/// NetBSD
NetBSD => "netbsd",
/// OpenBSD
OpenBSD => "openbsd",
/// PS4
PS4 => "ps4",
/// RTEMS
RTEMS => "rtems",
/// Solaris
Solaris => "solaris",
/// TvOS
TvOS => "tvos",
/// WASI
WASI => "wasi",
/// WatchOS
WatchOS => "watchos",
/// Win32
Win32 => "windows" "win32" "w32",
/// ZOS
ZOS => "zos",
}
}
|
#![recursion_limit = "1024"]
#![allow(clippy::needless_return)]
mod app;
mod backend;
mod components;
mod data;
mod error;
mod examples;
mod page;
mod pages;
mod preferences;
mod spy;
mod utils;
use crate::app::Main;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn run_app() -> Result<(), JsValue> {
wasm_logger::init(wasm_logger::Config::new(log::Level::Info));
log::info!("Getting ready...");
yew::start_app::<Main>();
Ok(())
}
|
use good_memory_allocator::SpinLockedAllocator;
use x86_64::{
structures::paging::{
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
},
VirtAddr,
};
use crate::memory::MemoryError;
#[global_allocator]
static ALLOCATOR: SpinLockedAllocator = SpinLockedAllocator::empty();
const HEAP_START: usize = 0x_4321_1234_0000;
const HEAP_SIZE: usize = 1024 * 1024; /* 1MB */
impl From<MapToError<Size4KiB>> for MemoryError {
fn from(_: MapToError<Size4KiB>) -> Self {
MemoryError::PageMappingError
}
}
pub fn init(
mapper: &mut impl Mapper<Size4KiB>,
frame_allocator: &mut impl FrameAllocator<Size4KiB>,
) -> Result<(), MemoryError> {
let page_range = {
let heap_start = VirtAddr::new(HEAP_START as u64);
let heap_end = heap_start + HEAP_SIZE - 1u64;
let heap_start_page = Page::containing_address(heap_start);
let heap_end_page = Page::containing_address(heap_end);
Page::range_inclusive(heap_start_page, heap_end_page)
};
for page in page_range {
let frame = frame_allocator
.allocate_frame()
.ok_or(MemoryError::FrameAllocationError)?;
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
unsafe { mapper.map_to(page, frame, flags, frame_allocator)?.flush() };
}
unsafe {
ALLOCATOR.init(HEAP_START, HEAP_SIZE);
}
Ok(())
}
|
//! Basic "can I connect to a router" tests.
use crate::integration::common::*;
use wamp_proto::{transport::websocket::WebsocketTransport, uri::Uri, Client, ClientConfig};
#[tokio::test]
async fn connect_close() {
let router = start_router().await;
let url = router.get_url();
let client_config = ClientConfig::new(&url, Uri::strict(TEST_REALM).unwrap());
let mut client = Client::<WebsocketTransport>::new(client_config)
.await
.unwrap();
client
.close(Uri::strict("wamp.error.goodbye").unwrap())
.await
.unwrap();
}
#[tokio::test]
async fn connect_then_router_closed() {
let router = start_router().await;
let url = router.get_url();
let client_config = ClientConfig::new(&url, Uri::strict(TEST_REALM).unwrap());
let client = Client::<WebsocketTransport>::new(client_config)
.await
.unwrap();
drop(router);
client.wait_for_close().await;
}
|
#![allow(unused_variables)]
fn main() {
let list = Vec::<i32>::new();
// 05.rs:2:9: 2:13 warning: unused variable: `list`, #[warn(unused_variables)] on by default
// 05.rs:2 let list = Vec::<i32>::new();
// ^~~~
// 아래것들 다 가능
// 이를 Parametric Polymorphism
let list2 = Vec::<f64>::new();
let list3 = Vec::<String>::new();
let list4 = Vec::<Vec<i32>>::new();
}
|
//! Pre-defined SOCP solver
use super::prelude::*;
use std::io::Write;
/// Second-order cone program
///
/// <script src='https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js?config=TeX-MML-AM_CHTML' async></script>
///
/// The problem is
/// \\[
/// \\begin{array}{ll}
/// {\\rm minimize} & f^T x \\\\
/// {\\rm subject \\ to} & \\| G_i x + h_i \\|_2 \\le c_i^T x + d_i \\quad (i = 0, \\ldots, m - 1) \\\\
/// & A x = b,
/// \\end{array}
/// \\]
/// where
/// - variables \\( x \\in {\\bf R}^n \\)
/// - \\( f \\in {\\bf R}^n \\)
/// - \\( G_i \\in {\\bf R}^{n_i \\times n} \\), \\( h_i \\in {\\bf R}^{n_i} \\), \\( c_i \\in {\\bf R}^n \\), \\( d_i \\in {\\bf R} \\)
/// - \\( A \\in {\\bf R}^{p \\times n} \\), \\( b \\in {\\bf R}^p \\).
///
/// Internally an **approximately equivalent** problem is formed and
/// an auxiliary variable \\( s \\in {\\bf R}^m \\) is introduced for the infeasible start method as follows:
/// \\[
/// \\begin{array}{lll}
/// {\\rm minimize}\_{x,s} & f^T x \\\\
/// {\\rm subject \\ to} & {\\| G_i x + h_i \\|\_2^2 \\over s_i} \\le s_i & (i = 0, \\ldots, m - 1) \\\\
/// & s_i \\ge \\epsilon_{\\rm bd} & (i = 0, \\ldots, m - 1) \\\\
/// & c_i^T x + d_i = s_i & (i = 0, \\ldots, m - 1) \\\\
/// & A x = b,
/// \\end{array}
/// \\]
/// where \\( \\epsilon_{\\rm bd} > 0 \\) indicates the extent of approximation that excludes \\( c_i^T x + d_i = 0 \\) boundary.
pub trait SOCP {
fn solve_socp<L>(&mut self, param: &PDIPMParam, log: &mut L,
vec_f: &Mat,
mat_g: &[Mat], vec_h: &[Mat], vec_c: &[Mat], scl_d: &[FP],
mat_a: &Mat, vec_b: &Mat)
-> Result<Mat, String>
where L: Write;
}
fn check_param(vec_f: &Mat,
mat_g: &[Mat], vec_h: &[Mat], vec_c: &[Mat], scl_d: &[FP],
mat_a: &Mat, vec_b: &Mat)
-> Result<(usize, usize, usize), String>
{
let (n, _) = vec_f.size();
let m = mat_g.len();
let (p, _) = mat_a.size();
if n == 0 {return Err("vec_f: 0 rows".into());}
// m = 0 means NO inequality constraints
// p = 0 means NO equality constraints
if vec_h.len() != m {return Err(format!("vec_h: length {} must be {}", vec_h.len(), m));}
if vec_c.len() != m {return Err(format!("vec_c: length {} must be {}", vec_c.len(), m));}
if scl_d.len() != m {return Err(format!("scl_d: length {} must be {}", scl_d.len(), m));}
if vec_f.size() != (n, 1) {return Err(format!("vec_c: size {:?} must be {:?}", vec_f.size(), (n, 1)));}
for i in 0 .. m {
let (ni, _) = mat_g[i].size();
if mat_g[i].size() != (ni, n) {return Err(format!("mat_g[{}]: size {:?} must be {:?}", i, mat_g[i].size(), (ni, n)));}
if vec_h[i].size() != (ni, 1) {return Err(format!("vec_h[{}]: size {:?} must be {:?}", i, vec_h[i].size(), (ni, 1)));}
if vec_c[i].size() != (n, 1) {return Err(format!("vec_c[{}]: size {:?} must be {:?}", i, vec_c[i].size(), (n, 1)));}
}
if mat_a.size() != (p, n) {return Err(format!("mat_a: size {:?} must be {:?}", mat_a.size(), (p, n)));}
if vec_b.size() != (p, 1) {return Err(format!("vec_b: size {:?} must be {:?}", vec_b.size(), (p, 1)));}
Ok((n, m, p))
}
impl SOCP for PDIPM
{
/// Runs the solver with given parameters.
///
/// Returns `Ok` with optimal \\(x\\) or `Err` with message string.
/// * `param` is solver parameters.
/// * `log` outputs solver progress.
/// * `vec_f` is \\(f\\).
/// * `mat_g` is \\(G_0, \\ldots, G_{m-1}\\).
/// * `vec_h` is \\(h_0, \\ldots, h_{m-1}\\).
/// * `vec_c` is \\(c_0, \\ldots, c_{m-1}\\).
/// * `scl_d` is \\(d_0, \\ldots, d_{m-1}\\).
/// * `mat_a` is \\(A\\).
/// * `vec_b` is \\(b\\).
fn solve_socp<L>(&mut self, param: &PDIPMParam, log: &mut L,
vec_f: &Mat,
mat_g: &[Mat], vec_h: &[Mat], vec_c: &[Mat], scl_d: &[FP],
mat_a: &Mat, vec_b: &Mat)
-> Result<Mat, String>
where L: Write
{
// ----- parameter check
let (n, m, p) = check_param(vec_f, mat_g, vec_h, vec_c, scl_d, mat_a, vec_b)?;
let eps_div0 = param.eps;
let eps_bd = param.eps;
// ----- start to solve
let rslt = self.solve(param, log,
n + m, m + m, p + m, // '+ m' is for slack variables
|_, df_o| {
df_o.rows_mut(0 .. n).assign(&vec_f);
// for slack variables
df_o.rows_mut(n .. n + m).assign_all(0.);
},
|_, ddf_o| {
ddf_o.assign_all(0.);
},
|x, f_i| {
let xn = x.rows(0 .. n);
for r in 0 .. m {
let xnr = x[(n + r, 0)];
let inv_s = if xnr.abs() > eps_div0 {
1. / xnr
}
else {
// guard from div by zero
1. / eps_div0
};
let tmp = &mat_g[r] * &xn + &vec_h[r];
f_i[(r, 0)] = tmp.norm_p2sq() * inv_s - xnr;
// for slack variables
f_i[(r + m, 0)] = eps_bd - xnr;
}
},
|x, df_i| {
let xn = x.rows(0 .. n);
df_i.assign_all(0.);
for r in 0 .. m {
let xnr = x[(n + r, 0)];
let inv_s = if xnr.abs() > eps_div0 {
1. / xnr
}
else {
// guard from div by zero
1. / eps_div0
};
let tmp1 = &mat_g[r] * &xn + &vec_h[r];
let tmp1_norm_p2sq = tmp1.norm_p2sq();
let tmp2 = 2. * inv_s * mat_g[r].t() * tmp1;
df_i.slice_mut(r ..= r, 0 .. n).assign(&tmp2.t());
// for slack variables
df_i[(r, n + r)] = -inv_s * inv_s * tmp1_norm_p2sq - 1.;
// for slack variables
df_i[(r + m, n + r)] = -1.;
}
},
|x, ddf_i, i| {
ddf_i.assign_all(0.); // for slack variables
if i < m {
let xn = x.rows(0 .. n);
let xni = x[(n + i, 0)];
let inv_s = if xni.abs() > eps_div0 {
1. / xni
}
else {
// guard from div by zero
1. / eps_div0
};
ddf_i.slice_mut(0 .. n, 0 .. n).assign(&(
2. * inv_s * mat_g[i].t() * &mat_g[i]
));
let tmp1 = &mat_g[i] * xn + &vec_h[i];
let tmp1_norm_p2sq = tmp1.norm_p2sq();
let tmp2 = -2. * inv_s * inv_s * mat_g[i].t() * tmp1;
// for slack variables
ddf_i.slice_mut(0 .. n, n + i ..= n + i).assign(&tmp2);
// for slack variables
ddf_i.slice_mut(n + i ..= n + i, 0 .. n).assign(&tmp2.t());
// for slack variables
ddf_i[(n + i, n + i)] = 2. * inv_s * inv_s * inv_s * tmp1_norm_p2sq;
}
},
|a, b| {
a.assign_all(0.);
b.assign_all(0.);
a.slice_mut(0 .. p, 0 .. n).assign(mat_a);
b.rows_mut(0 .. p).assign(vec_b);
// for a slack variable
for r in 0 .. m {
a.slice_mut(p + r ..= p + r, 0 .. n).assign(&vec_c[r].t());
a[(p + r, n + r)] = -1.;
b[(p + r, 0)] = -scl_d[r];
}
},
|mut x| {
x.assign_all(0.);
// slack variables
for i in 0 .. m {
let s = vec_h[i].norm_p2() + eps_bd;
let mut margin = param.margin;
let mut s_initial = s + margin;
while s_initial <= s {
margin *= 2.;
s_initial = s + margin;
}
x[(n + i, 0)] = s_initial;
}
}
);
match rslt {
Ok(y) => Ok(y.rows(0 .. n).clone_sz()),
Err(s) => Err(s.into())
}
}
}
|
fn main() {
let mut memory: Vec<i32> = vec![];
// Find the input noun and verb that cause the program to produce the output 19690720.
'outer: for noun in 0..100 {
for verb in 0..100 {
memory = parse("1,0,0,3,1,1,2,3,1,3,4,3,1,5,0,3,2,6,1,19,1,5,19,23,1,13,23,27,1,6,27,31,2,31,13,35,1,9,35,39,2,39,13,43,1,43,10,47,1,47,13,51,2,13,51,55,1,55,9,59,1,59,5,63,1,6,63,67,1,13,67,71,2,71,10,75,1,6,75,79,1,79,10,83,1,5,83,87,2,10,87,91,1,6,91,95,1,9,95,99,1,99,9,103,2,103,10,107,1,5,107,111,1,9,111,115,2,13,115,119,1,119,10,123,1,123,10,127,2,127,10,131,1,5,131,135,1,10,135,139,1,139,2,143,1,6,143,0,99,2,14,0,0".to_string());
// before running the program, replace position 1 with the value 12 and replace position 2 with the value 2
memory[1] = noun;
memory[2] = verb;
memory = calculate(memory);
println!("{}", render(&memory));
if memory[0] == 19690720 {
break 'outer;
}
}
}
// What is 100 * noun + verb? (For example, if noun=12 and verb=2, the answer would be 1202.)
println!(
"100 * {} + {} = {}",
memory[1],
memory[2],
100 * memory[1] + memory[2]
)
}
fn parse(expr: String) -> Vec<i32> {
let memory: Vec<i32> = expr
.split(",")
.map(|s| s.parse::<i32>().expect("x"))
.collect();
return memory;
}
fn render(memory: &Vec<i32>) -> String {
return memory
.iter()
.map(|s| s.to_string())
.collect::<Vec<String>>()
.join(",");
}
fn calculate(mut memory: Vec<i32>) -> Vec<i32> {
let mut instruction_pointer = 0;
loop {
let opcode = memory[instruction_pointer];
if opcode == 99 {
// halt immediately
break;
}
let left_op = memory[instruction_pointer + 1] as usize;
let right_op = memory[instruction_pointer + 2] as usize;
let dest = memory[instruction_pointer + 3] as usize;
println!("op {}", opcode);
match opcode {
1 => {
// add
println!("add {} {} to {}", left_op, right_op, dest);
memory[dest] = memory[left_op] + memory[right_op];
instruction_pointer += 4;
}
2 => {
// multiply
println!("mul {} {} to {}", left_op, right_op, dest);
memory[dest] = memory[left_op] * memory[right_op];
instruction_pointer += 4;
}
_ => {
// error
panic!("error at instruction_pointer {}", instruction_pointer);
}
}
}
return memory;
}
fn calc_str(expr: String) -> String {
return render(&calculate(parse(expr)));
}
fn calculate_str(expr: String) -> String {
let mut memory: Vec<i32> = expr
.split(",")
.map(|s| s.parse::<i32>().expect("x"))
.collect();
let mut instruction_pointer = 0;
loop {
let opcode = memory[instruction_pointer];
if opcode == 99 {
// halt immediately
break;
}
let left_op = memory[instruction_pointer + 1] as usize;
let right_op = memory[instruction_pointer + 2] as usize;
let dest = memory[instruction_pointer + 3] as usize;
println!("op {}", opcode);
match opcode {
1 => {
// add
println!("add {} {} to {}", left_op, right_op, dest);
memory[dest] = memory[left_op] + memory[right_op];
}
2 => {
// multiply
println!("mul {} {} to {}", left_op, right_op, dest);
memory[dest] = memory[left_op] * memory[right_op];
}
_ => {
// error
panic!("error at instruction_pointer {}", instruction_pointer);
}
}
instruction_pointer += 4;
}
return memory
.iter()
.map(|s| s.to_string())
.collect::<Vec<String>>()
.join(",");
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn ex_1() {
assert_eq!("2,0,0,0,99", calc_str("1,0,0,0,99".to_string()));
}
#[test]
fn ex_2() {
assert_eq!("2,3,0,6,99", calc_str("2,3,0,3,99".to_string()));
}
#[test]
fn ex_3() {
assert_eq!("2,4,4,5,99,9801", calc_str("2,4,4,5,99,0".to_string()));
}
#[test]
fn ex_4() {
assert_eq!(
"30,1,1,4,2,5,6,0,99",
calc_str("1,1,1,4,99,5,6,0,99".to_string())
);
}
}
|
pub mod provider;
|
use error_chain::error_chain;
use url::Url;
error_chain! {
foreign_links {
UrlParse(url::ParseError);
}
errors {
CannotBeABase
}
}
pub fn get_url() -> Result<()> {
let s = "https://github.com/rust-lang/rust/issues?labels=E-easy&state=open";
let parsed = Url::parse(s)?;
println!("The path part of the URL is: {}", parsed.path());
Ok(())
}
// 方法2: 从url标识符截取url片段
fn get_base_url2(url: &Url) -> Result<Url> {
let base_url = Some(url.as_ref())
.map_or_else(|| Url::parse(&url[..url::Position::BeforePath]), Url::parse)?;
Ok(base_url)
}
// 方法1: 直接分片截取
pub fn get_base_url() -> Result<Url> {
let full = "https://github.com/rust-lang/cargo?asdf";
let mut url = Url::parse(full)?;
match url.path_segments_mut() {
Ok(mut path) => {
path.clear();
}
Err(_) => {
return Err(Error::from_kind(ErrorKind::CannotBeABase));
}
}
url.set_query(Some("page=2"));
assert_eq!(url.as_str(), "https://example.com/products?page=2");
assert_eq!(url.query(), Some("page=2"));
Ok(url)
}
pub fn get_url_source() -> Result<()> {
let s = "ftp://rust-lang.org/examples";
let url = Url::parse(s)?;
assert_eq!(url.scheme(), "ftp");
assert_eq!(url.host(), Some(url::Host::Domain("rust-lang.org")));
assert_eq!(url.port_or_known_default(), Some(21));
println!("The origin is as expected!");
Ok(())
} |
fn main() {
proconio::input! {
n: usize,
a: [u64; n],
}
let e = 1000000007;
let mut a: Vec<u64> = a.clone();
a.sort();
let mut pre = 0;
let mut ans = 1;
for i in (0..n).rev() {
if pre == 0 {
pre = a[i];
continue;
}
if pre == a[i] {
pre = a[i];
continue;
}
ans = ans * (pre - a[i] + 1);
ans = ans % e;
pre = a[i];
// println!("{} {}", a[i], ans % e);
}
ans = (ans * (a[0] + 1)) % e;
// println!("{:?}", a);
println!("{}", ans % e);
}
|
extern crate termion;
// http://ticki.github.io/blog/making-terminal-applications-in-rust-with-termion/
use std::io::{stdin, stdout, Write};
use termion::event::Key;
use termion::input::TermRead;
use termion::raw::IntoRawMode;
fn sub_min_1(x : u16, y: u16) -> u16 {
if x <= 1 {
1
} else {
x - y
}
}
fn main() {
// Get the standard input stream.
let stdin = stdin();
// Get the standard output stream and go to raw mode.
let mut stdout = stdout().into_raw_mode().unwrap();
let mut column = 1;
let mut line = 1;
write!(
stdout,
"{}{}Ctrl+ c exit.",
// Clear the screen.
termion::clear::All,
// Goto (1,1).
termion::cursor::Goto(1, 1)
).unwrap();
// Flush stdout (i.e. make the output appear).
stdout.flush().unwrap();
let (width, _height) = termion::terminal_size().unwrap();
for c in stdin.keys() {
// Clear the current line.
write!(stdout, "{}", termion::cursor::Goto(column, line)).unwrap();
if column == width {
line += 1;
column = 1;
} else {
column += 1;
}
// Print the key we type...
match c.unwrap() {
// Exit.
Key::Char('\n') => {
line += 1;
column = 1;
print!("\n\r")
},
Key::Char(c) => print!("{}", c),
Key::Alt(c) => print!("Alt-{}", c),
Key::Ctrl(_c) => break,
Key::Left => column = sub_min_1(column, 2),
Key::Right => column += 2,
Key::Up => {
line = sub_min_1(line, 1);
column = sub_min_1(column, 1)
}
Key::Down => {
line += 1;
column = sub_min_1(column, 1)
}
_ => print!("Other"),
}
// Flush again.
stdout.flush().unwrap();
}
// Show the cursor again before we exit.
write!(stdout, "{}", termion::cursor::Show).unwrap();
}
|
/*===============================================================================================*/
// Copyright 2016 Kyle Finlay
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*===============================================================================================*/
extern crate serde;
extern crate serde_json;
use self::serde::Serialize;
use std::fs::File;
use std::io;
use std::io::Write;
/*===============================================================================================*/
/*------SERIALIZER STRUCT------------------------------------------------------------------------*/
/*===============================================================================================*/
/// Providies utilities for serializing JSON files.
#[derive (Copy, Clone)]
pub struct Serializer;
/*===============================================================================================*/
/*------SERIALIZER PUBLIC STATIC METHODS---------------------------------------------------------*/
/*===============================================================================================*/
impl Serializer {
/// Serializes an object to a file.
///
/// The contents of type `T` is converted into a JSON text file.
/// `T` must derive from the [`serde::Serialize`](http://serde-rs.github.io/serde/serde/ser/trait.Serialize.html)
/// trait.
///
/// # Arguments
/// * `item` - The instance of `T` to serialize.
/// * `file_path` - The path to the file to be serialized.
/// It can be either a full or local path.
/// If the file doesn't exist, it will be created.
///
/// # Returns
/// A `Result` containing a `std::io::Error` on failure.
///
/// # Examples
/// ```
/// # #![feature (custom_derive)]
/// # #![feature (plugin)]
/// # #![plugin (serde_macros)]
/// # extern crate serde;
/// # extern crate ion_core;
/// # use self::ion_core::util::serialization::Serializer;
/// # use self::serde::Serialize;
/// #[derive (Serialize)]
/// struct Person {
///
/// name: String,
/// age: u32
/// }
/// # fn main () {
/// let person = Person {name: "John Doe".to_string (), age: 31};
/// Serializer::to_file (&person, "person.json");
/// # }
/// ```
pub fn to_file<T: Serialize> (item: &T, file_path: &str) -> Result<(), io::Error> {
let mut file = try! (File::create (file_path));
let string = match Serializer::to_string (item) {
Ok (s) => s,
Err (e) => return Err (io::Error::new (io::ErrorKind::Other, e.to_string ()))
};
try! (file.write (string.as_bytes ()));
Ok (())
}
/*-----------------------------------------------------------------------------------------------*/
/// Serializes an object to a String.
///
/// The type `T` is converted into a JSON string.
/// `T` must derive from the [`serde::Serialize`](http://serde-rs.github.io/serde/serde/ser/trait.Serialize.html)
/// trait.
///
/// # Arguments
/// * `item` - The instance of `T` to serialize.
///
/// # Returns
/// A `Result` containing a JSON string on success,
/// and a `serde_json::Error` on failure.
///
/// # Examples
/// ```
/// # #![feature (custom_derive)]
/// # #![feature (plugin)]
/// # #![plugin (serde_macros)]
/// # extern crate serde;
/// # extern crate ion_core;
/// # use self::ion_core::util::serialization::Serializer;
/// # use self::serde::Serialize;
/// #[derive (Serialize)]
/// struct Person {
///
/// name: String,
/// age: u32
/// }
/// # fn main () {
/// let person = Person {name: "John Doe".to_string (), age: 31};
/// let string = Serializer::to_string (&person).unwrap ();
/// # }
/// ```
pub fn to_string<T: Serialize> (item: &T) -> Result<String, serde_json::Error> {
Ok (try! (serde_json::to_string_pretty (item)))
}
}
|
use windows_dll::dll;
use winapi::shared::{
ntdef::VOID,
minwindef::BOOL,
windef::HWND,
ntdef::PVOID,
basetsd::SIZE_T,
};
#[test]
fn link_ordinal() {
#[dll("uxtheme.dll")]
extern "system" {
#[link_ordinal = 137]
fn flush_menu_themes() -> VOID;
}
}
#[test]
fn link_ordinal_with_arguments() {
#[dll("uxtheme.dll")]
extern "system" {
#[link_ordinal = 133]
fn allow_dark_mode_for_window(hwnd: HWND, allow: BOOL) -> BOOL;
}
}
#[test]
fn link_name() {
#[dll("user32.dll")]
extern "system" {
#[link_name = "SetWindowCompositionAttribute"]
fn set_window_composition_attribute(h_wnd: HWND, data: *mut WINDOWCOMPOSITIONATTRIBDATA) -> BOOL;
}
}
#[allow(non_snake_case)]
type WINDOWCOMPOSITIONATTRIB = u32;
const WCA_USEDARKMODECOLORS: WINDOWCOMPOSITIONATTRIB = 26;
#[allow(non_snake_case)]
#[repr(C)]
pub struct WINDOWCOMPOSITIONATTRIBDATA {
Attrib: WINDOWCOMPOSITIONATTRIB,
pvData: PVOID,
cbData: SIZE_T,
}
#[test]
fn guess_name() {
#[dll("user32.dll")]
extern "system" {
#[allow(non_snake_case)]
fn SetWindowCompositionAttribute(h_wnd: HWND, data: *mut WINDOWCOMPOSITIONATTRIBDATA) -> BOOL;
}
}
#[test]
fn return_result() {
#[dll("user32.dll")]
extern "system" {
#[allow(non_snake_case)]
#[fallible]
fn SetWindowCompositionAttribute(h_wnd: HWND, data: *mut WINDOWCOMPOSITIONATTRIBDATA) -> BOOL;
}
}
#[test]
fn function_exists() {
#[dll("user32.dll")]
extern "system" {
#[allow(non_snake_case)]
fn SetWindowCompositionAttribute(h_wnd: HWND, data: *mut WINDOWCOMPOSITIONATTRIBDATA) -> BOOL;
}
dbg!(SetWindowCompositionAttribute::exists());
}
#[test]
fn function_exists_module() {
mod user32 {
use super::*;
#[dll("user32.dll")]
extern "system" {
#[allow(non_snake_case)]
pub fn SetWindowCompositionAttribute(h_wnd: HWND, data: *mut WINDOWCOMPOSITIONATTRIBDATA) -> BOOL;
}
}
use user32::SetWindowCompositionAttribute;
dbg!(SetWindowCompositionAttribute::exists());
}
|
extern crate regex;
pub mod date_time_tuple;
pub mod date_tuple;
mod date_utils;
pub mod month_tuple;
pub mod time_tuple;
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub location: String,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationalizationCluster {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<OperationalizationClusterProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationalizationClusterProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "createdOn", default, skip_serializing_if = "Option::is_none")]
pub created_on: Option<String>,
#[serde(rename = "modifiedOn", default, skip_serializing_if = "Option::is_none")]
pub modified_on: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<operationalization_cluster_properties::ProvisioningState>,
#[serde(rename = "clusterType")]
pub cluster_type: operationalization_cluster_properties::ClusterType,
#[serde(rename = "storageAccount", default, skip_serializing_if = "Option::is_none")]
pub storage_account: Option<StorageAccountProperties>,
#[serde(rename = "containerRegistry", default, skip_serializing_if = "Option::is_none")]
pub container_registry: Option<ContainerRegistryProperties>,
#[serde(rename = "containerService")]
pub container_service: AcsClusterProperties,
#[serde(rename = "appInsights", default, skip_serializing_if = "Option::is_none")]
pub app_insights: Option<AppInsightsCredentials>,
#[serde(rename = "globalServiceConfiguration", default, skip_serializing_if = "Option::is_none")]
pub global_service_configuration: Option<GlobalServiceConfiguration>,
}
pub mod operationalization_cluster_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Unknown,
Updating,
Creating,
Succeeded,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ClusterType {
#[serde(rename = "ACS")]
Acs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageAccountProperties {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerRegistryProperties {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AcsClusterProperties {
#[serde(rename = "clusterFqdn", default, skip_serializing_if = "Option::is_none")]
pub cluster_fqdn: Option<String>,
#[serde(rename = "orchestratorType")]
pub orchestrator_type: acs_cluster_properties::OrchestratorType,
#[serde(rename = "orchestratorProperties")]
pub orchestrator_properties: KubernetesClusterProperties,
#[serde(rename = "systemServices", default, skip_serializing_if = "Vec::is_empty")]
pub system_services: Vec<SystemServices>,
#[serde(rename = "agentCount", default, skip_serializing_if = "Option::is_none")]
pub agent_count: Option<i64>,
#[serde(rename = "agentVmSize", default, skip_serializing_if = "Option::is_none")]
pub agent_vm_size: Option<acs_cluster_properties::AgentVmSize>,
}
pub mod acs_cluster_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OrchestratorType {
Kubernetes,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AgentVmSize {
#[serde(rename = "Standard_A0")]
StandardA0,
#[serde(rename = "Standard_A1")]
StandardA1,
#[serde(rename = "Standard_A2")]
StandardA2,
#[serde(rename = "Standard_A3")]
StandardA3,
#[serde(rename = "Standard_A4")]
StandardA4,
#[serde(rename = "Standard_A5")]
StandardA5,
#[serde(rename = "Standard_A6")]
StandardA6,
#[serde(rename = "Standard_A7")]
StandardA7,
#[serde(rename = "Standard_A8")]
StandardA8,
#[serde(rename = "Standard_A9")]
StandardA9,
#[serde(rename = "Standard_A10")]
StandardA10,
#[serde(rename = "Standard_A11")]
StandardA11,
#[serde(rename = "Standard_D1")]
StandardD1,
#[serde(rename = "Standard_D2")]
StandardD2,
#[serde(rename = "Standard_D3")]
StandardD3,
#[serde(rename = "Standard_D4")]
StandardD4,
#[serde(rename = "Standard_D11")]
StandardD11,
#[serde(rename = "Standard_D12")]
StandardD12,
#[serde(rename = "Standard_D13")]
StandardD13,
#[serde(rename = "Standard_D14")]
StandardD14,
#[serde(rename = "Standard_D1_v2")]
StandardD1V2,
#[serde(rename = "Standard_D2_v2")]
StandardD2V2,
#[serde(rename = "Standard_D3_v2")]
StandardD3V2,
#[serde(rename = "Standard_D4_v2")]
StandardD4V2,
#[serde(rename = "Standard_D5_v2")]
StandardD5V2,
#[serde(rename = "Standard_D11_v2")]
StandardD11V2,
#[serde(rename = "Standard_D12_v2")]
StandardD12V2,
#[serde(rename = "Standard_D13_v2")]
StandardD13V2,
#[serde(rename = "Standard_D14_v2")]
StandardD14V2,
#[serde(rename = "Standard_G1")]
StandardG1,
#[serde(rename = "Standard_G2")]
StandardG2,
#[serde(rename = "Standard_G3")]
StandardG3,
#[serde(rename = "Standard_G4")]
StandardG4,
#[serde(rename = "Standard_G5")]
StandardG5,
#[serde(rename = "Standard_DS1")]
StandardDs1,
#[serde(rename = "Standard_DS2")]
StandardDs2,
#[serde(rename = "Standard_DS3")]
StandardDs3,
#[serde(rename = "Standard_DS4")]
StandardDs4,
#[serde(rename = "Standard_DS11")]
StandardDs11,
#[serde(rename = "Standard_DS12")]
StandardDs12,
#[serde(rename = "Standard_DS13")]
StandardDs13,
#[serde(rename = "Standard_DS14")]
StandardDs14,
#[serde(rename = "Standard_GS1")]
StandardGs1,
#[serde(rename = "Standard_GS2")]
StandardGs2,
#[serde(rename = "Standard_GS3")]
StandardGs3,
#[serde(rename = "Standard_GS4")]
StandardGs4,
#[serde(rename = "Standard_GS5")]
StandardGs5,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KubernetesClusterProperties {
#[serde(rename = "servicePrincipal")]
pub service_principal: ServicePrincipalProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SystemServices {
Scoring,
Batch,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServicePrincipalProperties {
#[serde(rename = "clientId")]
pub client_id: String,
pub secret: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationalizationClusterUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GlobalServiceConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ssl: Option<SslConfiguration>,
#[serde(rename = "serviceAuth", default, skip_serializing_if = "Option::is_none")]
pub service_auth: Option<ServiceAuthConfiguration>,
#[serde(rename = "autoScale", default, skip_serializing_if = "Option::is_none")]
pub auto_scale: Option<AutoScaleConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SslConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<ssl_configuration::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cert: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
}
pub mod ssl_configuration {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceAuthConfiguration {
#[serde(rename = "primaryAuthKeyHash")]
pub primary_auth_key_hash: String,
#[serde(rename = "secondaryAuthKeyHash")]
pub secondary_auth_key_hash: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AutoScaleConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<auto_scale_configuration::Status>,
#[serde(rename = "minReplicas", default, skip_serializing_if = "Option::is_none")]
pub min_replicas: Option<i64>,
#[serde(rename = "maxReplicas", default, skip_serializing_if = "Option::is_none")]
pub max_replicas: Option<i64>,
#[serde(rename = "targetUtilization", default, skip_serializing_if = "Option::is_none")]
pub target_utilization: Option<f64>,
#[serde(rename = "refreshPeriodInSeconds", default, skip_serializing_if = "Option::is_none")]
pub refresh_period_in_seconds: Option<i64>,
}
pub mod auto_scale_configuration {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationalizationClusterCredentials {
#[serde(rename = "storageAccount", default, skip_serializing_if = "Option::is_none")]
pub storage_account: Option<StorageAccountCredentials>,
#[serde(rename = "containerRegistry", default, skip_serializing_if = "Option::is_none")]
pub container_registry: Option<ContainerRegistryCredentials>,
#[serde(rename = "containerService", default, skip_serializing_if = "Option::is_none")]
pub container_service: Option<ContainerServiceCredentials>,
#[serde(rename = "appInsights", default, skip_serializing_if = "Option::is_none")]
pub app_insights: Option<AppInsightsCredentials>,
#[serde(rename = "serviceAuthConfiguration", default, skip_serializing_if = "Option::is_none")]
pub service_auth_configuration: Option<ServiceAuthConfiguration>,
#[serde(rename = "sslConfiguration", default, skip_serializing_if = "Option::is_none")]
pub ssl_configuration: Option<SslConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageAccountCredentials {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
#[serde(rename = "primaryKey", default, skip_serializing_if = "Option::is_none")]
pub primary_key: Option<String>,
#[serde(rename = "secondaryKey", default, skip_serializing_if = "Option::is_none")]
pub secondary_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerRegistryCredentials {
#[serde(rename = "loginServer", default, skip_serializing_if = "Option::is_none")]
pub login_server: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub password: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub password2: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceCredentials {
#[serde(rename = "acsKubeConfig", default, skip_serializing_if = "Option::is_none")]
pub acs_kube_config: Option<String>,
#[serde(rename = "servicePrincipalConfiguration", default, skip_serializing_if = "Option::is_none")]
pub service_principal_configuration: Option<ServicePrincipalProperties>,
#[serde(rename = "imagePullSecretName", default, skip_serializing_if = "Option::is_none")]
pub image_pull_secret_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AppInsightsCredentials {
#[serde(rename = "appId", default, skip_serializing_if = "Option::is_none")]
pub app_id: Option<String>,
#[serde(rename = "apiKey", default, skip_serializing_if = "Option::is_none")]
pub api_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckUpdateResponse {
#[serde(rename = "updatesAvailable", default, skip_serializing_if = "Option::is_none")]
pub updates_available: Option<check_update_response::UpdatesAvailable>,
}
pub mod check_update_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum UpdatesAvailable {
Yes,
No,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateSystemResponse {
#[serde(rename = "updateStatus", default, skip_serializing_if = "Option::is_none")]
pub update_status: Option<update_system_response::UpdateStatus>,
#[serde(rename = "updateStartedOn", default, skip_serializing_if = "Option::is_none")]
pub update_started_on: Option<String>,
#[serde(rename = "updateCompletedOn", default, skip_serializing_if = "Option::is_none")]
pub update_completed_on: Option<String>,
}
pub mod update_system_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum UpdateStatus {
InProgress,
Completed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaginatedOperationalizationClustersList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationalizationCluster>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AsyncOperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<async_operation_status::ProvisioningState>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(rename = "percentComplete", default, skip_serializing_if = "Option::is_none")]
pub percent_complete: Option<f64>,
#[serde(rename = "errorInfo", default, skip_serializing_if = "Option::is_none")]
pub error_info: Option<AsyncOperationErrorInfo>,
}
pub mod async_operation_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Unknown,
Updating,
Creating,
Succeeded,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AsyncOperationErrorInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<AsyncOperationErrorInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
pub code: String,
pub message: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
pub code: String,
pub message: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailableOperations {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceOperation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceOperation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<resource_operation::Display>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
pub mod resource_operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
|
use std::fs;
use std::collections::HashMap;
fn main() {
let lines = fs::read_to_string("./in.in").expect("Smth went wrong smh");
part1(lines.clone());
part2(lines);
}
fn part1(n : String) {
println!("Part 1: {}", work(n, 3, 1));
}
fn part2(n : String) {
let mut trees : usize = 1;
trees *= work(n.clone(), 1, 1);
trees *= work(n.clone(), 3, 1);
trees *= work(n.clone(), 5, 1);
trees *= work(n.clone(), 7, 1);
trees *= work(n.clone(), 1, 2);
println!("Part 2: {}", trees);
}
fn work(n : String, xd : usize, yd : usize) -> usize {
return n
.lines()
.step_by(yd)
.zip((0..).step_by(xd))
.filter(|&(line, i)| line.chars().cycle().nth(i).unwrap() == '#')
.count();
}
|
/* Service Data Object */
/* SDO Is A Client / Server Type */
use super::CANOpen;
use crate::stm32hal::{common, can::CanMsg};
const IDE: bool = false; // CANOpen only uses normal ID
const S_OFFSET: u8 = 0;
const E_OFFSET: u8 = 1;
const N_OFFSET: u8 = 2;
const CCS_OFFSET: u8 = 5;
const S_MASK: u8 = common::MASK_1_BIT as u8;
const E_MASK: u8 = common::MASK_1_BIT as u8;
const N_MASK: u8 = common::MASK_2_BIT as u8;
const CCS_MASK: u8 = common::MASK_3_BIT as u8;
const OD_MASK: u16 = 0xFF;
const MAX_LEN: u32 = 8;
const DLC_UP: u32 = 4;
// CCS is the client command specifier of the SDO transfer
// 0 for SDO segment download,
// 1 for initiating download,
// 2 for initiating upload,
// 3 for SDO segment upload,
// 4 for aborting an SDO transfer,
// 5 for SDO block upload,
// 6 for SDO block download
pub enum Ccs {SegDl, InitDl, InitUl, AbortTrans, BlkUl, BlkDl, Unknown}
fn sdo_ccs(cmd_byte: u8) -> Ccs {
let ccs = (cmd_byte >> CCS_OFFSET) & CCS_MASK;
return match ccs {
0 => Ccs::SegDl,
1 => Ccs::InitDl,
2 => Ccs::InitUl,
3 => Ccs::AbortTrans,
4 => Ccs::BlkUl,
5 => Ccs::BlkDl,
_ => Ccs::Unknown
};
}
// N is the number of bytes in the data part of the message
// which do not contain data, only valid if e and s are set
pub enum N {Bytes0, Bytes1, Bytes2, Bytes3}
fn sdo_n(cmd_byte: u8) -> N {
let n = (cmd_byte >> N_OFFSET) & N_MASK;
return match n {
0 => N::Bytes0,
1 => N::Bytes1,
2 => N::Bytes2,
3 => N::Bytes3,
_ => N::Bytes3
};
}
// E if set,
// indicates an expedited transfer,
// i.e. all data exchanged are contained within the message.
// If this bit is cleared then the message is a segmented transfer
// where the data does not fit into one message and multiple messages
// are used
pub enum E {Segmented, Expedited}
fn sdo_e(cmd_byte: u8) -> E {
let e = (cmd_byte >> E_OFFSET) & E_MASK;
return match e {
0 => E::Segmented,
1 => E::Expedited,
_ => E::Expedited
};
}
// S if set,
// indicates that the data size is specified in n (if e is set)
// or in the data part of the message
pub enum S {Unset, DataSizeN}
fn sdo_s(cmd_byte: u8) -> S {
let s = (cmd_byte >> S_OFFSET) & S_MASK;
return match s {
0 => S::Unset,
1 => S::DataSizeN,
_ => S::DataSizeN
};
}
pub struct CANOpenSdo {
cmd_byte: u8, // Combination of CCS, N, E, S
od_ind: u16, // Index (16 bits) reflect the OD address to be accessed
od_sub: u8, // Subindex (8 bits) reflect the OD address to be accessed
data: [u8; 4] // Data to be Transmitted
}
impl CANOpen {
/* SDO Segment Download */
/* SDO Initiating download */
pub fn sdo_init_download(&self, n: N, e: E, od_ind: u16, od_sub: u8, data: [u8; 4], msg: &mut CanMsg){
let dlc = match n {
N::Bytes0 => MAX_LEN,
N::Bytes1 => MAX_LEN - 1,
N::Bytes2 => MAX_LEN - 2,
N::Bytes3 => MAX_LEN - 3
};
let sdo = match e {
E::Expedited => CANOpenSdo::init_write(Ccs::InitDl, n, e, S::DataSizeN, od_ind, od_sub, data),
E::Segmented => CANOpenSdo::init_write(Ccs::InitDl, N::Bytes0, e, S::DataSizeN, od_ind, od_sub, data)
};
self.sdo_write(self.get_rsdo(), dlc, sdo, msg);
}
pub fn sdo_init_upload(&self, od_ind: u16, od_sub: u8, msg: &mut CanMsg) {
let data = [0; 4];
let sdo = CANOpenSdo::init_write(Ccs::InitUl, N::Bytes0, E::Segmented, S::Unset, od_ind, od_sub, data);
self.sdo_write(self.get_rsdo(), DLC_UP, sdo, msg);
}
/* All Write Functions Will Be Passed Through Here */
pub fn sdo_write(&self, cod_id: u32, dlc: u32, sdo: CANOpenSdo, msg: &mut CanMsg) {
let mut data = [0; 8];
data[0] = sdo.cmd_byte;
data[1] = ((sdo.od_ind >> 0) & OD_MASK) as u8;
data[2] = ((sdo.od_ind >> 8) & OD_MASK) as u8;
data[3] = sdo.od_sub;
data[4] = sdo.data[0];
data[5] = sdo.data[1];
data[6] = sdo.data[2];
data[7] = sdo.data[3];
msg.set_id(cod_id + self.node, false);
msg.clr_rtr();
msg.set_dlc(dlc);
msg.set_data(data);
}
/* TO-DO IMPLEMENT DECONSTRUCTION OF THE */
pub fn sdo_read(&self, msg: &CanMsg) -> CANOpenSdo {
return CANOpenSdo::init_read(&msg.get_data())
}
}
impl CANOpenSdo {
/* When Generating An SDO Message */
pub fn init_write(ccs: Ccs, n: N, e: E, s: S, od_ind: u16, od_sub: u8, data: [u8; 4]) -> CANOpenSdo {
let mut cmd_byte = 0;
cmd_byte |= (s as u8) << S_OFFSET;
cmd_byte |= (e as u8) << E_OFFSET;
cmd_byte |= (n as u8) << N_OFFSET;
cmd_byte |= (ccs as u8) << CCS_OFFSET;
return CANOpenSdo {
cmd_byte: cmd_byte,
od_ind: od_ind,
od_sub: od_sub,
data: data
};
}
/* When Receiving An SDO Message */
pub fn init_read(msg: &[u8; 8]) -> CANOpenSdo {
return CANOpenSdo {
cmd_byte: msg[0],
od_ind: ((msg[1] as u16) << 0) | ((msg[2] as u16) << 8),
od_sub: msg[3],
data: [msg[4], msg[5], msg[6], msg[7]]
};
}
pub fn get_cmd_byte(&self) -> u8 {
return self.cmd_byte;
}
pub fn get_ccs(&self) -> Ccs {
return sdo_ccs(self.cmd_byte);
}
pub fn get_n(&self) -> N {
return sdo_n(self.cmd_byte);
}
pub fn get_e(&self) -> E {
return sdo_e(self.cmd_byte);
}
pub fn get_s(&self) -> S {
return sdo_s(self.cmd_byte);
}
pub fn get_od_ind(&self) -> u16 {
return self.od_ind;
}
pub fn get_od_sub(&self) -> u8 {
return self.od_sub;
}
pub fn get_data(&self) -> [u8; 4] {
return self.data;
}
pub fn set_data(&mut self, data: [u8; 4]) {
self.data = data;
}
} |
// thread 'rustc' panicked at 'no entry found for key'
// prusti-interface/src/environment/polonius_info.rs:1169:9
fn foo(ptr: *mut i32) {
let _ = ptr as *mut i32;
}
fn main() {}
|
use kagura::component::Cmd;
use kagura::prelude::*;
use std::collections::VecDeque;
pub struct Cmds<C: Component> {
cmds: VecDeque<Cmd<C>>,
}
impl<C: Component> Cmds<C> {
pub fn new() -> Self {
Self {
cmds: VecDeque::new(),
}
}
pub fn pop(&mut self) -> Cmd<C> {
if let Some(cmd) = self.cmds.pop_front() {
cmd
} else {
Cmd::none()
}
}
pub fn push(&mut self, cmd: Cmd<C>) {
self.cmds.push_back(cmd);
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// OrganizationBilling : A JSON array of billing type.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OrganizationBilling {
/// The type of billing. Only `parent_billing` is supported.
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub _type: Option<String>,
}
impl OrganizationBilling {
/// A JSON array of billing type.
pub fn new() -> OrganizationBilling {
OrganizationBilling {
_type: None,
}
}
}
|
use diesel::insert_into;
use diesel::prelude::*;
use crate::common::*;
table! {
use diesel::sql_types::{Integer, Nullable};
use super::MyEnumMapping;
test_nullable {
id -> Integer,
my_enum -> Nullable<MyEnumMapping>,
}
}
#[derive(Insertable, Queryable, Identifiable, Debug, PartialEq)]
#[table_name = "test_nullable"]
struct Nullable {
id: i32,
my_enum: Option<MyEnum>,
}
#[derive(Insertable, Queryable, Identifiable, Debug, PartialEq)]
#[table_name = "test_nullable"]
struct MaybeNullable {
id: i32,
my_enum: MyEnum,
}
#[cfg(feature = "postgres")]
pub fn create_null_table(conn: &PgConnection) {
use diesel::connection::SimpleConnection;
conn.batch_execute(
r#"
DROP TYPE IF EXISTS my_enum;
CREATE TYPE my_enum AS ENUM ('foo', 'bar', 'baz_quxx');
CREATE TEMP TABLE IF NOT EXISTS test_nullable (
id SERIAL PRIMARY KEY,
my_enum my_enum
);
"#,
)
.unwrap();
}
#[cfg(feature = "mysql")]
pub fn create_null_table(conn: &MysqlConnection) {
use diesel::connection::SimpleConnection;
conn.batch_execute(
r#"
CREATE TEMPORARY TABLE IF NOT EXISTS test_nullable (
id SERIAL PRIMARY KEY,
my_enum enum ('foo', 'bar', 'baz_quxx')
);
"#,
)
.unwrap();
}
#[cfg(feature = "sqlite")]
pub fn create_null_table(conn: &SqliteConnection) {
conn.execute(
r#"
CREATE TABLE test_nullable (
id SERIAL PRIMARY KEY,
my_enum TEXT CHECK(my_enum IN ('foo', 'bar', 'baz_quxx'))
);
"#,
)
.unwrap();
}
#[test]
fn nullable_enum_round_trip() {
let connection = get_connection();
create_null_table(&connection);
let data = vec![
Nullable {
id: 1,
my_enum: None,
},
Nullable {
id: 2,
my_enum: Some(MyEnum::Bar),
},
];
let sql = insert_into(test_nullable::table).values(&data);
let ct = sql.execute(&connection).unwrap();
assert_eq!(data.len(), ct);
let items = test_nullable::table.load::<Nullable>(&connection).unwrap();
assert_eq!(data, items);
}
#[test]
fn not_nullable_enum_round_trip() {
let connection = get_connection();
create_null_table(&connection);
let data = vec![
MaybeNullable {
id: 1,
my_enum: MyEnum::Foo,
},
MaybeNullable {
id: 2,
my_enum: MyEnum::BazQuxx,
},
];
let ct = insert_into(test_nullable::table)
.values(&data)
.execute(&connection)
.unwrap();
assert_eq!(data.len(), ct);
}
|
use std::{env, io};
use crossbeam_channel::SendError;
use thiserror::Error;
use crate::finder::NoteFindMessage;
#[derive(Error, Debug)]
pub enum NottoError {
#[error("context {context} not found")]
ContextNotFound { context: String },
#[error("context error from environment variable - {source}")]
ContextError {
#[from]
source: env::VarError
},
#[error("problem with notto's home directory - {source}")]
ConfigDirectory {
#[from]
source: io::Error
},
#[error("problem deserializing yaml content - {source}")]
ReadingFile {
#[from]
source: serde_yaml::Error
},
#[error("home directory not found")]
HomeDirectoryNotFound,
#[error("{message}")]
LoadConfigError { message: String },
#[error("error sending an asynchronous request")]
SendError {
#[from]
source: SendError<NoteFindMessage>
},
#[error("Note {note_name} alerady exists.")]
NoteExists { note_name: String },
#[error("File Error: {message}")]
FileError { message: String },
#[error("Create Note Error: {message}")]
CreateNoteError { message: String },
} |
use anyhow::{format_err, Error};
use rand::{
distributions::{Distribution, Uniform},
thread_rng,
};
use reqwest::{header::HeaderMap, redirect::Policy, Client, Response, Url};
use serde::Serialize;
use std::{collections::HashMap, future::Future, thread::sleep, time::Duration};
#[derive(Debug, Clone)]
pub struct ReqwestSession {
client: Client,
}
impl Default for ReqwestSession {
fn default() -> Self {
Self::new(true)
}
}
impl ReqwestSession {
#[must_use]
pub fn new(allow_redirects: bool) -> Self {
let redirect_policy = if allow_redirects {
Policy::default()
} else {
Policy::none()
};
Self {
client: Client::builder()
.cookie_store(true)
.redirect(redirect_policy)
.build()
.expect("Failed to build client"),
}
}
async fn exponential_retry<T, U, V>(f: T) -> Result<U, Error>
where
T: Fn() -> V,
V: Future<Output = Result<U, Error>>,
{
let mut timeout: f64 = 1.0;
let range = Uniform::from(0..1000);
loop {
let resp = f().await;
match resp {
Ok(x) => return Ok(x),
Err(e) => {
sleep(Duration::from_millis((timeout * 1000.0) as u64));
timeout *= 4.0 * f64::from(range.sample(&mut thread_rng())) / 1000.0;
if timeout >= 64.0 {
return Err(format_err!(e));
}
}
}
}
}
/// # Errors
/// Return error if db query fails
pub async fn get(&self, url: &Url, headers: &HeaderMap) -> Result<Response, Error> {
Self::exponential_retry(|| async move { self._get(url.clone(), headers.clone()).await })
.await
}
/// # Errors
/// Return error if db query fails
async fn _get(&self, url: Url, headers: HeaderMap) -> Result<Response, Error> {
self.client
.get(url)
.headers(headers)
.send()
.await
.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn post<T>(
&self,
url: &Url,
headers: &HeaderMap,
form: &HashMap<&str, T>,
) -> Result<Response, Error>
where
T: Serialize,
{
Self::exponential_retry(
|| async move { self._post(url.clone(), headers.clone(), form).await },
)
.await
}
async fn _post<T>(
&self,
url: Url,
headers: HeaderMap,
form: &HashMap<&str, T>,
) -> Result<Response, Error>
where
T: Serialize,
{
self.client
.post(url)
.headers(headers)
.json(form)
.send()
.await
.map_err(Into::into)
}
/// # Errors
/// Return error if db query fails
pub async fn delete(&self, url: &Url, headers: &HeaderMap) -> Result<Response, Error> {
Self::exponential_retry(|| async move { self._delete(url.clone(), headers.clone()).await })
.await
}
async fn _delete(&self, url: Url, headers: HeaderMap) -> Result<Response, Error> {
self.client
.delete(url)
.headers(headers)
.send()
.await
.map_err(Into::into)
}
}
|
use std::borrow::Borrow;
use std::ops::Deref;
/// Wrapper struct for representing ownership of values in vulkan that implement
/// the `Copy` trait.
pub struct VkOwned<A: Copy, F: Fn(A)> {
value: A,
destroy_fn: F
}
impl<A: Copy, F: Fn(A)> VkOwned<A, F> {
/// Takes ownership of the previously-unowned vulkan pointer. This operation is unsafe because
/// the vulkan pointer may be owned by some other means, or another VkOwned instance.
pub unsafe fn new(a: A, destroy_fn: F) -> VkOwned<A, F> {
VkOwned {
value: a,
destroy_fn: destroy_fn
}
}
/// Gets the `ash` representation of this VkOwned. This operation is unsafe because the
/// returned value appears to be owned by the caller, when it really is not.
#[inline(always)]
#[allow(dead_code)]
pub unsafe fn unsafe_get(&self) -> A {
self.value
}
}
impl<A: Copy, F: Fn(A)> Drop for VkOwned<A, F> {
fn drop(&mut self) {
(self.destroy_fn)(self.value)
}
}
impl<A: Copy, F: Fn(A)> Borrow<A> for VkOwned<A, F> {
fn borrow(&self) -> &A {
&self.value
}
}
impl<A: Copy, F: Fn(A)> Deref for VkOwned<A, F> {
type Target = A;
fn deref(&self) -> &A {
&self.value
}
}
|
mod linalg;
mod cube;
use cube::*;
fn main() {
let mut cube = Cube::new();
cube.turns("F D Y L2 B2 D' X' B U2 R' F2 Z L B' R' Z' F' L2 U X' Y R2 D' F' X' X' D F D' Y F D2 L2 U' X").unwrap();
cube.print_ascii();
}
|
//! Get info on members of your Slack team.
use id::*;
use rtm::Cursor;
use rtm::{Paging, Team};
use timestamp::Timestamp;
/// Delete the user profile photo
///
/// Wraps https://api.slack.com/methods/users.deletePhoto
/// Gets user presence information.
///
/// Wraps https://api.slack.com/methods/users.getPresence
#[derive(Clone, Debug, Serialize, new)]
pub struct GetPresenceRequest {
/// User to get presence info on. Defaults to the authed user.
pub user: UserId,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct GetPresenceResponse {
ok: bool,
pub presence: Option<String>,
}
/// Get a user's identity.
///
/// Wraps https://api.slack.com/methods/users.identity
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct IdentityResponse {
ok: bool,
pub team: Option<Team>,
pub user: Option<User>,
}
/// Gets information about a user.
///
/// Wraps https://api.slack.com/methods/users.info
#[derive(Clone, Debug, Serialize, new)]
pub struct InfoRequest {
/// User to get info on
pub user: UserId,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct InfoResponse {
ok: bool,
pub user: Option<User>,
}
/// Lists all users in a Slack team.
///
/// Wraps https://api.slack.com/methods/users.list
/// At this time, providing no limit value will result in Slack
/// attempting to deliver you the entire result set.
/// If the collection is too large you may experience HTTP 500 errors.
/// Resolve this scenario by using pagination.
///
/// One day pagination will become required to use this method.
#[derive(Clone, Debug, Serialize, new)]
pub struct ListRequest {
/// Whether to include presence data in the output
#[new(default)]
pub presence: Option<bool>,
#[new(default)]
pub cursor: Option<Cursor>,
#[new(default)]
pub limit: Option<usize>,
#[new(default)]
pub include_locale: Option<bool>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ListResponse {
ok: bool,
pub members: Vec<User>,
pub cache_ts: Option<Timestamp>,
pub response_metadata: Option<Paging>,
pub is_limited: Option<bool>,
}
/// Gets a users's preferences
///
/// Wraps https://api.slack.com/methods/users.prefs.get
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct PrefsResponse {
ok: bool,
pub prefs: UserPrefs,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct UserPrefs {
muted_channels: Vec<ChannelId>,
}
/// Marks a user as active.
///
/// Wraps https://api.slack.com/methods/users.setActive
/// Manually sets user presence.
///
/// Wraps https://api.slack.com/methods/users.setPresence
#[derive(Clone, Debug, Serialize, new)]
pub struct SetPresenceRequest {
/// Either auto or away
pub presence: Presence,
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename = "snake_case")]
pub enum Presence {
Auto,
Away,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct User {
pub color: Option<String>,
pub deleted: bool,
pub has_2fa: Option<bool>,
pub two_factor_type: Option<String>,
pub id: UserId,
pub is_admin: Option<bool>,
pub is_app_user: bool,
pub is_bot: bool,
pub is_owner: Option<bool>,
pub is_primary_owner: Option<bool>,
pub is_restricted: Option<bool>,
pub is_ultra_restricted: Option<bool>,
pub name: String,
pub profile: UserProfile,
pub real_name: Option<String>,
pub team_id: TeamId,
pub tz: Option<String>, // TODO: Might be an enum
pub tz_label: Option<String>,
pub tz_offset: Option<i64>,
pub updated: Timestamp,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct UserProfile {
pub always_active: Option<bool>,
pub bot_id: Option<BotId>,
pub api_app_id: Option<String>,
pub avatar_hash: String, // TOOD: static length
pub display_name: String,
pub display_name_normalized: String,
pub email: Option<String>,
pub first_name: Option<String>,
pub image_1024: Option<String>,
pub image_192: String,
pub image_24: String,
pub image_32: String,
pub image_48: String,
pub image_512: String,
pub image_72: String,
pub image_original: Option<String>,
pub is_custom_image: Option<bool>,
pub last_name: Option<String>,
pub phone: String,
pub real_name: String,
pub real_name_normalized: String,
pub skype: String,
pub status_emoji: String,
pub status_expiration: i64,
pub status_text: String,
pub status_text_canonical: String,
pub team: TeamId,
pub title: String,
pub fields: Option<()>, // No idea what goes here
}
|
const KEY_HANDLE_LENGTH: isize = 64;
pub struct KeyHandle(*mut u8);
impl KeyHandle {
pub fn from(ptr: *mut u8) -> KeyHandle {
KeyHandle(ptr)
}
pub fn is_null(&self) -> bool {
println!("checking if key handle is null...");
for i in 0..KEY_HANDLE_LENGTH {
unsafe {
if *self.0.offset(i) != 0 {
return false;
}
}
}
true
}
pub fn print(&self) {
for i in 0..KEY_HANDLE_LENGTH {
unsafe {
let u = *self.0.offset(i);
print!("{:x?}", u);
}
}
println!("");
}
pub fn set_value(&self) {
// for now, set fixed value
for i in 0..KEY_HANDLE_LENGTH {
unsafe {
std::ptr::write(self.0.offset(i), 3);
}
}
}
} |
use crate::geometry::{SimpleRect, Vec2};
use sdl2::{
render::{Canvas, Texture},
video::Window,
};
use super::{text::FontAtlas, rendering::Drawable};
pub struct Sprite<'a> {
tex: &'a Texture<'a>,
sdl_rect: Option<sdl2::rect::Rect>,
pub rect: SimpleRect,
pub angle: f64,
pub flip_horizontal: bool,
pub flip_vertical: bool,
}
impl<'a> Sprite<'a> {
pub fn new(tex: &'a Texture<'a>, x: f64, y: f64, w: f64, h: f64) -> Self {
let mut ret = Self {
tex,
sdl_rect: None,
rect: SimpleRect::new(x, y, w, h),
angle: 0.0,
flip_horizontal: false,
flip_vertical: false,
};
ret.update_sdl_rect();
return ret;
}
pub fn pos(&self) -> Vec2 {
self.rect.pos
}
pub fn size(&self) -> Vec2 {
self.rect.size
}
pub fn set_size(&mut self, w: f64, h: f64) {
self.rect.size.x = w;
self.rect.size.y = h;
self.update_sdl_rect();
}
pub fn set_pos(&mut self, x: f64, y: f64) {
self.rect.pos.x = x;
self.rect.pos.y = y;
self.update_sdl_rect();
}
pub fn translate(&mut self, dx: f64, dy: f64) {
self.rect.pos.x += dx;
self.rect.pos.y += dy;
self.update_sdl_rect();
}
pub fn update_sdl_rect(&mut self) {
self.sdl_rect = Some(sdl2::rect::Rect::new(
self.rect.pos.x as i32,
self.rect.pos.y as i32,
self.rect.size.x as u32,
self.rect.size.y as u32,
));
}
pub fn clamp(&mut self, min_x: f64, min_y: f64, max_x: f64, max_y: f64) {
self.rect.clamp(min_x, min_y, max_x, max_y);
self.update_sdl_rect();
}
}
impl Drawable for Sprite<'_> {
fn draw<'a>(&mut self, canvas: &mut Canvas<Window>, font_atlas: &mut FontAtlas<'a>) {
canvas
.copy_ex(
&self.tex,
None,
self.sdl_rect,
self.angle,
None,
self.flip_horizontal,
self.flip_vertical,
)
.expect("Error calling canvas.copy_ex")
}
} |
#[doc = "Register `DTXFSTS2` reader"]
pub type R = crate::R<DTXFSTS2_SPEC>;
#[doc = "Field `INEPTFSAV` reader - IN endpoint TxFIFO space available"]
pub type INEPTFSAV_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - IN endpoint TxFIFO space available"]
#[inline(always)]
pub fn ineptfsav(&self) -> INEPTFSAV_R {
INEPTFSAV_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "OTG_FS device IN endpoint transmit FIFO status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dtxfsts2::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DTXFSTS2_SPEC;
impl crate::RegisterSpec for DTXFSTS2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dtxfsts2::R`](R) reader structure"]
impl crate::Readable for DTXFSTS2_SPEC {}
#[doc = "`reset()` method sets DTXFSTS2 to value 0"]
impl crate::Resettable for DTXFSTS2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![deny(missing_docs)]
//! Core library for picross frontends.
mod board;
mod cell;
mod picross;
mod puzzle;
pub use board::Board;
pub use cell::Cell;
pub use picross::Picross;
pub use puzzle::{Constraint, ConstraintEntry, ConstraintGroup, Puzzle};
|
use structopt::StructOpt;
mod day01;
#[derive(Debug, StructOpt)]
#[structopt(name = "Advent of Code 2020", about = "Yearly challenge")]
struct CommandLineParams {
#[structopt(short = "-d", long)]
day: i32,
input_file: String,
}
fn main() {
let params = CommandLineParams::from_args();
println!("Advent of Code 2020");
println!();
match params.day {
1 => match day01::solve(¶ms.input_file) {
Ok(()) => (),
Err(error) => {
println!("Problem found: {:?}", error);
std::process::exit(1);
}
},
_ => {
println!("Day {:?} not implemented.", params.day);
std::process::exit(1);
}
}
}
|
table! {
categories (id) {
id -> Int4,
title -> Varchar,
user_id -> Int4,
created_at -> Nullable<Timestamp>,
}
}
table! {
comments (id) {
id -> Int4,
description -> Varchar,
user_id -> Int4,
created_at -> Nullable<Timestamp>,
}
}
table! {
courses (id) {
id -> Int4,
title -> Varchar,
thumbnail -> Nullable<Varchar>,
video_url -> Nullable<Varchar>,
description -> Nullable<Varchar>,
cate_id -> Int4,
price -> Float8,
created_at -> Date,
}
}
table! {
roles (id) {
id -> Int4,
title -> Varchar,
created_at -> Nullable<Timestamp>,
}
}
table! {
user_courses (id) {
id -> Int4,
user_id -> Int4,
course_id -> Int4,
}
}
table! {
users (id) {
id -> Int4,
fullname -> Varchar,
email -> Varchar,
password -> Varchar,
avatar -> Nullable<Varchar>,
biography -> Nullable<Varchar>,
created_at -> Timestamp,
role_id -> Int4,
}
}
joinable!(categories -> users (user_id));
joinable!(comments -> users (user_id));
joinable!(courses -> categories (cate_id));
joinable!(user_courses -> courses (course_id));
joinable!(user_courses -> users (user_id));
joinable!(users -> roles (role_id));
allow_tables_to_appear_in_same_query!(
categories,
comments,
courses,
roles,
user_courses,
users,
);
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo};
use std::fmt::Debug;
#[derive(Clone, Debug, Default)]
#[repr(C)]
pub struct PathWaypoint {
pub latitude: f64,
pub longitude: f64,
pub altitude: f32,
pub altitude_type: ::afrl::cmasi::altitude_type::AltitudeType,
pub number: i64,
pub next_waypoint: i64,
pub speed: f32,
pub speed_type: ::afrl::cmasi::speed_type::SpeedType,
pub climb_rate: f32,
pub turn_type: ::afrl::cmasi::turn_type::TurnType,
pub vehicle_action_list: Vec<Box<::afrl::cmasi::vehicle_action::VehicleActionT>>,
pub contingency_waypoint_a: i64,
pub contingency_waypoint_b: i64,
pub associated_tasks: Vec<i64>,
pub pause_time: i64,
}
impl PartialEq for PathWaypoint {
fn eq(&self, _other: &PathWaypoint) -> bool {
true
&& &self.pause_time == &_other.pause_time
}
}
impl LmcpSubscription for PathWaypoint {
fn subscription() -> &'static str { "afrl.cmasi.PathWaypoint" }
}
impl Struct for PathWaypoint {
fn struct_info() -> StructInfo {
StructInfo {
exist: 1,
series: 4849604199710720000u64,
version: 3,
struct_ty: 57,
}
}
}
impl Lmcp for PathWaypoint {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
let mut pos = 0;
{
let x = Self::struct_info().ser(buf)?;
pos += x;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.latitude.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.longitude.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.altitude.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.altitude_type.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.number.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.next_waypoint.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.speed.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.speed_type.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.climb_rate.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.turn_type.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.vehicle_action_list.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.contingency_waypoint_a.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.contingency_waypoint_b.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.associated_tasks.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.pause_time.ser(r)?;
pos += writeb;
}
Ok(pos)
}
fn deser(buf: &[u8]) -> Result<(PathWaypoint, usize), Error> {
let mut pos = 0;
let (si, u) = StructInfo::deser(buf)?;
pos += u;
if si == PathWaypoint::struct_info() {
let mut out: PathWaypoint = Default::default();
{
let r = get!(buf.get(pos ..));
let (x, readb): (f64, usize) = Lmcp::deser(r)?;
out.latitude = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (f64, usize) = Lmcp::deser(r)?;
out.longitude = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (f32, usize) = Lmcp::deser(r)?;
out.altitude = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (::afrl::cmasi::altitude_type::AltitudeType, usize) = Lmcp::deser(r)?;
out.altitude_type = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.number = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.next_waypoint = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (f32, usize) = Lmcp::deser(r)?;
out.speed = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (::afrl::cmasi::speed_type::SpeedType, usize) = Lmcp::deser(r)?;
out.speed_type = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (f32, usize) = Lmcp::deser(r)?;
out.climb_rate = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (::afrl::cmasi::turn_type::TurnType, usize) = Lmcp::deser(r)?;
out.turn_type = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<Box<::afrl::cmasi::vehicle_action::VehicleActionT>>, usize) = Lmcp::deser(r)?;
out.vehicle_action_list = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.contingency_waypoint_a = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.contingency_waypoint_b = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<i64>, usize) = Lmcp::deser(r)?;
out.associated_tasks = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.pause_time = x;
pos += readb;
}
Ok((out, pos))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
let mut size = 15;
size += self.latitude.size();
size += self.longitude.size();
size += self.altitude.size();
size += self.altitude_type.size();
size += self.number.size();
size += self.next_waypoint.size();
size += self.speed.size();
size += self.speed_type.size();
size += self.climb_rate.size();
size += self.turn_type.size();
size += self.vehicle_action_list.size();
size += self.contingency_waypoint_a.size();
size += self.contingency_waypoint_b.size();
size += self.associated_tasks.size();
size += self.pause_time.size();
size
}
}
pub trait PathWaypointT: Debug + Send + ::afrl::cmasi::waypoint::WaypointT {
fn as_afrl_cmasi_path_waypoint(&self) -> Option<&PathWaypoint> { None }
fn as_mut_afrl_cmasi_path_waypoint(&mut self) -> Option<&mut PathWaypoint> { None }
fn pause_time(&self) -> i64;
fn pause_time_mut(&mut self) -> &mut i64;
}
impl Clone for Box<PathWaypointT> {
fn clone(&self) -> Box<PathWaypointT> {
if let Some(x) = PathWaypointT::as_afrl_cmasi_path_waypoint(self.as_ref()) {
Box::new(x.clone())
} else {
unreachable!()
}
}
}
impl Default for Box<PathWaypointT> {
fn default() -> Box<PathWaypointT> { Box::new(PathWaypoint::default()) }
}
impl PartialEq for Box<PathWaypointT> {
fn eq(&self, other: &Box<PathWaypointT>) -> bool {
if let (Some(x), Some(y)) =
(PathWaypointT::as_afrl_cmasi_path_waypoint(self.as_ref()),
PathWaypointT::as_afrl_cmasi_path_waypoint(other.as_ref())) {
x == y
} else {
false
}
}
}
impl Lmcp for Box<PathWaypointT> {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
if let Some(x) = PathWaypointT::as_afrl_cmasi_path_waypoint(self.as_ref()) {
x.ser(buf)
} else {
unreachable!()
}
}
fn deser(buf: &[u8]) -> Result<(Box<PathWaypointT>, usize), Error> {
let (si, _) = StructInfo::deser(buf)?;
if si == PathWaypoint::struct_info() {
let (x, readb) = PathWaypoint::deser(buf)?;
Ok((Box::new(x), readb))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
if let Some(x) = PathWaypointT::as_afrl_cmasi_path_waypoint(self.as_ref()) {
x.size()
} else {
unreachable!()
}
}
}
impl ::afrl::cmasi::location3d::Location3DT for PathWaypoint {
fn as_afrl_cmasi_path_waypoint(&self) -> Option<&PathWaypoint> { Some(self) }
fn as_mut_afrl_cmasi_path_waypoint(&mut self) -> Option<&mut PathWaypoint> { Some(self) }
fn latitude(&self) -> f64 { self.latitude }
fn latitude_mut(&mut self) -> &mut f64 { &mut self.latitude }
fn longitude(&self) -> f64 { self.longitude }
fn longitude_mut(&mut self) -> &mut f64 { &mut self.longitude }
fn altitude(&self) -> f32 { self.altitude }
fn altitude_mut(&mut self) -> &mut f32 { &mut self.altitude }
fn altitude_type(&self) -> ::afrl::cmasi::altitude_type::AltitudeType { self.altitude_type }
fn altitude_type_mut(&mut self) -> &mut ::afrl::cmasi::altitude_type::AltitudeType { &mut self.altitude_type }
}
impl ::afrl::cmasi::waypoint::WaypointT for PathWaypoint {
fn as_afrl_cmasi_path_waypoint(&self) -> Option<&PathWaypoint> { Some(self) }
fn as_mut_afrl_cmasi_path_waypoint(&mut self) -> Option<&mut PathWaypoint> { Some(self) }
fn number(&self) -> i64 { self.number }
fn number_mut(&mut self) -> &mut i64 { &mut self.number }
fn next_waypoint(&self) -> i64 { self.next_waypoint }
fn next_waypoint_mut(&mut self) -> &mut i64 { &mut self.next_waypoint }
fn speed(&self) -> f32 { self.speed }
fn speed_mut(&mut self) -> &mut f32 { &mut self.speed }
fn speed_type(&self) -> ::afrl::cmasi::speed_type::SpeedType { self.speed_type }
fn speed_type_mut(&mut self) -> &mut ::afrl::cmasi::speed_type::SpeedType { &mut self.speed_type }
fn climb_rate(&self) -> f32 { self.climb_rate }
fn climb_rate_mut(&mut self) -> &mut f32 { &mut self.climb_rate }
fn turn_type(&self) -> ::afrl::cmasi::turn_type::TurnType { self.turn_type }
fn turn_type_mut(&mut self) -> &mut ::afrl::cmasi::turn_type::TurnType { &mut self.turn_type }
fn vehicle_action_list(&self) -> &Vec<Box<::afrl::cmasi::vehicle_action::VehicleActionT>> { &self.vehicle_action_list }
fn vehicle_action_list_mut(&mut self) -> &mut Vec<Box<::afrl::cmasi::vehicle_action::VehicleActionT>> { &mut self.vehicle_action_list }
fn contingency_waypoint_a(&self) -> i64 { self.contingency_waypoint_a }
fn contingency_waypoint_a_mut(&mut self) -> &mut i64 { &mut self.contingency_waypoint_a }
fn contingency_waypoint_b(&self) -> i64 { self.contingency_waypoint_b }
fn contingency_waypoint_b_mut(&mut self) -> &mut i64 { &mut self.contingency_waypoint_b }
fn associated_tasks(&self) -> &Vec<i64> { &self.associated_tasks }
fn associated_tasks_mut(&mut self) -> &mut Vec<i64> { &mut self.associated_tasks }
}
impl PathWaypointT for PathWaypoint {
fn as_afrl_cmasi_path_waypoint(&self) -> Option<&PathWaypoint> { Some(self) }
fn as_mut_afrl_cmasi_path_waypoint(&mut self) -> Option<&mut PathWaypoint> { Some(self) }
fn pause_time(&self) -> i64 { self.pause_time }
fn pause_time_mut(&mut self) -> &mut i64 { &mut self.pause_time }
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for PathWaypoint {
fn arbitrary<G: Gen>(_g: &mut G) -> PathWaypoint {
PathWaypoint {
latitude: Arbitrary::arbitrary(_g),
longitude: Arbitrary::arbitrary(_g),
altitude: Arbitrary::arbitrary(_g),
altitude_type: Arbitrary::arbitrary(_g),
number: Arbitrary::arbitrary(_g),
next_waypoint: Arbitrary::arbitrary(_g),
speed: Arbitrary::arbitrary(_g),
speed_type: Arbitrary::arbitrary(_g),
climb_rate: Arbitrary::arbitrary(_g),
turn_type: Arbitrary::arbitrary(_g),
vehicle_action_list: Vec::<::afrl::cmasi::vehicle_action::VehicleAction>::arbitrary(_g).into_iter().map(|x| Box::new(x) as Box<::afrl::cmasi::vehicle_action::VehicleActionT>).collect(),
contingency_waypoint_a: Arbitrary::arbitrary(_g),
contingency_waypoint_b: Arbitrary::arbitrary(_g),
associated_tasks: Arbitrary::arbitrary(_g),
pause_time: Arbitrary::arbitrary(_g),
}
}
}
quickcheck! {
fn serializes(x: PathWaypoint) -> Result<TestResult, Error> {
use std::u16;
if x.vehicle_action_list.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.associated_tasks.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
Ok(TestResult::from_bool(sx == x.size()))
}
fn roundtrips(x: PathWaypoint) -> Result<TestResult, Error> {
use std::u16;
if x.vehicle_action_list.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.associated_tasks.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
let (y, sy) = PathWaypoint::deser(&buf)?;
Ok(TestResult::from_bool(sx == sy && x == y))
}
}
}
|
fn largest_i32 (list: &[i32]) -> i32 {
let mut largest = list[0] ;
for &item in list {
if item > largest {
largest = item ;
}
}
largest
}
fn largest_char (list: &[char]) -> char {
let mut largest = list[0] ;
for &item in list {
if item > largest {
largest = item ;
}
}
largest
}
fn largest<T: PartialOrd + Copy> (list: &[T]) -> T {
let mut largest = list[0] ;
for &item in list {
if item > largest {
largest = item ;
}
}
largest
}
struct Point<T> {
x: T,
y: T,
}
impl<T> Point<T> {
fn x(&self) -> &T {
&self.x
}
}
fn main() {
// Listing 10-4: Two functions that differ only in their names and the types in their signatures
let number_list = vec![34, 50, 25, 100, 65] ;
let result = largest_i32(&number_list) ;
println!("The largest number is {}", result) ;
let char_list = vec!['y', 'm', 'a', 'q'] ;
let result = largest_char(&char_list) ;
println!("The largest char is {}", result) ;
// Listing 10-5: A definition of the largest function that uses generic type parameters but doesn't compile yet
/*
let result = largest(&number_list) ;
println!("The largest number is {}", result) ;
let result = largest(&char_list) ;
println!("The largest char is {}", result) ;
*/
// Listing 10-6: A Point<T> struct that holds x and y values of type T
let integer = Point { x: 5, y: 10 } ;
let float = Point { x: 1.0, y: 4.0 } ;
// Listing 10-9: Implementing a method names x on the Point<T> struct that will return a reference to the x field of type T
let p = Point { x: 5, y: 10 } ;
println!("p.x = {}", p.x()) ;
}
|
use super::{
destination::PandasDestination,
transports::{
BigQueryPandasTransport, MsSQLPandasTransport, MysqlPandasTransport, OraclePandasTransport,
PostgresPandasTransport, SqlitePandasTransport,
},
};
use crate::errors::ConnectorXPythonError;
use connectorx::source_router::{SourceConn, SourceType};
use connectorx::{
prelude::*,
sources::{
bigquery::BigQuerySource,
mssql::MsSQLSource,
mysql::{BinaryProtocol as MySQLBinaryProtocol, MySQLSource, TextProtocol},
postgres::{
rewrite_tls_args, BinaryProtocol as PgBinaryProtocol, CSVProtocol, CursorProtocol,
PostgresSource, SimpleProtocol,
},
sqlite::SQLiteSource,
},
sql::CXQuery,
};
use fehler::throws;
use log::debug;
use postgres::NoTls;
use postgres_openssl::MakeTlsConnector;
use pyo3::prelude::*;
use std::convert::TryFrom;
use std::sync::Arc;
#[throws(ConnectorXPythonError)]
pub fn get_meta<'a>(py: Python<'a>, conn: &str, protocol: &str, query: String) -> &'a PyAny {
let source_conn = SourceConn::try_from(conn)?;
let mut destination = PandasDestination::new(py);
let queries = &[CXQuery::Naked(query)];
match source_conn.ty {
SourceType::Postgres => {
debug!("Protocol: {}", protocol);
let (config, tls) = rewrite_tls_args(&source_conn.conn)?;
match (protocol, tls) {
("csv", Some(tls_conn)) => {
let sb =
PostgresSource::<CSVProtocol, MakeTlsConnector>::new(config, tls_conn, 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<CSVProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("csv", None) => {
let sb = PostgresSource::<CSVProtocol, NoTls>::new(config, NoTls, 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<CSVProtocol, NoTls>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("binary", Some(tls_conn)) => {
let sb = PostgresSource::<PgBinaryProtocol, MakeTlsConnector>::new(
config, tls_conn, 1,
)?;
let mut dispatcher =
Dispatcher::<
_,
_,
PostgresPandasTransport<PgBinaryProtocol, MakeTlsConnector>,
>::new(sb, &mut destination, queries, None);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("binary", None) => {
let sb = PostgresSource::<PgBinaryProtocol, NoTls>::new(config, NoTls, 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<PgBinaryProtocol, NoTls>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("cursor", Some(tls_conn)) => {
let sb = PostgresSource::<CursorProtocol, MakeTlsConnector>::new(
config, tls_conn, 1,
)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<CursorProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("cursor", None) => {
let sb = PostgresSource::<CursorProtocol, NoTls>::new(config, NoTls, 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<CursorProtocol, NoTls>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("simple", Some(tls_conn)) => {
let sb = PostgresSource::<SimpleProtocol, MakeTlsConnector>::new(
config, tls_conn, 1,
)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<SimpleProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
("simple", None) => {
let sb = PostgresSource::<SimpleProtocol, NoTls>::new(config, NoTls, 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
PostgresPandasTransport<SimpleProtocol, NoTls>,
>::new(
sb, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
_ => unimplemented!("{} protocol not supported", protocol),
}
}
SourceType::SQLite => {
// remove the first "sqlite://" manually since url.path is not correct for windows
let path = &source_conn.conn.as_str()[9..];
let source = SQLiteSource::new(path, 1)?;
let mut dispatcher = Dispatcher::<_, _, SqlitePandasTransport>::new(
source,
&mut destination,
queries,
None,
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
SourceType::MySQL => {
debug!("Protocol: {}", protocol);
match protocol {
"binary" => {
let source = MySQLSource::<MySQLBinaryProtocol>::new(&source_conn.conn[..], 1)?;
let mut dispatcher = Dispatcher::<
_,
_,
MysqlPandasTransport<MySQLBinaryProtocol>,
>::new(
source, &mut destination, queries, None
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
"text" => {
let source = MySQLSource::<TextProtocol>::new(&source_conn.conn[..], 1)?;
let mut dispatcher =
Dispatcher::<_, _, MysqlPandasTransport<TextProtocol>>::new(
source,
&mut destination,
queries,
None,
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
_ => unimplemented!("{} protocol not supported", protocol),
}
}
SourceType::MsSQL => {
let rt = Arc::new(tokio::runtime::Runtime::new().expect("Failed to create runtime"));
let source = MsSQLSource::new(rt, &source_conn.conn[..], 1)?;
let mut dispatcher = Dispatcher::<_, _, MsSQLPandasTransport>::new(
source,
&mut destination,
queries,
None,
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
SourceType::Oracle => {
let source = OracleSource::new(&source_conn.conn[..], 1)?;
let mut dispatcher = Dispatcher::<_, _, OraclePandasTransport>::new(
source,
&mut destination,
queries,
None,
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
SourceType::BigQuery => {
let rt = Arc::new(tokio::runtime::Runtime::new().expect("Failed to create runtime"));
let source = BigQuerySource::new(rt, &source_conn.conn[..])?;
let mut dispatcher = Dispatcher::<_, _, BigQueryPandasTransport>::new(
source,
&mut destination,
queries,
None,
);
debug!("Running dispatcher");
dispatcher.get_meta()?;
}
_ => unimplemented!("{:?} not implemented!", source_conn.ty),
}
destination.result()?
}
|
use super::*;
pub fn handle_i_type(regfile: &mut [u32], mem: &mut [u8], bytes: &[u8], pc: &mut u32, _extensions: &Extensions) -> Result<(), ExecutionError> {
let opcode = get_opcode(bytes);
let rd = get_rd(bytes) as usize;
let f3 = get_f3(bytes);
let rs1 = get_rs1(bytes) as usize;
let f7 = get_f7(bytes) as u32;
let immediate = decode_i_type_immediate(bytes);
if opcode == 0x3 && f3 == 0x0 { //lb
let byte: u32 = mem[((regfile[rs1] as i32) + (immediate as i32)) as usize] as u32;
regfile[rd] = if byte >> 7 == 0x1 {
0xFF_FF_FF_00 + byte
} else {
byte
};
*pc += 4;
}
else if opcode == 0x3 && f3 == 0x1 { //lh
let bottom = mem[((regfile[rs1] as i32) + (immediate as i32)) as usize] as u32;
let top = mem[((regfile[rs1] as i32) + (immediate as i32) + 1) as usize] as u32;
let total = bottom + (top << 8);
regfile[rd] = if top >> 7 == 0x1 {
0xFF_FF_00_00 + total
} else {
total
};
*pc += 4;
}
else if opcode == 0x3 && f3 == 0x2 { //lw
let bottom = mem[((regfile[rs1] as i32) + immediate) as usize] as u32;
let low_mid = mem[((regfile[rs1] as i32) + immediate + 1) as usize] as u32;
let high_mid = mem[((regfile[rs1] as i32) + immediate + 2) as usize] as u32;
let top = mem[((regfile[rs1] as i32) + immediate + 3) as usize] as u32;
regfile[rd] = bottom + (low_mid << 8) + (high_mid << 16) + (top << 24);
*pc += 4;
}
else if opcode == 0x3 && f3 == 0x4 { //lbu
regfile[rd] = mem[((regfile[rs1] as i32) + immediate) as usize] as u32;
*pc += 4;
}
else if opcode == 0x3 && f3 == 0x5 { //lhu
let bottom = mem[((regfile[rs1] as i32) + immediate) as usize] as u32;
let top = mem[((regfile[rs1] as i32) + immediate + 1) as usize] as u32;
regfile[rd] = bottom + (top << 8);
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x0 { //addi
regfile[rd] = ((regfile[rs1] as i32) + immediate) as u32;
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x1 && f7 == 0x0 { //slli
regfile[rd] = regfile[rs1] << immediate;
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x2 { //slti
regfile[rd] = if (regfile[rs1] as i32) < immediate { 1 } else { 0 };
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x3 { //sltiu
regfile[rd] = if regfile[rs1] < (immediate as u32) { 1 } else { 0 };
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x4 { //xori
regfile[rd] = regfile[rs1] ^ (immediate as u32);
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x5 && f7 == 0x0 { //srli
regfile[rd] = regfile[rs1] >> (immediate as u32);
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x5 && f7 == 0x20 { //srai
regfile[rd] = ((regfile[rs1] as i32) >> immediate) as u32;
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x6 { //ori
regfile[rd] = regfile[rs1] | (immediate as u32);
*pc += 4;
}
else if opcode == 0x13 && f3 == 0x7 { //andi
regfile[rd] = regfile[rs1] & (immediate as u32);
*pc += 4;
}
else if opcode == 0x67 && f3 == 0x0 { // jalr
let destination = ((regfile[rs1] as i32) + immediate) & 0xFF_FF_FF_FE;
if destination % INSTRUCTION_ADDRESS_MISALIGNED_THRESHOLD != 0 {
return Err(ExecutionError::InstructionAddressMisaligned);
}
regfile[rd] = *pc + 4;
*pc = destination as u32;
}
else if opcode == 0x73 && f3 == 0x0 && f7 == 0x0 { //ecall
match regfile[10] {
0x1 => {
println!("PRINT ECALL: {}", regfile[11]);
}
0xA => {
println!("TERMINATE ECALL");
return Err(ExecutionError::UserTerminate);
}
_ => {}
}
}
else if opcode == 0x73 && f3 == 0x1 { //csrrw
return Err(ExecutionError::Unimplemented("csrrw".into()));
}
else if opcode == 0x73 && f3 == 0x2 { //csrrs
return Err(ExecutionError::Unimplemented("csrrs".into()));
}
else if opcode == 0x73 && f3 == 0x3 { //csrrc
return Err(ExecutionError::Unimplemented("csrrc".into()));
}
else if opcode == 0x73 && f3 == 0x4 { //csrrwi
return Err(ExecutionError::Unimplemented("csrrwi".into()));
}
else if opcode == 0x73 && f3 == 0x5 { //csrrsi
return Err(ExecutionError::Unimplemented("csrrsi".into()));
}
else if opcode == 0x73 && f3 == 0x6 { //csrrci
return Err(ExecutionError::Unimplemented("csrrci".into()));
}
else {
return Err(ExecutionError::InvalidInstruction(encode_hex(bytes)));
}
Ok(())
} |
use nia_protocol_rust::RemoveModifierRequest;
use crate::error::{NiaServerError, NiaServerResult};
use crate::protocol::NiaKey;
use crate::protocol::Serializable;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct NiaRemoveModifierRequest {
key: NiaKey,
}
impl NiaRemoveModifierRequest {
pub fn new(key: NiaKey) -> NiaRemoveModifierRequest {
NiaRemoveModifierRequest { key }
}
pub fn take_key(&self) -> NiaKey {
self.key
}
}
impl
Serializable<
NiaRemoveModifierRequest,
nia_protocol_rust::RemoveModifierRequest,
> for NiaRemoveModifierRequest
{
fn to_pb(&self) -> nia_protocol_rust::RemoveModifierRequest {
let modifier_key_pb = self.key.to_pb();
let mut remove_modifier_request_pb =
nia_protocol_rust::RemoveModifierRequest::new();
remove_modifier_request_pb.set_modifier_key(modifier_key_pb);
remove_modifier_request_pb
}
fn from_pb(
object_pb: nia_protocol_rust::RemoveModifierRequest,
) -> NiaServerResult<NiaRemoveModifierRequest> {
let mut object_pb = object_pb;
let key = NiaKey::from_pb(object_pb.take_modifier_key())?;
let remove_modifier_request = NiaRemoveModifierRequest::new(key);
Ok(remove_modifier_request)
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn serializes_and_deserializes_key_1() {
let expected = NiaRemoveModifierRequest::new(NiaKey::Key1(1));
let bytes = expected.to_bytes().unwrap();
let result = NiaRemoveModifierRequest::from_bytes(bytes).unwrap();
assert_eq!(expected, result);
}
#[test]
fn serializes_and_deserializes_key_2() {
let expected = NiaRemoveModifierRequest::new(NiaKey::Key2(1, 2));
let bytes = expected.to_bytes().unwrap();
let result = NiaRemoveModifierRequest::from_bytes(bytes).unwrap();
assert_eq!(expected, result);
}
}
|
use ckb_types::{
core::{Capacity, Cycle},
packed::Byte32,
};
pub type TxVerifyCache = lru_cache::LruCache<Byte32, CacheEntry>;
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct CacheEntry {
pub cycles: Cycle,
pub fee: Capacity,
}
impl CacheEntry {
pub fn new(cycles: Cycle, fee: Capacity) -> Self {
CacheEntry { cycles, fee }
}
}
|
extern crate bspline;
use bspline::BSpline;
use std::ops::{Add, Mul};
extern crate trait_set;
use trait_set::trait_set;
extern crate num_traits;
#[cfg(not(feature = "nalgebra-support"))]
trait_set! {
pub trait Float = num_traits::Float;
}
#[cfg(feature = "nalgebra-support")]
extern crate nalgebra;
#[cfg(feature = "nalgebra-support")]
trait_set! {
pub trait Float = nalgebra::RealField + Copy;
}
/// Check that the bspline returns the values we expect it to at various t values
fn check_bspline<T: Mul<F, Output = T> + Add<Output = T> + Copy + PartialOrd, F: Float>(
spline: &BSpline<T, F>,
expect: &Vec<(F, T)>,
) -> bool {
expect
.iter()
.fold(true, |ac, &(t, x)| ac && spline.point(t) == x)
}
#[test]
fn linear_bspline() {
let expect: Vec<(f32, f32)> = vec![
(0.0, 0.0),
(0.2, 0.2),
(0.4, 0.4),
(0.6, 0.6),
(0.8, 0.8),
(1.0, 1.0),
];
let points: Vec<f32> = vec![0.0, 1.0];
let knots: Vec<f32> = vec![0.0, 0.0, 1.0, 1.0];
let degree = 1;
let spline = BSpline::new(degree, points, knots);
assert!(check_bspline(&spline, &expect));
}
#[test]
fn quadratic_bspline() {
let expect: Vec<(f32, f32)> = vec![
(0.0, 0.0),
(0.5, 0.125),
(1.0, 0.5),
(1.4, 0.74),
(1.5, 0.75),
(1.6, 0.74),
(2.0, 0.5),
(2.5, 0.125),
(3.0, 0.0),
];
let points: Vec<f32> = vec![0.0, 0.0, 1.0, 0.0, 0.0];
let knots: Vec<f32> = vec![0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 3.0, 3.0];
let degree = 2;
let spline = BSpline::new(degree, points, knots);
assert!(check_bspline(&spline, &expect));
}
#[test]
fn cubic_bspline() {
let expect: Vec<(f32, f32)> = vec![
(-2.0, 0.0),
(-1.5, 0.125),
(-1.0, 1.0),
(-0.6, 2.488),
(0.0, 4.0),
(0.5, 2.875),
(1.5, 0.12500001),
(2.0, 0.0),
];
let points: Vec<f32> = vec![0.0, 0.0, 0.0, 6.0, 0.0, 0.0, 0.0];
let knots: Vec<f32> = vec![-2.0, -2.0, -2.0, -2.0, -1.0, 0.0, 1.0, 2.0, 2.0, 2.0, 2.0];
let degree = 3;
let spline = BSpline::new(degree, points, knots);
assert!(check_bspline(&spline, &expect));
}
#[test]
fn quartic_bspline() {
let expect: Vec<(f32, f32)> = vec![
(0.0, 0.0),
(0.4, 0.0010666668),
(1.0, 0.041666668),
(1.5, 0.19791667),
(2.0, 0.4583333),
(2.5, 0.5989583),
(3.0, 0.4583333),
(3.2, 0.35206667),
(4.1, 0.02733751),
(4.5, 0.002604167),
(5.0, 0.0),
];
let points: Vec<f32> = vec![0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0];
let knots: Vec<f32> = vec![
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0,
];
let degree = 4;
let spline = BSpline::new(degree, points, knots);
assert!(check_bspline(&spline, &expect));
}
#[test]
fn quartic_bspline_f64() {
let expect: Vec<(f64, f64)> = vec![
(0.0, 0.0),
(0.4, 0.001066666666666667),
(1.0, 0.041666666666666664),
(1.5, 0.19791666666666666),
(2.0, 0.45833333333333337),
(2.5, 0.5989583333333334),
(3.0, 0.4583333333333333),
(3.2, 0.3520666666666666),
(4.1, 0.027337500000000046),
(4.5, 0.002604166666666666),
(5.0, 0.0),
];
let points: Vec<f64> = vec![0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0];
let knots: Vec<f64> = vec![
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 5.0, 5.0, 5.0, 5.0,
];
let degree = 4;
let spline = BSpline::new(degree, points, knots);
assert!(check_bspline(&spline, &expect));
}
|
use std::io::{Read, Result as IoResult, Seek, SeekFrom};
#[derive(Debug)]
pub(crate) struct PeekReader<R: Read> {
pub inner: R,
pub peeked: Option<u8>,
}
impl<R: Read> Read for PeekReader<R> {
fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> {
if buf.is_empty() {
return Ok(0);
}
if let Some(b) = self.peeked {
buf[0] = b;
self.peeked = None;
//Read the input and add one to the number of byte read
self.inner.read(&mut buf[1..]).map(|x| x+1)
}
else {
self.inner.read(buf)
}
}
}
impl<R: Read + Seek> Seek for PeekReader<R> {
fn seek(&mut self, pos: SeekFrom) -> ::std::io::Result<u64> {
if self.peeked.is_some() {
self.inner.seek(SeekFrom::Current(-1))?;
self.peeked = None;
}
self.inner.seek(pos)
}
}
impl<R: Read> PeekReader<R> {
pub fn new(inner: R) -> PeekReader<R> {
PeekReader {
inner,
peeked: None
}
}
#[allow(clippy::wrong_self_convention)]
pub fn is_empty(&mut self) -> IoResult<bool> {
if self.peeked.is_some() {
return Ok(false);
}
let mut buf = [0; 1];
let nb_read = self.read(&mut buf)?;
Ok(
match nb_read {
0 => true,
1 => {
self.peeked = Some(buf[0]);
false
},
_ => unreachable!(),
}
)
}
}
|
#![warn(
clippy::all,
clippy::nursery,
clippy::pedantic,
missing_copy_implementations,
missing_debug_implementations,
rust_2018_idioms,
unused_qualifications
)]
#![allow(
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::similar_names,
clippy::single_match_else,
clippy::wildcard_imports,
dead_code,
elided_lifetimes_in_paths
)]
#![feature(format_args_capture, box_syntax)]
mod codegen;
mod free_vars;
#[cfg(test)]
mod tests;
use walrus_semantics::{hir::HirData, scopes::Scopes, ty::InferenceResult};
use crate::codegen::*;
pub fn codegen(name: &str, hir: HirData, scopes: Scopes, types: InferenceResult) -> String {
let llvm = Context::create();
let builder = llvm.create_builder();
let module = llvm.create_module(name);
{
let compiler = Compiler {
llvm: &llvm,
module,
builder,
hir,
scopes,
types,
};
compiler.codegen_module()
}
}
|
use apllodb_shared_components::{ApllodbResult, DatabaseName};
use apllodb_sql_parser::apllodb_ast;
use crate::ast_translator::AstTranslator;
impl AstTranslator {
pub fn database_name(
ast_database_name: apllodb_ast::DatabaseName,
) -> ApllodbResult<DatabaseName> {
DatabaseName::new(ast_database_name.0 .0)
}
}
|
//#![deny(warnings)]
//#![allow(unused, deprecated)]
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
// #![feature(use_extern_macros)]
#[macro_use]
extern crate hyper;
#[macro_use]
extern crate log;
extern crate futures;
extern crate log4rs;
extern crate mta_status;
extern crate net2;
extern crate num_cpus;
extern crate tokio_core;
extern crate pretty_env_logger;
use std::fs::File;
use std::io::Write;
mod service;
#[cfg(debug_assertions)]
const IS_PROD: bool = false;
#[cfg(not(debug_assertions))]
const IS_PROD: bool = true;
fn main() {
if IS_PROD {
let data = include_str!("../../resources/log4rs.yaml");
let mut f = File::create("log_config.yaml").expect("Unable to create file");
f.write_all(data.as_bytes()).expect("Unable to write data");
log4rs::init_file("log_config.yaml", Default::default()).unwrap();
} else {
pretty_env_logger::init();
}
let url = "127.0.0.1:4000";
warn!("prod build: {}", IS_PROD);
warn!("http://{}", url);
service::start_server(url, num_cpus::get());
}
|
mod aggregation;
mod group_by;
mod into_entries_iter;
mod query;
mod statement;
mod statement_expr;
mod round;
pub use aggregation::Aggregation;
pub use query::{QueryBuilder, Row};
pub use statement::Statement;
pub use statement_expr::StatementExpr;
#[cfg(test)]
mod test {
use super::*;
use crate::storage::{error::Error, series_table, Entry};
use chrono::{TimeZone, Utc};
use std::convert::TryInto;
fn utc_millis(ts: &str) -> i64 {
Utc.datetime_from_str(ts, "%F %H:%M")
.unwrap()
.timestamp_millis()
}
fn entry(ts: &str, value: f64) -> Entry {
Entry {
ts: utc_millis(ts),
value: value,
}
}
fn row(ts: &str, agg: Aggregation) -> Row {
Row {
ts: utc_millis(ts),
values: vec![agg],
}
}
#[test]
fn test_group_by_query() -> Result<(), Error> {
let table = series_table::test::create()?;
table.create("series-1")?;
let writer = table.writer("series-1").unwrap();
writer.append(&vec![
entry("1961-01-02 11:00", 3.0),
entry("1961-01-02 11:02", 2.0),
entry("1961-01-02 11:04", 4.0),
entry("1961-01-02 12:02", 5.0),
entry("1961-01-02 12:04", 7.0),
entry("1961-01-02 12:02", 5.0),
entry("1961-01-02 12:04", 7.0),
entry("1971-01-02 12:02", 5.0),
entry("1971-01-02 12:04", 7.0),
])?;
let reader = table.reader("series-1").unwrap();
let rows: Vec<Row> = reader
.query(
StatementExpr {
from: "1961-01-02".to_string(),
group_by: "hour".to_string(),
aggregators: "mean".to_string(),
limit: "1000".to_string(),
}
.try_into()
.unwrap(),
)
.rows()?
.into_iter()
.collect();
assert_eq!(
vec![
row("1961-01-02 11:00", Aggregation::Mean(3.0)),
row("1961-01-02 12:00", Aggregation::Mean(6.0)),
row("1971-01-02 12:00", Aggregation::Mean(6.0)),
],
rows
);
Ok(())
}
}
|
use std::{
collections::{BTreeMap, HashMap},
fmt::{Debug, Formatter},
};
use anyhow::{Context as _, Result};
use camino::Utf8PathBuf;
use regex::Regex;
use serde::{ser::SerializeMap, Deserialize, Serialize, Serializer};
// https://github.com/llvm/llvm-project/blob/llvmorg-17.0.0-rc2/llvm/tools/llvm-cov/CoverageExporterJson.cpp#L13-L47
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub struct LlvmCovJsonExport {
/// List of one or more export objects
pub(crate) data: Vec<Export>,
// llvm.coverage.json.export
#[serde(rename = "type")]
pub(crate) type_: String,
pub(crate) version: String,
/// Additional information injected into the export data.
#[serde(skip_deserializing, skip_serializing_if = "Option::is_none")]
cargo_llvm_cov: Option<CargoLlvmCov>,
}
/// <https://docs.codecov.com/docs/codecov-custom-coverage-format>
///
/// This represents the fraction: `{covered}/{count}`.
#[derive(Default, Debug)]
pub(crate) struct CodeCovCoverage {
pub(crate) count: u64,
pub(crate) covered: u64,
}
impl Serialize for CodeCovCoverage {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{}/{}", self.covered, self.count))
}
}
/// line -> coverage in fraction
#[derive(Default)]
pub struct CodeCovExport(BTreeMap<u64, CodeCovCoverage>);
/// Custom serialize [`CodeCovExport`] as "string" -> JSON (as function)
/// Serialize as "string" -> JSON
impl Serialize for CodeCovExport {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(self.0.len()))?;
for (key, value) in &self.0 {
map.serialize_entry(&key.to_string(), value)?;
}
map.end()
}
}
#[derive(Default, Serialize)]
pub struct CodeCovJsonExport {
/// filename -> list of uncovered lines.
pub(crate) coverage: BTreeMap<String, CodeCovExport>,
}
impl CodeCovJsonExport {
fn from_export(value: Export, ignore_filename_regex: Option<&Regex>) -> Self {
let functions = value.functions.unwrap_or_default();
let mut regions = BTreeMap::new();
for func in functions {
for filename in func.filenames {
if let Some(re) = ignore_filename_regex {
if re.is_match(&filename) {
continue;
}
}
for region in &func.regions {
let loc = RegionLocation::from(region);
// region location to covered
let coverage: &mut HashMap<RegionLocation, bool> =
regions.entry(filename.clone()).or_default();
let covered = coverage.entry(loc).or_default();
*covered = *covered || region.execution_count() > 0;
}
}
}
let mut coverage = BTreeMap::new();
for (filename, regions) in regions {
let coverage: &mut CodeCovExport = coverage.entry(filename).or_default();
for (loc, covered) in regions {
for line in loc.lines() {
let coverage = coverage.0.entry(line).or_default();
coverage.count += 1;
coverage.covered += u64::from(covered);
}
}
}
Self { coverage }
}
#[must_use]
pub fn from_llvm_cov_json_export(
value: LlvmCovJsonExport,
ignore_filename_regex: Option<&str>,
) -> Self {
let re = ignore_filename_regex.map(|s| Regex::new(s).unwrap());
let exports: Vec<_> =
value.data.into_iter().map(|v| Self::from_export(v, re.as_ref())).collect();
let mut combined = CodeCovJsonExport::default();
// combine
for export in exports {
for (filename, coverage) in export.coverage {
let combined = combined.coverage.entry(filename).or_default();
for (line, coverage) in coverage.0 {
let combined = combined
.0
.entry(line)
.or_insert_with(|| CodeCovCoverage { count: 0, covered: 0 });
combined.count += coverage.count;
combined.covered += coverage.covered;
}
}
}
combined
}
}
/// Files -> list of uncovered lines.
pub(crate) type UncoveredLines = BTreeMap<String, Vec<u64>>;
impl LlvmCovJsonExport {
pub fn demangle(&mut self) {
for data in &mut self.data {
if let Some(functions) = &mut data.functions {
for func in functions {
func.name = format!("{:#}", rustc_demangle::demangle(&func.name));
}
}
}
}
pub fn inject(&mut self, manifest_path: Utf8PathBuf) {
self.cargo_llvm_cov =
Some(CargoLlvmCov { version: env!("CARGO_PKG_VERSION"), manifest_path });
}
/// Gets the minimal lines coverage of all files.
pub fn get_lines_percent(&self) -> Result<f64> {
let mut count = 0_f64;
let mut covered = 0_f64;
for data in &self.data {
let totals = &data.totals.as_object().context("totals is not an object")?;
let lines = &totals["lines"].as_object().context("no lines")?;
count += lines["count"].as_f64().context("no count")?;
covered += lines["covered"].as_f64().context("no covered")?;
}
if count == 0_f64 {
return Ok(0_f64);
}
Ok(covered * 100_f64 / count)
}
/// Gets the list of uncovered lines of all files.
#[must_use]
pub fn get_uncovered_lines(&self, ignore_filename_regex: Option<&str>) -> UncoveredLines {
let mut uncovered_files: UncoveredLines = BTreeMap::new();
let mut covered_files: UncoveredLines = BTreeMap::new();
let re = ignore_filename_regex.map(|s| Regex::new(s).unwrap());
for data in &self.data {
if let Some(ref functions) = data.functions {
// Iterate over all functions inside the coverage data.
for function in functions {
if function.filenames.is_empty() {
continue;
}
let file_name = &function.filenames[0];
if let Some(ref re) = re {
if re.is_match(file_name) {
continue;
}
}
let mut lines: BTreeMap<u64, u64> = BTreeMap::new();
// Iterate over all possible regions inside a function:
for region in &function.regions {
// LineStart, ColumnStart, LineEnd, ColumnEnd, ExecutionCount, FileID, ExpandedFileID, Kind
let line_start = region.0;
let line_end = region.2;
let exec_count = region.4;
// Remember the execution count for each line of that region:
for line in line_start..=line_end {
*lines.entry(line).or_insert(0) += exec_count;
}
}
let mut uncovered_lines: Vec<u64> = lines
.iter()
.filter(|(_line, exec_count)| **exec_count == 0)
.map(|(line, _exec_count)| *line)
.collect();
let mut covered_lines: Vec<u64> = lines
.iter()
.filter(|(_line, exec_count)| **exec_count > 0)
.map(|(line, _exec_count)| *line)
.collect();
if !uncovered_lines.is_empty() {
uncovered_files
.entry(file_name.clone())
.or_default()
.append(&mut uncovered_lines);
}
if !covered_lines.is_empty() {
covered_files
.entry(file_name.clone())
.or_default()
.append(&mut covered_lines);
}
}
}
}
for uncovered_file in &mut uncovered_files {
// Check if a line is both covered and non-covered. It's covered in this case.
let file_name = uncovered_file.0;
let uncovered_lines = uncovered_file.1;
if let Some(covered_lines) = covered_files.get(file_name) {
uncovered_lines.retain(|&x| !covered_lines.contains(&x));
}
// Remove duplicates.
uncovered_lines.sort_unstable();
uncovered_lines.dedup();
}
// Remove empty keys.
uncovered_files.retain(|_, v| !v.is_empty());
uncovered_files
}
pub fn count_uncovered_functions(&self) -> Result<u64> {
let mut count = 0_u64;
let mut covered = 0_u64;
for data in &self.data {
let totals = &data.totals.as_object().context("totals is not an object")?;
let functions = &totals["functions"].as_object().context("no functions")?;
count += functions["count"].as_u64().context("no count")?;
covered += functions["covered"].as_u64().context("no covered")?;
}
Ok(count.saturating_sub(covered))
}
pub fn count_uncovered_lines(&self) -> Result<u64> {
let mut count = 0_u64;
let mut covered = 0_u64;
for data in &self.data {
let totals = &data.totals.as_object().context("totals is not an object")?;
let lines = &totals["lines"].as_object().context("no lines")?;
count += lines["count"].as_u64().context("no count")?;
covered += lines["covered"].as_u64().context("no covered")?;
}
Ok(count.saturating_sub(covered))
}
pub fn count_uncovered_regions(&self) -> Result<u64> {
let mut count = 0_u64;
let mut covered = 0_u64;
for data in &self.data {
let totals = &data.totals.as_object().context("totals is not an object")?;
let regions = &totals["regions"].as_object().context("no regions")?;
count += regions["count"].as_u64().context("no count")?;
covered += regions["covered"].as_u64().context("no covered")?;
}
Ok(count.saturating_sub(covered))
}
}
/// Json representation of one `CoverageMapping`
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct Export {
/// List of objects describing coverage for files
pub(crate) files: Vec<File>,
/// List of objects describing coverage for functions
///
/// This is None if report is summary-only.
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) functions: Option<Vec<Function>>,
pub(crate) totals: serde_json::Value,
}
/// Coverage for a single file
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct File {
/// List of Branches in the file
///
/// This is None if report is summary-only.
// https://github.com/llvm/llvm-project/blob/llvmorg-17.0.0-rc2/llvm/tools/llvm-cov/CoverageExporterJson.cpp#L92
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) branches: Option<Vec<serde_json::Value>>,
/// List of expansion records
///
/// This is None if report is summary-only.
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) expansions: Option<Vec<serde_json::Value>>,
pub(crate) filename: String,
/// List of Segments contained in the file
///
/// This is None if report is summary-only.
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) segments: Option<Vec<Segment>>,
/// Object summarizing the coverage for this file
pub(crate) summary: Summary,
}
/// Describes a segment of the file with a counter
// https://github.com/llvm/llvm-project/blob/llvmorg-17.0.0-rc2/llvm/tools/llvm-cov/CoverageExporterJson.cpp#L79
#[derive(Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct Segment(
/* Line */ pub(crate) u64,
/* Col */ pub(crate) u64,
/* Count */ pub(crate) u64,
/* HasCount */ pub(crate) bool,
/* IsRegionEntry */ pub(crate) bool,
/* IsGapRegion */ pub(crate) bool,
);
impl Segment {
pub(crate) fn line(&self) -> u64 {
self.0
}
pub(crate) fn col(&self) -> u64 {
self.1
}
pub(crate) fn count(&self) -> u64 {
self.2
}
pub(crate) fn has_count(&self) -> bool {
self.3
}
pub(crate) fn is_region_entry(&self) -> bool {
self.4
}
pub(crate) fn is_gap_region(&self) -> bool {
self.5
}
}
impl Debug for Segment {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Segment")
.field("line", &self.line())
.field("col", &self.col())
.field("count", &self.count())
.field("has_count", &self.has_count())
.field("is_region_entry", &self.is_region_entry())
.field("is_gap_region", &self.is_gap_region())
.finish()
}
}
// https://github.com/llvm/llvm-project/blob/llvmorg-17.0.0-rc2/llvm/tools/llvm-cov/CoverageExporterJson.cpp#L258
/// Coverage info for a single function
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct Function {
pub(crate) branches: Vec<serde_json::Value>,
pub(crate) count: u64,
/// List of filenames that the function relates to
pub(crate) filenames: Vec<String>,
pub(crate) name: String,
pub(crate) regions: Vec<Region>,
}
#[derive(Copy, Clone, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct Region(
/* LineStart */ pub(crate) u64,
/* ColumnStart */ pub(crate) u64,
/* LineEnd */ pub(crate) u64,
/* ColumnEnd */ pub(crate) u64,
/* ExecutionCount */ pub(crate) u64,
/* FileID */ pub(crate) u64,
/* ExpandedFileID */ pub(crate) u64,
/* Kind */ pub(crate) u64,
);
impl Region {
pub(crate) fn line_start(&self) -> u64 {
self.0
}
pub(crate) fn column_start(&self) -> u64 {
self.1
}
pub(crate) fn line_end(&self) -> u64 {
self.2
}
pub(crate) fn column_end(&self) -> u64 {
self.3
}
pub(crate) fn execution_count(&self) -> u64 {
self.4
}
pub(crate) fn file_id(&self) -> u64 {
self.5
}
pub(crate) fn expanded_file_id(&self) -> u64 {
self.6
}
pub(crate) fn kind(&self) -> u64 {
self.7
}
}
/// The location of a region
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]
pub(crate) struct RegionLocation {
start_line: u64,
end_line: u64,
start_column: u64,
end_column: u64,
}
impl From<&Region> for RegionLocation {
fn from(region: &Region) -> Self {
Self {
start_line: region.line_start(),
end_line: region.line_end(),
start_column: region.column_start(),
end_column: region.column_end(),
}
}
}
impl RegionLocation {
fn lines(&self) -> impl Iterator<Item = u64> {
self.start_line..=self.end_line
}
}
impl Debug for Region {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Region")
.field("line_start", &self.line_start())
.field("column_start", &self.column_start())
.field("line_end", &self.line_end())
.field("column_end", &self.column_end())
.field("execution_count", &self.execution_count())
.field("file_id", &self.file_id())
.field("expanded_file_id", &self.expanded_file_id())
.field("kind", &self.kind())
.finish()
}
}
/// Object summarizing the coverage for this file
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct Summary {
/// Object summarizing branch coverage
pub(crate) branches: CoverageCounts,
/// Object summarizing function coverage
pub(crate) functions: CoverageCounts,
pub(crate) instantiations: CoverageCounts,
/// Object summarizing line coverage
pub(crate) lines: CoverageCounts,
/// Object summarizing region coverage
pub(crate) regions: CoverageCounts,
}
#[derive(Debug, Serialize, Deserialize)]
#[cfg_attr(test, serde(deny_unknown_fields))]
pub(crate) struct CoverageCounts {
pub(crate) count: u64,
pub(crate) covered: u64,
// Currently only branches and regions has this field.
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) notcovered: Option<u64>,
pub(crate) percent: f64,
}
/// Information that is not part of the llvm-cov JSON export, but instead injected afterwards by us.
#[derive(Debug, Default, Serialize)]
#[cfg_attr(test, derive(PartialEq))]
struct CargoLlvmCov {
/// Version of this project, which allows projects that depend on it, to express and verify
/// requirements on specific versions.
version: &'static str,
/// Resolved path to the `Cargo.toml` manifest.
manifest_path: Utf8PathBuf,
}
#[cfg(test)]
mod tests {
use std::path::Path;
use fs_err as fs;
use super::*;
#[test]
fn test_parse_llvm_cov_json() {
let files: Vec<_> = glob::glob(&format!(
"{}/tests/fixtures/coverage-reports/**/*.json",
env!("CARGO_MANIFEST_DIR")
))
.unwrap()
.filter_map(Result::ok)
.filter(|path| !path.to_str().unwrap().contains("codecov.json"))
.collect();
assert!(!files.is_empty());
for file in files {
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
assert_eq!(json.type_, "llvm.coverage.json.export");
assert!(json.version.starts_with("2.0."));
assert_eq!(json.cargo_llvm_cov, None);
serde_json::to_string(&json).unwrap();
}
}
#[test]
fn test_get_lines_percent() {
// There are 5 different percentages, make sure we pick the correct one.
let file = format!(
"{}/tests/fixtures/coverage-reports/no_coverage/no_coverage.json",
env!("CARGO_MANIFEST_DIR")
);
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
let percent = json.get_lines_percent().unwrap();
let error_margin = f64::EPSILON;
assert!((percent - 68.181_818_181_818_19).abs() < error_margin, "{percent}");
}
#[test]
fn test_count_uncovered() {
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let cases = &[
// (path, uncovered_functions, uncovered_lines, uncovered_regions)
("tests/fixtures/coverage-reports/no_coverage/no_coverage.json", 0, 7, 6),
("tests/fixtures/coverage-reports/no_test/no_test.json", 1, 7, 6),
];
for &(file, uncovered_functions, uncovered_lines, uncovered_regions) in cases {
let file = manifest_dir.join(file);
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
assert_eq!(json.count_uncovered_functions().unwrap(), uncovered_functions);
assert_eq!(json.count_uncovered_lines().unwrap(), uncovered_lines);
assert_eq!(json.count_uncovered_regions().unwrap(), uncovered_regions);
}
}
#[test]
fn test_get_uncovered_lines() {
// Given a coverage report which includes function regions:
// There are 5 different percentages, make sure we pick the correct one.
let file = format!("{}/tests/fixtures/show-missing-lines.json", env!("CARGO_MANIFEST_DIR"));
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
// When finding uncovered lines in that report:
let ignore_filename_regex = None;
let uncovered_lines = json.get_uncovered_lines(ignore_filename_regex);
// Then make sure the file / line data matches the `llvm-cov report` output:
let expected: UncoveredLines =
vec![("src/lib.rs".to_string(), vec![7, 8, 9])].into_iter().collect();
assert_eq!(uncovered_lines, expected);
}
#[test]
/// This was a case when counting line coverage based on the segments in files lead to
/// incorrect results but doing it based on regions inside functions (the way `llvm-cov
/// report`) leads to complete line coverage.
fn test_get_uncovered_lines_complete() {
let file = format!(
"{}/tests/fixtures/show-missing-lines-complete.json",
env!("CARGO_MANIFEST_DIR")
);
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
let ignore_filename_regex = None;
let uncovered_lines = json.get_uncovered_lines(ignore_filename_regex);
let expected: UncoveredLines = UncoveredLines::new();
assert_eq!(uncovered_lines, expected);
}
#[test]
fn test_get_uncovered_lines_multi_missing() {
// Given a coverage report which includes a line with multiple functions via macros + two
// other uncovered lines:
let file = format!(
"{}/tests/fixtures/show-missing-lines-multi-missing.json",
env!("CARGO_MANIFEST_DIR")
);
let s = fs::read_to_string(file).unwrap();
let json = serde_json::from_str::<LlvmCovJsonExport>(&s).unwrap();
// When finding uncovered lines in that report:
let ignore_filename_regex = None;
let uncovered_lines = json.get_uncovered_lines(ignore_filename_regex);
// Then make sure the file / line data matches the `llvm-cov report` output:
let expected: UncoveredLines =
vec![("src/lib.rs".to_string(), vec![15, 17])].into_iter().collect();
// This was just '11', i.e. there were two problems:
// 1) line 11 has a serde macro which expands to multiple functions; some of those were
// covered, which should be presented as a "covered" 11th line.
// 2) only the last function with missing lines were reported, so 15 and 17 was missing.
assert_eq!(uncovered_lines, expected);
}
}
|
use {
actix_web::HttpResponse,
actix_web::web::{Data, Json, Path},
uuid::Uuid,
crate::DBPool,
crate::util::{NotFoundMessage, ResponseType},
crate::wallet::*,
};
// ---- List all Wallets
#[get("/wallets")]
pub async fn list_wallets(pool: Data<DBPool>) -> HttpResponse {
let conn = crate::get_connection_to_pool(pool);
let wallets: Vec<Wallet> = fetch_all_wallets(&conn);
ResponseType::Ok(wallets).get_response()
}
// ---- Get Wallet
#[get("/wallets/{id}")]
pub async fn get_wallet(path: Path<(String,)>, pool: Data<DBPool>) -> HttpResponse {
let conn = crate::get_connection_to_pool(pool);
let wallet: Option<Wallet> = fetch_wallet_by_id(
Uuid::parse_str(path.0.0.as_str()).unwrap(),
&conn,
);
match wallet {
Some(wallet) => ResponseType::Ok(wallet).get_response(),
None => ResponseType::NotFound(
NotFoundMessage::new("Wallet not found".to_string())
).get_response(),
}
}
// ---- Create new Wallet
#[post("/wallets")]
pub async fn create_wallet(wallet_request: Json<NewWalletRequest>, pool: Data<DBPool>) -> HttpResponse {
let conn = crate::get_connection_to_pool(pool);
match create_new_wallet(wallet_request.0, &conn) {
Ok(created_wallet) => ResponseType::Created(created_wallet).get_response(),
Err(_) => ResponseType::NotFound(
NotFoundMessage::new("Error creating wallet".to_string())
).get_response(),
}
} |
//! Marker for user selected entities.
/// Marker for user selected entities.
pub struct Selected;
|
// References - https://docs.rs/rodio/0.14.0/rodio/
// References - https://github.com/mohanson/space-invaders/
// References - https://github.com/mohanson/i8080/blob/master/src/bit.rs
use rodio::source::Source;
use std::fs::File;
use std::io::BufReader;
#[derive(Debug, Default)]
pub struct Invaderwavs {
pub sounds: Vec<String>,
}
impl Invaderwavs {
pub fn load_sounds(&mut self) {
self.sounds.push(String::from("sounds/res_snd_0.wav"));
self.sounds.push(String::from("sounds/res_snd_1.wav"));
self.sounds.push(String::from("sounds/res_snd_2.wav"));
self.sounds.push(String::from("sounds/res_snd_3.wav"));
self.sounds.push(String::from("sounds/res_snd_4.wav"));
self.sounds.push(String::from("sounds/res_snd_5.wav"));
self.sounds.push(String::from("sounds/res_snd_6.wav"));
self.sounds.push(String::from("sounds/res_snd_7.wav"));
self.sounds.push(String::from("sounds/res_snd_8.wav"));
self.sounds.push(String::from("sounds/res_snd_9.wav"));
}
pub fn play_sound(&self, i: usize) {
let stream_handle = rodio::default_output_device().unwrap();
let data = self.sounds[i].clone();
let file = File::open(data).unwrap();
let source = rodio::Decoder::new(BufReader::new(file)).unwrap();
rodio::play_raw(&stream_handle, source.convert_samples());
}
pub fn get_sound_bit(&self, n: u8, b: usize) -> bool {
(n & (1 << b)) != 0
}
pub fn queued_event(&self, reg_a: u8, event_type: u8, output_state: u8) -> bool {
let mut success_bool: bool = false;
if event_type == 1 && reg_a != output_state {
if self.get_sound_bit(reg_a, 0) && !self.get_sound_bit(output_state, 0) {
self.play_sound(0)
}
if self.get_sound_bit(reg_a, 1) && !self.get_sound_bit(output_state, 1) {
self.play_sound(1)
}
if self.get_sound_bit(reg_a, 2) && !self.get_sound_bit(output_state, 2) {
self.play_sound(2)
}
if self.get_sound_bit(reg_a, 3) && !self.get_sound_bit(output_state, 3) {
self.play_sound(3)
}
success_bool = true;
}
if event_type == 2 && reg_a != output_state {
if self.get_sound_bit(reg_a, 0) && !self.get_sound_bit(output_state, 0) {
self.play_sound(4)
}
if self.get_sound_bit(reg_a, 1) && !self.get_sound_bit(output_state, 1) {
self.play_sound(5)
}
if self.get_sound_bit(reg_a, 2) && !self.get_sound_bit(output_state, 2) {
self.play_sound(6)
}
if self.get_sound_bit(reg_a, 3) && !self.get_sound_bit(output_state, 3) {
self.play_sound(7)
}
if self.get_sound_bit(reg_a, 4) && !self.get_sound_bit(output_state, 4) {
self.play_sound(8)
}
success_bool = true;
}
success_bool
}
}
|
//! Asynchronous Metric Sink implementation that uses UDP sockets.
use cadence::{ErrorKind as MetricErrorKind, MetricError, MetricResult, MetricSink};
use log::*;
use std::{
future::Future,
io::Result,
net::{SocketAddr, ToSocketAddrs},
panic::{RefUnwindSafe, UnwindSafe},
pin::Pin,
};
use tokio::{
net::UdpSocket,
sync::mpsc::{channel, Sender},
time::Duration,
};
use crate::{
builder::Builder,
define_worker,
worker::{Cmd, TrySend},
};
impl<T: ToSocketAddrs> Builder<T, UdpSocket> {
/// Creates a customized instance of the [TokioBatchUdpMetricSink](crate::udp::TokioBatchUdpMetricSink).
pub fn build(
self,
) -> MetricResult<(
TokioBatchUdpMetricSink,
Pin<Box<dyn Future<Output = ()> + Send + Sync + 'static>>,
)> {
let mut addrs = self.addr.to_socket_addrs()?;
let addr = addrs.next().ok_or_else(|| {
MetricError::from((MetricErrorKind::InvalidInput, "No socket addresses yielded"))
})?;
let (tx, rx) = channel(self.queue_cap);
let worker_fut = worker(rx, self.sock, addr, self.buf_size, self.max_delay);
Ok((TokioBatchUdpMetricSink { tx }, Box::pin(worker_fut)))
}
}
/// Metric sink that allows clients to enqueue metrics without blocking, and sending
/// them asynchronously via UDP using Tokio runtime.
///
/// It also accumulates individual metrics for a configured maximum amount of time
/// before submitting them as a single [batch](https://github.com/statsd/statsd/blob/master/docs/metric_types.md#multi-metric-packets).
///
/// Exceeding the configured queue capacity results in an error, which the client may handle as appropriate.
///
/// ## Important!
/// The client is responsible for polling the asynchronous processing future, which is created along
/// with the sink, in a manner appropriate for the application (e.g., spawning it in a Tokio task pool).
///
/// The client should also wait for this future to complete *after* dropping the metric sink.
///
/// ### Example
///
/// ```no_run
/// use cadence::prelude::*;
/// use cadence::{StatsdClient, DEFAULT_PORT};
/// use tokio_cadence::TokioBatchUdpMetricSink;
/// use tokio::{spawn, net::UdpSocket};
///
/// # #[tokio::main]
/// # async fn main() -> cadence::MetricResult<()> {
/// let host = ("metrics.example.com", DEFAULT_PORT);
/// let socket = UdpSocket::bind("0.0.0.0:0").await?;
/// let (sink, process) = TokioBatchUdpMetricSink::from(host, socket)?;
///
/// // Spawn the future!
/// let processing_job = spawn(process);
///
/// {
/// let client = StatsdClient::from_sink("my.metrics", sink);
///
/// // Emit metrics!
/// client.incr("some.counter");
/// client.time("some.methodCall", 42);
/// client.gauge("some.thing", 7);
/// client.meter("some.value", 5);
///
/// // the client drops here, and the sink along with it
/// }
///
/// // Wait for the processing job to complete!
/// processing_job.await.unwrap();
/// # Ok(())
/// # }
/// ```
#[derive(Clone, Debug)]
pub struct TokioBatchUdpMetricSink {
tx: Sender<Cmd>,
}
// we don't let tx panic
impl UnwindSafe for TokioBatchUdpMetricSink {}
impl RefUnwindSafe for TokioBatchUdpMetricSink {}
impl TokioBatchUdpMetricSink {
/// Creates a new metric sink for the given statsd host using a previously bound UDP socket.
/// Other sink parameters are defaulted.
pub fn from<T: ToSocketAddrs>(
host: T,
socket: UdpSocket,
) -> MetricResult<(
Self,
Pin<Box<dyn Future<Output = ()> + Send + Sync + 'static>>,
)> {
Builder::new(host, socket).build()
}
/// Returns a builder for creating a new metric sink for the given statsd host
/// using a previously bound UDP socket. The builder may be used to customize various
/// configuration parameters before creating an instance of this sink.
pub fn builder<T: ToSocketAddrs>(host: T, socket: UdpSocket) -> Builder<T, UdpSocket> {
Builder::new(host, socket)
}
/// Creates a new metric sink for the given statsd host, using the UDP socket, as well as
/// metric queue capacity, batch buffer size, and maximum delay (in milliseconds) to wait
/// before submitting any accumulated metrics as a batch.
#[deprecated = "please use `with_options` instead"]
pub fn with_capacity<T: ToSocketAddrs>(
host: T,
socket: UdpSocket,
queue_capacity: usize,
buf_size: usize,
max_delay: u64,
) -> MetricResult<(
Self,
Pin<Box<dyn Future<Output = ()> + Send + Sync + 'static>>,
)> {
let mut builder = Builder::new(host, socket);
builder.queue_cap(queue_capacity);
builder.buf_size(buf_size);
builder.max_delay(Duration::from_millis(max_delay));
builder.build()
}
}
impl TrySend for TokioBatchUdpMetricSink {
fn sender(&self) -> &Sender<Cmd> {
&self.tx
}
}
impl MetricSink for TokioBatchUdpMetricSink {
fn emit(&self, metric: &str) -> Result<usize> {
self.try_send(Cmd::Write(metric.to_string()))?;
Ok(metric.len())
}
fn flush(&self) -> Result<()> {
self.try_send(Cmd::Flush)?;
Ok(())
}
}
define_worker!(UdpSocket, SocketAddr);
#[cfg(test)]
mod tests {
use super::*;
use tokio::{net::UdpSocket, spawn};
#[tokio::test]
async fn from() -> MetricResult<()> {
let socket = UdpSocket::bind("0.0.0.0:0").await?;
let result = TokioBatchUdpMetricSink::from("127.0.0.1:8125", socket);
assert!(result.is_ok());
Ok(())
}
#[tokio::test]
async fn from_bad_address() -> MetricResult<()> {
let socket = UdpSocket::bind("0.0.0.0:0").await?;
let result = TokioBatchUdpMetricSink::from("bad address", socket);
assert!(result.is_err());
Ok(())
}
#[tokio::test]
async fn emit() -> MetricResult<()> {
pretty_env_logger::try_init().ok();
let server_socket = UdpSocket::bind("127.0.0.1:0").await?;
let server_addr = server_socket.local_addr()?;
debug!("server socket: {}", server_addr);
let socket = UdpSocket::bind("0.0.0.0:0").await?;
debug!("local socket: {}", socket.local_addr()?);
let (sink, fut) =
TokioBatchUdpMetricSink::from(format!("127.0.0.1:{}", server_addr.port()), socket)?;
let worker = spawn(fut);
const MSG: &str = "test";
let n = sink.emit(MSG)?;
assert_eq!(MSG.len(), n);
let mut buf = [0; 8192];
let (received, addr) = server_socket.recv_from(&mut buf).await?;
debug!(
"received {} bytes from {} with {}",
received,
addr,
String::from_utf8_lossy(&buf[..received])
);
assert_eq!(MSG.len(), received);
assert_eq!(MSG, String::from_utf8_lossy(&buf[..received]));
drop(sink);
worker.await.unwrap();
Ok(())
}
#[tokio::test]
async fn emit_multi() -> MetricResult<()> {
pretty_env_logger::try_init().ok();
let server_socket = UdpSocket::bind("127.0.0.1:0").await?;
let server_addr = server_socket.local_addr()?;
debug!("server socket: {}", server_addr);
let socket = UdpSocket::bind("0.0.0.0:0").await?;
debug!("local socket: {}", socket.local_addr()?);
const BUF_SIZE: usize = 10;
let mut builder = Builder::new(format!("127.0.0.1:{}", server_addr.port()), socket);
builder.buf_size(BUF_SIZE);
let (sink, fut) = builder.build()?;
let worker = spawn(fut);
const MSG: &str = "test_multi";
let n = sink.emit(MSG)?;
assert_eq!(BUF_SIZE, n);
let n = sink.emit(MSG)?;
assert_eq!(BUF_SIZE, n);
let mut buf = [0; 8192];
let (received, addr) = server_socket.recv_from(&mut buf).await?;
debug!(
"received {} bytes from {} with {}",
received,
addr,
String::from_utf8_lossy(&buf[..received])
);
assert_eq!(MSG.len(), received);
assert_eq!(MSG, String::from_utf8_lossy(&buf[..received]));
let (received, addr) = server_socket.recv_from(&mut buf).await?;
debug!(
"received {} bytes from {} with {}",
received,
addr,
String::from_utf8_lossy(&buf[..received])
);
assert_eq!(MSG.len(), received);
assert_eq!(MSG, String::from_utf8_lossy(&buf[..received]));
drop(sink);
worker.await.unwrap();
Ok(())
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// WidgetPalette : Color palette to apply.
/// Color palette to apply.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum WidgetPalette {
#[serde(rename = "blue")]
BLUE,
#[serde(rename = "custom_bg")]
CUSTOM_BACKGROUND,
#[serde(rename = "custom_image")]
CUSTOM_IMAGE,
#[serde(rename = "custom_text")]
CUSTOM_TEXT,
#[serde(rename = "gray_on_white")]
GRAY_ON_WHITE,
#[serde(rename = "grey")]
GREY,
#[serde(rename = "green")]
GREEN,
#[serde(rename = "orange")]
ORANGE,
#[serde(rename = "red")]
RED,
#[serde(rename = "red_on_white")]
RED_ON_WHITE,
#[serde(rename = "white_on_gray")]
WHITE_ON_GRAY,
#[serde(rename = "white_on_green")]
WHITE_ON_GREEN,
#[serde(rename = "green_on_white")]
GREEN_ON_WHITE,
#[serde(rename = "white_on_red")]
WHITE_ON_RED,
#[serde(rename = "white_on_yellow")]
WHITE_ON_YELLOW,
#[serde(rename = "yellow_on_white")]
YELLOW_ON_WHITE,
#[serde(rename = "black_on_light_yellow")]
BlackOnLightYellow,
#[serde(rename = "black_on_light_green")]
BlackOnLightGreen,
#[serde(rename = "black_on_light_red")]
BlackOnLightRed,
}
impl ToString for WidgetPalette {
fn to_string(&self) -> String {
match self {
Self::BLUE => String::from("blue"),
Self::CUSTOM_BACKGROUND => String::from("custom_bg"),
Self::CUSTOM_IMAGE => String::from("custom_image"),
Self::CUSTOM_TEXT => String::from("custom_text"),
Self::GRAY_ON_WHITE => String::from("gray_on_white"),
Self::GREY => String::from("grey"),
Self::GREEN => String::from("green"),
Self::ORANGE => String::from("orange"),
Self::RED => String::from("red"),
Self::RED_ON_WHITE => String::from("red_on_white"),
Self::WHITE_ON_GRAY => String::from("white_on_gray"),
Self::WHITE_ON_GREEN => String::from("white_on_green"),
Self::GREEN_ON_WHITE => String::from("green_on_white"),
Self::WHITE_ON_RED => String::from("white_on_red"),
Self::WHITE_ON_YELLOW => String::from("white_on_yellow"),
Self::YELLOW_ON_WHITE => String::from("yellow_on_white"),
Self::BlackOnLightYellow => String::from("black_on_light_yellow"),
Self::BlackOnLightGreen => String::from("black_on_light_green"),
Self::BlackOnLightRed => String::from("black_on_light_red"),
}
}
}
|
use crate::components::player::PlayerType;
use oxygengine::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct Bullet(pub PlayerType);
impl Component for Bullet {
type Storage = VecStorage<Self>;
}
|
use std::str;
use crate::task::{Task, TaskBuilder};
use crate::util::pig::Pig;
use failure::Fallible;
/// Rust implementation of part of utf8_codepoint from Taskwarrior's src/utf8.cpp
///
/// Note that the original function will return garbage for invalid hex sequences;
/// this panics instead.
fn hex_to_unicode(value: &[u8]) -> Fallible<String> {
if value.len() < 4 {
bail!("too short");
}
fn nyb(c: u8) -> Fallible<u16> {
match c {
b'0'..=b'9' => Ok((c - b'0') as u16),
b'a'..=b'f' => Ok((c - b'a' + 10) as u16),
b'A'..=b'F' => Ok((c - b'A' + 10) as u16),
_ => bail!("invalid hex character"),
}
};
let words = [nyb(value[0])? << 12 | nyb(value[1])? << 8 | nyb(value[2])? << 4 | nyb(value[3])?];
Ok(String::from_utf16(&words[..])?)
}
/// Rust implementation of JSON::decode in Taskwarrior's src/JSON.cpp
///
/// Decode the given byte slice into a string using Taskwarrior JSON's escaping The slice is
/// assumed to be ASCII; unicode escapes within it will be expanded.
fn json_decode(value: &[u8]) -> Fallible<String> {
let length = value.len();
let mut rv = String::with_capacity(length);
let mut pos = 0;
while pos < length {
let v = value[pos];
if v == b'\\' {
pos += 1;
if pos == length {
rv.push(v as char);
break;
}
let v = value[pos];
match v {
b'"' | b'\\' | b'/' => rv.push(v as char),
b'b' => rv.push(8 as char),
b'f' => rv.push(12 as char),
b'n' => rv.push('\n' as char),
b'r' => rv.push('\r' as char),
b't' => rv.push('\t' as char),
b'u' => {
let unicode = hex_to_unicode(&value[pos + 1..pos + 5]).map_err(|_| {
let esc = &value[pos - 1..pos + 5];
match str::from_utf8(esc) {
Ok(s) => format_err!("invalid unicode escape `{}`", s),
Err(_) => format_err!("invalid unicode escape bytes {:?}", esc),
}
})?;
rv.push_str(&unicode);
pos += 4;
}
_ => {
rv.push(b'\\' as char);
rv.push(v as char);
}
}
} else {
rv.push(v as char)
}
pos += 1;
}
Ok(rv)
}
/// Rust implementation of Task::decode in Taskwarrior's src/Task.cpp
///
/// Note that the docstring for the C++ function does not match the
/// implementation!
fn decode(value: String) -> String {
if let Some(_) = value.find('&') {
return value.replace("&open;", "[").replace("&close;", "]");
}
value
}
/// Parse an "FF4" formatted task line. From Task::parse in Taskwarrior's src/Task.cpp.
///
/// While Taskwarrior supports additional formats, this is the only format supported by
/// taskwarrior_rust.
pub(super) fn parse_ff4(line: &str) -> Fallible<Task> {
let mut pig = Pig::new(line.as_bytes());
let mut builder = TaskBuilder::new();
pig.skip(b'[')?;
let line = pig.get_until(b']')?;
let mut subpig = Pig::new(line);
while !subpig.depleted() {
let name = subpig.get_until(b':')?;
let name = str::from_utf8(name)?;
subpig.skip(b':')?;
let value = subpig.get_quoted(b'"')?;
let value = json_decode(value)?;
let value = decode(value);
builder = builder.set(name, value);
subpig.skip(b' ').ok(); // ignore if not found..
}
pig.skip(b']')?;
if !pig.depleted() {
bail!("trailing characters on line");
}
Ok(builder.finish())
}
#[cfg(test)]
mod test {
use super::{decode, hex_to_unicode, json_decode, parse_ff4};
use crate::task::Pending;
#[test]
fn test_hex_to_unicode_digits() {
assert_eq!(hex_to_unicode(b"1234").unwrap(), "\u{1234}");
}
#[test]
fn test_hex_to_unicode_lower() {
assert_eq!(hex_to_unicode(b"abcd").unwrap(), "\u{abcd}");
}
#[test]
fn test_hex_to_unicode_upper() {
assert_eq!(hex_to_unicode(b"ABCD").unwrap(), "\u{abcd}");
}
#[test]
fn test_hex_to_unicode_too_short() {
assert!(hex_to_unicode(b"AB").is_err());
}
#[test]
fn test_hex_to_unicode_invalid() {
assert!(hex_to_unicode(b"defg").is_err());
}
#[test]
fn test_json_decode_no_change() {
assert_eq!(json_decode(b"abcd").unwrap(), "abcd");
}
#[test]
fn test_json_decode_escape_quote() {
assert_eq!(json_decode(b"ab\\\"cd").unwrap(), "ab\"cd");
}
#[test]
fn test_json_decode_escape_backslash() {
assert_eq!(json_decode(b"ab\\\\cd").unwrap(), "ab\\cd");
}
#[test]
fn test_json_decode_escape_frontslash() {
assert_eq!(json_decode(b"ab\\/cd").unwrap(), "ab/cd");
}
#[test]
fn test_json_decode_escape_b() {
assert_eq!(json_decode(b"ab\\bcd").unwrap(), "ab\x08cd");
}
#[test]
fn test_json_decode_escape_f() {
assert_eq!(json_decode(b"ab\\fcd").unwrap(), "ab\x0ccd");
}
#[test]
fn test_json_decode_escape_n() {
assert_eq!(json_decode(b"ab\\ncd").unwrap(), "ab\ncd");
}
#[test]
fn test_json_decode_escape_r() {
assert_eq!(json_decode(b"ab\\rcd").unwrap(), "ab\rcd");
}
#[test]
fn test_json_decode_escape_t() {
assert_eq!(json_decode(b"ab\\tcd").unwrap(), "ab\tcd");
}
#[test]
fn test_json_decode_escape_other() {
assert_eq!(json_decode(b"ab\\xcd").unwrap(), "ab\\xcd");
}
#[test]
fn test_json_decode_escape_eos() {
assert_eq!(json_decode(b"ab\\").unwrap(), "ab\\");
}
#[test]
fn test_json_decode_escape_unicode() {
assert_eq!(json_decode(b"ab\\u1234").unwrap(), "ab\u{1234}");
}
#[test]
fn test_json_decode_escape_unicode_bad() {
let rv = json_decode(b"ab\\uwxyz");
assert_eq!(
rv.unwrap_err().to_string(),
"invalid unicode escape `\\uwxyz`"
);
}
#[test]
fn test_decode_no_change() {
let s = "abcd " efgh &".to_string();
assert_eq!(decode(s.clone()), s);
}
#[test]
fn test_decode_multi() {
let s = "abcd &open; efgh &close; &open".to_string();
assert_eq!(decode(s), "abcd [ efgh ] &open".to_string());
}
#[test]
fn test_parse_ff4() {
let s = "[description:\"desc\" entry:\"1437855511\" modified:\"1479480556\" \
priority:\"L\" project:\"lists\" status:\"pending\" tags:\"watch\" \
uuid:\"83ce989e-8634-4d62-841c-eb309383ff1f\"]";
let task = parse_ff4(s).unwrap();
assert_eq!(task.status, Pending);
assert_eq!(task.description, "desc");
}
#[test]
fn test_parse_ff4_fail() {
assert!(parse_ff4("abc:10]").is_err());
assert!(parse_ff4("[abc:10").is_err());
assert!(parse_ff4("[abc:10 123:123]").is_err());
}
}
|
pub(crate) mod success;
|
use std::fmt;
use board::*;
pub struct Game {
pub board: Board,
}
#[derive(PartialEq, Debug)]
pub enum GameError {
MoveOutOfBounds,
CantPlayEmpty,
RegionTooSmall,
GameOver,
}
pub trait Player {
fn play(board: &Board) -> Option<(usize, usize)>;
}
impl Game {
pub fn new(width: usize, height: usize) -> Game {
Game { board: Board::new(width, height) }
}
pub fn from_string(s: &str) -> Game {
Game { board: Board::from_string(s).unwrap() }
}
pub fn delete(&mut self, col: usize, row: usize) -> Result<usize, GameError> {
if !self.board.live() {
return Err(GameError::GameOver);
}
if let Some(val) = self.board.checked(col, row) {
if val == Square::Empty {
Err(GameError::CantPlayEmpty)
} else {
let region = self.board.region(col, row);
if region.len() < 2 {
Err(GameError::RegionTooSmall)
} else {
self.board.set_region(®ion, Square::Empty);
Ok(region.len())
}
}
} else {
Err(GameError::MoveOutOfBounds)
}
}
pub fn play_single(&mut self, col: usize, row: usize) -> Result<usize, GameError> {
let score = try!(self.delete(col, row));
while self.board.gravity() {}
while self.board.left_collapse() {}
Ok(score)
}
pub fn live(&self) -> bool {
self.board.live()
}
}
impl fmt::Display for Game {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.board)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_delete_center_region() {
let mut game = Game::from_string("GGGG\nGBBG\nGGGG");
let score = game.delete(1, 1).unwrap();
assert_eq!(score, 2);
assert_eq!(game.board, Game::from_string("GGGG\nG..G\nGGGG").board);
}
#[test]
fn test_delete_outer_region() {
let mut game = Game::from_string("GGGG\nGBBG\nGGGG");
let score = game.delete(0, 0).unwrap();
assert_eq!(score, 10);
assert_eq!(game.board, Game::from_string("....\n.BB.\n....").board);
}
#[test]
fn test_play_center_region() {
let mut game = Game::from_string("GGGG\nGBBG\nGGGG");
let score = game.play_single(1, 1).unwrap();
assert_eq!(score, 2);
assert_eq!(game.board, Game::from_string("G..G\nGGGG\nGGGG").board);
}
#[test]
fn test_play_outer_region() {
let mut game = Game::from_string("GGGG\nGBBG\nGGGG");
let score = game.play_single(0, 0).unwrap();
assert_eq!(score, 10);
assert_eq!(game.board, Game::from_string("....\n....\nBB..").board);
}
}
|
// src/parser.rs
use crate::ast::*;
use crate::lexer::*;
use crate::token::*;
use std::collections::HashMap;
type PrefixParseFn = fn(&mut Parser) -> Result<Expression, String>;
type InfixParseFn = fn(&mut Parser, Expression) -> Result<Expression, String>;
pub struct Parser {
pub l: Lexer,
pub cur_token: Token,
pub peek_token: Token,
pub prefix_parse_fns: HashMap<TokenType, PrefixParseFn>,
pub infix_parse_fns: HashMap<TokenType, InfixParseFn>,
}
impl Parser {
pub fn new(l: Lexer) -> Parser {
let mut p = Parser {
l: l,
cur_token: new_token(TokenType::ILLEGAL, 0),
peek_token: new_token(TokenType::ILLEGAL, 0),
prefix_parse_fns: HashMap::new(),
infix_parse_fns: HashMap::new(),
};
p.register_prefix(TokenType::IDENT, |parser| parser.parse_identifier());
p.register_prefix(TokenType::INT, |parser| parser.parse_integer_literal());
p.register_prefix(TokenType::BANG, |parser| parser.parse_prefix_expression());
p.register_prefix(TokenType::MINUS, |parser| parser.parse_prefix_expression());
p.register_prefix(TokenType::TRUE, |parser| parser.parse_boolean_literal());
p.register_prefix(TokenType::FALSE, |parser| parser.parse_boolean_literal());
p.register_prefix(TokenType::LPAREN, |parser| {
parser.parse_grouped_expression()
});
p.register_prefix(TokenType::IF, |parser| parser.parse_if_expression());
p.register_prefix(TokenType::FUNCTION, |parser| {
parser.parse_function_literal()
});
p.register_prefix(TokenType::STRING, |parser| parser.parse_string_literal());
p.register_prefix(TokenType::LBRACKET, |parser| parser.parse_array_literal());
p.register_prefix(TokenType::LBRACE, |parser| parser.parse_hash_literal());
p.register_infix(TokenType::PLUS, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::MINUS, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::SLASH, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::ASTERISK, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::EQ, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::NOTEQ, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::LT, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::GT, |parser, exp| {
parser.parse_infix_expression(exp)
});
p.register_infix(TokenType::LPAREN, |parser, exp| {
parser.parse_call_expression(exp)
});
p.register_infix(TokenType::LBRACKET, |parser, exp| {
parser.parse_index_expression(exp)
});
p.next_token();
p.next_token();
p
}
pub fn next_token(&mut self) {
self.cur_token = std::mem::replace(&mut self.peek_token, self.l.next_token());
}
pub fn parse_program(&mut self) -> Result<Program, Vec<String>> {
let mut statements: Vec<Statement> = Vec::new();
let mut errors = Vec::new();
while self.cur_token.tk_type != TokenType::EOF {
match self.parse_statement() {
Ok(stmt) => statements.push(stmt),
Err(err) => errors.push(err),
}
self.next_token();
}
if errors.len() != 0 {
return Err(errors);
}
Ok(Program {
statements: statements,
})
}
fn parse_statement(&mut self) -> Result<Statement, String> {
match self.cur_token.tk_type {
TokenType::LET => self.parse_let_statement(),
TokenType::RETURN => self.parse_return_statement(),
_ => self.parse_expression_statement(),
}
}
fn parse_let_statement(&mut self) -> Result<Statement, String> {
let token = self.cur_token.clone();
self.expect_peek(TokenType::IDENT)?;
let name = Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
};
self.expect_peek(TokenType::ASSIGN)?;
self.next_token();
let value = self.parse_expression(Precedence::LOWEST)?;
if self.peek_token_is(TokenType::SEMICOLON) {
self.next_token();
}
Ok(Statement::LetStatement(LetStatement {
token: token,
name: name,
value: value,
}))
}
fn cur_token_is(&self, t: TokenType) -> bool {
self.cur_token.tk_type == t
}
fn peek_token_is(&self, t: TokenType) -> bool {
self.peek_token.tk_type == t
}
fn expect_peek(&mut self, t: TokenType) -> Result<(), String> {
if self.peek_token_is(t.clone()) {
self.next_token();
Ok(())
} else {
Err(self.peek_error(t))
}
}
fn peek_error(&mut self, t: TokenType) -> String {
format!(
"expected next token to be {:?}, got {:?} instead",
t, self.peek_token.tk_type
)
}
fn parse_return_statement(&mut self) -> Result<Statement, String> {
let token = self.cur_token.clone();
self.next_token();
let return_value = self.parse_expression(Precedence::LOWEST)?;
if self.peek_token_is(TokenType::SEMICOLON) {
self.next_token();
}
Ok(Statement::ReturnStatement(ReturnStatement {
token: token,
return_value: return_value,
}))
}
fn register_prefix(&mut self, token_type: TokenType, func: PrefixParseFn) {
self.prefix_parse_fns.insert(token_type, func);
}
fn register_infix(&mut self, token_type: TokenType, func: InfixParseFn) {
self.infix_parse_fns.insert(token_type, func);
}
fn parse_expression_statement(&mut self) -> Result<Statement, String> {
let token = self.cur_token.clone();
let expression = self.parse_expression(Precedence::LOWEST)?;
if self.peek_token_is(TokenType::SEMICOLON) {
self.next_token();
}
Ok(Statement::ExpressionStatement(ExpressionStatement {
token: token,
expression: expression,
}))
}
fn parse_expression(&mut self, precedence: Precedence) -> Result<Expression, String> {
if let Some(prefix) = self.prefix_parse_fns.get(&self.cur_token.tk_type) {
let mut left_exp = prefix(self)?;
while !self.peek_token_is(TokenType::SEMICOLON) && precedence < self.peek_precedence() {
let infix_fn: InfixParseFn;
if let Some(infix) = self.infix_parse_fns.get(&self.peek_token.tk_type) {
infix_fn = *infix;
} else {
return Ok(left_exp);
}
self.next_token();
left_exp = infix_fn(self, left_exp)?;
}
Ok(left_exp)
} else {
Err(self.no_prefix_parse_fn_error(&self.cur_token.tk_type))
}
}
fn parse_identifier(&mut self) -> Result<Expression, String> {
Ok(Expression::Identifier(Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
}))
}
fn parse_integer_literal(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
if let Ok(value) = self.cur_token.literal.parse::<i64>() {
Ok(Expression::IntegerLiteral(IntegerLiteral {
token: token,
value: value,
}))
} else {
Err(format!(
"could not parse {} as integer",
self.cur_token.literal
))
}
}
fn no_prefix_parse_fn_error(&self, t: &TokenType) -> String {
format!("no prefix parse function for {:?} found", t)
}
fn parse_prefix_expression(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
let operator = self.cur_token.literal.clone();
self.next_token();
let right = self.parse_expression(Precedence::PREFIX)?;
Ok(Expression::PrefixExpression(PrefixExpression {
token: token,
operator: operator,
right: Box::new(right),
}))
}
fn peek_precedence(&self) -> Precedence {
get_precedence(&self.peek_token.tk_type)
}
fn cur_precedence(&self) -> Precedence {
get_precedence(&self.cur_token.tk_type)
}
fn parse_infix_expression(&mut self, left: Expression) -> Result<Expression, String> {
let token = self.cur_token.clone();
let operator = self.cur_token.literal.clone();
let precedence = self.cur_precedence();
self.next_token();
let right = self.parse_expression(precedence)?;
Ok(Expression::InfixExpression(InfixExpression {
token: token,
left: Box::new(left),
operator: operator,
right: Box::new(right),
}))
}
fn parse_boolean_literal(&self) -> Result<Expression, String> {
Ok(Expression::BooleanLiteral(BooleanLiteral {
token: self.cur_token.clone(),
value: self.cur_token_is(TokenType::TRUE),
}))
}
fn parse_grouped_expression(&mut self) -> Result<Expression, String> {
self.next_token();
let exp = self.parse_expression(Precedence::LOWEST)?;
self.expect_peek(TokenType::RPAREN)?;
Ok(exp)
}
fn parse_if_expression(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
self.expect_peek(TokenType::LPAREN)?;
self.next_token();
let condition = self.parse_expression(Precedence::LOWEST)?;
self.expect_peek(TokenType::RPAREN)?;
self.expect_peek(TokenType::LBRACE)?;
let consequence = self.parse_block_statement()?;
let mut alternative: Option<BlockStatement> = None;
if self.peek_token_is(TokenType::ELSE) {
self.next_token();
self.expect_peek(TokenType::LBRACE)?;
alternative = Some(self.parse_block_statement()?);
}
Ok(Expression::IfExpression(IfExpression {
token: token,
condition: Box::new(condition),
consequence: consequence,
alternative: alternative,
}))
}
fn parse_block_statement(&mut self) -> Result<BlockStatement, String> {
let token = self.cur_token.clone();
let mut statements: Vec<Statement> = Vec::new();
self.next_token();
while !self.cur_token_is(TokenType::RBRACE) {
if self.cur_token_is(TokenType::EOF) {
return Err(String::from("EOF"));
}
let stmt = self.parse_statement()?;
statements.push(stmt);
self.next_token();
}
Ok(BlockStatement {
token: token,
statements: statements,
})
}
fn parse_function_literal(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
self.expect_peek(TokenType::LPAREN)?;
let parameters = self.parse_function_parameters()?;
self.expect_peek(TokenType::LBRACE)?;
let body = self.parse_block_statement()?;
Ok(Expression::FunctionLiteral(FunctionLiteral {
token: token,
parameters: parameters,
body: body,
}))
}
fn parse_function_parameters(&mut self) -> Result<Vec<Identifier>, String> {
if self.peek_token_is(TokenType::RPAREN) {
self.next_token();
return Ok(Vec::new());
}
self.next_token();
let mut identfiers: Vec<Identifier> = Vec::new();
identfiers.push(Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
});
while self.peek_token_is(TokenType::COMMA) {
self.next_token();
self.next_token();
identfiers.push(Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
})
}
self.expect_peek(TokenType::RPAREN)?;
Ok(identfiers)
}
fn parse_call_expression(&mut self, function: Expression) -> Result<Expression, String> {
let token = self.cur_token.clone();
let arguements = self.parse_expression_list(TokenType::RPAREN)?;
Ok(Expression::CallExpression(CallExpression {
token: token,
function: Box::new(function),
arguments: arguements,
}))
}
fn parse_call_arguments(&mut self) -> Result<Vec<Expression>, String> {
let mut args: Vec<Expression> = Vec::new();
if self.peek_token_is(TokenType::RPAREN) {
self.next_token();
return Ok(args);
}
self.next_token();
let arg = self.parse_expression(Precedence::LOWEST)?;
args.push(arg);
while self.peek_token_is(TokenType::COMMA) {
self.next_token();
self.next_token();
let arg = self.parse_expression(Precedence::LOWEST)?;
args.push(arg);
}
self.expect_peek(TokenType::RPAREN)?;
Ok(args)
}
fn parse_string_literal(&self) -> Result<Expression, String> {
Ok(Expression::StringLiteral(StringLiteral {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
}))
}
fn parse_array_literal(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
let elements = self.parse_expression_list(TokenType::RBRACKET)?;
Ok(Expression::ArrayLiteral(ArrayLiteral {
token: token,
elements: elements.to_vec(),
}))
}
fn parse_expression_list(&mut self, end: TokenType) -> Result<Vec<Expression>, String> {
let mut list: Vec<Expression> = Vec::new();
if self.peek_token_is(end.clone()) {
self.next_token();
return Ok(list);
}
self.next_token();
let mut expr = self.parse_expression(Precedence::LOWEST)?;
list.push(expr);
while self.peek_token_is(TokenType::COMMA) {
self.next_token();
self.next_token();
expr = self.parse_expression(Precedence::LOWEST)?;
list.push(expr);
}
self.expect_peek(end)?;
Ok(list)
}
fn parse_index_expression(&mut self, left: Expression) -> Result<Expression, String> {
let token = self.cur_token.clone();
self.next_token();
let index = self.parse_expression(Precedence::LOWEST)?;
self.expect_peek(TokenType::RBRACKET)?;
Ok(Expression::IndexExpression(IndexExpression {
token: token,
left: Box::new(left),
index: Box::new(index),
}))
}
fn parse_hash_literal(&mut self) -> Result<Expression, String> {
let token = self.cur_token.clone();
let mut pairs: HashMap<Expression, Expression> = HashMap::new();
while !self.peek_token_is(TokenType::RBRACE) {
self.next_token();
let key = self.parse_expression(Precedence::LOWEST)?;
self.expect_peek(TokenType::COLON)?;
self.next_token();
let value = self.parse_expression(Precedence::LOWEST)?;
pairs.insert(key, value);
if !self.peek_token_is(TokenType::RBRACE) {
self.expect_peek(TokenType::COMMA)?;
}
}
self.expect_peek(TokenType::RBRACE)?;
Ok(Expression::HashLiteral(HashLiteral {
token: token,
pairs: pairs,
}))
}
}
#[derive(PartialOrd, PartialEq)]
pub enum Precedence {
LOWEST,
EQUALS, // ==
LESSGREATER, // > or <
SUM, // +
PRODUCT, // *
PREFIX, // -x or !x
CALL, // myFunction(X)
INDEX, // array[index]
}
fn get_precedence(t: &TokenType) -> Precedence {
match t {
TokenType::EQ | TokenType::NOTEQ => Precedence::EQUALS,
TokenType::LT | TokenType::GT => Precedence::LESSGREATER,
TokenType::PLUS | TokenType::MINUS => Precedence::SUM,
TokenType::SLASH | TokenType::ASTERISK => Precedence::PRODUCT,
TokenType::LPAREN => Precedence::CALL,
TokenType::LBRACKET => Precedence::INDEX,
_ => Precedence::LOWEST,
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.