text
stringlengths
8
4.13M
#[doc = "Reader of register C1TCR"] pub type R = crate::R<u32, super::C1TCR>; #[doc = "Writer for register C1TCR"] pub type W = crate::W<u32, super::C1TCR>; #[doc = "Register C1TCR `reset()`'s with value 0"] impl crate::ResetValue for super::C1TCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SINC`"] pub type SINC_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SINC`"] pub struct SINC_W<'a> { w: &'a mut W, } impl<'a> SINC_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } #[doc = "Reader of field `DINC`"] pub type DINC_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DINC`"] pub struct DINC_W<'a> { w: &'a mut W, } impl<'a> DINC_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2); self.w } } #[doc = "Reader of field `SSIZE`"] pub type SSIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SSIZE`"] pub struct SSIZE_W<'a> { w: &'a mut W, } impl<'a> SSIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4); self.w } } #[doc = "Reader of field `DSIZE`"] pub type DSIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DSIZE`"] pub struct DSIZE_W<'a> { w: &'a mut W, } impl<'a> DSIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6); self.w } } #[doc = "Reader of field `SINCOS`"] pub type SINCOS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SINCOS`"] pub struct SINCOS_W<'a> { w: &'a mut W, } impl<'a> SINCOS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `DINCOS`"] pub type DINCOS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DINCOS`"] pub struct DINCOS_W<'a> { w: &'a mut W, } impl<'a> DINCOS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10); self.w } } #[doc = "Reader of field `SBURST`"] pub type SBURST_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SBURST`"] pub struct SBURST_W<'a> { w: &'a mut W, } impl<'a> SBURST_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12); self.w } } #[doc = "Reader of field `DBURST`"] pub type DBURST_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DBURST`"] pub struct DBURST_W<'a> { w: &'a mut W, } impl<'a> DBURST_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 15)) | (((value as u32) & 0x07) << 15); self.w } } #[doc = "Reader of field `TLEN`"] pub type TLEN_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TLEN`"] pub struct TLEN_W<'a> { w: &'a mut W, } impl<'a> TLEN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x7f << 18)) | (((value as u32) & 0x7f) << 18); self.w } } #[doc = "Reader of field `PKE`"] pub type PKE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PKE`"] pub struct PKE_W<'a> { w: &'a mut W, } impl<'a> PKE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `PAM`"] pub type PAM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PAM`"] pub struct PAM_W<'a> { w: &'a mut W, } impl<'a> PAM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 26)) | (((value as u32) & 0x03) << 26); self.w } } #[doc = "Reader of field `TRGM`"] pub type TRGM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TRGM`"] pub struct TRGM_W<'a> { w: &'a mut W, } impl<'a> TRGM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28); self.w } } #[doc = "Reader of field `SWRM`"] pub type SWRM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SWRM`"] pub struct SWRM_W<'a> { w: &'a mut W, } impl<'a> SWRM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `BWM`"] pub type BWM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BWM`"] pub struct BWM_W<'a> { w: &'a mut W, } impl<'a> BWM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:1 - Source increment mode These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: When source is AHB (SBUS=1), SINC = 00 is forbidden. In Linked List Mode, at the end of a block (single or last block in repeated block transfer mode), this register will be loaded from memory (from address given by current LAR\\[31:0\\] + 0x00)."] #[inline(always)] pub fn sinc(&self) -> SINC_R { SINC_R::new((self.bits & 0x03) as u8) } #[doc = "Bits 2:3 - Destination increment mode These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: When destination is AHB (DBUS=1), DINC = 00 is forbidden."] #[inline(always)] pub fn dinc(&self) -> DINC_R { DINC_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bits 4:5 - Source data size These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: If a value of 11 is programmed for the TCM access/AHB port, a transfer error will occur (TEIF bit set) If SINCOS &lt; SSIZE and SINC &#8800; 00, the result will be unpredictable. Note: SSIZE = 11 (double-word) is forbidden when source is TCM/AHB bus (SBUS=1)."] #[inline(always)] pub fn ssize(&self) -> SSIZE_R { SSIZE_R::new(((self.bits >> 4) & 0x03) as u8) } #[doc = "Bits 6:7 - Destination data size These bits are set and cleared by software. These bits are protected and can be written only if EN is 0. Note: If a value of 11 is programmed for the TCM access/AHB port, a transfer error will occur (TEIF bit set) If DINCOS &lt; DSIZE and DINC &#8800; 00, the result will be unpredictable. Note: DSIZE = 11 (double-word) is forbidden when destination is TCM/AHB bus (DBUS=1)."] #[inline(always)] pub fn dsize(&self) -> DSIZE_R { DSIZE_R::new(((self.bits >> 6) & 0x03) as u8) } #[doc = "Bits 8:9 - source increment offset size"] #[inline(always)] pub fn sincos(&self) -> SINCOS_R { SINCOS_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bits 10:11 - Destination increment offset"] #[inline(always)] pub fn dincos(&self) -> DINCOS_R { DINCOS_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bits 12:14 - source burst transfer configuration"] #[inline(always)] pub fn sburst(&self) -> SBURST_R { SBURST_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 15:17 - Destination burst transfer configuration"] #[inline(always)] pub fn dburst(&self) -> DBURST_R { DBURST_R::new(((self.bits >> 15) & 0x07) as u8) } #[doc = "Bits 18:24 - buffer transfer lengh"] #[inline(always)] pub fn tlen(&self) -> TLEN_R { TLEN_R::new(((self.bits >> 18) & 0x7f) as u8) } #[doc = "Bit 25 - PacK Enable These bit is set and cleared by software. If the Source Size is smaller than the destination, it will be padded according to the PAM value. If the Source data size is larger than the destination one, it will be truncated. The alignment will be done according to the PAM\\[0\\] value. This bit is protected and can be written only if EN is 0"] #[inline(always)] pub fn pke(&self) -> PKE_R { PKE_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bits 26:27 - Padding/Alignement Mode These bits are set and cleared by software. Case 1: Source data size smaller than destination data size - 3 options are valid. Case 2: Source data size larger than destination data size. The remainder part is discarded. When PKE = 1 or DSIZE=SSIZE, these bits are ignored. These bits are protected and can be written only if EN is 0"] #[inline(always)] pub fn pam(&self) -> PAM_R { PAM_R::new(((self.bits >> 26) & 0x03) as u8) } #[doc = "Bits 28:29 - Trigger Mode These bits are set and cleared by software. Note: If TRGM is 11 for the current block, all the values loaded at the end of the current block through the linked list mechanism must keep the same value (TRGM=11) and the same SWRM value, otherwise the result is undefined. These bits are protected and can be written only if EN is 0."] #[inline(always)] pub fn trgm(&self) -> TRGM_R { TRGM_R::new(((self.bits >> 28) & 0x03) as u8) } #[doc = "Bit 30 - SW Request Mode This bit is set and cleared by software. If a HW or SW request is currently active, the bit change will be delayed until the current transfer is completed. If the CxMAR contains a valid address, the CxMDR value will also be written @ CxMAR address. This bit is protected and can be written only if EN is 0."] #[inline(always)] pub fn swrm(&self) -> SWRM_R { SWRM_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Bufferable Write Mode This bit is set and cleared by software. This bit is protected and can be written only if EN is 0. Note: All MDMA destination accesses are non-cacheable."] #[inline(always)] pub fn bwm(&self) -> BWM_R { BWM_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:1 - Source increment mode These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: When source is AHB (SBUS=1), SINC = 00 is forbidden. In Linked List Mode, at the end of a block (single or last block in repeated block transfer mode), this register will be loaded from memory (from address given by current LAR\\[31:0\\] + 0x00)."] #[inline(always)] pub fn sinc(&mut self) -> SINC_W { SINC_W { w: self } } #[doc = "Bits 2:3 - Destination increment mode These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: When destination is AHB (DBUS=1), DINC = 00 is forbidden."] #[inline(always)] pub fn dinc(&mut self) -> DINC_W { DINC_W { w: self } } #[doc = "Bits 4:5 - Source data size These bits are set and cleared by software. These bits are protected and can be written only if EN is 0 Note: If a value of 11 is programmed for the TCM access/AHB port, a transfer error will occur (TEIF bit set) If SINCOS &lt; SSIZE and SINC &#8800; 00, the result will be unpredictable. Note: SSIZE = 11 (double-word) is forbidden when source is TCM/AHB bus (SBUS=1)."] #[inline(always)] pub fn ssize(&mut self) -> SSIZE_W { SSIZE_W { w: self } } #[doc = "Bits 6:7 - Destination data size These bits are set and cleared by software. These bits are protected and can be written only if EN is 0. Note: If a value of 11 is programmed for the TCM access/AHB port, a transfer error will occur (TEIF bit set) If DINCOS &lt; DSIZE and DINC &#8800; 00, the result will be unpredictable. Note: DSIZE = 11 (double-word) is forbidden when destination is TCM/AHB bus (DBUS=1)."] #[inline(always)] pub fn dsize(&mut self) -> DSIZE_W { DSIZE_W { w: self } } #[doc = "Bits 8:9 - source increment offset size"] #[inline(always)] pub fn sincos(&mut self) -> SINCOS_W { SINCOS_W { w: self } } #[doc = "Bits 10:11 - Destination increment offset"] #[inline(always)] pub fn dincos(&mut self) -> DINCOS_W { DINCOS_W { w: self } } #[doc = "Bits 12:14 - source burst transfer configuration"] #[inline(always)] pub fn sburst(&mut self) -> SBURST_W { SBURST_W { w: self } } #[doc = "Bits 15:17 - Destination burst transfer configuration"] #[inline(always)] pub fn dburst(&mut self) -> DBURST_W { DBURST_W { w: self } } #[doc = "Bits 18:24 - buffer transfer lengh"] #[inline(always)] pub fn tlen(&mut self) -> TLEN_W { TLEN_W { w: self } } #[doc = "Bit 25 - PacK Enable These bit is set and cleared by software. If the Source Size is smaller than the destination, it will be padded according to the PAM value. If the Source data size is larger than the destination one, it will be truncated. The alignment will be done according to the PAM\\[0\\] value. This bit is protected and can be written only if EN is 0"] #[inline(always)] pub fn pke(&mut self) -> PKE_W { PKE_W { w: self } } #[doc = "Bits 26:27 - Padding/Alignement Mode These bits are set and cleared by software. Case 1: Source data size smaller than destination data size - 3 options are valid. Case 2: Source data size larger than destination data size. The remainder part is discarded. When PKE = 1 or DSIZE=SSIZE, these bits are ignored. These bits are protected and can be written only if EN is 0"] #[inline(always)] pub fn pam(&mut self) -> PAM_W { PAM_W { w: self } } #[doc = "Bits 28:29 - Trigger Mode These bits are set and cleared by software. Note: If TRGM is 11 for the current block, all the values loaded at the end of the current block through the linked list mechanism must keep the same value (TRGM=11) and the same SWRM value, otherwise the result is undefined. These bits are protected and can be written only if EN is 0."] #[inline(always)] pub fn trgm(&mut self) -> TRGM_W { TRGM_W { w: self } } #[doc = "Bit 30 - SW Request Mode This bit is set and cleared by software. If a HW or SW request is currently active, the bit change will be delayed until the current transfer is completed. If the CxMAR contains a valid address, the CxMDR value will also be written @ CxMAR address. This bit is protected and can be written only if EN is 0."] #[inline(always)] pub fn swrm(&mut self) -> SWRM_W { SWRM_W { w: self } } #[doc = "Bit 31 - Bufferable Write Mode This bit is set and cleared by software. This bit is protected and can be written only if EN is 0. Note: All MDMA destination accesses are non-cacheable."] #[inline(always)] pub fn bwm(&mut self) -> BWM_W { BWM_W { w: self } } }
mod handler; mod model; pub mod route;
use std::io; use std::convert::AsRef; use std::str::FromStr; use std::path::PathBuf; use std::default::Default; use lazy_init::Lazy; use crate::{State, Technology}; use crate::platform::traits::BatteryDevice; use super::sysfs; const DESIGN_VOLTAGE_PROBES: [&str; 4] = [ "voltage_max_design", "voltage_min_design", "voltage_present", "voltage_now", ]; #[derive(Default)] pub struct Inner { root: PathBuf, design_voltage: Lazy<u32>, energy: Lazy<u32>, energy_full: Lazy<u32>, energy_full_design: Lazy<u32>, energy_rate: Lazy<u32>, voltage: Lazy<u32>, // mV percentage: Lazy<f32>, // 0.0 .. 100.0 temperature: Lazy<Option<f32>>, cycle_count: Lazy<Option<u32>>, state: Lazy<State>, technology: Lazy<Technology>, manufacturer: Lazy<Option<String>>, model_name: Lazy<Option<String>>, serial_number: Lazy<Option<String>>, } impl Inner { pub fn new(root: PathBuf) -> Inner { let device = Inner { root, ..Default::default() }; device.preload(); device } } impl Inner { // With current design, `Inner` is not an instant representation of the battery stats // because of `Lazy` fields. End user might fetch needed data with a significant time difference // which will lead to an inconsistent results. // All results should be loaded at the same time; as for now, making a quick hack // and preloading all the stuff in once. // It seems that even with ignored results (`let _ = self...()`), rust still calls all required methods, // since we have side effects (file I/O) fn preload(&self) { let _ = self.design_voltage(); let _ = self.energy(); let _ = self.energy_full(); let _ = self.energy_full_design(); let _ = self.energy_rate(); let _ = self.voltage(); let _ = self.percentage(); let _ = self.temperature(); let _ = self.state(); let _ = self.technology(); let _ = self.vendor(); let _ = self.model(); let _ = self.serial_number(); let _ = self.cycle_count(); } fn design_voltage(&self) -> u32 { *self.design_voltage.get_or_create(|| { DESIGN_VOLTAGE_PROBES.iter() .filter_map(|filename| { match sysfs::get_u32(self.root.join(filename)) { Ok(value) if value > 1 => Some(value / 1_000_000), _ => None, } }) .next() // Same to `upower`, using 10V as an approximation .unwrap_or(10) }) } fn charge_full(&self) -> u32 { ["charge_full", "charge_full_design"].iter() // µAh .filter_map(|filename| { match sysfs::get_u32(self.root.join(filename)) { Ok(value) => Some(value / 1_000), _ => None, } }) .next() .unwrap_or(0) } } impl BatteryDevice for Inner { fn capacity(&self) -> f32 { let energy_full = self.energy_full(); if energy_full > 0 { let capacity = (energy_full as f32 / self.energy_full_design() as f32) * 100.0; set_bounds(capacity) } else { 100.0 } } fn energy(&self) -> u32 { *self.energy.get_or_create(|| { let mut value = ["energy_now", "energy_avg"].iter() .filter_map(|filename| { match sysfs::get_u32(self.root.join(filename)) { Ok(energy) => Some(energy / 1_000), Err(_) => None, } }) .next(); if value.is_none() { value = ["charge_now", "charge_avg"].iter() .filter_map(|filename| { match sysfs::get_u32(self.root.join(filename)) { Ok(charge) => Some(charge / 1_000 * self.design_voltage()), Err(_) => None, } }) .next(); } match value { None => self.energy_full() * self.percentage() as u32 / 100, Some(energy) => energy, } }) } fn energy_full(&self) -> u32 { *self.energy_full.get_or_create(|| { let res = match sysfs::get_u32(self.root.join("energy_full")) { Ok(energy) => energy / 1_000, Err(_) => match sysfs::get_u32(self.root.join("charge_full")) { Ok(charge) => charge / 1_000 * self.design_voltage(), Err(_) => 0, } }; if res == 0 { self.energy_full_design() } else { res } }) } fn energy_full_design(&self) -> u32 { *self.energy_full_design.get_or_create(|| { match sysfs::get_u32(self.root.join("energy_full_design")) { Ok(energy) => energy / 1_000, Err(_) => match sysfs::get_u32(self.root.join("charge_full_design")) { Ok(charge) => charge / 1_000 * self.design_voltage(), Err(_) => 0, } } }) } fn energy_rate(&self) -> u32 { *self.energy_rate.get_or_create(|| { let mut value = match sysfs::get_u32(self.root.join("power_now")) { Ok(power) if power > 10_000 => power / 1_000, _ => { match sysfs::get_u32(self.root.join("current_now")) { Ok(current_now) => { // If charge_full exists, then current_now is always reported in uA. // In the legacy case, where energy only units exist, and power_now isn't present // current_now is power in uW. // Source: upower let mut current = current_now / 1_000; if self.charge_full() != 0 { current *= self.design_voltage(); } current }, Err(_) => { 0u32 }, } } }; // ACPI gives out the special 'Ones' value for rate when it's unable // to calculate the true rate. We should set the rate zero, and wait // for the BIOS to stabilise. // Source: upower // TODO: Uncomment and fix // if value == 0xffff { // value = 0.0; // } // Sanity check, same as upower does, if power is greater than 100W if value > 100_000 { value = 0; } // TODO: Calculate energy_rate manually, if hardware fails. // if value < 0.01 { // // Check upower `up_device_supply_calculate_rate` function // } // Some batteries give out massive rate values when nearly empty if value < 10 { value = 0; } value }) } // 0.0..100.0 fn percentage(&self) -> f32 { *self.percentage.get_or_create(|| { let capacity= match sysfs::get_u32(self.root.join("capacity")) { Ok(capacity) => capacity, _ if self.energy_full() > 0 => 100 * self.energy() / self.energy_full(), Err(_) => 0, }; set_bounds(capacity as f32) }) } fn state(&self) -> State { *self.state.get_or_create(|| { sysfs::get_string(self.root.join("status")) .and_then(|x| State::from_str(&x)) .unwrap_or(State::Unknown) }) } // mV fn voltage(&self) -> u32 { *self.voltage.get_or_create(|| { ["voltage_now", "voltage_avg"].iter() .filter_map(|filename| { match sysfs::get_u32(self.root.join(filename)) { Ok(voltage) if voltage > 1 => Some(voltage / 1_000), _ => None, } }) .next() .unwrap_or(0) // TODO: Check if it is really unreachable }) } fn temperature(&self) -> Option<f32> { *self.temperature.get_or_create(|| { let res = sysfs::get_f32(self.root.join("temp")) .and_then(|temp| Ok(temp / 10.0)); match res { Ok(value) => Some(value), Err(_) => None, } }) } fn vendor(&self) -> Option<&str> { self.manufacturer.get_or_create(|| { match sysfs::get_string(self.root.join("manufacturer")) { Ok(vendor) => Some(vendor), Err(_) => None, } }).as_ref().map(AsRef::as_ref) } fn model(&self) -> Option<&str> { self.model_name.get_or_create(|| { match sysfs::get_string(self.root.join("model_name")) { Ok(model) => Some(model), Err(_) => None, } }).as_ref().map(AsRef::as_ref) } fn serial_number(&self) -> Option<&str> { self.serial_number.get_or_create(|| { match sysfs::get_string(self.root.join("serial_number")) { Ok(serial) => Some(serial), Err(_) => None, } }).as_ref().map(AsRef::as_ref) } fn technology(&self) -> Technology { *self.technology.get_or_create(|| { match sysfs::get_string(self.root.join("technology")) { Ok(ref tech) => Technology::from_str(tech).unwrap_or(Technology::Unknown), Err(_) => Technology::Unknown, } }) } fn cycle_count(&self) -> Option<u32> { *self.cycle_count.get_or_create(|| { match sysfs::get_u32(self.root.join("cycle_count")) { Ok(value) => Some(value), Err(_) => None, } }) } } #[derive(Default)] pub struct SysFsDevice(Inner); impl SysFsDevice { pub fn new(root: PathBuf) -> SysFsDevice { SysFsDevice(Inner::new(root)) } pub fn refresh(&mut self) -> io::Result<()> { self.0 = Inner::new(self.0.root.clone()); Ok(()) } } impl BatteryDevice for SysFsDevice { fn capacity(&self) -> f32 { self.0.capacity() } fn energy(&self) -> u32 { self.0.energy() } fn energy_full(&self) -> u32 { self.0.energy_full() } fn energy_full_design(&self) -> u32 { self.0.energy_full_design() } fn energy_rate(&self) -> u32 { self.0.energy_rate() } fn percentage(&self) -> f32 { self.0.percentage() } fn state(&self) -> State { self.0.state() } fn voltage(&self) -> u32 { self.0.voltage() } fn temperature(&self) -> Option<f32> { self.0.temperature() } fn vendor(&self) -> Option<&str> { self.0.vendor() } fn model(&self) -> Option<&str> { self.0.model() } fn serial_number(&self) -> Option<&str> { self.0.serial_number() } fn technology(&self) -> Technology { self.0.technology() } fn cycle_count(&self) -> Option<u32> { self.0.cycle_count() } } #[inline] fn set_bounds(value: f32) -> f32 { if value < 0.0 { return 0.0; } if value > 100.0 { return 100.0; } value }
#![feature(core)] use std::mem::transmute; use std::raw::TraitObject; trait Passable { fn just_a_thing(&self) -> bool { false } } impl Passable for i32 {} fn main() { let i = Box::new(42i32); let j = &*i as &Passable; let k = unsafe { transmute::<&Passable, TraitObject>(j) }; let l = unsafe { transmute::<TraitObject, &Passable>(k) }; drop(l); }
mod lib; use lib::config::CONFIG_FILE; fn main() { println!("{}", CONFIG_FILE); }
use crate::configuration::Configuration; use crate::domain_cmd::DomainCmd; use crate::http; type Error = Box<dyn std::error::Error>; type Result<T, E = Error> = std::result::Result<T, E>; pub async fn call(form : DomainCmd) -> Result<()> { match &form { DomainCmd::Create{auth_param, ..} => http::post("/v1.0/domains", &vec![], &vec![], Some(serde_json::to_string(&form).unwrap()), &Configuration::auth_from(auth_param) ).await, DomainCmd::Delete{auth_param, ..} => http::delete("/v1.0/domains/:id", &vec![], &vec![], Some(serde_json::to_string(&form).unwrap()), &Configuration::auth_from(auth_param) ).await, DomainCmd::Get{auth_param, ..} => http::get("/v1.0/domains/:id", &vec![], &vec![], &Configuration::auth_from(auth_param) ).await, DomainCmd::List{auth_param, form} => http::list("/v1.0/domains", &vec![], &form.to_query_string(), &Configuration::auth_from(auth_param) ).await, DomainCmd::Update{auth_param, ..} => http::patch("/v1.0/domains/:id", &vec![], &vec![], Some(serde_json::to_string(&form).unwrap()), &Configuration::auth_from(auth_param) ).await, } }
use bigdecimal::BigDecimal; use num::BigInt; use std::collections::BTreeMap; mod traits; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum AST { EmptyStatement, // Program(Vec<AST>), // /// function call /// function(name, *args, **kwargs) Function(Symbol, Vec<Parameter>), // /// true or false Boolean(bool), Integer(BigInt), Decimal(BigDecimal), Symbol(Symbol), String(String), } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Parameter { pub arguments: Vec<AST>, pub options: BTreeMap<AST, AST>, pub position: Position, } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Position { pub file: String, pub start: (usize, usize), pub end: (usize, usize), } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum Expression { Source { raw: String, input: AST, eos: bool }, Executed { raw: String, input: AST, output: AST }, } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Symbol { pub name_space: Vec<String>, pub name: String, pub kind: SymbolKind, /// maybe use bit flag pub attributes: u64, } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum SymbolKind { /// raw symbol Normal, /// alias of another one Alias, /// Prefix(Box<str>), /// infix operator with Precedence Infix(Box<str>, u8), Suffix(Box<str>), }
#![feature(test)] #![feature(rand)] extern crate test; extern crate rand; extern crate merkle; extern crate ring; use test::Bencher; use rand::Rng; use ring::digest::{Algorithm, SHA512}; use merkle::MerkleTree; #[allow(non_upper_case_globals)] static digest: &'static Algorithm = &SHA512; #[bench] fn bench_small_str_tree(b: &mut Bencher) { let values = vec!["one", "two", "three", "four"]; b.iter(|| MerkleTree::from_vec(digest, values.clone())); } #[bench] fn bench_small_str_proof_gen(b: &mut Bencher) { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::from_vec(digest, values.clone()); b.iter(|| for value in &values { let proof = tree.gen_proof(value); test::black_box(proof); }); } #[bench] fn bench_small_str_proof_check(b: &mut Bencher) { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::from_vec(digest, values.clone()); let proofs = values .iter() .map(|v| tree.gen_proof(v).unwrap()) .collect::<Vec<_>>(); b.iter(|| for proof in &proofs { test::black_box(proof.validate(tree.root_hash())); }); } #[bench] fn bench_big_rnd_tree(b: &mut Bencher) { let mut values = vec![vec![0u8; 256]; 160]; let mut rng = rand::IsaacRng::new_unseeded(); for mut v in &mut values { rng.fill_bytes(&mut v); } b.iter(|| { let tree = MerkleTree::from_vec(digest, values.clone()); test::black_box(tree) }); } #[bench] fn bench_big_rnd_proof_gen(b: &mut Bencher) { let mut values = vec![vec![0u8; 256]; 160]; let mut rng = rand::IsaacRng::new_unseeded(); for mut v in &mut values { rng.fill_bytes(&mut v); } let tree = MerkleTree::from_vec(digest, values.clone()); b.iter(|| for value in &values { let proof = tree.gen_proof(value.clone()); test::black_box(proof); }); } #[bench] fn bench_big_rnd_proof_check(b: &mut Bencher) { let mut values = vec![vec![0u8; 256]; 160]; let mut rng = rand::IsaacRng::new_unseeded(); for mut v in &mut values { rng.fill_bytes(&mut v); } let tree = MerkleTree::from_vec(digest, values.clone()); let proofs = values .into_iter() .map(|v| tree.gen_proof(v).unwrap()) .collect::<Vec<_>>(); b.iter(|| for proof in &proofs { test::black_box(proof.validate(tree.root_hash())); }); } #[bench] fn bench_big_rnd_iter(b: &mut Bencher) { let mut values = vec![vec![0u8; 256]; 160]; let mut rng = rand::IsaacRng::new_unseeded(); for mut v in &mut values { rng.fill_bytes(&mut v); } let tree = MerkleTree::from_vec(digest, values.clone()); b.iter(|| for value in &tree { test::black_box(value); }); }
// Copyright 2017 Serde Developers // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms./// This structure pretty prints a JSON value to make it human readable. // // This is an edited version of PrettyFormatter that prints CRLF use std::io; use serde_json::ser::Formatter; #[derive(Clone, Debug)] pub struct WindowsPrettyFormatter<'a> { current_indent: usize, has_value: bool, indent: &'a [u8], } impl<'a> WindowsPrettyFormatter<'a> { /// Construct a pretty printer formatter that defaults to using two spaces for indentation. pub fn new() -> Self { WindowsPrettyFormatter::with_indent(b" ") } /// Construct a pretty printer formatter that uses the `indent` string for indentation. pub fn with_indent(indent: &'a [u8]) -> Self { WindowsPrettyFormatter { current_indent: 0, has_value: false, indent: indent, } } } impl<'a> Default for WindowsPrettyFormatter<'a> { fn default() -> Self { WindowsPrettyFormatter::new() } } impl<'a> Formatter for WindowsPrettyFormatter<'a> { #[inline] fn begin_array<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()> where W: io::Write, { self.current_indent += 1; self.has_value = false; writer.write_all(b"[") } #[inline] fn end_array<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()> where W: io::Write, { self.current_indent -= 1; if self.has_value { try!(writer.write_all(b"\r\n")); try!(indent(writer, self.current_indent, self.indent)); } writer.write_all(b"]") } #[inline] fn begin_array_value<W: ?Sized>(&mut self, writer: &mut W, first: bool) -> io::Result<()> where W: io::Write, { if first { try!(writer.write_all(b"\r\n")); } else { try!(writer.write_all(b",\r\n")); } try!(indent(writer, self.current_indent, self.indent)); Ok(()) } #[inline] fn end_array_value<W: ?Sized>(&mut self, _writer: &mut W) -> io::Result<()> where W: io::Write, { self.has_value = true; Ok(()) } #[inline] fn begin_object<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()> where W: io::Write, { self.current_indent += 1; self.has_value = false; writer.write_all(b"{") } #[inline] fn end_object<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()> where W: io::Write, { self.current_indent -= 1; if self.has_value { try!(writer.write_all(b"\r\n")); try!(indent(writer, self.current_indent, self.indent)); } writer.write_all(b"}") } #[inline] fn begin_object_key<W: ?Sized>(&mut self, writer: &mut W, first: bool) -> io::Result<()> where W: io::Write, { if first { try!(writer.write_all(b"\r\n")); } else { try!(writer.write_all(b",\r\n")); } indent(writer, self.current_indent, self.indent) } #[inline] fn begin_object_value<W: ?Sized>(&mut self, writer: &mut W) -> io::Result<()> where W: io::Write, { writer.write_all(b": ") } #[inline] fn end_object_value<W: ?Sized>(&mut self, _writer: &mut W) -> io::Result<()> where W: io::Write, { self.has_value = true; Ok(()) } } fn indent<W: ?Sized>(wr: &mut W, n: usize, s: &[u8]) -> io::Result<()> where W: io::Write, { for _ in 0..n { try!(wr.write_all(s)); } Ok(()) }
#![allow(unused_imports)] #![allow(dead_code)] #![allow(unused_variables)] use sodiumoxide; use serde::{Serialize, Deserialize}; pub static SOCKET_PATH: &'static str = "/tmp/loopback-socket"; #[derive(Serialize, Deserialize, Debug)] pub enum MyRequest { ReqCryptoBoxGenKeypair, ReqCryptoBoxGenNonce, ReqCryptoBoxSeal { keyid: usize, plaintext: Vec<u8>, public_key: sodiumoxide::crypto::box_::PublicKey, }, ReqCryptoBoxOpen { keyid: usize, ciphertext: Vec<u8>, public_key: sodiumoxide::crypto::box_::PublicKey, nonce: sodiumoxide::crypto::box_::Nonce, } } #[derive(Serialize, Deserialize, Debug)] pub enum MyResponse { ResCryptoBoxGenKeypair { keyid: usize, public_key: sodiumoxide::crypto::box_::PublicKey, }, ResCryptoBoxGenNonce { nonce: sodiumoxide::crypto::box_::Nonce, }, ResCryptoBoxSeal { ciphertext: Vec<u8>, nonce: sodiumoxide::crypto::box_::Nonce, }, ResCryptoBoxOpen { plaintext: Vec<u8>, }, }
use oasis_contract_sdk::{ self as sdk, env::Env, types::{ message::{Message, NotifyReply}, InstanceId, }, }; use oasis_contract_sdk_storage::{cell::Cell, map::Map}; use oasis_contract_sdk_types::address::Address; use crate::{ Error, Event, InitialBalance, ReceiverRequest, Request, Response, TokenInformation, TokenInstantiation, }; /// Handles an OAS20 request call. pub fn handle_call<C: sdk::Context>( ctx: &mut C, token_info: Cell<TokenInformation>, balances: Map<Address, u128>, allowances: Map<(Address, Address), u128>, request: Request, ) -> Result<Response, Error> { match request { Request::Transfer { to, amount } => { // Transfers the `amount` of funds from caller to `to` address. let from = ctx.caller_address().to_owned(); transfer(ctx, balances, from, to, amount)?; ctx.emit_event(Event::Oas20Transferred { from, to, amount }); Ok(Response::Empty) } Request::Send { to, amount, data } => { let from = ctx.caller_address().to_owned(); send(ctx, balances, from, to, amount, data, 0, NotifyReply::Never)?; ctx.emit_event(Event::Oas20Sent { from, to, amount }); Ok(Response::Empty) } Request::Burn { amount } => { let from = ctx.caller_address().to_owned(); burn(ctx, balances, token_info, from, amount)?; ctx.emit_event(Event::Oas20Burned { from, amount }); Ok(Response::Empty) } Request::Mint { to, amount } => { mint(ctx, balances, token_info, to, amount)?; ctx.emit_event(Event::Oas20Minted { to, amount }); Ok(Response::Empty) } Request::Allow { beneficiary, negative, amount_change, } => { let owner = ctx.caller_address().to_owned(); let (new_allowance, amount_change) = allow(ctx, allowances, owner, beneficiary, negative, amount_change)?; ctx.emit_event(Event::Oas20AllowanceChanged { owner, beneficiary, allowance: new_allowance, negative, amount_change, }); Ok(Response::Empty) } Request::Withdraw { from, amount } => { let to = ctx.caller_address().to_owned(); withdraw(ctx, balances, allowances, from, to, amount)?; ctx.emit_event(Event::Oas20Withdrew { from, to, amount }); Ok(Response::Empty) } _ => Err(Error::BadRequest), } } /// Handles an OAS20 request query. pub fn handle_query<C: sdk::Context>( ctx: &mut C, token_info: Cell<TokenInformation>, balances: Map<Address, u128>, allowances: Map<(Address, Address), u128>, request: Request, ) -> Result<Response, Error> { match request { Request::TokenInformation => { // Token info should always be present. let token_info = token_info.get(ctx.public_store()).unwrap(); Ok(Response::TokenInformation { token_information: token_info, }) } Request::Balance { address } => Ok(Response::Balance { balance: balances .get(ctx.public_store(), address) .unwrap_or_default(), }), Request::Allowance { allower, beneficiary, } => Ok(Response::Allowance { allowance: allowances .get(ctx.public_store(), (allower, beneficiary)) .unwrap_or_default(), }), _ => Err(Error::BadRequest), } } /// Instantiates the contract state. pub fn instantiate<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, token_info: Cell<TokenInformation>, instantiation: TokenInstantiation, ) -> Result<TokenInformation, Error> { // Setup initial balances and compute the total supply. let mut total_supply: u128 = 0; for InitialBalance { address, amount } in instantiation.initial_balances { total_supply = total_supply .checked_add(amount) .ok_or(Error::TotalSupplyOverflow)?; balances.insert(ctx.public_store(), address, amount); } let token_information = TokenInformation { name: instantiation.name, symbol: instantiation.symbol, decimals: instantiation.decimals, minting: instantiation.minting, total_supply, }; token_info.set(ctx.public_store(), token_information.clone()); Ok(token_information) } /// Transfer the `amount` of funds from `from` to `to` address. pub fn transfer<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, from: Address, to: Address, amount: u128, ) -> Result<(), Error> { if amount == 0 { return Err(Error::ZeroAmount); } let mut from_balance = balances.get(ctx.public_store(), from).unwrap_or_default(); let mut to_balance = balances.get(ctx.public_store(), to).unwrap_or_default(); from_balance = from_balance .checked_sub(amount) .ok_or(Error::InsufficientFunds)?; to_balance += amount; balances.insert(ctx.public_store(), from, from_balance); balances.insert(ctx.public_store(), to, to_balance); Ok(()) } /// Burns the `amount` of funds from `from`. pub fn burn<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, token_info: Cell<TokenInformation>, from: Address, amount: u128, ) -> Result<(), Error> { if amount == 0 { return Err(Error::ZeroAmount); } // Remove from account balance. let mut from_balance = balances.get(ctx.public_store(), from).unwrap_or_default(); from_balance = from_balance .checked_sub(amount) .ok_or(Error::InsufficientFunds)?; // Decrease the supply. // Token info should always be present. let mut info = token_info.get(ctx.public_store()).unwrap(); // Shouldn't ever overflow. info.total_supply = info.total_supply.checked_sub(amount).unwrap(); balances.insert(ctx.public_store(), from, from_balance); token_info.set(ctx.public_store(), info); Ok(()) } /// Mints the `amount` of tokens to `to`. pub fn mint<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, token_info_cell: Cell<TokenInformation>, to: Address, amount: u128, ) -> Result<(), Error> { if amount == 0 { return Err(Error::ZeroAmount); } // Token info should always be present. let mut token_info = token_info_cell.get(ctx.public_store()).unwrap(); // Ensure token supports minting and new supply cap is bellow mint cap. match token_info.minting.as_ref() { Some(info) => { let cap = info.cap.unwrap_or(u128::MAX); match token_info.total_supply.checked_add(amount) { Some(new_cap) => { if new_cap > cap { return Err(Error::MintOverCap); } } None => return Err(Error::TotalSupplyOverflow), } if &info.minter != ctx.caller_address() { return Err(Error::MintingForbidden); } } None => return Err(Error::MintingForbidden), } // Add to account balance. let mut to_balance = balances.get(ctx.public_store(), to).unwrap_or_default(); // Cannot overflow due to the total supply overflow check above. to_balance = to_balance.checked_add(amount).unwrap(); // Increase the supply. // Overflow already checked above. token_info.total_supply = token_info.total_supply.checked_add(amount).unwrap(); balances.insert(ctx.public_store(), to, to_balance); token_info_cell.set(ctx.public_store(), token_info); Ok(()) } /// Transfers the `amount` of funds from caller to `to` contract instance identifier /// and calls `ReceiveOas20` on the receiving contract. #[allow(clippy::too_many_arguments)] pub fn send<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, from: Address, to: InstanceId, amount: u128, data: cbor::Value, id: u64, notify: NotifyReply, ) -> Result<(), Error> { let to_address = ctx.env().address_for_instance(to); transfer(ctx, balances, from, to_address, amount)?; // There should be high-level helpers for calling methods of other contracts that follow a similar // "standard" API - maybe define an API and helper methods in an OAS-0 document. // Emit a message through which we instruct the runtime to make a call on the // contract's behalf. use cbor::cbor_map; ctx.emit_message(Message::Call { id, reply: notify, method: "contracts.Call".to_string(), body: cbor::cbor_map! { "id" => cbor::cbor_int!(to.as_u64() as i64), "data" => cbor::cbor_bytes!(cbor::to_vec( cbor::to_value(ReceiverRequest::Receive{sender: from, amount, data}), )), "tokens" => cbor::cbor_array![], }, max_gas: None, data: None, }); Ok(()) } /// Update the `beneficiary` allowance by the `amount`. pub fn allow<C: sdk::Context>( ctx: &mut C, allowances: Map<(Address, Address), u128>, allower: Address, beneficiary: Address, negative: bool, amount: u128, ) -> Result<(u128, u128), Error> { if amount == 0 { return Err(Error::ZeroAmount); } if allower == beneficiary { return Err(Error::SameAllowerAndBeneficiary); } let allowance = allowances .get(ctx.public_store(), (allower, beneficiary)) .unwrap_or_default(); let (new_allowance, change) = match negative { true => { let new = allowance.saturating_sub(amount); (new, allowance - new) } false => { let new = allowance.saturating_add(amount); (new, new - allowance) } }; allowances.insert(ctx.public_store(), (allower, beneficiary), new_allowance); Ok((new_allowance, change)) } /// Withdraw the `amount` of funds from `from` to `to`. pub fn withdraw<C: sdk::Context>( ctx: &mut C, balances: Map<Address, u128>, allowances: Map<(Address, Address), u128>, from: Address, to: Address, amount: u128, ) -> Result<(), Error> { if amount == 0 { return Err(Error::ZeroAmount); } if from == to { return Err(Error::SameAllowerAndBeneficiary); } let mut allowance = allowances .get(ctx.public_store(), (from, to)) .unwrap_or_default(); allowance = allowance .checked_sub(amount) .ok_or(Error::InsufficientAllowance)?; transfer(ctx, balances, from, to, amount)?; allowances.insert(ctx.public_store(), (from, to), allowance); Ok(()) }
use tokio::sync::{mpsc, broadcast}; use tokio_stream::wrappers::ReceiverStream; use tonic::{Request, Response, Status}; use stream_mod::{HelloReply, HelloRequest, GetBlocksRequest, GenericDataProto, streamout_server::Streamout}; pub mod stream_mod { tonic::include_proto!("chaindata"); } #[derive(Debug)] pub struct StreamService { pub chan : broadcast::Sender<GenericDataProto>, } #[tonic::async_trait] impl Streamout for StreamService { async fn say_hello( &self, request: Request<HelloRequest>, ) -> Result<Response<HelloReply>, Status> { println!("Got a request: {:?}", request); let reply = HelloReply { message: format!("Hello {}!", request.into_inner().name).into(), }; Ok(Response::new(reply)) } type ListBlocksStream = ReceiverStream<Result<GenericDataProto, Status>>; async fn list_blocks( &self, request: Request<GetBlocksRequest>, ) -> Result<Response<Self::ListBlocksStream>, Status> { println!("ListFeatures = {:?}", request); // tx, rx for out stream gRPC let (tx, rx) = mpsc::channel(1024); // Create new channel for connect between input and output stream let mut rx_chan = self.chan.subscribe(); tokio::spawn(async move { loop { // Getting generic_data let generic_data = rx_chan.recv().await.unwrap(); // Send generic_data to queue" tx.send(Ok(generic_data)).await.unwrap(); } }); Ok(Response::new(ReceiverStream::new(rx))) } }
use std::io; use std::io::Write; fn main() { print!("Please write number of clouds: "); io::stdout().flush().unwrap(); let mut input_steps = String::new(); io::stdin().read_line(&mut input_steps).expect("Failed to read from stdin"); let clouds_amount = input_steps.trim().parse::<usize>().unwrap(); print!("Please write sequence: "); io::stdout().flush().unwrap(); let mut input_sequence = String::new(); io::stdin().read_line(&mut input_sequence).expect("Failed to read from stdin"); let _clouds = input_sequence.trim().split(" ").map(binary_to_bool).map(str::parse).collect::<Result<Vec<bool>, _>>().unwrap(); let mut i = 0; let mut counter = 0; while i < clouds_amount - 1 { if (i + 2 < clouds_amount) && !_clouds[i + 2] { i = i + 2; } else { i = i + 1; } counter += 1; } println!("Answer is {}", counter); } fn binary_to_bool(input: &str) -> &str { match input { "1" => "true", "0" => "false", _ => "false", } }
pub mod neural_network; pub mod simulation;
use crate::compiling::{ImportEntryStep, InsertMetaError}; use crate::{ Id, IrError, IrErrorKind, ParseError, ParseErrorKind, ResolveError, ResolveErrorKind, Spanned, }; use runestick::{CompileMeta, Item, Location, Visibility}; use thiserror::Error; error! { /// An error raised during querying. #[derive(Debug)] pub struct QueryError { kind: QueryErrorKind, } impl From<IrError>; impl From<ParseError>; impl From<ResolveError>; } /// Error raised during queries. #[allow(missing_docs)] #[derive(Debug, Error)] pub enum QueryErrorKind { #[error("{message}")] Custom { message: &'static str }, #[error("failed to insert meta: {error}")] InsertMetaError { #[source] #[from] error: InsertMetaError, }, #[error("{error}")] IrError { #[source] #[from] error: IrErrorKind, }, #[error("{error}")] ParseError { #[source] #[from] error: ParseErrorKind, }, #[error("{error}")] ResolveError { #[source] #[from] error: ResolveErrorKind, }, #[error("missing {what} for id {id:?}")] MissingId { what: &'static str, id: Option<Id> }, #[error("cannot define conflicting item `{item}`")] ItemConflict { item: Item, other: Location }, #[error("`{item}` can refer to multiple things")] AmbiguousItem { item: Item, locations: Vec<(Location, Item)>, }, #[error("`{item}` with {visibility} visibility, is not accessible from module `{from}`")] NotVisible { chain: Vec<Location>, location: Location, visibility: Visibility, item: Item, from: Item, }, #[error( "module `{item}` with {visibility} visibility, is not accessible from module `{from}`" )] NotVisibleMod { chain: Vec<Location>, location: Location, visibility: Visibility, item: Item, from: Item, }, #[error("missing item for id {id:?}")] MissingRevId { id: Id }, #[error("missing query meta for module {item}")] MissingMod { item: Item }, #[error("cycle in import")] ImportCycle { path: Vec<ImportEntryStep> }, #[error("missing last use component")] LastUseComponent, #[error("found indexed entry for `{item}`, but was not an import")] NotIndexedImport { item: Item }, #[error("{meta} can't be used as an import")] UnsupportedImportMeta { meta: CompileMeta }, /// Tried to add an item that already exists. #[error("trying to insert `{current}` but conflicting meta `{existing}` already exists")] MetaConflict { /// The meta we tried to insert. current: CompileMeta, /// The existing item. existing: CompileMeta, }, }
#[path = "with_float/with_empty_list_options.rs"] pub mod with_empty_list_options; #[path = "with_float/with_proper_list_options.rs"] pub mod with_proper_list_options; // `without_proper_list_options_errors_badarg` in unit tests
use std::sync::Arc; use async_trait::async_trait; use ingester_query_grpc::IngesterQueryRequest; use trace::{ctx::SpanContext, span::SpanRecorder}; use crate::ingester::flight_client::{Error as FlightClientError, IngesterFlightClient, QueryData}; #[derive(Debug)] pub struct InvalidateOnErrorFlightClient { /// The underlying client. inner: Arc<dyn IngesterFlightClient>, } impl InvalidateOnErrorFlightClient { pub fn new(inner: Arc<dyn IngesterFlightClient>) -> Self { Self { inner } } } #[async_trait] impl IngesterFlightClient for InvalidateOnErrorFlightClient { async fn invalidate_connection(&self, ingester_address: Arc<str>) { self.inner.invalidate_connection(ingester_address).await; } async fn query( &self, ingester_addr: Arc<str>, request: IngesterQueryRequest, span_context: Option<SpanContext>, ) -> Result<Box<dyn QueryData>, FlightClientError> { let span = span_context.map(|s| s.child("invalidator")); let mut span_recorder = SpanRecorder::new(span.clone()); let res = self .inner .query( Arc::clone(&ingester_addr), request, span_recorder.span().map(|span| span.ctx.clone()), ) .await; // IOx vs borrow checker let is_err = if let Err(e) = &res { e.is_upstream_error() } else { false }; if is_err { self.inner.invalidate_connection(ingester_addr).await; span_recorder.event("invalidate connection"); } res } }
fn main() { let v1:Option<i32> = None; let v2:Option<i32> = Some(10); match v1 { None => println!("Option value is None"), Some(x) => println!("x = {}", x), } match v2 { None => println!("Option value is None"), Some(x) => println!("x = {}", x), } }
//! Metrics related utilities use super::{ builder::{stage::Method, Request}, SequencerError, }; use futures::Future; use pathfinder_common::BlockId; const METRIC_REQUESTS: &str = "gateway_requests_total"; const METRIC_FAILED_REQUESTS: &str = "gateway_requests_failed_total"; const METRICS: [&str; 2] = [METRIC_REQUESTS, METRIC_FAILED_REQUESTS]; const TAG_LATEST: &str = "latest"; const TAG_PENDING: &str = "pending"; const TAGS: &[&str] = &[TAG_LATEST, TAG_PENDING]; const REASON_DECODE: &str = "decode"; const REASON_STARKNET: &str = "starknet"; const REASON_RATE_LIMITING: &str = "rate_limiting"; const REASONS: [&str; 3] = [REASON_DECODE, REASON_RATE_LIMITING, REASON_STARKNET]; /// Register all sequencer related metrics pub fn register() { let methods_with_tags = ["get_block", "get_state_update"].into_iter(); // Requests and failed requests METRICS.iter().for_each(|&name| { // For all methods Request::<'_, Method>::METHODS.iter().for_each(|&method| { metrics::register_counter!(name, "method" => method); }); // For methods that support block tags in metrics methods_with_tags.clone().for_each(|method| { TAGS.iter().for_each(|&tag| { metrics::register_counter!(name, "method" => method, "tag" => tag); }) }) }); // Failed requests for specific failure reasons REASONS.iter().for_each(|&reason| { // For all methods Request::<'_, Method>::METHODS.iter().for_each(|&method| { metrics::register_counter!(METRIC_FAILED_REQUESTS, "method" => method, "reason" => reason); }); // For methods that support block tags in metrics methods_with_tags.clone().for_each(|method| { TAGS.iter().for_each(|&tag| { metrics::register_counter!(METRIC_FAILED_REQUESTS, "method" => method, "tag" => tag, "reason" => reason); }) }) }); } /// Used to mark methods that touch special block tags to avoid reparsing the url. #[derive(Clone, Copy, Debug)] pub enum BlockTag { None, Latest, Pending, } impl From<BlockId> for BlockTag { fn from(x: BlockId) -> Self { match x { BlockId::Number(_) | BlockId::Hash(_) => Self::None, BlockId::Latest => Self::Latest, BlockId::Pending => Self::Pending, } } } impl BlockTag { // Returns a `&'static str` representation of the tag, if it exists. pub fn as_str(self) -> Option<&'static str> { match self { BlockTag::None => None, BlockTag::Latest => Some(TAG_LATEST), BlockTag::Pending => Some(TAG_PENDING), } } } #[derive(Clone, Copy, Debug)] /// Carries metrics metadata while creating sequencer requests pub struct RequestMetadata { pub method: &'static str, pub tag: BlockTag, } impl RequestMetadata { /// Create new instance with tag set to [`BlockTag::None`] pub fn new(method: &'static str) -> Self { Self { method, tag: BlockTag::None, } } } /// # Usage /// /// Awaits future `f` and increments the following counters for a particular method: /// - `gateway_requests_total`, /// - `gateway_requests_failed_total` if the future returns the `Err()` variant. /// /// # Additional counter labels /// /// 1. All the above counters are also duplicated for the special cases of: /// `("get_block" | "get_state_update") AND ("latest" | "pending")`. /// /// 2. `gateway_requests_failed_total` is also duplicated for the specific failure reasons: /// - `starknet`, if the future returns an `Err()` variant, which carries a Starknet specific error variant /// - `decode`, if the future returns an `Err()` variant, which carries a decode error variant /// - `rate_limiting` if the future returns an `Err()` variant, /// which carries the [`reqwest::StatusCode::TOO_MANY_REQUESTS`] status code pub async fn with_metrics<T>( meta: RequestMetadata, f: impl Future<Output = Result<T, SequencerError>>, ) -> Result<T, SequencerError> { /// Increments a counter and its block tag specific variants if they exist fn increment(counter_name: &'static str, meta: RequestMetadata) { let method = meta.method; let tag = meta.tag; metrics::increment_counter!(counter_name, "method" => method); if let ("get_block" | "get_state_update", Some(tag)) = (method, tag.as_str()) { metrics::increment_counter!(counter_name, "method" => method, "tag" => tag); } } /// Increments the `gateway_requests_failed_total` counter for a given failure `reason`, /// includes block tag specific variants if they exist fn increment_failed(meta: RequestMetadata, reason: &'static str) { let method = meta.method; let tag = meta.tag; metrics::increment_counter!(METRIC_FAILED_REQUESTS, "method" => method, "reason" => reason); if let ("get_block" | "get_state_update", Some(tag)) = (method, tag.as_str()) { metrics::increment_counter!(METRIC_FAILED_REQUESTS, "method" => method, "tag" => tag, "reason" => reason); } } increment(METRIC_REQUESTS, meta); f.await.map_err(|e| { increment(METRIC_FAILED_REQUESTS, meta); match &e { SequencerError::StarknetError(_) => { increment_failed(meta, REASON_STARKNET); } SequencerError::InvalidStarknetErrorVariant => { increment_failed(meta, REASON_DECODE); } SequencerError::ReqwestError(e) if e.is_decode() => { increment_failed(meta, REASON_DECODE); } SequencerError::ReqwestError(e) if e.is_status() && e.status().expect("error kind should be status") == reqwest::StatusCode::TOO_MANY_REQUESTS => { increment_failed(meta, REASON_RATE_LIMITING); } SequencerError::ReqwestError(_) => {} } e }) }
pub struct Solution; impl Solution { pub fn can_complete_circuit(gas: Vec<i32>, cost: Vec<i32>) -> i32 { let mut balance = 0; let mut min = 0; let mut i_min = 0; for i in 0..gas.len() { balance += gas[i]; balance -= cost[i]; if balance < min { min = balance; i_min = i + 1; } } if balance < 0 { -1 } else { i_min as i32 } } } #[test] fn test0134() { assert_eq!( Solution::can_complete_circuit(vec![1, 2, 3, 4, 5], vec![3, 4, 5, 1, 2]), 3 ); assert_eq!( Solution::can_complete_circuit(vec![2, 3, 4], vec![3, 4, 3]), -1 ); }
use druid::{ theme, Color, Env, Key, }; use theme::FOREGROUND_DARK; pub const HOT_COLOUR: Key<Color> = Key::new("thomhuds.hot_colour"); pub const RED: Key<Color> = Key::new("thomhuds.red"); pub const PALE_RED: Key<Color> = Key::new("thomhuds.pale_red"); pub const GREEN: Key<Color> = Key::new("thomhuds.green"); pub const PALE_GREEN: Key<Color> = Key::new("thomhuds.pale_green"); pub const BLUE: Key<Color> = Key::new("thomhuds.blue"); pub const PALE_BLUE: Key<Color> = Key::new("thomhuds.pale_blue"); pub fn theme(env: &mut Env, _: &crate::State) { env.set(theme::BUTTON_BORDER_RADIUS, 4.); env.set(HOT_COLOUR, Color::grey(0.25)); env.set(FOREGROUND_DARK, Color::grey(0.6)); env.set(RED, Color::from_rgba32_u32(0xF44336FF)); env.set(PALE_RED, Color::from_rgba32_u32(0xEF9A9AFF)); env.set(GREEN, Color::from_rgba32_u32(0x4CAF50FF)); env.set(PALE_GREEN, Color::from_rgba32_u32(0xA5D6A7FF)); env.set(BLUE, Color::from_rgba32_u32(0x2196F3FF)); env.set(PALE_BLUE, Color::from_rgba32_u32(0x90CAF9FF)); #[cfg(target_os = "windows")] env.set(theme::FONT_NAME, "Segoe UI"); #[cfg(target_os = "macos")] env.set(theme::FONT_NAME, "San Francisco"); #[cfg(target_os = "linux")] env.set(theme::FONT_NAME, "Roboto"); }
/*! A DFA-backed `Regex`. This module provides [`Regex`], which is defined generically over the [`Automaton`] trait. A `Regex` implements convenience routines you might have come to expect, such as finding the start/end of a match and iterating over all non-overlapping matches. This `Regex` type is limited in its capabilities to what a DFA can provide. Therefore, APIs involving capturing groups, for example, are not provided. Internally, a `Regex` is composed of two DFAs. One is a "forward" DFA that finds the end offset of a match, where as the other is a "reverse" DFA that find the start offset of a match. See the [parent module](crate::dfa) for examples. */ #[cfg(feature = "alloc")] use alloc::vec::Vec; use crate::{ dfa::automaton::{Automaton, OverlappingState}, util::prefilter::{self, Prefilter}, MatchError, MultiMatch, }; #[cfg(feature = "alloc")] use crate::{ dfa::{dense, error::Error, sparse}, nfa::thompson, util::matchtypes::MatchKind, }; // When the alloc feature is enabled, the regex type sets its A type parameter // to default to an owned dense DFA. But without alloc, we set no default. This // makes things a lot more convenient in the common case, since writing out the // DFA types is pretty annoying. // // Since we have two different definitions but only want to write one doc // string, we use a macro to capture the doc and other attributes once and then // repeat them for each definition. macro_rules! define_regex_type { ($(#[$doc:meta])*) => { #[cfg(feature = "alloc")] $(#[$doc])* pub struct Regex<A = dense::OwnedDFA, P = prefilter::None> { prefilter: Option<P>, forward: A, reverse: A, utf8: bool, } #[cfg(not(feature = "alloc"))] $(#[$doc])* pub struct Regex<A, P = prefilter::None> { prefilter: Option<P>, forward: A, reverse: A, utf8: bool, } }; } define_regex_type!( /// A regular expression that uses deterministic finite automata for fast /// searching. /// /// A regular expression is comprised of two DFAs, a "forward" DFA and a /// "reverse" DFA. The forward DFA is responsible for detecting the end of /// a match while the reverse DFA is responsible for detecting the start /// of a match. Thus, in order to find the bounds of any given match, a /// forward search must first be run followed by a reverse search. A match /// found by the forward DFA guarantees that the reverse DFA will also find /// a match. /// /// The type of the DFA used by a `Regex` corresponds to the `A` type /// parameter, which must satisfy the [`Automaton`] trait. Typically, /// `A` is either a [`dense::DFA`](crate::dfa::dense::DFA) or a /// [`sparse::DFA`](crate::dfa::sparse::DFA), where dense DFAs use more /// memory but search faster, while sparse DFAs use less memory but search /// more slowly. /// /// By default, a regex's automaton type parameter is set to /// `dense::DFA<Vec<u32>>` when the `alloc` feature is enabled. For most /// in-memory work loads, this is the most convenient type that gives the /// best search performance. When the `alloc` feature is disabled, no /// default type is used. /// /// A `Regex` also has a `P` type parameter, which is used to select the /// prefilter used during search. By default, no prefilter is enabled by /// setting the type to default to [`prefilter::None`]. A prefilter can be /// enabled by using the [`Regex::prefilter`] method. /// /// # When should I use this? /// /// Generally speaking, if you can afford the overhead of building a full /// DFA for your regex, and you don't need things like capturing groups, /// then this is a good choice if you're looking to optimize for matching /// speed. Note however that its speed may be worse than a general purpose /// regex engine if you don't select a good [prefilter]. /// /// # Earliest vs Leftmost vs Overlapping /// /// The search routines exposed on a `Regex` reflect three different ways /// of searching: /// /// * "earliest" means to stop as soon as a match has been detected. /// * "leftmost" means to continue matching until the underlying /// automaton cannot advance. This reflects "standard" searching you /// might be used to in other regex engines. e.g., This permits /// non-greedy and greedy searching to work as you would expect. /// * "overlapping" means to find all possible matches, even if they /// overlap. /// /// Generally speaking, when doing an overlapping search, you'll want to /// build your regex DFAs with [`MatchKind::All`] semantics. Using /// [`MatchKind::LeftmostFirst`] semantics with overlapping searches is /// likely to lead to odd behavior since `LeftmostFirst` specifically omits /// some matches that can never be reported due to its semantics. /// /// The following example shows the differences between how these different /// types of searches impact looking for matches of `[a-z]+` in the /// haystack `abc`. /// /// ``` /// use regex_automata::{dfa::{self, dense}, MatchKind, MultiMatch}; /// /// let pattern = r"[a-z]+"; /// let haystack = "abc".as_bytes(); /// /// // With leftmost-first semantics, we test "earliest" and "leftmost". /// let re = dfa::regex::Builder::new() /// .dense(dense::Config::new().match_kind(MatchKind::LeftmostFirst)) /// .build(pattern)?; /// /// // "earliest" searching isn't impacted by greediness /// let mut it = re.find_earliest_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 2)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 2, 3)), it.next()); /// assert_eq!(None, it.next()); /// /// // "leftmost" searching supports greediness (and non-greediness) /// let mut it = re.find_leftmost_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 3)), it.next()); /// assert_eq!(None, it.next()); /// /// // For overlapping, we want "all" match kind semantics. /// let re = dfa::regex::Builder::new() /// .dense(dense::Config::new().match_kind(MatchKind::All)) /// .build(pattern)?; /// /// // In the overlapping search, we find all three possible matches /// // starting at the beginning of the haystack. /// let mut it = re.find_overlapping_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 0, 2)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 0, 3)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` /// /// # Sparse DFAs /// /// Since a `Regex` is generic over the [`Automaton`] trait, it can be /// used with any kind of DFA. While this crate constructs dense DFAs by /// default, it is easy enough to build corresponding sparse DFAs, and then /// build a regex from them: /// /// ``` /// use regex_automata::dfa::regex::Regex; /// /// // First, build a regex that uses dense DFAs. /// let dense_re = Regex::new("foo[0-9]+")?; /// /// // Second, build sparse DFAs from the forward and reverse dense DFAs. /// let fwd = dense_re.forward().to_sparse()?; /// let rev = dense_re.reverse().to_sparse()?; /// /// // Third, build a new regex from the constituent sparse DFAs. /// let sparse_re = Regex::builder().build_from_dfas(fwd, rev); /// /// // A regex that uses sparse DFAs can be used just like with dense DFAs. /// assert_eq!(true, sparse_re.is_match(b"foo123")); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` /// /// Alternatively, one can use a [`Builder`] to construct a sparse DFA /// more succinctly. (Note though that dense DFAs are still constructed /// first internally, and then converted to sparse DFAs, as in the example /// above.) /// /// ``` /// use regex_automata::dfa::regex::Regex; /// /// let sparse_re = Regex::builder().build_sparse(r"foo[0-9]+")?; /// // A regex that uses sparse DFAs can be used just like with dense DFAs. /// assert!(sparse_re.is_match(b"foo123")); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` /// /// # Fallibility /// /// In non-default configurations, the DFAs generated in this module may /// return an error during a search. (Currently, the only way this happens /// is if quit bytes are added or Unicode word boundaries are heuristically /// enabled, both of which are turned off by default.) For convenience, the /// main search routines, like [`find_leftmost`](Regex::find_leftmost), /// will panic if an error occurs. However, if you need to use DFAs /// which may produce an error at search time, then there are fallible /// equivalents of all search routines. For example, for `find_leftmost`, /// its fallible analog is [`try_find_leftmost`](Regex::try_find_leftmost). /// The routines prefixed with `try_` return `Result<Option<MultiMatch>, /// MatchError>`, where as the infallible routines simply return /// `Option<MultiMatch>`. /// /// # Example /// /// This example shows how to cause a search to terminate if it sees a /// `\n` byte, and handle the error returned. This could be useful if, for /// example, you wanted to prevent a user supplied pattern from matching /// across a line boundary. /// /// ``` /// use regex_automata::{dfa::{self, regex::Regex}, MatchError}; /// /// let re = Regex::builder() /// .dense(dfa::dense::Config::new().quit(b'\n', true)) /// .build(r"foo\p{any}+bar")?; /// /// let haystack = "foo\nbar".as_bytes(); /// // Normally this would produce a match, since \p{any} contains '\n'. /// // But since we instructed the automaton to enter a quit state if a /// // '\n' is observed, this produces a match error instead. /// let expected = MatchError::Quit { byte: 0x0A, offset: 3 }; /// let got = re.try_find_leftmost(haystack).unwrap_err(); /// assert_eq!(expected, got); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` #[derive(Clone, Debug)] ); #[cfg(feature = "alloc")] impl Regex { /// Parse the given regular expression using the default configuration and /// return the corresponding regex. /// /// If you want a non-default configuration, then use the [`Builder`] to /// set your own configuration. /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new("foo[0-9]+bar")?; /// assert_eq!( /// Some(MultiMatch::must(0, 3, 14)), /// re.find_leftmost(b"zzzfoo12345barzzz"), /// ); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn new(pattern: &str) -> Result<Regex, Error> { Builder::new().build(pattern) } /// Like `new`, but parses multiple patterns into a single "regex set." /// This similarly uses the default regex configuration. /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new_many(&["[a-z]+", "[0-9]+"])?; /// /// let mut it = re.find_leftmost_iter(b"abc 1 foo 4567 0 quux"); /// assert_eq!(Some(MultiMatch::must(0, 0, 3)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 4, 5)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 6, 9)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 10, 14)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 15, 16)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 17, 21)), it.next()); /// assert_eq!(None, it.next()); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn new_many<P: AsRef<str>>(patterns: &[P]) -> Result<Regex, Error> { Builder::new().build_many(patterns) } } #[cfg(feature = "alloc")] impl Regex<sparse::DFA<Vec<u8>>> { /// Parse the given regular expression using the default configuration, /// except using sparse DFAs, and return the corresponding regex. /// /// If you want a non-default configuration, then use the [`Builder`] to /// set your own configuration. /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new_sparse("foo[0-9]+bar")?; /// assert_eq!( /// Some(MultiMatch::must(0, 3, 14)), /// re.find_leftmost(b"zzzfoo12345barzzz"), /// ); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn new_sparse( pattern: &str, ) -> Result<Regex<sparse::DFA<Vec<u8>>>, Error> { Builder::new().build_sparse(pattern) } /// Like `new`, but parses multiple patterns into a single "regex set" /// using sparse DFAs. This otherwise similarly uses the default regex /// configuration. /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new_many_sparse(&["[a-z]+", "[0-9]+"])?; /// /// let mut it = re.find_leftmost_iter(b"abc 1 foo 4567 0 quux"); /// assert_eq!(Some(MultiMatch::must(0, 0, 3)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 4, 5)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 6, 9)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 10, 14)), it.next()); /// assert_eq!(Some(MultiMatch::must(1, 15, 16)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 17, 21)), it.next()); /// assert_eq!(None, it.next()); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn new_many_sparse<P: AsRef<str>>( patterns: &[P], ) -> Result<Regex<sparse::DFA<Vec<u8>>>, Error> { Builder::new().build_many_sparse(patterns) } } /// Convenience routines for regex construction. #[cfg(feature = "alloc")] impl Regex { /// Return a default configuration for a `Regex`. /// /// This is a convenience routine to avoid needing to import the `Config` /// type when customizing the construction of a regex. /// /// # Example /// /// This example shows how to disable UTF-8 mode for `Regex` iteration. /// When UTF-8 mode is disabled, the position immediately following an /// empty match is where the next search begins, instead of the next /// position of a UTF-8 encoded codepoint. /// /// ``` /// use regex_automata::{dfa::regex::Regex, MultiMatch}; /// /// let re = Regex::builder() /// .configure(Regex::config().utf8(false)) /// .build(r"")?; /// let haystack = "a☃z".as_bytes(); /// let mut it = re.find_leftmost_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 0)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 2, 2)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 3, 3)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 4, 4)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 5, 5)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn config() -> Config { Config::new() } /// Return a builder for configuring the construction of a `Regex`. /// /// This is a convenience routine to avoid needing to import the /// [`Builder`] type in common cases. /// /// # Example /// /// This example shows how to use the builder to disable UTF-8 mode /// everywhere. /// /// ``` /// use regex_automata::{ /// dfa::regex::Regex, /// nfa::thompson, /// MultiMatch, SyntaxConfig, /// }; /// /// let re = Regex::builder() /// .configure(Regex::config().utf8(false)) /// .syntax(SyntaxConfig::new().utf8(false)) /// .thompson(thompson::Config::new().utf8(false)) /// .build(r"foo(?-u:[^b])ar.*")?; /// let haystack = b"\xFEfoo\xFFarzz\xE2\x98\xFF\n"; /// let expected = Some(MultiMatch::must(0, 1, 9)); /// let got = re.find_leftmost(haystack); /// assert_eq!(expected, got); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn builder() -> Builder { Builder::new() } } /// Standard search routines for finding and iterating over matches. impl<A: Automaton, P: Prefilter> Regex<A, P> { /// Returns true if and only if this regex matches the given haystack. /// /// This routine may short circuit if it knows that scanning future input /// will never lead to a different result. In particular, if the underlying /// DFA enters a match state or a dead state, then this routine will return /// `true` or `false`, respectively, without inspecting any future input. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_is_match`](Regex::try_is_match). /// /// # Example /// /// ``` /// use regex_automata::dfa::regex::Regex; /// /// let re = Regex::new("foo[0-9]+bar")?; /// assert_eq!(true, re.is_match(b"foo12345bar")); /// assert_eq!(false, re.is_match(b"foobar")); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn is_match(&self, haystack: &[u8]) -> bool { self.is_match_at(haystack, 0, haystack.len()) } /// Returns the first position at which a match is found. /// /// This routine stops scanning input in precisely the same circumstances /// as `is_match`. The key difference is that this routine returns the /// position at which it stopped scanning input if and only if a match /// was found. If no match is found, then `None` is returned. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_earliest`](Regex::try_find_earliest). /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// // Normally, the leftmost first match would greedily consume as many /// // decimal digits as it could. But a match is detected as soon as one /// // digit is seen. /// let re = Regex::new("foo[0-9]+")?; /// assert_eq!( /// Some(MultiMatch::must(0, 0, 4)), /// re.find_earliest(b"foo12345"), /// ); /// /// // Normally, the end of the leftmost first match here would be 3, /// // but the "earliest" match semantics detect a match earlier. /// let re = Regex::new("abc|a")?; /// assert_eq!(Some(MultiMatch::must(0, 0, 1)), re.find_earliest(b"abc")); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_earliest(&self, haystack: &[u8]) -> Option<MultiMatch> { self.find_earliest_at(haystack, 0, haystack.len()) } /// Returns the start and end offset of the leftmost match. If no match /// exists, then `None` is returned. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_leftmost`](Regex::try_find_leftmost). /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// // Greediness is applied appropriately when compared to find_earliest. /// let re = Regex::new("foo[0-9]+")?; /// assert_eq!( /// Some(MultiMatch::must(0, 3, 11)), /// re.find_leftmost(b"zzzfoo12345zzz"), /// ); /// /// // Even though a match is found after reading the first byte (`a`), /// // the default leftmost-first match semantics demand that we find the /// // earliest match that prefers earlier parts of the pattern over latter /// // parts. /// let re = Regex::new("abc|a")?; /// assert_eq!(Some(MultiMatch::must(0, 0, 3)), re.find_leftmost(b"abc")); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_leftmost(&self, haystack: &[u8]) -> Option<MultiMatch> { self.find_leftmost_at(haystack, 0, haystack.len()) } /// Search for the first overlapping match in `haystack`. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// In particular, callers must preserve the automaton's search state from /// prior calls so that the implementation knows where the last match /// occurred and which pattern was reported. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_overlapping`](Regex::try_find_overlapping). /// /// # Example /// /// This example shows how to run an overlapping search with multiple /// regexes. /// /// ``` /// use regex_automata::{dfa::{self, regex::Regex}, MatchKind, MultiMatch}; /// /// let re = Regex::builder() /// .dense(dfa::dense::Config::new().match_kind(MatchKind::All)) /// .build_many(&[r"\w+$", r"\S+$"])?; /// let haystack = "@foo".as_bytes(); /// let mut state = dfa::OverlappingState::start(); /// /// let expected = Some(MultiMatch::must(1, 0, 4)); /// let got = re.find_overlapping(haystack, &mut state); /// assert_eq!(expected, got); /// /// // The first pattern also matches at the same position, so re-running /// // the search will yield another match. Notice also that the first /// // pattern is returned after the second. This is because the second /// // pattern begins its match before the first, is therefore an earlier /// // match and is thus reported first. /// let expected = Some(MultiMatch::must(0, 1, 4)); /// let got = re.find_overlapping(haystack, &mut state); /// assert_eq!(expected, got); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_overlapping( &self, haystack: &[u8], state: &mut OverlappingState, ) -> Option<MultiMatch> { self.find_overlapping_at(haystack, 0, haystack.len(), state) } /// Returns an iterator over all non-overlapping "earliest" matches. /// /// Match positions are reported as soon as a match is known to occur, even /// if the standard leftmost match would be longer. /// /// # Panics /// /// If the underlying DFAs return an error during iteration, then iteration /// panics. This only occurs in non-default configurations where quit bytes /// are used or Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_earliest_iter`](Regex::try_find_earliest_iter). /// /// # Example /// /// This example shows how to run an "earliest" iterator. /// /// ``` /// use regex_automata::{dfa::regex::Regex, MultiMatch}; /// /// let re = Regex::new("[0-9]+")?; /// let haystack = "123".as_bytes(); /// /// // Normally, a standard leftmost iterator would return a single /// // match, but since "earliest" detects matches earlier, we get /// // three matches. /// let mut it = re.find_earliest_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 2)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 2, 3)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_earliest_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> FindEarliestMatches<'r, 't, A, P> { FindEarliestMatches::new(self, haystack) } /// Returns an iterator over all non-overlapping leftmost matches in the /// given bytes. If no match exists, then the iterator yields no elements. /// /// This corresponds to the "standard" regex search iterator. /// /// # Panics /// /// If the underlying DFAs return an error during iteration, then iteration /// panics. This only occurs in non-default configurations where quit bytes /// are used or Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_leftmost_iter`](Regex::try_find_leftmost_iter). /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new("foo[0-9]+")?; /// let text = b"foo1 foo12 foo123"; /// let matches: Vec<MultiMatch> = re.find_leftmost_iter(text).collect(); /// assert_eq!(matches, vec![ /// MultiMatch::must(0, 0, 4), /// MultiMatch::must(0, 5, 10), /// MultiMatch::must(0, 11, 17), /// ]); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_leftmost_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> FindLeftmostMatches<'r, 't, A, P> { FindLeftmostMatches::new(self, haystack) } /// Returns an iterator over all overlapping matches in the given haystack. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// The iterator takes care of handling the overlapping state that must be /// threaded through every search. /// /// # Panics /// /// If the underlying DFAs return an error during iteration, then iteration /// panics. This only occurs in non-default configurations where quit bytes /// are used or Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_overlapping_iter`](Regex::try_find_overlapping_iter). /// /// # Example /// /// This example shows how to run an overlapping search with multiple /// regexes. /// /// ``` /// use regex_automata::{dfa::{self, regex::Regex}, MatchKind, MultiMatch}; /// /// let re = Regex::builder() /// .dense(dfa::dense::Config::new().match_kind(MatchKind::All)) /// .build_many(&[r"\w+$", r"\S+$"])?; /// let haystack = "@foo".as_bytes(); /// /// let mut it = re.find_overlapping_iter(haystack); /// assert_eq!(Some(MultiMatch::must(1, 0, 4)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 4)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn find_overlapping_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> FindOverlappingMatches<'r, 't, A, P> { FindOverlappingMatches::new(self, haystack) } } /// Lower level infallible search routines that permit controlling where /// the search starts and ends in a particular sequence. This is useful for /// executing searches that need to take surrounding context into account. This /// is required for correctly implementing iteration because of look-around /// operators (`^`, `$`, `\b`). impl<A: Automaton, P: Prefilter> Regex<A, P> { /// Returns true if and only if this regex matches the given haystack. /// /// This routine may short circuit if it knows that scanning future input /// will never lead to a different result. In particular, if the underlying /// DFA enters a match state or a dead state, then this routine will return /// `true` or `false`, respectively, without inspecting any future input. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_is_match_at`](Regex::try_is_match_at). pub fn is_match_at( &self, haystack: &[u8], start: usize, end: usize, ) -> bool { self.try_is_match_at(haystack, start, end).unwrap() } /// Returns the first position at which a match is found. /// /// This routine stops scanning input in precisely the same circumstances /// as `is_match`. The key difference is that this routine returns the /// position at which it stopped scanning input if and only if a match /// was found. If no match is found, then `None` is returned. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches /// within the same haystack, which cannot be done correctly by simply /// providing a subslice of `haystack`. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_earliest_at`](Regex::try_find_earliest_at). pub fn find_earliest_at( &self, haystack: &[u8], start: usize, end: usize, ) -> Option<MultiMatch> { self.try_find_earliest_at(haystack, start, end).unwrap() } /// Returns the same as `find_leftmost`, but starts the search at the given /// offset. /// /// The significance of the starting point is that it takes the surrounding /// context into consideration. For example, if the DFA is anchored, then /// a match can only occur when `start == 0`. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches within the /// same haystack, which cannot be done correctly by simply providing a /// subslice of `haystack`. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_leftmost_at`](Regex::try_find_leftmost_at). pub fn find_leftmost_at( &self, haystack: &[u8], start: usize, end: usize, ) -> Option<MultiMatch> { self.try_find_leftmost_at(haystack, start, end).unwrap() } /// Search for the first overlapping match within a given range of /// `haystack`. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// In particular, callers must preserve the automaton's search state from /// prior calls so that the implementation knows where the last match /// occurred and which pattern was reported. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches /// within the same haystack, which cannot be done correctly by simply /// providing a subslice of `haystack`. /// /// # Panics /// /// If the underlying DFAs return an error, then this routine panics. This /// only occurs in non-default configurations where quit bytes are used or /// Unicode word boundaries are heuristically enabled. /// /// The fallible version of this routine is /// [`try_find_overlapping_at`](Regex::try_find_overlapping_at). pub fn find_overlapping_at( &self, haystack: &[u8], start: usize, end: usize, state: &mut OverlappingState, ) -> Option<MultiMatch> { self.try_find_overlapping_at(haystack, start, end, state).unwrap() } } /// Fallible search routines. These may return an error when the underlying /// DFAs have been configured in a way that permits them to fail during a /// search. /// /// Errors during search only occur when the DFA has been explicitly /// configured to do so, usually by specifying one or more "quit" bytes or by /// heuristically enabling Unicode word boundaries. /// /// Errors will never be returned using the default configuration. So these /// fallible routines are only needed for particular configurations. impl<A: Automaton, P: Prefilter> Regex<A, P> { /// Returns true if and only if this regex matches the given haystack. /// /// This routine may short circuit if it knows that scanning future input /// will never lead to a different result. In particular, if the underlying /// DFA enters a match state or a dead state, then this routine will return /// `true` or `false`, respectively, without inspecting any future input. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`is_match`](Regex::is_match). pub fn try_is_match(&self, haystack: &[u8]) -> Result<bool, MatchError> { self.try_is_match_at(haystack, 0, haystack.len()) } /// Returns the first position at which a match is found. /// /// This routine stops scanning input in precisely the same circumstances /// as `is_match`. The key difference is that this routine returns the /// position at which it stopped scanning input if and only if a match /// was found. If no match is found, then `None` is returned. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_earliest`](Regex::find_earliest). pub fn try_find_earliest( &self, haystack: &[u8], ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_earliest_at(haystack, 0, haystack.len()) } /// Returns the start and end offset of the leftmost match. If no match /// exists, then `None` is returned. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_leftmost`](Regex::find_leftmost). pub fn try_find_leftmost( &self, haystack: &[u8], ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_leftmost_at(haystack, 0, haystack.len()) } /// Search for the first overlapping match in `haystack`. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// In particular, callers must preserve the automaton's search state from /// prior calls so that the implementation knows where the last match /// occurred and which pattern was reported. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_overlapping`](Regex::find_overlapping). pub fn try_find_overlapping( &self, haystack: &[u8], state: &mut OverlappingState, ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_overlapping_at(haystack, 0, haystack.len(), state) } /// Returns an iterator over all non-overlapping "earliest" matches. /// /// Match positions are reported as soon as a match is known to occur, even /// if the standard leftmost match would be longer. /// /// # Errors /// /// This iterator only yields errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_earliest_iter`](Regex::find_earliest_iter). pub fn try_find_earliest_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> TryFindEarliestMatches<'r, 't, A, P> { TryFindEarliestMatches::new(self, haystack) } /// Returns an iterator over all non-overlapping leftmost matches in the /// given bytes. If no match exists, then the iterator yields no elements. /// /// This corresponds to the "standard" regex search iterator. /// /// # Errors /// /// This iterator only yields errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_leftmost_iter`](Regex::find_leftmost_iter). pub fn try_find_leftmost_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> TryFindLeftmostMatches<'r, 't, A, P> { TryFindLeftmostMatches::new(self, haystack) } /// Returns an iterator over all overlapping matches in the given haystack. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// The iterator takes care of handling the overlapping state that must be /// threaded through every search. /// /// # Errors /// /// This iterator only yields errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_overlapping_iter`](Regex::find_overlapping_iter). pub fn try_find_overlapping_iter<'r, 't>( &'r self, haystack: &'t [u8], ) -> TryFindOverlappingMatches<'r, 't, A, P> { TryFindOverlappingMatches::new(self, haystack) } } /// Lower level fallible search routines that permit controlling where the /// search starts and ends in a particular sequence. impl<A: Automaton, P: Prefilter> Regex<A, P> { /// Returns true if and only if this regex matches the given haystack. /// /// This routine may short circuit if it knows that scanning future input /// will never lead to a different result. In particular, if the underlying /// DFA enters a match state or a dead state, then this routine will return /// `true` or `false`, respectively, without inspecting any future input. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used, Unicode word boundaries are heuristically /// enabled or limits are set on the number of times the lazy DFA's cache /// may be cleared. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`is_match_at`](Regex::is_match_at). pub fn try_is_match_at( &self, haystack: &[u8], start: usize, end: usize, ) -> Result<bool, MatchError> { self.forward() .find_earliest_fwd_at( self.scanner().as_mut(), None, haystack, start, end, ) .map(|x| x.is_some()) } /// Returns the first position at which a match is found. /// /// This routine stops scanning input in precisely the same circumstances /// as `is_match`. The key difference is that this routine returns the /// position at which it stopped scanning input if and only if a match /// was found. If no match is found, then `None` is returned. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches /// within the same haystack, which cannot be done correctly by simply /// providing a subslice of `haystack`. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_earliest_at`](Regex::find_earliest_at). pub fn try_find_earliest_at( &self, haystack: &[u8], start: usize, end: usize, ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_earliest_at_imp( self.scanner().as_mut(), haystack, start, end, ) } /// The implementation of "earliest" searching, where a prefilter scanner /// may be given. fn try_find_earliest_at_imp( &self, pre: Option<&mut prefilter::Scanner>, haystack: &[u8], start: usize, end: usize, ) -> Result<Option<MultiMatch>, MatchError> { // N.B. We use `&&A` here to call `Automaton` methods, which ensures // that we always use the `impl Automaton for &A` for calling methods. // Since this is the usual way that automata are used, this helps // reduce the number of monomorphized copies of the search code. let (fwd, rev) = (self.forward(), self.reverse()); let end = match (&fwd) .find_earliest_fwd_at(pre, None, haystack, start, end)? { None => return Ok(None), Some(end) => end, }; // N.B. The only time we need to tell the reverse searcher the pattern // to match is in the overlapping case, since it's ambiguous. In the // leftmost case, I have tentatively convinced myself that it isn't // necessary and the reverse search will always find the same pattern // to match as the forward search. But I lack a rigorous proof. let start = (&rev) .find_earliest_rev_at(None, haystack, start, end.offset())? .expect("reverse search must match if forward search does"); assert_eq!( start.pattern(), end.pattern(), "forward and reverse search must match same pattern" ); assert!(start.offset() <= end.offset()); Ok(Some(MultiMatch::new(end.pattern(), start.offset(), end.offset()))) } /// Returns the start and end offset of the leftmost match. If no match /// exists, then `None` is returned. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches /// within the same haystack, which cannot be done correctly by simply /// providing a subslice of `haystack`. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_leftmost_at`](Regex::find_leftmost_at). pub fn try_find_leftmost_at( &self, haystack: &[u8], start: usize, end: usize, ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_leftmost_at_imp( self.scanner().as_mut(), haystack, start, end, ) } /// The implementation of leftmost searching, where a prefilter scanner /// may be given. fn try_find_leftmost_at_imp( &self, scanner: Option<&mut prefilter::Scanner>, haystack: &[u8], start: usize, end: usize, ) -> Result<Option<MultiMatch>, MatchError> { // N.B. We use `&&A` here to call `Automaton` methods, which ensures // that we always use the `impl Automaton for &A` for calling methods. // Since this is the usual way that automata are used, this helps // reduce the number of monomorphized copies of the search code. let (fwd, rev) = (self.forward(), self.reverse()); let end = match (&fwd) .find_leftmost_fwd_at(scanner, None, haystack, start, end)? { None => return Ok(None), Some(end) => end, }; // N.B. The only time we need to tell the reverse searcher the pattern // to match is in the overlapping case, since it's ambiguous. In the // leftmost case, I have tentatively convinced myself that it isn't // necessary and the reverse search will always find the same pattern // to match as the forward search. But I lack a rigorous proof. Why not // just provide the pattern anyway? Well, if it is needed, then leaving // it out gives us a chance to find a witness. let start = (&rev) .find_leftmost_rev_at(None, haystack, start, end.offset())? .expect("reverse search must match if forward search does"); assert_eq!( start.pattern(), end.pattern(), "forward and reverse search must match same pattern", ); assert!(start.offset() <= end.offset()); Ok(Some(MultiMatch::new(end.pattern(), start.offset(), end.offset()))) } /// Search for the first overlapping match within a given range of /// `haystack`. /// /// This routine is principally useful when searching for multiple patterns /// on inputs where multiple patterns may match the same regions of text. /// In particular, callers must preserve the automaton's search state from /// prior calls so that the implementation knows where the last match /// occurred and which pattern was reported. /// /// # Searching a substring of the haystack /// /// Being an "at" search routine, this permits callers to search a /// substring of `haystack` by specifying a range in `haystack`. /// Why expose this as an API instead of just asking callers to use /// `&input[start..end]`? The reason is that regex matching often wants /// to take the surrounding context into account in order to handle /// look-around (`^`, `$` and `\b`). /// /// This is useful when implementing an iterator over matches /// within the same haystack, which cannot be done correctly by simply /// providing a subslice of `haystack`. /// /// # Errors /// /// This routine only errors if the search could not complete. For /// DFA-based regexes, this only occurs in a non-default configuration /// where quit bytes are used or Unicode word boundaries are heuristically /// enabled. /// /// When a search cannot complete, callers cannot know whether a match /// exists or not. /// /// The infallible (panics on error) version of this routine is /// [`find_overlapping_at`](Regex::find_overlapping_at). pub fn try_find_overlapping_at( &self, haystack: &[u8], start: usize, end: usize, state: &mut OverlappingState, ) -> Result<Option<MultiMatch>, MatchError> { self.try_find_overlapping_at_imp( self.scanner().as_mut(), haystack, start, end, state, ) } /// The implementation of overlapping search at a given range in /// `haystack`, where `scanner` is a prefilter (if active) and `state` is /// the current state of the search. fn try_find_overlapping_at_imp( &self, scanner: Option<&mut prefilter::Scanner>, haystack: &[u8], start: usize, end: usize, state: &mut OverlappingState, ) -> Result<Option<MultiMatch>, MatchError> { // N.B. We use `&&A` here to call `Automaton` methods, which ensures // that we always use the `impl Automaton for &A` for calling methods. // Since this is the usual way that automata are used, this helps // reduce the number of monomorphized copies of the search code. let (fwd, rev) = (self.forward(), self.reverse()); // TODO: Decide whether it's worth making this assert work. It doesn't // work currently because 'has_starts_for_each_pattern' isn't on the // Automaton trait. Without this assert, we still get a panic, but it's // a bit more inscrutable. // assert!( // rev.has_starts_for_each_pattern(), // "overlapping searches require that the reverse DFA is \ // compiled with the 'starts_for_each_pattern' option", // ); let end = match (&fwd).find_overlapping_fwd_at( scanner, None, haystack, start, end, state, )? { None => return Ok(None), Some(end) => end, }; // Unlike the leftmost cases, the reverse overlapping search may match // a different pattern than the forward search. See test failures when // using `None` instead of `Some(end.pattern())` below. Thus, we must // run our reverse search using the pattern that matched in the forward // direction. let start = (&rev) .find_leftmost_rev_at( Some(end.pattern()), haystack, 0, end.offset(), )? .expect("reverse search must match if forward search does"); assert!(start.offset() <= end.offset()); assert_eq!(start.pattern(), end.pattern()); Ok(Some(MultiMatch::new(end.pattern(), start.offset(), end.offset()))) } } /// Non-search APIs for querying information about the regex and setting a /// prefilter. impl<A: Automaton, P: Prefilter> Regex<A, P> { /// Attach the given prefilter to this regex. pub fn with_prefilter<Q: Prefilter>(self, prefilter: Q) -> Regex<A, Q> { Regex { prefilter: Some(prefilter), forward: self.forward, reverse: self.reverse, utf8: self.utf8, } } /// Remove any prefilter from this regex. pub fn without_prefilter(self) -> Regex<A> { Regex { prefilter: None, forward: self.forward, reverse: self.reverse, utf8: self.utf8, } } /// Return the underlying DFA responsible for forward matching. /// /// This is useful for accessing the underlying DFA and converting it to /// some other format or size. See the [`Builder::build_from_dfas`] docs /// for an example of where this might be useful. pub fn forward(&self) -> &A { &self.forward } /// Return the underlying DFA responsible for reverse matching. /// /// This is useful for accessing the underlying DFA and converting it to /// some other format or size. See the [`Builder::build_from_dfas`] docs /// for an example of where this might be useful. pub fn reverse(&self) -> &A { &self.reverse } /// Returns the total number of patterns matched by this regex. /// /// # Example /// /// ``` /// use regex_automata::{MultiMatch, dfa::regex::Regex}; /// /// let re = Regex::new_many(&[r"[a-z]+", r"[0-9]+", r"\w+"])?; /// assert_eq!(3, re.pattern_count()); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn pattern_count(&self) -> usize { assert_eq!( self.forward().pattern_count(), self.reverse().pattern_count() ); self.forward().pattern_count() } /// Convenience function for returning this regex's prefilter as a trait /// object. /// /// If this regex doesn't have a prefilter, then `None` is returned. pub fn prefilter(&self) -> Option<&dyn Prefilter> { match self.prefilter { None => None, Some(ref x) => Some(&*x), } } /// Convenience function for returning a prefilter scanner. fn scanner(&self) -> Option<prefilter::Scanner> { self.prefilter().map(prefilter::Scanner::new) } } /// An iterator over all non-overlapping earliest matches for a particular /// infallible search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. If the underlying search returns an error, then this panics. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct FindEarliestMatches<'r, 't, A, P>( TryFindEarliestMatches<'r, 't, A, P>, ); impl<'r, 't, A: Automaton, P: Prefilter> FindEarliestMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> FindEarliestMatches<'r, 't, A, P> { FindEarliestMatches(TryFindEarliestMatches::new(re, text)) } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for FindEarliestMatches<'r, 't, A, P> { type Item = MultiMatch; fn next(&mut self) -> Option<MultiMatch> { next_unwrap(self.0.next()) } } /// An iterator over all non-overlapping leftmost matches for a particular /// infallible search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. If the underlying search returns an error, then this panics. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct FindLeftmostMatches<'r, 't, A, P>( TryFindLeftmostMatches<'r, 't, A, P>, ); impl<'r, 't, A: Automaton, P: Prefilter> FindLeftmostMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> FindLeftmostMatches<'r, 't, A, P> { FindLeftmostMatches(TryFindLeftmostMatches::new(re, text)) } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for FindLeftmostMatches<'r, 't, A, P> { type Item = MultiMatch; fn next(&mut self) -> Option<MultiMatch> { next_unwrap(self.0.next()) } } /// An iterator over all overlapping matches for a particular infallible /// search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. If the underlying search returns an error, then this panics. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct FindOverlappingMatches<'r, 't, A: Automaton, P>( TryFindOverlappingMatches<'r, 't, A, P>, ); impl<'r, 't, A: Automaton, P: Prefilter> FindOverlappingMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> FindOverlappingMatches<'r, 't, A, P> { FindOverlappingMatches(TryFindOverlappingMatches::new(re, text)) } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for FindOverlappingMatches<'r, 't, A, P> { type Item = MultiMatch; fn next(&mut self) -> Option<MultiMatch> { next_unwrap(self.0.next()) } } /// An iterator over all non-overlapping earliest matches for a particular /// fallible search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct TryFindEarliestMatches<'r, 't, A, P> { re: &'r Regex<A, P>, scanner: Option<prefilter::Scanner<'r>>, text: &'t [u8], last_end: usize, last_match: Option<usize>, } impl<'r, 't, A: Automaton, P: Prefilter> TryFindEarliestMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> TryFindEarliestMatches<'r, 't, A, P> { let scanner = re.scanner(); TryFindEarliestMatches { re, scanner, text, last_end: 0, last_match: None, } } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for TryFindEarliestMatches<'r, 't, A, P> { type Item = Result<MultiMatch, MatchError>; fn next(&mut self) -> Option<Result<MultiMatch, MatchError>> { if self.last_end > self.text.len() { return None; } let result = self.re.try_find_earliest_at_imp( self.scanner.as_mut(), self.text, self.last_end, self.text.len(), ); let m = match result { Err(err) => return Some(Err(err)), Ok(None) => return None, Ok(Some(m)) => m, }; if m.is_empty() { // This is an empty match. To ensure we make progress, start // the next search at the smallest possible starting position // of the next match following this one. self.last_end = if self.re.utf8 { crate::util::next_utf8(self.text, m.end()) } else { m.end() + 1 }; // Don't accept empty matches immediately following a match. // Just move on to the next match. if Some(m.end()) == self.last_match { return self.next(); } } else { self.last_end = m.end(); } self.last_match = Some(m.end()); Some(Ok(m)) } } /// An iterator over all non-overlapping leftmost matches for a particular /// fallible search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct TryFindLeftmostMatches<'r, 't, A, P> { re: &'r Regex<A, P>, scanner: Option<prefilter::Scanner<'r>>, text: &'t [u8], last_end: usize, last_match: Option<usize>, } impl<'r, 't, A: Automaton, P: Prefilter> TryFindLeftmostMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> TryFindLeftmostMatches<'r, 't, A, P> { let scanner = re.scanner(); TryFindLeftmostMatches { re, scanner, text, last_end: 0, last_match: None, } } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for TryFindLeftmostMatches<'r, 't, A, P> { type Item = Result<MultiMatch, MatchError>; fn next(&mut self) -> Option<Result<MultiMatch, MatchError>> { if self.last_end > self.text.len() { return None; } let result = self.re.try_find_leftmost_at_imp( self.scanner.as_mut(), self.text, self.last_end, self.text.len(), ); let m = match result { Err(err) => return Some(Err(err)), Ok(None) => return None, Ok(Some(m)) => m, }; if m.is_empty() { // This is an empty match. To ensure we make progress, start // the next search at the smallest possible starting position // of the next match following this one. self.last_end = if self.re.utf8 { crate::util::next_utf8(self.text, m.end()) } else { m.end() + 1 }; // Don't accept empty matches immediately following a match. // Just move on to the next match. if Some(m.end()) == self.last_match { return self.next(); } } else { self.last_end = m.end(); } self.last_match = Some(m.end()); Some(Ok(m)) } } /// An iterator over all overlapping matches for a particular fallible search. /// /// The iterator yields a [`MultiMatch`] value until no more matches could be /// found. /// /// `A` is the type used to represent the underlying DFAs used by the regex, /// while `P` is the type of prefilter used, if any. The lifetime variables are /// as follows: /// /// * `'r` is the lifetime of the regular expression itself. /// * `'t` is the lifetime of the text being searched. #[derive(Clone, Debug)] pub struct TryFindOverlappingMatches<'r, 't, A: Automaton, P> { re: &'r Regex<A, P>, scanner: Option<prefilter::Scanner<'r>>, text: &'t [u8], last_end: usize, state: OverlappingState, } impl<'r, 't, A: Automaton, P: Prefilter> TryFindOverlappingMatches<'r, 't, A, P> { fn new( re: &'r Regex<A, P>, text: &'t [u8], ) -> TryFindOverlappingMatches<'r, 't, A, P> { let scanner = re.scanner(); TryFindOverlappingMatches { re, scanner, text, last_end: 0, state: OverlappingState::start(), } } } impl<'r, 't, A: Automaton, P: Prefilter> Iterator for TryFindOverlappingMatches<'r, 't, A, P> { type Item = Result<MultiMatch, MatchError>; fn next(&mut self) -> Option<Result<MultiMatch, MatchError>> { if self.last_end > self.text.len() { return None; } let result = self.re.try_find_overlapping_at_imp( self.scanner.as_mut(), self.text, self.last_end, self.text.len(), &mut self.state, ); let m = match result { Err(err) => return Some(Err(err)), Ok(None) => return None, Ok(Some(m)) => m, }; // Unlike the non-overlapping case, we're OK with empty matches at this // level. In particular, the overlapping search algorithm is itself // responsible for ensuring that progress is always made. self.last_end = m.end(); Some(Ok(m)) } } /// The configuration used for compiling a DFA-backed regex. /// /// A regex configuration is a simple data object that is typically used with /// [`Builder::configure`]. #[cfg(feature = "alloc")] #[derive(Clone, Copy, Debug, Default)] pub struct Config { utf8: Option<bool>, } #[cfg(feature = "alloc")] impl Config { /// Return a new default regex compiler configuration. pub fn new() -> Config { Config::default() } /// Whether to enable UTF-8 mode or not. /// /// When UTF-8 mode is enabled (the default) and an empty match is seen, /// the iterators on [`Regex`] will always start the next search at the /// next UTF-8 encoded codepoint when searching valid UTF-8. When UTF-8 /// mode is disabled, such searches are begun at the next byte offset. /// /// If this mode is enabled and invalid UTF-8 is given to search, then /// behavior is unspecified. /// /// Generally speaking, one should enable this when /// [`SyntaxConfig::utf8`](crate::SyntaxConfig::utf8) /// and /// [`thompson::Config::utf8`](crate::nfa::thompson::Config::utf8) /// are enabled, and disable it otherwise. /// /// # Example /// /// This example demonstrates the differences between when this option is /// enabled and disabled. The differences only arise when the regex can /// return matches of length zero. /// /// In this first snippet, we show the results when UTF-8 mode is disabled. /// /// ``` /// use regex_automata::{dfa::regex::Regex, MultiMatch}; /// /// let re = Regex::builder() /// .configure(Regex::config().utf8(false)) /// .build(r"")?; /// let haystack = "a☃z".as_bytes(); /// let mut it = re.find_leftmost_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 0)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 2, 2)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 3, 3)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 4, 4)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 5, 5)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` /// /// And in this snippet, we execute the same search on the same haystack, /// but with UTF-8 mode enabled. Notice that byte offsets that would /// otherwise split the encoding of `☃` are not returned. /// /// ``` /// use regex_automata::{dfa::regex::Regex, MultiMatch}; /// /// let re = Regex::builder() /// .configure(Regex::config().utf8(true)) /// .build(r"")?; /// let haystack = "a☃z".as_bytes(); /// let mut it = re.find_leftmost_iter(haystack); /// assert_eq!(Some(MultiMatch::must(0, 0, 0)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 1, 1)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 4, 4)), it.next()); /// assert_eq!(Some(MultiMatch::must(0, 5, 5)), it.next()); /// assert_eq!(None, it.next()); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn utf8(mut self, yes: bool) -> Config { self.utf8 = Some(yes); self } /// Returns true if and only if this configuration has UTF-8 mode enabled. /// /// When UTF-8 mode is enabled and an empty match is seen, the iterators on /// [`Regex`] will always start the next search at the next UTF-8 encoded /// codepoint. When UTF-8 mode is disabled, such searches are begun at the /// next byte offset. pub fn get_utf8(&self) -> bool { self.utf8.unwrap_or(true) } /// Overwrite the default configuration such that the options in `o` are /// always used. If an option in `o` is not set, then the corresponding /// option in `self` is used. If it's not set in `self` either, then it /// remains not set. pub(crate) fn overwrite(self, o: Config) -> Config { Config { utf8: o.utf8.or(self.utf8) } } } /// A builder for a regex based on deterministic finite automatons. /// /// This builder permits configuring options for the syntax of a pattern, the /// NFA construction, the DFA construction and finally the regex searching /// itself. This builder is different from a general purpose regex builder in /// that it permits fine grain configuration of the construction process. The /// trade off for this is complexity, and the possibility of setting a /// configuration that might not make sense. For example, there are three /// different UTF-8 modes: /// /// * [`SyntaxConfig::utf8`](crate::SyntaxConfig::utf8) controls whether the /// pattern itself can contain sub-expressions that match invalid UTF-8. /// * [`nfa::thompson::Config::utf8`](crate::nfa::thompson::Config::utf8) /// controls whether the implicit unanchored prefix added to the NFA can /// match through invalid UTF-8 or not. /// * [`Config::utf8`] controls how the regex iterators themselves advance /// the starting position of the next search when a match with zero length is /// found. /// /// Generally speaking, callers will want to either enable all of these or /// disable all of these. /// /// Internally, building a regex requires building two DFAs, where one is /// responsible for finding the end of a match and the other is responsible /// for finding the start of a match. If you only need to detect whether /// something matched, or only the end of a match, then you should use a /// [`dense::Builder`] to construct a single DFA, which is cheaper than /// building two DFAs. /// /// # Build methods /// /// This builder has a few "build" methods. In general, it's the result of /// combining the following parameters: /// /// * Building one or many regexes. /// * Building a regex with dense or sparse DFAs. /// /// The simplest "build" method is [`Builder::build`]. It accepts a single /// pattern and builds a dense DFA using `usize` for the state identifier /// representation. /// /// The most general "build" method is [`Builder::build_many`], which permits /// building a regex that searches for multiple patterns simultaneously while /// using a specific state identifier representation. /// /// The most flexible "build" method, but hardest to use, is /// [`Builder::build_from_dfas`]. This exposes the fact that a [`Regex`] is /// just a pair of DFAs, and this method allows you to specify those DFAs /// exactly. /// /// # Example /// /// This example shows how to disable UTF-8 mode in the syntax, the NFA and /// the regex itself. This is generally what you want for matching on /// arbitrary bytes. /// /// ``` /// use regex_automata::{ /// dfa::regex::Regex, nfa::thompson, MultiMatch, SyntaxConfig /// }; /// /// let re = Regex::builder() /// .configure(Regex::config().utf8(false)) /// .syntax(SyntaxConfig::new().utf8(false)) /// .thompson(thompson::Config::new().utf8(false)) /// .build(r"foo(?-u:[^b])ar.*")?; /// let haystack = b"\xFEfoo\xFFarzz\xE2\x98\xFF\n"; /// let expected = Some(MultiMatch::must(0, 1, 9)); /// let got = re.find_leftmost(haystack); /// assert_eq!(expected, got); /// // Notice that `(?-u:[^b])` matches invalid UTF-8, /// // but the subsequent `.*` does not! Disabling UTF-8 /// // on the syntax permits this. Notice also that the /// // search was unanchored and skipped over invalid UTF-8. /// // Disabling UTF-8 on the Thompson NFA permits this. /// // /// // N.B. This example does not show the impact of /// // disabling UTF-8 mode on Config, since that /// // only impacts regexes that can produce matches of /// // length 0. /// assert_eq!(b"foo\xFFarzz", &haystack[got.unwrap().range()]); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` #[cfg(feature = "alloc")] #[derive(Clone, Debug)] pub struct Builder { config: Config, dfa: dense::Builder, } #[cfg(feature = "alloc")] impl Builder { /// Create a new regex builder with the default configuration. pub fn new() -> Builder { Builder { config: Config::default(), dfa: dense::Builder::new() } } /// Build a regex from the given pattern. /// /// If there was a problem parsing or compiling the pattern, then an error /// is returned. pub fn build(&self, pattern: &str) -> Result<Regex, Error> { self.build_many(&[pattern]) } /// Build a regex from the given pattern using sparse DFAs. /// /// If there was a problem parsing or compiling the pattern, then an error /// is returned. pub fn build_sparse( &self, pattern: &str, ) -> Result<Regex<sparse::DFA<Vec<u8>>>, Error> { self.build_many_sparse(&[pattern]) } /// Build a regex from the given patterns. pub fn build_many<P: AsRef<str>>( &self, patterns: &[P], ) -> Result<Regex, Error> { let forward = self.dfa.build_many(patterns)?; let reverse = self .dfa .clone() .configure( dense::Config::new() .anchored(true) .match_kind(MatchKind::All) .starts_for_each_pattern(true), ) .thompson(thompson::Config::new().reverse(true)) .build_many(patterns)?; Ok(self.build_from_dfas(forward, reverse)) } /// Build a sparse regex from the given patterns. pub fn build_many_sparse<P: AsRef<str>>( &self, patterns: &[P], ) -> Result<Regex<sparse::DFA<Vec<u8>>>, Error> { let re = self.build_many(patterns)?; let forward = re.forward().to_sparse()?; let reverse = re.reverse().to_sparse()?; Ok(self.build_from_dfas(forward, reverse)) } /// Build a regex from its component forward and reverse DFAs. /// /// This is useful when deserializing a regex from some arbitrary /// memory region. This is also useful for building regexes from other /// types of DFAs. /// /// If you're building the DFAs from scratch instead of building new DFAs /// from other DFAs, then you'll need to make sure that the reverse DFA is /// configured correctly to match the intended semantics. Namely: /// /// * It should be anchored. /// * It should use [`MatchKind::All`] semantics. /// * It should match in reverse. /// * It should have anchored start states compiled for each pattern. /// * Otherwise, its configuration should match the forward DFA. /// /// If these conditions are satisfied, then behavior of searches is /// unspecified. /// /// Note that when using this constructor, only the configuration from /// [`Config`] is applied. The only configuration settings on this builder /// only apply when the builder owns the construction of the DFAs /// themselves. /// /// # Example /// /// This example is a bit a contrived. The usual use of these methods /// would involve serializing `initial_re` somewhere and then deserializing /// it later to build a regex. But in this case, we do everything in /// memory. /// /// ``` /// use regex_automata::dfa::regex::Regex; /// /// let initial_re = Regex::new("foo[0-9]+")?; /// assert_eq!(true, initial_re.is_match(b"foo123")); /// /// let (fwd, rev) = (initial_re.forward(), initial_re.reverse()); /// let re = Regex::builder().build_from_dfas(fwd, rev); /// assert_eq!(true, re.is_match(b"foo123")); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` /// /// This example shows how to build a `Regex` that uses sparse DFAs instead /// of dense DFAs without using one of the convenience `build_sparse` /// routines: /// /// ``` /// use regex_automata::dfa::regex::Regex; /// /// let initial_re = Regex::new("foo[0-9]+")?; /// assert_eq!(true, initial_re.is_match(b"foo123")); /// /// let fwd = initial_re.forward().to_sparse()?; /// let rev = initial_re.reverse().to_sparse()?; /// let re = Regex::builder().build_from_dfas(fwd, rev); /// assert_eq!(true, re.is_match(b"foo123")); /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn build_from_dfas<A: Automaton>( &self, forward: A, reverse: A, ) -> Regex<A> { let utf8 = self.config.get_utf8(); Regex { prefilter: None, forward, reverse, utf8 } } /// Apply the given regex configuration options to this builder. pub fn configure(&mut self, config: Config) -> &mut Builder { self.config = self.config.overwrite(config); self } /// Set the syntax configuration for this builder using /// [`SyntaxConfig`](crate::SyntaxConfig). /// /// This permits setting things like case insensitivity, Unicode and multi /// line mode. pub fn syntax( &mut self, config: crate::util::syntax::SyntaxConfig, ) -> &mut Builder { self.dfa.syntax(config); self } /// Set the Thompson NFA configuration for this builder using /// [`nfa::thompson::Config`](thompson::Config). /// /// This permits setting things like whether additional time should be /// spent shrinking the size of the NFA. pub fn thompson(&mut self, config: thompson::Config) -> &mut Builder { self.dfa.thompson(config); self } /// Set the dense DFA compilation configuration for this builder using /// [`dense::Config`](dense::Config). /// /// This permits setting things like whether the underlying DFAs should /// be minimized. pub fn dense(&mut self, config: dense::Config) -> &mut Builder { self.dfa.configure(config); self } } #[cfg(feature = "alloc")] impl Default for Builder { fn default() -> Builder { Builder::new() } } #[inline(always)] fn next_unwrap( item: Option<Result<MultiMatch, MatchError>>, ) -> Option<MultiMatch> { match item { None => None, Some(Ok(m)) => Some(m), Some(Err(err)) => panic!( "unexpected regex search error: {}\n\ to handle search errors, use try_ methods", err, ), } }
pub mod taunt_system; use amethyst::core::ecs::{Entity, Component, DenseVecStorage}; #[derive(Default)] pub struct TauntComponent; impl Component for TauntComponent { type Storage = DenseVecStorage<Self>; } #[derive(Default)] pub struct Taunt { pub(crate) face: Option<Entity>, }
use super::*; #[test] fn with_less_than_byte_len_returns_binary_prefix_and_suffix_bitstring() { with_process(|process| { let binary = bitstring!(1, 2 :: 2, &process); let position = process.integer(1); assert_eq!( result(process, binary, position), Ok(process.tuple_from_slice(&[ process.binary_from_bytes(&[1]), bitstring!(2 :: 2, &process) ],)) ) }) } #[test] fn with_byte_len_without_bit_count_returns_subbinary_and_empty_suffix() { with_process(|process| { let original = process.binary_from_bytes(&[1]); let binary = process.subbinary_from_original(original, 0, 0, 1, 0); let position = process.integer(1); assert_eq!( result(process, binary, position), Ok(process.tuple_from_slice(&[binary, process.binary_from_bytes(&[])],)) ); }); } #[test] fn with_byte_len_with_bit_count_errors_badarg() { with_process(|process| { let binary = bitstring!(1, 2 :: 2, &process); let position = process.integer(2); assert_badarg!(result(process, binary, position), "bitstring (<<1,2:2>>) has 2 bits in its partial bytes, so the index (2) cannot equal the total byte length (2)"); }); } #[test] fn with_greater_than_byte_len_errors_badarg() { with_process(|process| { let binary = bitstring!(1, 2 :: 2, &process); let position = process.integer(3); assert_badarg!( result(process, binary, position), "index (3) exceeds total byte length (2) of bitstring (<<1,2:2>>)" ); }); }
#[macro_use] extern crate lazy_static; extern crate regex; mod recipe; use std::collections::HashSet; use std::env::current_dir; use std::ffi::OsStr; use std::fs::{read_dir, File}; use std::io::prelude::*; use std::io::{self, BufReader}; use regex::Regex; use recipe::beautify_jsons; fn main() -> io::Result<()> { let ingredients_dir = "../ingredients/"; // Fetch ingredient units let mut ingredients_files: Vec<String> = Vec::new(); for entry in read_dir(ingredients_dir)? { let entry = entry?; let path = entry.path(); if path.is_file() && path.extension().is_some() && path.extension().unwrap() == "txt" { ingredients_files.push(String::from(path.as_path().to_str().unwrap())); } } let re = Regex::new(r"^(?:[0-9]+(?:\.[0-9]+)?) (?P<unit>[^ ]+) .+$").unwrap(); let mut units: HashSet<String> = HashSet::new(); for ingredients_file in ingredients_files { println!("Using ingredients file {}", ingredients_file); let f = File::open(ingredients_file)?; let f = BufReader::new(f); for line in f.lines() { let line = &line.unwrap(); if re.is_match(line) { let caps = re.captures(line).unwrap(); let unit = caps.name("unit").map_or("ERROR", |c| c.as_str()); units.insert(String::from(unit)); } } } // Make Regex string from ingredient units let re_units = units .iter() .map(|u| String::from(u)) .collect::<Vec<String>>() .join("|"); println!("Found units: {}", re_units); beautify_jsons("../recipes/", re_units)?; Ok(()) }
// Copyright 2019 The vault713 Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::error::ErrorKind; use crate::swap::ser::*; use blake2::blake2b::blake2b; use grin_core::core::{Input as TxInput, Output as TxOutput, OutputFeatures}; use grin_core::libtx::secp_ser; use grin_util::secp::constants::SECRET_KEY_SIZE; use grin_util::secp::key::{PublicKey, SecretKey}; use grin_util::secp::pedersen::{Commitment, RangeProof}; use grin_util::secp::Secp256k1; use hex::FromHex; use rand::thread_rng; use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Builder { num_participants: usize, #[serde(with = "secp_ser::string_or_u64")] amount: u64, commit_reveal: bool, pub participants: Vec<ParticipantData>, participant_id: usize, #[serde(serialize_with = "seckey_to_hex", deserialize_with = "seckey_from_hex")] nonce: SecretKey, #[serde( serialize_with = "option_seckey_to_hex", deserialize_with = "option_seckey_from_hex", skip_serializing_if = "Option::is_none", default )] pub common_nonce: Option<SecretKey>, } impl Builder { pub fn new( num_participants: usize, amount: u64, commit_reveal: bool, participant_id: usize, nonce: SecretKey, common_nonce: Option<SecretKey>, ) -> Self { Self { num_participants, amount, commit_reveal, participants: vec![], participant_id, nonce, common_nonce, } } pub fn create_participant( &mut self, secp: &Secp256k1, secret_key: &SecretKey, ) -> Result<(), ErrorKind> { let id = self.participants.len(); if id != self.participant_id { return Err(ErrorKind::ParticipantOrdering); } let partial_commitment = secp.commit(0, secret_key.clone())?; self.participants.push(if self.commit_reveal { ParticipantData::new_commit(partial_commitment) } else { ParticipantData::new_revealed(partial_commitment) }); Ok(()) } pub fn import_participant( &mut self, id: usize, participant: &ParticipantData, ) -> Result<(), ErrorKind> { if self.participants.len() > id { return Err(ErrorKind::ParticipantExists); } if self.participants.len() != id || self.participants.len() >= self.num_participants { return Err(ErrorKind::ParticipantOrdering); } self.participants.push(if self.commit_reveal { participant.new_foreign_commit()? } else { participant.new_foreign_reveal()? }); Ok(()) } pub fn reveal_participant( &mut self, id: usize, participant: &ParticipantData, ) -> Result<(), ErrorKind> { if self.participants.len() <= id { return Err(ErrorKind::ParticipantDoesntExist); } if self.commit_reveal && self.participants.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } match participant.partial_commitment.as_ref() { Some(p) => self.participants[id].reveal(p), None => Err(ErrorKind::Reveal), } } pub fn round_1_participant( &mut self, id: usize, participant: &ParticipantData, ) -> Result<(), ErrorKind> { if self.participants.len() <= id { return Err(ErrorKind::ParticipantDoesntExist); } if self.participants.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } if participant.t_1.is_none() || participant.t_2.is_none() { return Err(ErrorKind::Round1Missing); } self.participants[id].t_1 = participant.t_1.clone(); self.participants[id].t_2 = participant.t_2.clone(); Ok(()) } pub fn round_2_participant( &mut self, id: usize, participant: &ParticipantData, ) -> Result<(), ErrorKind> { if self.participants.len() <= id { return Err(ErrorKind::ParticipantDoesntExist.into()); } if self.participants.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete.into()); } if participant.tau_x.is_none() { return Err(ErrorKind::Round2Missing.into()); } self.participants[id].tau_x = participant.tau_x.clone(); Ok(()) } pub fn export(&self) -> Result<ParticipantData, ErrorKind> { if self.participants.len() <= self.participant_id { return Err(ErrorKind::ParticipantDoesntExist); } Ok(self.participants[self.participant_id].clone()) } pub fn reveal(&mut self, secp: &Secp256k1, secret_key: &SecretKey) -> Result<(), ErrorKind> { if self.participants.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } let partial_commitment = secp.commit(0, secret_key.clone())?; self.participants[self.participant_id].reveal(&partial_commitment)?; Ok(()) } pub fn round_1(&mut self, secp: &Secp256k1, blind: &SecretKey) -> Result<(), ErrorKind> { let mut t_1 = PublicKey::new(); let mut t_2 = PublicKey::new(); // Round 1 doesnt require knowledge of total commit or common nonce, we should allow NULL argument in libsecp let commit = secp.commit(0, SecretKey::new(secp, &mut thread_rng()))?; let common_nonce = self .common_nonce .clone() .unwrap_or(SecretKey::new(secp, &mut thread_rng())); secp.bullet_proof_multisig( self.amount, blind.clone(), common_nonce, None, None, None, Some(&mut t_1), Some(&mut t_2), vec![commit], Some(&self.nonce), 1, ); self.participants[self.participant_id].t_1 = Some(t_1); self.participants[self.participant_id].t_2 = Some(t_2); Ok(()) } pub fn round_2(&mut self, secp: &Secp256k1, blind: &SecretKey) -> Result<(), ErrorKind> { let mut t_1 = self.sum_t_1(secp)?; let mut t_2 = self.sum_t_2(secp)?; let mut tau_x = SecretKey([0; SECRET_KEY_SIZE]); let commit = self.commit(secp)?; secp.bullet_proof_multisig( self.amount, blind.clone(), self.common_nonce()?, None, None, Some(&mut tau_x), Some(&mut t_1), Some(&mut t_2), vec![commit], Some(&self.nonce), 2, ); self.participants[self.participant_id].tau_x = Some(tau_x); Ok(()) } pub fn finalize(&self, secp: &Secp256k1, blind: &SecretKey) -> Result<RangeProof, ErrorKind> { let mut t_1 = self.sum_t_1(secp)?; let mut t_2 = self.sum_t_2(secp)?; let mut tau_x = self.sum_tau_x(secp)?; let commit = self.commit(secp)?; let proof = secp .bullet_proof_multisig( self.amount, blind.clone(), self.common_nonce()?, None, None, Some(&mut tau_x), Some(&mut t_1), Some(&mut t_2), vec![commit], Some(&self.nonce), 0, ) .ok_or(ErrorKind::MultiSigIncomplete)?; secp.verify_bullet_proof(commit, proof, None)?; Ok(proof) } pub fn as_input(&self, secp: &Secp256k1) -> Result<TxInput, ErrorKind> { Ok(TxInput { features: OutputFeatures::Plain, commit: self.commit(secp)?, }) } pub fn as_output(&self, secp: &Secp256k1, blind: &SecretKey) -> Result<TxOutput, ErrorKind> { Ok(TxOutput { features: OutputFeatures::Plain, commit: self.commit(secp)?, proof: self.finalize(secp, blind)?, }) } pub fn commit(&self, secp: &Secp256k1) -> Result<Commitment, ErrorKind> { let mut partial_commitments: Vec<Commitment> = self .participants .iter() .filter_map(|p| p.partial_commitment.clone()) .collect(); if partial_commitments.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } let commitment_value = secp.commit_value(self.amount)?; partial_commitments.push(commitment_value); let commitment = secp.commit_sum(partial_commitments, vec![])?; Ok(commitment) } fn common_nonce(&self) -> Result<SecretKey, ErrorKind> { self.common_nonce .clone() .ok_or(ErrorKind::CommonNonceMissing) } fn sum_t_1(&self, secp: &Secp256k1) -> Result<PublicKey, ErrorKind> { let t_1s: Vec<&PublicKey> = self .participants .iter() .filter_map(|p| p.t_1.as_ref()) .collect(); if t_1s.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } let t_1 = PublicKey::from_combination(secp, t_1s)?; Ok(t_1) } fn sum_t_2(&self, secp: &Secp256k1) -> Result<PublicKey, ErrorKind> { let t_2s: Vec<&PublicKey> = self .participants .iter() .filter_map(|p| p.t_2.as_ref()) .collect(); if t_2s.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } let t_2 = PublicKey::from_combination(secp, t_2s)?; Ok(t_2) } fn sum_tau_x(&self, secp: &Secp256k1) -> Result<SecretKey, ErrorKind> { let mut sum_tau_x = SecretKey([0; SECRET_KEY_SIZE]); let tau_xs: Vec<&SecretKey> = self .participants .iter() .filter_map(|p| p.tau_x.as_ref()) .collect(); if tau_xs.len() != self.num_participants { return Err(ErrorKind::MultiSigIncomplete); } tau_xs .iter() .for_each(|x| sum_tau_x.add_assign(&secp, *x).unwrap()); Ok(sum_tau_x) } } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ParticipantData { #[serde(skip_serializing_if = "Option::is_none", default)] partial_commitment_hash: Option<Hash>, #[serde( serialize_with = "option_commit_to_hex", deserialize_with = "option_commit_from_hex", skip_serializing_if = "Option::is_none", default )] pub partial_commitment: Option<Commitment>, #[serde( serialize_with = "option_pubkey_to_hex", deserialize_with = "option_pubkey_from_hex", skip_serializing_if = "Option::is_none", default )] t_1: Option<PublicKey>, #[serde( serialize_with = "option_pubkey_to_hex", deserialize_with = "option_pubkey_from_hex", skip_serializing_if = "Option::is_none", default )] t_2: Option<PublicKey>, #[serde( serialize_with = "option_seckey_to_hex", deserialize_with = "option_seckey_from_hex", skip_serializing_if = "Option::is_none", default )] tau_x: Option<SecretKey>, } impl ParticipantData { pub fn new_commit(partial_commitment: Commitment) -> Self { ParticipantData { partial_commitment_hash: Some(partial_commitment.hash().unwrap()), partial_commitment: None, t_1: None, t_2: None, tau_x: None, } } pub fn new_revealed(partial_commitment: Commitment) -> Self { ParticipantData { partial_commitment_hash: None, partial_commitment: Some(partial_commitment), t_1: None, t_2: None, tau_x: None, } } pub fn new_foreign_commit(&self) -> Result<Self, ErrorKind> { let hash = self .partial_commitment_hash .clone() .ok_or(ErrorKind::ParticipantInvalid)?; Ok(ParticipantData { partial_commitment_hash: Some(hash), partial_commitment: None, t_1: None, t_2: None, tau_x: None, }) } pub fn new_foreign_reveal(&self) -> Result<Self, ErrorKind> { let commit = self .partial_commitment .clone() .ok_or(ErrorKind::ParticipantInvalid)?; Ok(ParticipantData { partial_commitment_hash: None, partial_commitment: Some(commit), t_1: None, t_2: None, tau_x: None, }) } fn reveal(&mut self, partial_commitment: &Commitment) -> Result<(), ErrorKind> { let hash = self .partial_commitment_hash .as_ref() .ok_or(ErrorKind::Reveal)?; if &partial_commitment.hash()? == hash { Ok(()) } else { Err(ErrorKind::Reveal) } } } #[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone)] pub struct Hash { inner: Vec<u8>, } impl Hash { pub fn new(inner: Vec<u8>) -> Result<Self, ErrorKind> { if inner.len() != 32 { return Err(ErrorKind::HashLength); } Ok(Self { inner }) } pub fn to_secret_key(&self, secp: &Secp256k1) -> Result<SecretKey, ErrorKind> { let key = SecretKey::from_slice(secp, &self.inner)?; Ok(key) } } impl Serialize for Hash { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&hex::encode(&self.inner)) } } impl<'de> Deserialize<'de> for Hash { fn deserialize<D>(deserializer: D) -> Result<Hash, D::Error> where D: Deserializer<'de>, { use serde::de::Error; let s = String::deserialize(deserializer)?; let v = Vec::from_hex(&s).map_err(D::Error::custom)?; Hash::new(v).map_err(D::Error::custom) } } pub trait Hashed { fn hash(&self) -> Result<Hash, ErrorKind>; } impl Hashed for Commitment { fn hash(&self) -> Result<Hash, ErrorKind> { Hash::new(blake2b(32, &[], &self.0).as_bytes().to_vec()) } } impl Hashed for Vec<u8> { fn hash(&self) -> Result<Hash, ErrorKind> { Hash::new(blake2b(32, &[], &self).as_bytes().to_vec()) } } #[cfg(test)] mod tests { use super::*; use grin_util::secp::ContextFlag; use rand::thread_rng; /* /// Test proof for 2-of-2 multisig with a commit & reveal phase // TODO: fix this test #[test] fn test_builder() { let secp = Secp256k1::with_caps(ContextFlag::Commit); //// Set up phase: parties agree on the participants (and an ordering), amount and a common nonce //// let num_participants: usize = 2; let amount: u64 = 713_000_000; let (common_nonce, _) = secp.generate_keypair(&mut thread_rng()).unwrap(); //// Commit phase: parties all send their hashed partial commitment to each other (inside the ParticipantData) //// // A let id_a = 0; let (secret_a, _) = secp.generate_keypair(&mut thread_rng()).unwrap(); let (nonce_a, _) = secp.generate_keypair(&mut thread_rng()).unwrap(); let mut builder_a = Builder::new(num_participants, amount, true, id_a, nonce_a, Some(common_nonce.clone())); assert!(builder_a.create_participant(&secp, &secret_a).is_ok()); // A cannot reveal yet assert!(builder_a.reveal(&secp, &secret_a).is_err()); let part_a = builder_a.export().unwrap(); // A -> all // B let id_b = 1; let (secret_b, _) = secp.generate_keypair(&mut thread_rng()).unwrap(); let (nonce_b, _) = secp.generate_keypair(&mut thread_rng()).unwrap(); let mut builder_b = Builder::new(num_participants, amount, true, id_b, nonce_b, Some(common_nonce.clone())); // Participant cannot be created before previous ones are imported assert!(builder_b.create_participant(&secp, &secret_b).is_err()); assert!(builder_b.import_participant(id_a, &part_a).is_ok()); assert!(builder_b.create_participant(&secp, &secret_b).is_ok()); //// Reveal phase //// // B // Revealing with the wrong secret will fail assert!(builder_b.reveal(&secp, &secret_a).is_err()); assert!(builder_b.reveal(&secp, &secret_b).is_ok()); // A hasn't revealed yet, we don't know the total commitment assert!(builder_b.commit(&secp).is_err()); let part_b = builder_b.export().unwrap(); // B -> all // A // (import+reveal of B at the same time to save on communication, not required) assert!(builder_a.import_participant(id_b, &part_b).is_ok()); assert!(builder_a.reveal_participant(id_b, &part_b).is_ok()); assert!(builder_a.reveal(&secp, &secret_a).is_ok()); assert!(builder_a.commit(&secp).is_ok()); //// Build phase round 1: T_1 and T_2 //// // A assert!(builder_a.round_1(&secp, &secret_a).is_ok()); let part_a = builder_a.export().unwrap(); // A -> all // B // (reveal+round 1 of A at the same time to save on communication, not required) // Revealing with the wrong commitment will fail assert!(builder_b.reveal_participant(id_a, &part_b).is_err()); assert!(builder_b.reveal_participant(id_a, &part_a).is_ok()); // All parties agree on the total commitment assert_eq!(builder_a.commit(&secp).unwrap(), builder_b.commit(&secp).unwrap()); assert!(builder_b.round_1(&secp, &secret_b).is_ok()); assert!(builder_b.round_1_participant(id_a, &part_a).is_ok()); //// Build phase round 2: tau_x //// // B assert!(builder_b.round_2(&secp, &secret_b).is_ok()); let part_b = builder_b.export().unwrap(); // B -> all // A // (round 1+round 2 of B at the same time to save on communication, not required) // Round 2 cannot be done without all round 1 information assert!(builder_a.round_2(&secp, &secret_a).is_err()); assert!(builder_a.round_1_participant(id_b, &part_b).is_ok()); // All parties agree on the total T_1 and T_2 assert_eq!(builder_a.sum_t_1(&secp).unwrap(), builder_b.sum_t_1(&secp).unwrap()); assert_eq!(builder_a.sum_t_2(&secp).unwrap(), builder_b.sum_t_2(&secp).unwrap()); assert!(builder_a.round_2(&secp, &secret_a).is_ok()); //// Finalization phase //// // A // Finalization cannot be done without all round 2 information assert!(builder_a.finalize(&secp, &secret_a).is_err()); assert!(builder_a.round_2_participant(id_b, &part_b).is_ok()); assert!(builder_a.finalize(&secp, &secret_a).is_ok()); // Explicitly verify proof let commit_a = builder_a.commit(&secp).unwrap(); let proof_a = builder_a.proof().unwrap(); assert!(secp.verify_bullet_proof(commit_a, proof_a, None).is_ok()); // For completeness, do same on B let part_a = builder_a.export().unwrap(); // A -> all // B assert!(builder_b.round_2_participant(id_a, &part_a).is_ok()); // All parties agree on the total tau_x assert_eq!(builder_a.sum_tau_x(&secp).unwrap(), builder_b.sum_tau_x(&secp).unwrap()); assert!(builder_b.finalize(&secp, &secret_b).is_ok()); // Explicitly verify proof let commit_b = builder_b.commit(&secp).unwrap(); let proof_b = builder_b.proof().unwrap(); assert!(secp.verify_bullet_proof(commit_b, proof_b, None).is_ok()); // Generated proof is the same assert_eq!(proof_a, proof_b); }*/ /// Test proof for 2-of-2 multisig in a single round trip #[test] fn test_builder_single() { let secp = Secp256k1::with_caps(ContextFlag::Commit); //// Set up phase: parties agree on the participants (and an ordering), amount and a common nonce let num_participants: usize = 2; let amount: u64 = 42_000_000; let common_nonce = SecretKey::new(&secp, &mut thread_rng()); // A: round 1 let id_a = 0; let secret_a = SecretKey::new(&secp, &mut thread_rng()); let nonce_a = SecretKey::new(&secp, &mut thread_rng()); let mut builder_a = Builder::new( num_participants, amount, false, id_a, nonce_a, Some(common_nonce.clone()), ); assert!(builder_a.create_participant(&secp, &secret_a).is_ok()); assert!(builder_a.round_1(&secp, &secret_a).is_ok()); let part_a = builder_a.export().unwrap(); // A -> B // B: round 1 + round 2 let id_b = 1; let secret_b = SecretKey::new(&secp, &mut thread_rng()); let nonce_b = SecretKey::new(&secp, &mut thread_rng()); let mut builder_b = Builder::new( num_participants, amount, false, id_b, nonce_b, Some(common_nonce.clone()), ); assert!(builder_b.import_participant(id_a, &part_a).is_ok()); assert!(builder_b.create_participant(&secp, &secret_b).is_ok()); assert!(builder_b.round_1_participant(id_a, &part_a).is_ok()); assert!(builder_b.round_1(&secp, &secret_b).is_ok()); assert!(builder_b.round_2(&secp, &secret_b).is_ok()); let part_b = builder_b.export().unwrap(); // B -> A // A: round 2 + finalize assert!(builder_a.import_participant(id_b, &part_b).is_ok()); assert!(builder_a.round_1_participant(id_b, &part_b).is_ok()); assert!(builder_a.round_2_participant(id_b, &part_b).is_ok()); assert!(builder_a.round_2(&secp, &secret_a).is_ok()); let proof = builder_a.finalize(&secp, &secret_a).unwrap(); // Explicitly verify proof let commit = builder_a.commit(&secp).unwrap(); assert!(secp.verify_bullet_proof(commit, proof, None).is_ok()); } }
use actix_web::{middleware, web, App, HttpRequest, HttpServer, Responder}; use env_logger; use std::{env, io}; mod handlers; // this function could be located in different module fn config(cfg: &mut web::ServiceConfig) { cfg .service(handlers::hello) .service(handlers::get_again) .service(handlers::get_name) .service(handlers::get_name_and_count); } async fn greet(req: HttpRequest) -> impl Responder { let name = req.match_info().get("name").unwrap_or("World"); format!("Hello {}!", &name) } #[actix_rt::main] async fn main() -> io::Result<()> { env::set_var("RUST_LOG", "actix_web=debug,actix_server=info"); env_logger::init(); HttpServer::new(|| { App::new() // enable logger - always register actix-web Logger middleware last .wrap(middleware::Logger::default()) .wrap(middleware::Logger::new("%a %{User-Agent}i")) // register HTTP requests handlers .service(web::scope("/hello").configure(config)) .service(handlers::hello_more) .route("/greet/{name}", web::get().to(greet)) }) .bind("127.0.0.1:8088")? .run() .await }
#![allow(unused_imports)] #![allow(unused_variables)] #![allow(dead_code)] mod paras; use ndarray::prelude::*; use ndarray::Array; use typenum::{U16, U1024}; fn main() { // set flags // fi_flag = 1 -> high fidelity model (full Nguyen) // fi_flag = 1 -> low fidelity model (Stevens Lewis reduced) let fi_flag: i32 = 1; // stability_flag only functional for high fidelity model currently! // stability_flag = 1 -> unstable xcg 35% model // stability_flag = 0 -> stable xcg 25% model let stab_flag: i32 = 0; // get limits from paras let lim = paras::lim(); // print out some debugging // println!{"{:?}",lim.x_ub} // println!{"{:?}",lim.x_lb} // get simulation parameters from paras let (dt, sim_time) = paras::sim_paras(); let a = array![ [1.,2.,3.], [4.,5.,6.], ]; //assert_eq!(a.ndim(), 2); // get the number of dimensions of array a //assert_eq!(a.len(), 6); // get the number of elements in array a //assert_eq!(a.shape(), [2, 3]); // get the shape of array a //assert_eq!(a.is_empty(), false); // check if the array has zero elements //let a = Array::<f64, _>::linspace(0.,5.,11); //let b = Array::range(0., 4., 1.); println!("{:?}", &a); println!("{:?}", a.dot(&a.t())); }
extern crate cry; fn main() { println!("Cry \"Havoc!\", and let slip the dogs of war."); }
#![deny(warnings)] #![warn(rust_2018_idioms)] use futures_util::future::join; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Request, Response, Server}; static INDEX1: &[u8] = b"The 1st service!"; static INDEX2: &[u8] = b"The 2nd service!"; async fn index1(_: Request<Body>) -> Result<Response<Body>, hyper::Error> { Ok(Response::new(Body::from(INDEX1))) } async fn index2(_: Request<Body>) -> Result<Response<Body>, hyper::Error> { Ok(Response::new(Body::from(INDEX2))) } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> { pretty_env_logger::init(); let addr1 = ([127, 0, 0, 1], 1337).into(); let addr2 = ([127, 0, 0, 1], 1338).into(); let srv1 = Server::bind(&addr1).serve(make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(index1)) })); let srv2 = Server::bind(&addr2).serve(make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(index2)) })); println!("Listening on http://{} and http://{}", addr1, addr2); let _ret = join(srv1, srv2).await; Ok(()) }
use mio::*; use mio::net::*; use mio::net::udp::*; use mio::buf::{RingBuf, SliceBuf}; use std::str; use super::localhost; use std::old_io::net::ip::{Ipv4Addr}; use mio::event as evt; type TestEventLoop = EventLoop<usize, ()>; const LISTENER: Token = Token(0); const SENDER: Token = Token(1); pub struct UdpHandler { listen_sock: UdpSocket, send_sock: UdpSocket, msg: &'static str, message_buf: SliceBuf<'static>, rx_buf: RingBuf } impl UdpHandler { fn new(send_sock: UdpSocket, listen_sock: UdpSocket, msg : &'static str) -> UdpHandler { UdpHandler { listen_sock: listen_sock, send_sock: send_sock, msg: msg, message_buf: SliceBuf::wrap(msg.as_bytes()), rx_buf: RingBuf::new(1024) } } } impl Handler<usize, ()> for UdpHandler { fn readable(&mut self, event_loop: &mut TestEventLoop, token: Token, _: evt::ReadHint) { match token { LISTENER => { debug!("We are receiving a datagram now..."); self.listen_sock.read(&mut self.rx_buf.writer()).unwrap(); assert!(str::from_utf8(self.rx_buf.reader().bytes()).unwrap() == self.msg); event_loop.shutdown(); }, _ => () } } fn writable(&mut self, _: &mut TestEventLoop, token: Token) { match token { SENDER => { self.send_sock.write(&mut self.message_buf).unwrap(); }, _ => () } } } #[test] pub fn test_udp_socket() { debug!("Starting TEST_UDP_SOCKETS"); let mut event_loop = EventLoop::new().unwrap(); let send_sock = UdpSocket::v4().unwrap(); let recv_sock = UdpSocket::v4().unwrap(); let addr = SockAddr::parse(localhost().as_slice()) .expect("could not parse InetAddr for localhost"); info!("Binding both listener and sender to localhost..."); send_sock.connect(&addr).unwrap(); recv_sock.bind(&addr).unwrap(); info!("Setting SO_REUSEADDR"); send_sock.set_reuseaddr(true).unwrap(); recv_sock.set_reuseaddr(true).unwrap(); info!("Joining group 227.1.1.100"); recv_sock.join_multicast_group(&Ipv4Addr(227, 1, 1, 100), &None).unwrap(); info!("Joining group 227.1.1.101"); recv_sock.join_multicast_group(&Ipv4Addr(227, 1, 1, 101), &None).unwrap(); info!("Registering LISTENER"); event_loop.register_opt(&recv_sock, LISTENER, evt::READABLE, evt::EDGE).unwrap(); info!("Registering SENDER"); event_loop.register_opt(&send_sock, SENDER, evt::WRITABLE, evt::EDGE).unwrap(); info!("Starting event loop to test with..."); event_loop.run(UdpHandler::new(send_sock, recv_sock, "hello world")).ok().expect("Failed to run the actual event listener loop"); }
use std::rc::Rc; use crate::error::Result; use crate::externs::PythonScripts; use crate::nodes::NodeRoot; use crate::tensor::IRData; pub use n3_program::code::*; pub trait AddScripts { fn add_scripts(&self, root: &NodeRoot, scripts: &mut PythonScripts) -> Result<()>; } pub trait DataFromIR { fn from_ir(data: IRData) -> Self; } impl AddScripts for Code { fn add_scripts(&self, root: &NodeRoot, scripts: &mut PythonScripts) -> Result<()> { match self { Self::Node(node) => node.add_scripts(root, scripts), Self::Extern(node) => node.add_scripts(root, scripts), } } } impl DataFromIR for CodeData { fn from_ir(data: IRData) -> Self { Self { name: data.name, graph: Rc::try_unwrap(data.graph) .unwrap() .into_inner() .into_table(), input: data.input, output: data.output, } } }
use std::{collections::HashMap, fs}; fn main() { let filename = "inputs/q10_input.txt"; let contents = fs::read_to_string(filename).expect("Could not read the file"); let mut ratings = contents .lines() .map(|x| x.parse().unwrap()) .collect::<Vec<i32>>(); ratings.sort_unstable(); ratings.insert(0, 0); ratings.push(ratings.last().unwrap() + 3); let mut differences: HashMap<i32, i32> = HashMap::new(); for (i, &x) in (&ratings[0..ratings.len() - 1]).iter().enumerate() { match differences.get_mut(&(ratings[i + 1] - x)) { Some(x) => *x += 1, None => { differences.insert(ratings[i + 1] - x, 1); } } } println!("{:?}", differences); }
#[doc = r"Value read from the register"] pub struct R { bits: u8, } #[doc = r"Value to write to the register"] pub struct W { bits: u8, } impl super::TXTYPE5 { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u8 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct USB_TXTYPE5_TEPR { bits: u8, } impl USB_TXTYPE5_TEPR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } } #[doc = r"Proxy"] pub struct _USB_TXTYPE5_TEPW<'a> { w: &'a mut W, } impl<'a> _USB_TXTYPE5_TEPW<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(15 << 0); self.w.bits |= ((value as u8) & 15) << 0; self.w } } #[doc = "Possible values of the field `USB_TXTYPE5_PROTO`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_TXTYPE5_PROTOR { #[doc = "Control"] USB_TXTYPE5_PROTO_CTRL, #[doc = "Isochronous"] USB_TXTYPE5_PROTO_ISOC, #[doc = "Bulk"] USB_TXTYPE5_PROTO_BULK, #[doc = "Interrupt"] USB_TXTYPE5_PROTO_INT, } impl USB_TXTYPE5_PROTOR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_CTRL => 0, USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_ISOC => 1, USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_BULK => 2, USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_INT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> USB_TXTYPE5_PROTOR { match value { 0 => USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_CTRL, 1 => USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_ISOC, 2 => USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_BULK, 3 => USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_INT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `USB_TXTYPE5_PROTO_CTRL`"] #[inline(always)] pub fn is_usb_txtype5_proto_ctrl(&self) -> bool { *self == USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_CTRL } #[doc = "Checks if the value of the field is `USB_TXTYPE5_PROTO_ISOC`"] #[inline(always)] pub fn is_usb_txtype5_proto_isoc(&self) -> bool { *self == USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_ISOC } #[doc = "Checks if the value of the field is `USB_TXTYPE5_PROTO_BULK`"] #[inline(always)] pub fn is_usb_txtype5_proto_bulk(&self) -> bool { *self == USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_BULK } #[doc = "Checks if the value of the field is `USB_TXTYPE5_PROTO_INT`"] #[inline(always)] pub fn is_usb_txtype5_proto_int(&self) -> bool { *self == USB_TXTYPE5_PROTOR::USB_TXTYPE5_PROTO_INT } } #[doc = "Values that can be written to the field `USB_TXTYPE5_PROTO`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_TXTYPE5_PROTOW { #[doc = "Control"] USB_TXTYPE5_PROTO_CTRL, #[doc = "Isochronous"] USB_TXTYPE5_PROTO_ISOC, #[doc = "Bulk"] USB_TXTYPE5_PROTO_BULK, #[doc = "Interrupt"] USB_TXTYPE5_PROTO_INT, } impl USB_TXTYPE5_PROTOW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_CTRL => 0, USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_ISOC => 1, USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_BULK => 2, USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_INT => 3, } } } #[doc = r"Proxy"] pub struct _USB_TXTYPE5_PROTOW<'a> { w: &'a mut W, } impl<'a> _USB_TXTYPE5_PROTOW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USB_TXTYPE5_PROTOW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Control"] #[inline(always)] pub fn usb_txtype5_proto_ctrl(self) -> &'a mut W { self.variant(USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_CTRL) } #[doc = "Isochronous"] #[inline(always)] pub fn usb_txtype5_proto_isoc(self) -> &'a mut W { self.variant(USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_ISOC) } #[doc = "Bulk"] #[inline(always)] pub fn usb_txtype5_proto_bulk(self) -> &'a mut W { self.variant(USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_BULK) } #[doc = "Interrupt"] #[inline(always)] pub fn usb_txtype5_proto_int(self) -> &'a mut W { self.variant(USB_TXTYPE5_PROTOW::USB_TXTYPE5_PROTO_INT) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 4); self.w.bits |= ((value as u8) & 3) << 4; self.w } } #[doc = "Possible values of the field `USB_TXTYPE5_SPEED`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_TXTYPE5_SPEEDR { #[doc = "Default"] USB_TXTYPE5_SPEED_DFLT, #[doc = "High"] USB_TXTYPE5_SPEED_HIGH, #[doc = "Full"] USB_TXTYPE5_SPEED_FULL, #[doc = "Low"] USB_TXTYPE5_SPEED_LOW, } impl USB_TXTYPE5_SPEEDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_DFLT => 0, USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_HIGH => 1, USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_FULL => 2, USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_LOW => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> USB_TXTYPE5_SPEEDR { match value { 0 => USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_DFLT, 1 => USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_HIGH, 2 => USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_FULL, 3 => USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_LOW, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `USB_TXTYPE5_SPEED_DFLT`"] #[inline(always)] pub fn is_usb_txtype5_speed_dflt(&self) -> bool { *self == USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_DFLT } #[doc = "Checks if the value of the field is `USB_TXTYPE5_SPEED_HIGH`"] #[inline(always)] pub fn is_usb_txtype5_speed_high(&self) -> bool { *self == USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_HIGH } #[doc = "Checks if the value of the field is `USB_TXTYPE5_SPEED_FULL`"] #[inline(always)] pub fn is_usb_txtype5_speed_full(&self) -> bool { *self == USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_FULL } #[doc = "Checks if the value of the field is `USB_TXTYPE5_SPEED_LOW`"] #[inline(always)] pub fn is_usb_txtype5_speed_low(&self) -> bool { *self == USB_TXTYPE5_SPEEDR::USB_TXTYPE5_SPEED_LOW } } #[doc = "Values that can be written to the field `USB_TXTYPE5_SPEED`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USB_TXTYPE5_SPEEDW { #[doc = "Default"] USB_TXTYPE5_SPEED_DFLT, #[doc = "High"] USB_TXTYPE5_SPEED_HIGH, #[doc = "Full"] USB_TXTYPE5_SPEED_FULL, #[doc = "Low"] USB_TXTYPE5_SPEED_LOW, } impl USB_TXTYPE5_SPEEDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_DFLT => 0, USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_HIGH => 1, USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_FULL => 2, USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_LOW => 3, } } } #[doc = r"Proxy"] pub struct _USB_TXTYPE5_SPEEDW<'a> { w: &'a mut W, } impl<'a> _USB_TXTYPE5_SPEEDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USB_TXTYPE5_SPEEDW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Default"] #[inline(always)] pub fn usb_txtype5_speed_dflt(self) -> &'a mut W { self.variant(USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_DFLT) } #[doc = "High"] #[inline(always)] pub fn usb_txtype5_speed_high(self) -> &'a mut W { self.variant(USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_HIGH) } #[doc = "Full"] #[inline(always)] pub fn usb_txtype5_speed_full(self) -> &'a mut W { self.variant(USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_FULL) } #[doc = "Low"] #[inline(always)] pub fn usb_txtype5_speed_low(self) -> &'a mut W { self.variant(USB_TXTYPE5_SPEEDW::USB_TXTYPE5_SPEED_LOW) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 6); self.w.bits |= ((value as u8) & 3) << 6; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bits 0:3 - Target Endpoint Number"] #[inline(always)] pub fn usb_txtype5_tep(&self) -> USB_TXTYPE5_TEPR { let bits = ((self.bits >> 0) & 15) as u8; USB_TXTYPE5_TEPR { bits } } #[doc = "Bits 4:5 - Protocol"] #[inline(always)] pub fn usb_txtype5_proto(&self) -> USB_TXTYPE5_PROTOR { USB_TXTYPE5_PROTOR::_from(((self.bits >> 4) & 3) as u8) } #[doc = "Bits 6:7 - Operating Speed"] #[inline(always)] pub fn usb_txtype5_speed(&self) -> USB_TXTYPE5_SPEEDR { USB_TXTYPE5_SPEEDR::_from(((self.bits >> 6) & 3) as u8) } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:3 - Target Endpoint Number"] #[inline(always)] pub fn usb_txtype5_tep(&mut self) -> _USB_TXTYPE5_TEPW { _USB_TXTYPE5_TEPW { w: self } } #[doc = "Bits 4:5 - Protocol"] #[inline(always)] pub fn usb_txtype5_proto(&mut self) -> _USB_TXTYPE5_PROTOW { _USB_TXTYPE5_PROTOW { w: self } } #[doc = "Bits 6:7 - Operating Speed"] #[inline(always)] pub fn usb_txtype5_speed(&mut self) -> _USB_TXTYPE5_SPEEDW { _USB_TXTYPE5_SPEEDW { w: self } } }
mod louds; mod louds_builder; mod louds_index; mod louds_node_num; use crate::{SuccinctBitVector, SuccinctBitVectorBuilder}; /// LOUDS (Level-Order Unary Degree Sequence). /// /// This class can handle tree structure of virtually **arbitrary number of nodes**. /// /// In fact, _N_ (number of nodes in the tree) is designed to be limited to: _N < 2^64 / 2_, while each node is represented in 2bits in average.<br> /// It should be enough for almost all usecases since a binary data of length of _2^63_ consumes _2^20 = 1,048,576_ TB (terabytes), which is hard to handle by state-of-the-art computer architecture. /// /// # Examples /// Say we want to hold the following tree structure in minimum length of bits. /// /// ```text /// (1) /// | /// |---+---+ /// | | | /// (2) (3) (4) /// | | /// | |---+-----+ /// | | | | /// (5) (6) (7) (8) /// | | /// | |----+ /// | | | /// (9) (10) (11) /// ``` /// /// This tree has NodeNum (node number of 1-origin, assigned from left node to right & top to bottom) and edges. /// With LOUDS, this tree is represented as the following LBS (LOUDS Bit String). /// /// ```text /// NodeNum | 0 (virtual root) | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | /// LBS | 1 0 | 1 1 1 0 | 1 0 | 0 | 1 1 1 0 | 0 | 0 | 1 0 | 1 1 0 | 0 | 0 | 0 | /// Child NodeNum | 1 - | 2 3 4 - | 5 - | - | 6 7 8 - | - | - | 9 - | 10 11 - | - | - | - | /// Index | 0 1 | 2 3 4 5 | 6 7 | 8 | 9 10 11 12| 13| 14| 15 16| 17 18 19| 20| 21 | 22 | /// ``` /// /// The same tree is represented as follows using index. /// /// ```text /// <0> /// | /// |---+---+ /// | | | /// <2> <3> <4> /// | | /// | |---+-----+ /// | | | | /// <6> <9> <10> <11> /// | | /// | |----+ /// | | | /// <15> <17> <18> /// ``` /// /// Then, create this tree structure with `Louds` and call operations to it. /// /// ``` /// extern crate succinct_rs; /// /// use succinct_rs::{BitString, LoudsBuilder, LoudsIndex, LoudsNodeNum}; /// /// // Construct from LBS. /// let bs = BitString::new("10_1110_10_0_1110_0_0_10_110_0_0_0"); /// let louds = LoudsBuilder::from_bit_string(bs).build(); /// /// // LoudsNodeNum <-> LoudsIndex /// let node8 = LoudsNodeNum::new(8); /// let index11 = louds.node_num_to_index(&node8); /// assert_eq!(louds.index_to_node_num(&index11), node8); /// /// // Search for children. /// assert_eq!(louds.parent_to_children(&node8), vec!(LoudsIndex::new(17), LoudsIndex::new(18))); /// /// // Search for parent. /// assert_eq!(louds.child_to_parent(&index11), LoudsNodeNum::new(4)); /// ``` pub struct Louds { lbs: SuccinctBitVector, } /// The builder of [Louds](struct.Louds.html). pub struct LoudsBuilder { bv_builder: SuccinctBitVectorBuilder, } #[derive(PartialEq, Eq, Debug)] /// Node number of [Louds](struct.Louds.html) tree. pub struct LoudsNodeNum { value: u64, } #[derive(PartialEq, Eq, Debug)] /// Index of [Louds](struct.Louds.html) tree. pub struct LoudsIndex { value: u64, }
use brew_calculator::units::*; use serde::Deserialize; #[derive(Deserialize, Debug, PartialEq)] pub struct Boil { pub pre_volume: Liters, pub boil_time: Minutes, } impl Boil { pub(crate) fn from_beerxml_recipe(boil_size: Liters, boil_time: Minutes) -> Self { Self { pre_volume: boil_size, boil_time, } } }
use structopt::StructOpt; #[derive(Debug, Clone, StructOpt)] /// Ported from https://github.com/prasmussen/glot-code-runner pub struct CmdLineOpt { #[structopt(short = "w", long = "work-dir")] /// Working directory, if not specified, will use a temporary directory. pub work_dir: Option<String>, #[structopt(short = "f", long = "file")] /// Read input from file, if not specified, read from STDIN. pub file: Option<String>, #[structopt(short = "l", long = "list")] /// List all supported languages. pub list: bool, #[structopt(long = "sample")] /// Sample stdin or file content. pub sample: bool, }
use lock_api::{ GetThreadId, RawMutex, RawRwLock, RawRwLockDowngrade, RawRwLockRecursive, RawRwLockUpgrade, RawRwLockUpgradeDowngrade, }; use std::{cell::Cell, num::NonZeroUsize}; pub struct RawCellMutex { locked: Cell<bool>, } unsafe impl RawMutex for RawCellMutex { #[allow(clippy::declare_interior_mutable_const)] const INIT: Self = RawCellMutex { locked: Cell::new(false), }; type GuardMarker = lock_api::GuardNoSend; #[inline] fn lock(&self) { if self.is_locked() { deadlock("", "Mutex") } self.locked.set(true) } #[inline] fn try_lock(&self) -> bool { if self.is_locked() { false } else { self.locked.set(true); true } } unsafe fn unlock(&self) { self.locked.set(false) } #[inline] fn is_locked(&self) -> bool { self.locked.get() } } const WRITER_BIT: usize = 0b01; const ONE_READER: usize = 0b10; pub struct RawCellRwLock { state: Cell<usize>, } impl RawCellRwLock { #[inline] fn is_exclusive(&self) -> bool { self.state.get() & WRITER_BIT != 0 } } unsafe impl RawRwLock for RawCellRwLock { #[allow(clippy::declare_interior_mutable_const)] const INIT: Self = RawCellRwLock { state: Cell::new(0), }; type GuardMarker = <RawCellMutex as RawMutex>::GuardMarker; #[inline] fn lock_shared(&self) { if !self.try_lock_shared() { deadlock("sharedly ", "RwLock") } } #[inline] fn try_lock_shared(&self) -> bool { // TODO: figure out whether this is realistic; could maybe help // debug deadlocks from 2+ read() in the same thread? // if self.is_locked() { // false // } else { // self.state.set(ONE_READER); // true // } self.try_lock_shared_recursive() } #[inline] unsafe fn unlock_shared(&self) { self.state.set(self.state.get() - ONE_READER) } #[inline] fn lock_exclusive(&self) { if !self.try_lock_exclusive() { deadlock("exclusively ", "RwLock") } self.state.set(WRITER_BIT) } #[inline] fn try_lock_exclusive(&self) -> bool { if self.is_locked() { false } else { self.state.set(WRITER_BIT); true } } unsafe fn unlock_exclusive(&self) { self.state.set(0) } fn is_locked(&self) -> bool { self.state.get() != 0 } } unsafe impl RawRwLockDowngrade for RawCellRwLock { unsafe fn downgrade(&self) { self.state.set(ONE_READER); } } unsafe impl RawRwLockUpgrade for RawCellRwLock { #[inline] fn lock_upgradable(&self) { if !self.try_lock_upgradable() { deadlock("upgradably+sharedly ", "RwLock") } } #[inline] fn try_lock_upgradable(&self) -> bool { // defer to normal -- we can always try to upgrade self.try_lock_shared() } #[inline] unsafe fn unlock_upgradable(&self) { self.unlock_shared() } #[inline] unsafe fn upgrade(&self) { if !self.try_upgrade() { deadlock("upgrade ", "RwLock") } } #[inline] unsafe fn try_upgrade(&self) -> bool { if self.state.get() == ONE_READER { self.state.set(WRITER_BIT); true } else { false } } } unsafe impl RawRwLockUpgradeDowngrade for RawCellRwLock { #[inline] unsafe fn downgrade_upgradable(&self) { // no-op -- we're always upgradable } #[inline] unsafe fn downgrade_to_upgradable(&self) { self.state.set(ONE_READER); } } unsafe impl RawRwLockRecursive for RawCellRwLock { #[inline] fn lock_shared_recursive(&self) { if !self.try_lock_shared_recursive() { deadlock("recursively+sharedly ", "RwLock") } } #[inline] fn try_lock_shared_recursive(&self) -> bool { if self.is_exclusive() { false } else if let Some(new) = self.state.get().checked_add(ONE_READER) { self.state.set(new); true } else { false } } } #[cold] #[inline(never)] fn deadlock(lock_kind: &str, ty: &str) -> ! { panic!("deadlock: tried to {lock_kind}lock a Cell{ty} twice") } pub struct SingleThreadId(()); unsafe impl GetThreadId for SingleThreadId { const INIT: Self = SingleThreadId(()); fn nonzero_thread_id(&self) -> NonZeroUsize { NonZeroUsize::new(1).unwrap() } }
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. /// Remote peer address-based access control. /// /// Holds deny and permitted address lists (black lists and white lists) for remote Internet Protocol version 4 and version 6 subnets; the deny list is checked first, and, if the address is not present, the allow list then checked. /// This allows for generic white listing rules (eg all of the regular internet) and then for explicit exemptions (eg these message-dispatchs in this country). /// /// For unix domain sockets, there is a deny list of user identifiers and a permitted list of (primary) group identifiers. /// The deny list is checked first, and, if the user identifier is not present, the allow list is then checked. /// This allows for generic white listing rules (eg for all administrators) and then for explicit exemptions (eg a recently departed administrator). pub struct RemotePeerAddressBasedAccessControl { denied_protocol_version_4_subnets: IpLookupTable<Ipv4Addr, ()>, permitted_protocol_version_4_subnets: Option<IpLookupTable<Ipv4Addr, ()>>, denied_protocol_version_6_subnets: IpLookupTable<Ipv6Addr, ()>, permitted_protocol_version_6_subnets: Option<IpLookupTable<Ipv6Addr, ()>>, denied_unix_domain_user_identifierentifiers: HashSet<uid_t>, permitted_unix_domain_group_identifiers: Option<HashSet<gid_t>>, } impl Debug for RemotePeerAddressBasedAccessControl { #[inline(always)] fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "RemotePeerAddressBasedAccessControl {{ denied_protocol_version_4_subnets: _, permitted_protocol_version_4_subnets: _, denied_protocol_version_6_subnets: _, permitted_protocol_version_6_subnets: _, denied_unix_domain_user_identifierentifiers: {:?}, permitted_unix_domain_group_identifiers: {:?} }}", self.denied_unix_domain_user_identifierentifiers, self.permitted_unix_domain_group_identifiers) } } impl RemotePeerAddressBasedAccessControl { /// Creates a new instance. /// /// Permitted lists are `Option`s. /// If they are `None`, then the permitted list is not checked and all possible values are permitted (as long as the accompanying deny list does not deny them). #[inline(always)] pub fn new(denied_protocol_version_4_subnets: InternetProtocolSubnets<Ipv4Addr>, permitted_protocol_version_4_subnets: Option<InternetProtocolSubnets<Ipv4Addr>>, denied_protocol_version_6_subnets: InternetProtocolSubnets<Ipv6Addr>, permitted_protocol_version_6_subnets: Option<InternetProtocolSubnets<Ipv6Addr>>, denied_unix_domain_user_identifierentifiers: HashSet<uid_t>, permitted_unix_domain_group_identifiers: Option<HashSet<gid_t>>) -> Self { Self { denied_protocol_version_4_subnets: denied_protocol_version_4_subnets.to_ip_lookup_table(), permitted_protocol_version_4_subnets: permitted_protocol_version_4_subnets.map(|value| value.to_ip_lookup_table()), denied_protocol_version_6_subnets: denied_protocol_version_6_subnets.to_ip_lookup_table(), permitted_protocol_version_6_subnets: permitted_protocol_version_6_subnets.map(|value| value.to_ip_lookup_table()), denied_unix_domain_user_identifierentifiers, permitted_unix_domain_group_identifiers, } } } impl AccessControl<sockaddr_in> for RemotePeerAddressBasedAccessControl { #[inline(always)] fn is_remote_peer_allowed(&self, remote_peer_address: sockaddr_in, _streaming_socket_file_descriptor: &StreamingSocketFileDescriptor<sockaddr_in>) -> bool { let remote_peer_address: Ipv4Addr = unsafe { transmute(remote_peer_address.sin_addr) }; if unlikely!(self.denied_protocol_version_4_subnets.is_match(remote_peer_address)) { return false } match self.permitted_protocol_version_4_subnets { None => true, Some(ref ip_lookup_table) => ip_lookup_table.is_match(remote_peer_address) } } } impl AccessControl<sockaddr_in6> for RemotePeerAddressBasedAccessControl { #[inline(always)] fn is_remote_peer_allowed(&self, remote_peer_address: sockaddr_in6, _streaming_socket_file_descriptor: &StreamingSocketFileDescriptor<sockaddr_in6>) -> bool { let remote_peer_address: Ipv6Addr = unsafe { transmute(remote_peer_address.sin6_addr) }; if unlikely!(self.denied_protocol_version_6_subnets.is_match(remote_peer_address)) { return false } match self.permitted_protocol_version_6_subnets { None => true, Some(ref ip_lookup_table) => ip_lookup_table.is_match(remote_peer_address) } } } impl AccessControl<sockaddr_un> for RemotePeerAddressBasedAccessControl { #[inline(always)] fn is_remote_peer_allowed(&self, _remote_peer_address: sockaddr_un, streaming_socket_file_descriptor: &StreamingSocketFileDescriptor<sockaddr_un>) -> bool { let credentials = streaming_socket_file_descriptor.remote_peer_credentials(); if unlikely!(self.denied_unix_domain_user_identifierentifiers.contains(&credentials.user_identifierentifier)) { return false } match self.permitted_unix_domain_group_identifiers { None => true, Some(ref group_identifiers) => group_identifiers.contains(&credentials.group_identifier), } } }
use crate::lib::{default_sub_command, file_to_lines, parse_lines, parse_usize, Command}; use anyhow::Error; use clap::{value_t_or_exit, values_t_or_exit, App, Arg, ArgMatches, SubCommand}; use nom::{ branch::alt, character::complete, combinator::map, multi::many1, sequence::{preceded, tuple}, }; use simple_error::SimpleError; use std::str::FromStr; pub const TOBOGGAN_TRAJECTORY: Command = Command::new(sub_command, "toboggan-trajectory", run); struct TobogganTrajectoryArgs { file: String, slopes: Vec<Slope>, } struct Slope { right: usize, down: usize, } impl FromStr for Slope { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { tuple((parse_usize, preceded(complete::char(','), parse_usize)))(s) .map(|(_, (right, down))| Slope { right: right, down: down, }) .map_err(|_| SimpleError::new("Parse failure").into()) } } #[derive(Debug)] enum Terrain { Clear, Tree, } fn sub_command() -> App<'static, 'static> { default_sub_command(&TOBOGGAN_TRAJECTORY, "Takes a toboggan hill and a slope an returns the product of the number of trees \ that the toboggan hit on each slope", "Path to the input file. Input should be a toboggan hill with . denoting\ an empty space and # denoting a tree.") .arg(Arg::with_name("slope") .short("s") .help( "Slope of the toboggan specified by number of right units then number of down units \ separated by a comma. Example: 3,1", ) .takes_value(true) .multiple(true) .number_of_values(1) .min_values(1), ) .subcommand( SubCommand::with_name("part1") .about("Validates the default input with a single slope of 3,1") .version("1.0.0"), ) .subcommand( SubCommand::with_name("part2") .about("Validates the default input with slopes of 1,1 3,1 5,1 7,1 1,2") .version("1.0.0"), ) } fn run(arguments: &ArgMatches) -> Result<(), Error> { let tobaggan_tarjectory_arguments = match arguments.subcommand_name() { Some("part1") => TobogganTrajectoryArgs { file: "day3/input.txt".to_string(), slopes: vec![Slope { right: 3, down: 1 }], }, Some("part2") => TobogganTrajectoryArgs { file: "day3/input.txt".to_string(), slopes: vec![ Slope { right: 1, down: 1 }, Slope { right: 3, down: 1 }, Slope { right: 5, down: 1 }, Slope { right: 7, down: 1 }, Slope { right: 1, down: 2 }, ], }, _ => TobogganTrajectoryArgs { file: value_t_or_exit!(arguments.value_of("file"), String), slopes: values_t_or_exit!(arguments.values_of("slope"), Slope), }, }; file_to_lines(&tobaggan_tarjectory_arguments.file) .and_then(|lines| parse_lines(lines, parse_toboggan_line)) .map(|hill| { tobaggan_tarjectory_arguments .slopes .into_iter() .map(|slope| run_through_slope(&hill, &slope)) .fold(1usize, |acc, trees| acc * trees) }) .map(|result| { println!("{:#?}", result); }) .map(|_| ()) } fn run_through_slope(hill: &Vec<Vec<Terrain>>, slope: &Slope) -> usize { let x_max = hill[0].len(); let mut x = 0; let mut y = 0; let mut tree_count = 0; loop { x = (x + slope.right) % x_max; y = y + slope.down; if y >= hill.len() { break; } tree_count += match hill[y][x] { Terrain::Clear => 0, Terrain::Tree => 1, }; } tree_count } fn parse_toboggan_line(line: &String) -> Result<Vec<Terrain>, Error> { many1(alt(( map(complete::char('.'), |_| Terrain::Clear), map(complete::char('#'), |_| Terrain::Tree), )))(line.as_str()) .map(|(_, terrain)| terrain) .map_err(|_: nom::Err<nom::error::Error<&str>>| SimpleError::new("Parse failure").into()) }
// This file is automatically @generated by awto-cli v0.1.1 pub use sea_orm; include!(concat!(env!("OUT_DIR"), "/app.rs"));
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Syntax extension to create floating point literals from hexadecimal strings Once loaded, hexfloat!() is called with a string containing the hexadecimal floating-point literal, and an optional type (f32 or f64). If the type is omitted, the literal is treated the same as a normal unsuffixed literal. # Examples To load the extension and use it: ```rust,ignore #[phase(plugin)] extern crate hexfloat; fn main() { let val = hexfloat!("0x1.ffffb4", f32); } ``` # References * [ExploringBinary: hexadecimal floating point constants] (http://www.exploringbinary.com/hexadecimal-floating-point-constants/) */ #![crate_name = "hexfloat"] #![experimental] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/master/")] #![feature(plugin_registrar)] extern crate syntax; extern crate rustc; use syntax::ast; use syntax::codemap::{Span, mk_sp}; use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; use rustc::plugin::Registry; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("hexfloat", expand_syntax_ext); } //Check if the literal is valid (as LLVM expects), //and return a descriptive error if not. fn hex_float_lit_err(s: &str) -> Option<(uint, String)> { let mut chars = s.chars().peekable(); let mut i = 0; if chars.peek() == Some(&'-') { chars.next(); i+= 1 } if chars.next() != Some('0') { return Some((i, "Expected '0'".to_string())); } i+=1; if chars.next() != Some('x') { return Some((i, "Expected 'x'".to_string())); } i+=1; let mut d_len = 0i; for _ in chars.take_while(|c| c.is_digit_radix(16)) { chars.next(); i+=1; d_len += 1;} if chars.next() != Some('.') { return Some((i, "Expected '.'".to_string())); } i+=1; let mut f_len = 0i; for _ in chars.take_while(|c| c.is_digit_radix(16)) { chars.next(); i+=1; f_len += 1;} if d_len == 0 && f_len == 0 { return Some((i, "Expected digits before or after decimal \ point".to_string())); } if chars.next() != Some('p') { return Some((i, "Expected 'p'".to_string())); } i+=1; if chars.peek() == Some(&'-') { chars.next(); i+= 1 } let mut e_len = 0i; for _ in chars.take_while(|c| c.is_digit()) { chars.next(); i+=1; e_len += 1} if e_len == 0 { return Some((i, "Expected exponent digits".to_string())); } match chars.next() { None => None, Some(_) => Some((i, "Expected end of string".to_string())) } } pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult + 'static> { let (expr, ty_lit) = parse_tts(cx, tts); let ty = match ty_lit { None => None, Some(Ident{ident, span}) => match token::get_ident(ident).get() { "f32" => Some(ast::TyF32), "f64" => Some(ast::TyF64), _ => { cx.span_err(span, "invalid floating point type in hexfloat!"); None } } }; let s = match expr.node { // expression is a literal ast::ExprLit(ref lit) => match lit.node { // string literal ast::LitStr(ref s, _) => { s.clone() } _ => { cx.span_err(expr.span, "unsupported literal in hexfloat!"); return base::DummyResult::expr(sp); } }, _ => { cx.span_err(expr.span, "non-literal in hexfloat!"); return base::DummyResult::expr(sp); } }; { let err = hex_float_lit_err(s.get()); match err { Some((err_pos, err_str)) => { let pos = expr.span.lo + syntax::codemap::Pos::from_uint(err_pos + 1); let span = syntax::codemap::mk_sp(pos,pos); cx.span_err(span, format!("invalid hex float literal in hexfloat!: \ {}", err_str).as_slice()); return base::DummyResult::expr(sp); } _ => () } } let lit = match ty { None => ast::LitFloatUnsuffixed(s), Some (ty) => ast::LitFloat(s, ty) }; MacExpr::new(cx.expr_lit(sp, lit)) } struct Ident { ident: ast::Ident, span: Span } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (P<ast::Expr>, Option<Ident>) { let p = &mut cx.new_parser_from_tts(tts); let ex = p.parse_expr(); let id = if p.token == token::Eof { None } else { p.expect(&token::Comma); let lo = p.span.lo; let ident = p.parse_ident(); let hi = p.last_span.hi; Some(Ident{ident: ident, span: mk_sp(lo, hi)}) }; if p.token != token::Eof { p.unexpected(); } (ex, id) } // FIXME (10872): This is required to prevent an LLVM assert on Windows #[test] fn dummy_test() { }
use std::ops::Deref; use super::*; pub struct ReadBufferMap<'a, T, Kind, Acces> where T: Sized + BufferData, Kind: BufferType, Acces: BufferAcces { pub(crate) buff: &'a Buffer<T, Kind, Acces>, pub(crate) buffer: &'a [T] } impl<T, Kind, Acces> Deref for ReadBufferMap<'_, T, Kind, Acces> where T: Sized + BufferData, Kind: BufferType, Acces: BufferAcces { type Target = [T]; fn deref(&self) -> &[T] { self.buffer } } impl<T, Kind, Acces> Drop for ReadBufferMap<'_, T, Kind, Acces> where T: Sized + BufferData, Kind: BufferType, Acces: BufferAcces { fn drop(&mut self) { unsafe { gl::BindBuffer(Kind::value(), self.buff.id()); gl::UnmapBuffer(Kind::value()); } } }
//! Shared helpers for windows programming. use windows_sys::Windows::Win32::SystemServices as ss; mod event; pub use self::event::Event; cfg_events_driver! { #[doc(inherit)] pub use crate::runtime::events::AsyncEvent; } /// Trait that indicates a type that encapsulates an event. pub trait RawEvent { /// Access the underlying raw handle for the event. /// /// # Safety /// /// Caller must ensure that the raw handle stays alive for the duration of /// whatever its being associated with. unsafe fn raw_event(&self) -> ss::HANDLE; }
use crate::components::prelude::{CheckpointId, FeatureType}; use deathframe::geo::Vector; #[derive(Clone, Deserialize, Serialize)] pub struct CheckpointData { pub position: Vector, pub features: Vec<FeatureType>, pub checkpoints: Vec<CheckpointId>, } #[derive(Default)] pub struct CheckpointRes(pub Option<CheckpointData>);
use super::symbol::*; use std::result::Result; /// /// Possible errors from a script call /// #[derive(Clone, PartialEq, Debug)] pub enum FloScriptError { /// The requested feature is not available (with description as to why) Unavailable(String), /// A requested symbol was not defined UndefinedSymbol(FloScriptSymbol), /// Tried to attach an input stream to a symbol that is not defined as an input symbol NotAnInputSymbol, /// Tried to perform an operation on an item that is not a namespace NotANamespace, /// Attempted to read from a namespace as if it were a stream CannotReadFromANamespace, /// Requested an output or an input with the wrong type IncorrectType, /// Indicates an error from the script ScriptError(String) } /// Result from a script operation pub type FloScriptResult<T> = Result<T, FloScriptError>;
use std::collections::HashMap; use std::io::Write; use serialize::json::ToJson; use template::{Template, TemplateError}; use render::{Renderable, RenderError, RenderContext}; use helpers::{HelperDef}; use context::{Context}; use helpers; use support::str::StringWriter; pub struct Registry { templates: HashMap<String, Template>, helpers: HashMap<String, Box<HelperDef + 'static>> } impl Registry { pub fn new() -> Registry { let mut r = Registry { templates: HashMap::new(), helpers: HashMap::new() }; r.register_helper("if", Box::new(helpers::IF_HELPER)); r.register_helper("unless", Box::new(helpers::UNLESS_HELPER)); r.register_helper("each", Box::new(helpers::EACH_HELPER)); r.register_helper("with", Box::new(helpers::WITH_HELPER)); r.register_helper("lookup", Box::new(helpers::LOOKUP_HELPER)); r.register_helper("raw", Box::new(helpers::RAW_HELPER)); r.register_helper(">", Box::new(helpers::INCLUDE_HELPER)); r.register_helper("block", Box::new(helpers::BLOCK_HELPER)); r.register_helper("partial", Box::new(helpers::PARTIAL_HELPER)); r.register_helper("log", Box::new(helpers::LOG_HELPER)); r } pub fn register_template(&mut self, name: &str, template: Template) { self.templates.insert(name.to_string(), template); } pub fn register_template_string(&mut self, name: &str, tpl_str: String) -> Result<(), TemplateError>{ let t = Template::compile(tpl_str); if let Ok(tpl) = t { self.templates.insert(name.to_string(), tpl); Ok(()) } else { Err(t.err().unwrap()) } } pub fn unregister_template(&mut self, name: &String) { self.templates.remove(name); } pub fn register_helper(&mut self, name: &str, def: Box<HelperDef + 'static>) -> Option<Box<HelperDef + 'static>> { self.helpers.insert(name.to_string(), def) } pub fn get_template(&self, name: &String) -> Option<&Template> { self.templates.get(name) } pub fn get_helper(&self, name: &String) -> Option<&Box<HelperDef + 'static>> { self.helpers.get(name) } pub fn get_templates(&self) -> &HashMap<String, Template> { &self.templates } pub fn clear_templates(&mut self) { self.templates.clear(); } pub fn render<T>(&self, name: &str, ctx: &T) -> Result<String, RenderError> where T: ToJson { let mut writer = StringWriter::new(); { try!(self.renderw(name, ctx, &mut writer)); } Ok(writer.to_string()) } pub fn renderw<T>(&self, name: &str, ctx: &T, writer: &mut Write) -> Result<(), RenderError> where T: ToJson { let template = self.get_template(&name.to_string()); let context = Context::wraps(ctx); if let Some(t) = template { let mut render_context = RenderContext::new(writer); (*t).render(&context, self, &mut render_context) } else { Err(RenderError{ desc: "Template not found." }) } } } #[cfg(test)] mod test { use serialize::json::Json; use template::{Template}; use registry::{Registry}; use render::{RenderContext, Renderable, RenderError, Helper}; use helpers::{HelperDef}; use context::{Context}; use support::str::StringWriter; #[derive(Clone, Copy)] struct DummyHelper; impl HelperDef for DummyHelper { fn call(&self, c: &Context, h: &Helper, r: &Registry, rc: &mut RenderContext) -> Result<(), RenderError> { try!(h.template().unwrap().render(c, r, rc)); Ok(()) } } static DUMMY_HELPER: DummyHelper = DummyHelper; #[test] fn test_registry_operations() { let mut r = Registry::new(); let t = Template::compile("<h1></h1>".to_string()).ok().unwrap(); r.register_template("index", t.clone()); let t2 = Template::compile("<h2></h2>".to_string()).ok().unwrap(); r.register_template("index2", t2.clone()); assert_eq!((*r.get_template(&("index".to_string())).unwrap()).to_string(), t.to_string()); assert_eq!(r.templates.len(), 2); r.unregister_template(&("index".to_string())); assert_eq!(r.templates.len(), 1); r.clear_templates(); assert_eq!(r.templates.len(), 0); r.register_helper("dummy", Box::new(DUMMY_HELPER)); // built-in helpers plus 1 assert_eq!(r.helpers.len(), 10+1); } #[test] fn test_renderw() { let mut r = Registry::new(); let t = Template::compile("<h1></h1>".to_string()).ok().unwrap(); r.register_template("index", t.clone()); let mut sw = StringWriter::new(); let data = Json::Null; { r.renderw("index", &data, &mut sw).ok().unwrap(); } assert_eq!("<h1></h1>".to_string(), sw.to_string()); } }
//! Module that contains known test keys. // TODO: Should be derived from seeds once implemented in the Rust version. /// Define an ed25519 test key. macro_rules! test_key_ed25519 { ($doc:expr, $name:ident, $pk:expr) => { #[doc = " Test key "] #[doc=$doc] #[doc = "."] pub mod $name { use crate::{ crypto::signature::{ed25519, PublicKey}, types::address::{Address, SignatureAddressSpec}, }; #[doc = " Test public key "] #[doc=$doc] #[doc = "."] pub fn pk() -> PublicKey { PublicKey::Ed25519(pk_ed25519()) } #[doc = " Test Ed25519 public key "] #[doc=$doc] #[doc = "."] pub fn pk_ed25519() -> ed25519::PublicKey { $pk.into() } #[doc = " Test address derivation information "] #[doc=$doc] #[doc = "."] pub fn sigspec() -> SignatureAddressSpec { SignatureAddressSpec::Ed25519(pk_ed25519()) } #[doc = " Test address "] #[doc=$doc] #[doc = "."] pub fn address() -> Address { Address::from_sigspec(&sigspec()) } } }; } /// Define a secp256k1 test key. macro_rules! test_key_secp256k1 { ($doc:expr, $name:ident, $pk:expr) => { #[doc = " Test key "] #[doc=$doc] #[doc = "."] pub mod $name { use crate::{ crypto::signature::{secp256k1, PublicKey}, types::address::{Address, SignatureAddressSpec}, }; #[doc = " Test public key "] #[doc=$doc] #[doc = "."] pub fn pk() -> PublicKey { PublicKey::Secp256k1(pk_secp256k1()) } #[doc = " Test Secp256k1 public key "] #[doc=$doc] #[doc = "."] pub fn pk_secp256k1() -> secp256k1::PublicKey { $pk.into() } #[doc = " Test address derivation information "] #[doc=$doc] #[doc = "."] pub fn sigspec() -> SignatureAddressSpec { SignatureAddressSpec::Secp256k1Eth(pk_secp256k1()) } #[doc = " Test address "] #[doc=$doc] #[doc = "."] pub fn address() -> Address { Address::from_sigspec(&sigspec()) } } }; } test_key_ed25519!("A", alice, "NcPzNW3YU2T+ugNUtUWtoQnRvbOL9dYSaBfbjHLP1pE="); test_key_ed25519!("B", bob, "YgkEiVSR4SMQdfXw+ppuFYlqH0seutnCKk8KG8PyAx0="); test_key_ed25519!("C", charlie, "8l1AQE+ETOPLckiNJ7NOD+AfZdaPw6wguir/vSF11YI="); test_key_secp256k1!("D", dave, "AwF6GNjbybMzhi3XRj5R1oTiMMkO1nAwB7NZAlH1X4BE");
//! Parser for Ulysses exported MarkDown bundle. //! //! Extract Post from a MarkDown bundle by iterating Blog. //! //! # Example //! //! ``` //! use parser::Blog; //! let blog = Blog::from(&data); //! //! for post in blog { //! println("{}", post.title); //! } //! ``` use linter::{Linter, Scripts}; use pulldown_cmark::{Alignment, Event, Options, Parser, Tag}; use std::borrow::Cow; use std::collections::HashMap; use std::fmt::{Arguments, Write}; #[derive(Eq, PartialEq)] pub struct Post { pub title: String, pub released: String, pub modified: String, pub category: String, pub pagename: String, pub data: String, } enum TableState { Head, Body, } pub struct Blog<'a> { iter: Parser<'a>, size: usize, title: String, released: String, modified: String, category: String, pagename: String, data: String, reference: HashMap<Cow<'a, str>, usize>, space_state: Scripts, space_buffer: String, table_state: TableState, table_alignments: Vec<Alignment>, table_cell_index: usize, } impl<'a> Blog<'a> { pub fn from(content: &'a str) -> Blog<'a> { let mut iter = Parser::new_ext(content, Options::ENABLE_TABLES); while let Some(event) = iter.next() { if event == Event::Start(Tag::Header(1)) { break; } } Blog { iter, size: content.len(), title: String::with_capacity(64), released: String::with_capacity(25), modified: String::with_capacity(25), category: String::with_capacity(16), pagename: String::with_capacity(32), data: String::with_capacity(16384), reference: HashMap::default(), space_state: Scripts::Unknown, space_buffer: String::with_capacity(64), table_state: TableState::Head, table_alignments: Vec::with_capacity(8), table_cell_index: 0, } } fn clear(&mut self) { self.title.clear(); self.released.clear(); self.modified.clear(); self.category.clear(); self.pagename.clear(); self.data.clear(); self.reference.clear(); self.space_state = Scripts::Unknown; self.space_buffer.clear(); self.table_state = TableState::Head; self.table_alignments.clear(); self.table_cell_index = 0; } fn parse_meta(&mut self) { let mut header = true; while let Some(event) = self.iter.next() { match event { Event::Start(Tag::CodeBlock(_)) => header = false, Event::Text(ref text) if header => { self.title.push_txt(text); } Event::Text(ref text) if text.starts_with("本文发表于:") => { self.released.push_str(text[18..].trim_end()); } Event::Text(ref text) if text.starts_with("最后修改于:") => { self.modified.push_str(text[18..].trim_end()); } Event::Text(ref text) if text.starts_with("分类:") => { self.category.push_str(text[9..].trim_end()); } Event::Text(ref text) if text.starts_with("页名:") => { self.pagename.push_str(text[9..].trim_end()); } Event::End(Tag::CodeBlock(_)) => break, _ => (), } } } fn parse_body(&mut self) { while let Some(event) = self.iter.next() { match event { Event::Start(Tag::Header(1)) => break, Event::Start(tag) => self.start_tag(tag), Event::End(tag) => self.end_tag(tag), Event::Text(text) => self.push_text(&text), Event::Html(html) => self.push_html(&html), Event::InlineHtml(html) => self.push_html(&html), Event::SoftBreak => self.fresh_line(), Event::HardBreak => self.push_html("<br />\n"), Event::FootnoteReference(name) => { let id = self.poll_note(name); self.push_note(format_args!( "\u{2060}<sup><a href=\"#{0}\">{0}</a></sup>", id )); } } } self.fresh_buffer(); } fn parse_text(&mut self) { let mut nest = 0; while let Some(event) = self.iter.next() { match event { Event::Start(_) => nest += 1, Event::End(_) if nest == 0 => break, Event::End(_) => nest -= 1, Event::Text(text) => self.push_html(&text), Event::Html(_) | Event::InlineHtml(_) => (), Event::SoftBreak | Event::HardBreak => self.push_html(" "), Event::FootnoteReference(name) => { let id = self.poll_note(name); self.push_note(format_args!("[{0}]", id)); } } } } fn fresh_buffer(&mut self) { if !self.space_buffer.is_empty() { self.data.push_str(&self.space_buffer); self.space_buffer.clear(); }; } fn fresh_line(&mut self) { self.fresh_buffer(); if !(self.data.is_empty() || self.data.ends_with('\n')) { self.data.push('\n'); } self.space_state = Scripts::Unknown; } fn push_html(&mut self, text: &str) { self.fresh_buffer(); self.data.push_str(text); self.space_state = Scripts::Unknown; } fn push_text(&mut self, text: &str) { let ws = self.space_state; let ns = text.chars().next().map_or(Scripts::Unknown, |x| x.into()); if (ws == Scripts::Chinese && ns != Scripts::Unknown) || (ns == Scripts::Chinese && ws != Scripts::Unknown) && ws != ns { self.data.push('\u{2009}'); }; self.fresh_buffer(); self.data.push_txt(text); self.space_state = text.chars().last().map_or(Scripts::Unknown, |x| x.into()); } fn push_note(&mut self, args: Arguments) { self.fresh_buffer(); self.data.write_fmt(args).unwrap(); self.space_state = Scripts::Unknown; } fn poll_note(&mut self, name: Cow<'a, str>) -> usize { let id = self.reference.len() + 1; *self.reference.entry(name).or_insert(id) } fn start_tag(&mut self, tag: Tag<'a>) { match tag { Tag::Paragraph => { self.fresh_line(); self.data.push_str("<p>"); } Tag::Rule => { self.fresh_line(); self.data.push_str("<hr />\n") } Tag::Header(level) => { self.fresh_line(); self.data.push_str("<h"); self.data.push((b'0' + level as u8) as char); self.data.push('>'); } Tag::Table(alignments) => { self.fresh_line(); self.table_alignments = alignments; self.data.push_str("<table>"); } Tag::TableHead => { self.table_state = TableState::Head; self.space_buffer.push_str("<thead><tr>"); } Tag::TableRow => { self.table_cell_index = 0; self.space_buffer.push_str("<tr>"); } Tag::TableCell => { match self.table_state { TableState::Head => self.space_buffer.push_str("<th"), TableState::Body => self.space_buffer.push_str("<td"), } match self.table_alignments.get(self.table_cell_index) { Some(&Alignment::Left) => self.space_buffer.push_str(" align=\"left\""), Some(&Alignment::Center) => self.space_buffer.push_str(" align=\"center\""), Some(&Alignment::Right) => self.space_buffer.push_str(" align=\"right\""), _ => (), } self.space_buffer.push_str(">"); } Tag::BlockQuote => { self.fresh_line(); self.data.push_str("<blockquote>\n"); } Tag::CodeBlock(info) => { self.fresh_line(); let lang = info.split(' ').next().unwrap(); if lang.is_empty() { self.data.push_str("<pre><code>"); } else { self.data.push_str("<pre><code lang=\""); self.data.push_str(lang); self.data.push_str("\">"); } } Tag::List(Some(1)) => { self.fresh_line(); self.data.push_str("<ol>\n"); } Tag::List(Some(start)) => { self.fresh_line(); write!(&mut self.data, "<ol start=\"{}\">\n", start).unwrap(); } Tag::List(None) => { self.fresh_line(); self.data.push_str("<ul>\n"); } Tag::Item => { self.fresh_line(); self.data.push_str("<li>"); } Tag::Emphasis => self.space_buffer.push_str("<em>"), Tag::Strong => self.space_buffer.push_str("<strong>"), Tag::Code => self.space_buffer.push_str("<code>"), Tag::Link(dest, title) => { self.space_buffer.push_str("<a href=\""); self.space_buffer.push_str(&dest); if !title.is_empty() { self.space_buffer.push_str("\" title=\""); self.space_buffer.push_txt(title); } self.space_buffer.push_str("\" target=\"_blank\">"); } Tag::Image(dest, title) => { self.space_buffer.push_str("<img src=\""); self.space_buffer.push_str(&dest); self.space_buffer.push_str("\" alt=\""); self.parse_text(); if !title.is_empty() { self.space_buffer.push_str("\" title=\""); self.space_buffer.push_txt(title); } self.space_buffer.push_str("\" />") } Tag::FootnoteDefinition(name) => { let id = self.poll_note(name); self.push_note(format_args!("<aside id=\"{0}\"><sup>{0}</sup>", id)); } } } fn end_tag(&mut self, tag: Tag) { self.fresh_buffer(); match tag { Tag::Paragraph => self.data.push_str("</p>\n"), Tag::Rule => (), Tag::Header(level) => { self.data.push_str("</h"); self.data.push((b'0' + level as u8) as char); self.data.push_str(">\n"); } Tag::Table(_) => { self.data.push_str("</tbody></table>\n"); } Tag::TableHead => { self.data.push_str("</tr></thead><tbody>\n"); self.table_state = TableState::Body; } Tag::TableRow => { self.data.push_str("</tr>\n"); } Tag::TableCell => { match self.table_state { TableState::Head => self.data.push_str("</th>"), TableState::Body => self.data.push_str("</td>"), } self.table_cell_index += 1; } Tag::BlockQuote => self.data.push_str("</blockquote>\n"), Tag::CodeBlock(_) => self.data.push_str("</code></pre>\n"), Tag::List(Some(_)) => self.data.push_str("</ol>\n"), Tag::List(None) => self.data.push_str("</ul>\n"), Tag::Item => self.data.push_str("</li>\n"), Tag::Emphasis => self.data.push_str("</em>"), Tag::Strong => self.data.push_str("</strong>"), Tag::Code => self.data.push_str("</code>"), Tag::Link(_, _) => self.data.push_str("</a>"), Tag::Image(_, _) => (), Tag::FootnoteDefinition(_) => self.data.push_str("</aside>\n"), } } } impl<'a> Iterator for Blog<'a> { type Item = Post; fn next(&mut self) -> Option<Post> { if self.iter.get_offset() < self.size { self.clear(); self.parse_meta(); self.parse_body(); Some(Post { title: self.title.clone(), released: self.released.clone(), modified: self.modified.clone(), category: self.category.clone(), pagename: self.pagename.clone(), data: self.data.clone(), }) } else { None } } }
use swayipc::{BindingEvent, Connection, Event, EventType}; use std::sync::{Arc, Mutex}; use std::collections::{HashSet, HashMap}; use std::thread; use std::boxed::Box; use std::error::Error; use std::time::Duration; use log; const SWAY_COMMAND_PRESS: &str = "nop press"; const SWAY_COMMAND_RELEASE: &str = "nop release"; type Keyname = String; #[derive (Debug, Clone, PartialEq, Eq)] pub struct Config { pub mod_key: Keyname, pub left_key: Keyname, pub right_key: Keyname, pub up_key: Keyname, pub down_key: Keyname, pub left_click_key: Keyname, pub right_click_key: Keyname, pub tick_interval: Duration, pub velocity_px_per_s: u32, } #[derive (Debug, Clone, PartialEq, Eq, Hash)] enum Key { Mod, Up, Down, Left, Right, LeftClick, RightClick, } #[derive (Debug, Clone, PartialEq, Eq, Hash)] enum KeyState { Up, Down, } #[derive (Default, Debug, Clone, PartialEq, Eq)] struct State { down_keys: HashSet<Key> } impl State { fn get_key_state(&self, key: &Key) -> KeyState { return if self.down_keys.contains(&key) { KeyState::Down } else { KeyState::Up } } } macro_rules! map_of { ($($k:expr => $v:expr),* $(,)?) => {{ use std::iter::{Iterator, IntoIterator}; Iterator::collect(IntoIterator::into_iter([$(($k, $v),)*])) }}; } macro_rules! collection_of { ($($v:expr),* $(,)?) => {{ use std::iter::{Iterator, IntoIterator}; Iterator::collect(IntoIterator::into_iter([$($v,)*])) }}; } fn run_sway_command<T: AsRef<str> + std::fmt::Display>(conn: &mut Connection, command: T) -> Result<(), Box<dyn Error>> { log::trace!("Running command: {}", command); conn.run_command(&command)?; Ok(()) } fn setup_sway_config(config: &Config) -> Result<(), Box<dyn Error>> { let mut conn = Connection::new()?; let codes: HashSet<&str> = collection_of! { &*config.up_key, &*config.down_key, &*config.left_key, &*config.right_key, &*config.left_click_key, &*config.right_click_key, }; for &key in &codes { log::debug!("Setting up: {}+{}", config.mod_key, key); let unbind_press = format!("unbindsym {}+{}", config.mod_key, key); let unbind_release = format!("unbindsym --release {}+{}", config.mod_key, key); let bind_press = format!("bindsym --no-repeat {}+{} {}", config.mod_key, key, SWAY_COMMAND_PRESS); let bind_release = format!("bindsym --release {}+{} {}", config.mod_key, key, SWAY_COMMAND_RELEASE); run_sway_command(&mut conn, unbind_press)?; run_sway_command(&mut conn, unbind_release)?; run_sway_command(&mut conn, bind_press)?; run_sway_command(&mut conn, bind_release)?; } Ok(()) } fn parse_binding_key(config: &Config, symbol: &str) -> Option<Key> { let symbol_to_key: HashMap<&str, Key> = map_of! { &*config.up_key => Key::Up, &*config.down_key => Key::Down, &*config.left_key => Key::Left, &*config.right_key => Key::Right, &*config.left_click_key => Key::LeftClick, &*config.right_click_key => Key::RightClick, }; return symbol_to_key.get(symbol).map(|x| x.clone()); } fn get_opposing_key(key: &Key) -> Option<Key> { return match key { Key::Up => Some(Key::Down), Key::Down => Some(Key::Up), Key::Right => Some(Key::Left), Key::Left => Some(Key::Right), _ => None, } } fn handle_mouse_key(conn: &mut Connection, key: &Key, key_down: bool) -> Result<(), Box<dyn Error>> { let button = match key { Key::LeftClick => Some("button1"), Key::RightClick => Some("button3"), _ => None, }; let action = if key_down {"press"} else {"release"}; match button { Some(button) => { let cmd = format!("seat - cursor {} {}", action, button); run_sway_command(conn, &cmd)?; }, None => {}, } Ok(()) } fn handle_bound_key(conn: &mut Connection, state: &mut State, key: &Key, key_down: bool) -> Result<(), Box<dyn Error>> { if key_down { match get_opposing_key(&key) { Some(key) => { state.down_keys.remove(&key); }, _ => {}, } state.down_keys.insert(key.clone()); } else { // Sway does not send release events when switching between bindings (only down) // We clear the entire state here to prevent stuck movement events // state.down_keys.remove(&key); state.down_keys.clear(); } handle_mouse_key(conn, &key, key_down)?; Ok(()) } fn handle_binding_event(conn: &mut Connection, state: &mut State, config: &Config, event: &BindingEvent) -> Result<(), Box<dyn Error>> { let binding = &event.binding; let modifiers: HashSet<&String> = binding.event_state_mask.iter().collect(); let mod_down = modifiers == collection_of! { &config.mod_key }; handle_bound_key(conn, state, &Key::Mod, mod_down)?; let bound_key = binding.symbol.as_ref().and_then(|x| parse_binding_key(&config, &*x)); match bound_key { Some(key) => { let our_commands: HashSet<&str> = collection_of! { SWAY_COMMAND_PRESS, SWAY_COMMAND_RELEASE}; let our_action = our_commands.contains(&*binding.command); if our_action { let key_down = binding.command.ends_with("press"); handle_bound_key(conn, state, &key, key_down)?; } else { log::warn!("Event was not bound correctly - bound to \"{}\"", binding.command); } } None => { log::trace!("Ignoring unbound key event"); } } Ok(()) } fn run_event_receiver(daemon_state: &Arc<Mutex<State>>, daemon_config: &Config) -> Result<(), Box<dyn Error>> { let config = daemon_config.clone(); let mut conn = Connection::new()?; let event_types = [EventType::Binding]; let event_iter = Connection::new()?.subscribe(&event_types)?; let thread_state = Arc::clone(daemon_state); thread::spawn(move || { for evt_result in event_iter { let event = evt_result.expect("Failed to get event"); log::trace!("Received event: {:?}", event); let mut state = thread_state.lock().expect("Failed to get state"); match event { Event::Binding(event) => { handle_binding_event(&mut conn, &mut state, &config, &event) }, _ => Ok(()) }.expect("Failed to handle event") } }); Ok(()) } fn handle_tick(config: &Config, conn: &mut Connection, current_state: &State, elapsed_time: &Duration) -> Result<(), Box<dyn Error>> { log::trace!("Tick state: {:?}, elapsed_time: {:?}", current_state, elapsed_time); let velocity_px_per_s = config.velocity_px_per_s; let elapsed_s = elapsed_time.as_secs_f32(); let mod_state = current_state.get_key_state(&Key::Mod); let up_state = current_state.get_key_state(&Key::Up); let down_state = current_state.get_key_state(&Key::Down); let left_state = current_state.get_key_state(&Key::Left); let right_state = current_state.get_key_state(&Key::Right); if mod_state != KeyState::Down { // Nothing to do, mod is not pressed log::trace!("Skipping tick because mod is not pressed"); return Ok(()); } let delta_px = elapsed_s * velocity_px_per_s as f32; let mut move_vec_x: f32 = 0.0; let mut move_vec_y: f32 = 0.0; if up_state == KeyState::Down { move_vec_y -= 1.0; } if down_state == KeyState::Down { move_vec_y += 1.0; } if right_state == KeyState::Down { move_vec_x += 1.0; } if left_state == KeyState::Down { move_vec_x -= 1.0; } let move_vec_magnitude = (move_vec_x.powf(2.0) + move_vec_y.powf(2.0)).sqrt(); let move_dx = move_vec_x / move_vec_magnitude; let move_dy = move_vec_y / move_vec_magnitude; let dx_px = (move_dx * delta_px).round() as i32; let dy_px = (move_dy * delta_px).round() as i32; log::trace!("Moving mouse by x: {dx}px y: {dy}px", dx=dx_px, dy=dy_px); let move_cmd = format!( "seat - cursor move {dx} {dy}", dx=dx_px, dy=dy_px ); run_sway_command(conn, move_cmd)?; Ok(()) } fn run_loop(config: &Config, daemon_state: &Arc<Mutex<State>>) -> Result<(), Box<dyn Error>> { let tick_interval = config.tick_interval; let mut conn = Connection::new()?; let mut last_iteration_time = std::time::Instant::now(); loop { let loop_start_time = std::time::Instant::now(); let elapsed_time = last_iteration_time.elapsed(); last_iteration_time = loop_start_time; // Move cursor (based on previous/current state and elapsed time) { let current_state = daemon_state.lock().expect("Failed to get state").clone(); handle_tick(&config, &mut conn, &current_state, &elapsed_time)?; } // Sleep let loop_end_time = std::time::Instant::now(); let loop_elapsed = loop_end_time - loop_start_time; let sleep_for = tick_interval.saturating_sub(loop_elapsed); thread::sleep(sleep_for); } } pub fn run(config: &Config) -> Result<(), Box<dyn Error>> { // Setup sway config log::info!("Setting up sway config"); setup_sway_config(&config)?; let state = Arc::new(Mutex::new(State::default())); // Spawn event receiver thread log::info!("Spawning event receiver"); run_event_receiver(&state, &config)?; // Run main loop log::info!("Starting main loop"); run_loop(&config, &state)?; Ok(()) }
use clap::Parser; use parquet_wasm::arrow1::reader::read_parquet; use std::fs; use std::path::PathBuf; use std::process; #[derive(Parser, Debug)] #[clap(author, version, about, long_about = None)] struct Args { /// Path to input file #[clap(short, long)] input_file: PathBuf, /// Path to output file #[clap(short, long)] output_file: PathBuf, } fn main() { let args = Args::parse(); // Read file to buffer let data = fs::read(&args.input_file).expect("Unable to read file"); // Call read_parquet let arrow_ipc = read_parquet(data) .map_err(|err| { eprintln!("Could not read parquet file: {}", err); process::exit(1); }) .unwrap(); // Write result to file fs::write(&args.output_file, arrow_ipc).expect("Unable to write file"); }
#![feature(llvm_asm)] fn f() {} fn main() { unsafe {llvm_asm!( "" :: "r"(f))} }
#[doc = "Reader of register OPTR"] pub type R = crate::R<u32, super::OPTR>; #[doc = "Writer for register OPTR"] pub type W = crate::W<u32, super::OPTR>; #[doc = "Register OPTR `reset()`'s with value 0"] impl crate::ResetValue for super::OPTR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `RDP`"] pub type RDP_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RDP`"] pub struct RDP_W<'a> { w: &'a mut W, } impl<'a> RDP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `BOR_LEV`"] pub type BOR_LEV_R = crate::R<u8, u8>; #[doc = "Write proxy for field `BOR_LEV`"] pub struct BOR_LEV_W<'a> { w: &'a mut W, } impl<'a> BOR_LEV_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } #[doc = "Reader of field `nRST_STOP`"] pub type NRST_STOP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `nRST_STOP`"] pub struct NRST_STOP_W<'a> { w: &'a mut W, } impl<'a> NRST_STOP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `nRST_STDBY`"] pub type NRST_STDBY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `nRST_STDBY`"] pub struct NRST_STDBY_W<'a> { w: &'a mut W, } impl<'a> NRST_STDBY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `nRST_SHDW`"] pub type NRST_SHDW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `nRST_SHDW`"] pub struct NRST_SHDW_W<'a> { w: &'a mut W, } impl<'a> NRST_SHDW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } #[doc = "Reader of field `IWDG_SW`"] pub type IWDG_SW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `IWDG_SW`"] pub struct IWDG_SW_W<'a> { w: &'a mut W, } impl<'a> IWDG_SW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `IWDG_STOP`"] pub type IWDG_STOP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `IWDG_STOP`"] pub struct IWDG_STOP_W<'a> { w: &'a mut W, } impl<'a> IWDG_STOP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `IWDG_STDBY`"] pub type IWDG_STDBY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `IWDG_STDBY`"] pub struct IWDG_STDBY_W<'a> { w: &'a mut W, } impl<'a> IWDG_STDBY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `WWDG_SW`"] pub type WWDG_SW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `WWDG_SW`"] pub struct WWDG_SW_W<'a> { w: &'a mut W, } impl<'a> WWDG_SW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Reader of field `SWAP_BANK`"] pub type SWAP_BANK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SWAP_BANK`"] pub struct SWAP_BANK_W<'a> { w: &'a mut W, } impl<'a> SWAP_BANK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Reader of field `DB256K`"] pub type DB256K_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DB256K`"] pub struct DB256K_W<'a> { w: &'a mut W, } impl<'a> DB256K_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "Reader of field `DBANK`"] pub type DBANK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DBANK`"] pub struct DBANK_W<'a> { w: &'a mut W, } impl<'a> DBANK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `SRAM2_PE`"] pub type SRAM2_PE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM2_PE`"] pub struct SRAM2_PE_W<'a> { w: &'a mut W, } impl<'a> SRAM2_PE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `SRAM2_RST`"] pub type SRAM2_RST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM2_RST`"] pub struct SRAM2_RST_W<'a> { w: &'a mut W, } impl<'a> SRAM2_RST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `nSWBOOT0`"] pub type NSWBOOT0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `nSWBOOT0`"] pub struct NSWBOOT0_W<'a> { w: &'a mut W, } impl<'a> NSWBOOT0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Reader of field `nBOOT0`"] pub type NBOOT0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `nBOOT0`"] pub struct NBOOT0_W<'a> { w: &'a mut W, } impl<'a> NBOOT0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } #[doc = "Reader of field `PA15_PUPEN`"] pub type PA15_PUPEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PA15_PUPEN`"] pub struct PA15_PUPEN_W<'a> { w: &'a mut W, } impl<'a> PA15_PUPEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Reader of field `TZEN`"] pub type TZEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TZEN`"] pub struct TZEN_W<'a> { w: &'a mut W, } impl<'a> TZEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:7 - Read protection level"] #[inline(always)] pub fn rdp(&self) -> RDP_R { RDP_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:10 - BOR reset Level"] #[inline(always)] pub fn bor_lev(&self) -> BOR_LEV_R { BOR_LEV_R::new(((self.bits >> 8) & 0x07) as u8) } #[doc = "Bit 12 - nRST_STOP"] #[inline(always)] pub fn n_rst_stop(&self) -> NRST_STOP_R { NRST_STOP_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - nRST_STDBY"] #[inline(always)] pub fn n_rst_stdby(&self) -> NRST_STDBY_R { NRST_STDBY_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 14 - nRST_SHDW"] #[inline(always)] pub fn n_rst_shdw(&self) -> NRST_SHDW_R { NRST_SHDW_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 16 - Independent watchdog selection"] #[inline(always)] pub fn iwdg_sw(&self) -> IWDG_SW_R { IWDG_SW_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Independent watchdog counter freeze in Stop mode"] #[inline(always)] pub fn iwdg_stop(&self) -> IWDG_STOP_R { IWDG_STOP_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - Independent watchdog counter freeze in Standby mode"] #[inline(always)] pub fn iwdg_stdby(&self) -> IWDG_STDBY_R { IWDG_STDBY_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - Window watchdog selection"] #[inline(always)] pub fn wwdg_sw(&self) -> WWDG_SW_R { WWDG_SW_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - SWAP_BANK"] #[inline(always)] pub fn swap_bank(&self) -> SWAP_BANK_R { SWAP_BANK_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - DB256K"] #[inline(always)] pub fn db256k(&self) -> DB256K_R { DB256K_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 22 - DBANK"] #[inline(always)] pub fn dbank(&self) -> DBANK_R { DBANK_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 24 - SRAM2 parity check enable"] #[inline(always)] pub fn sram2_pe(&self) -> SRAM2_PE_R { SRAM2_PE_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - SRAM2 Erase when system reset"] #[inline(always)] pub fn sram2_rst(&self) -> SRAM2_RST_R { SRAM2_RST_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 26 - nSWBOOT0"] #[inline(always)] pub fn n_swboot0(&self) -> NSWBOOT0_R { NSWBOOT0_R::new(((self.bits >> 26) & 0x01) != 0) } #[doc = "Bit 27 - nBOOT0"] #[inline(always)] pub fn n_boot0(&self) -> NBOOT0_R { NBOOT0_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 28 - PA15_PUPEN"] #[inline(always)] pub fn pa15_pupen(&self) -> PA15_PUPEN_R { PA15_PUPEN_R::new(((self.bits >> 28) & 0x01) != 0) } #[doc = "Bit 31 - TZEN"] #[inline(always)] pub fn tzen(&self) -> TZEN_R { TZEN_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:7 - Read protection level"] #[inline(always)] pub fn rdp(&mut self) -> RDP_W { RDP_W { w: self } } #[doc = "Bits 8:10 - BOR reset Level"] #[inline(always)] pub fn bor_lev(&mut self) -> BOR_LEV_W { BOR_LEV_W { w: self } } #[doc = "Bit 12 - nRST_STOP"] #[inline(always)] pub fn n_rst_stop(&mut self) -> NRST_STOP_W { NRST_STOP_W { w: self } } #[doc = "Bit 13 - nRST_STDBY"] #[inline(always)] pub fn n_rst_stdby(&mut self) -> NRST_STDBY_W { NRST_STDBY_W { w: self } } #[doc = "Bit 14 - nRST_SHDW"] #[inline(always)] pub fn n_rst_shdw(&mut self) -> NRST_SHDW_W { NRST_SHDW_W { w: self } } #[doc = "Bit 16 - Independent watchdog selection"] #[inline(always)] pub fn iwdg_sw(&mut self) -> IWDG_SW_W { IWDG_SW_W { w: self } } #[doc = "Bit 17 - Independent watchdog counter freeze in Stop mode"] #[inline(always)] pub fn iwdg_stop(&mut self) -> IWDG_STOP_W { IWDG_STOP_W { w: self } } #[doc = "Bit 18 - Independent watchdog counter freeze in Standby mode"] #[inline(always)] pub fn iwdg_stdby(&mut self) -> IWDG_STDBY_W { IWDG_STDBY_W { w: self } } #[doc = "Bit 19 - Window watchdog selection"] #[inline(always)] pub fn wwdg_sw(&mut self) -> WWDG_SW_W { WWDG_SW_W { w: self } } #[doc = "Bit 20 - SWAP_BANK"] #[inline(always)] pub fn swap_bank(&mut self) -> SWAP_BANK_W { SWAP_BANK_W { w: self } } #[doc = "Bit 21 - DB256K"] #[inline(always)] pub fn db256k(&mut self) -> DB256K_W { DB256K_W { w: self } } #[doc = "Bit 22 - DBANK"] #[inline(always)] pub fn dbank(&mut self) -> DBANK_W { DBANK_W { w: self } } #[doc = "Bit 24 - SRAM2 parity check enable"] #[inline(always)] pub fn sram2_pe(&mut self) -> SRAM2_PE_W { SRAM2_PE_W { w: self } } #[doc = "Bit 25 - SRAM2 Erase when system reset"] #[inline(always)] pub fn sram2_rst(&mut self) -> SRAM2_RST_W { SRAM2_RST_W { w: self } } #[doc = "Bit 26 - nSWBOOT0"] #[inline(always)] pub fn n_swboot0(&mut self) -> NSWBOOT0_W { NSWBOOT0_W { w: self } } #[doc = "Bit 27 - nBOOT0"] #[inline(always)] pub fn n_boot0(&mut self) -> NBOOT0_W { NBOOT0_W { w: self } } #[doc = "Bit 28 - PA15_PUPEN"] #[inline(always)] pub fn pa15_pupen(&mut self) -> PA15_PUPEN_W { PA15_PUPEN_W { w: self } } #[doc = "Bit 31 - TZEN"] #[inline(always)] pub fn tzen(&mut self) -> TZEN_W { TZEN_W { w: self } } }
#![allow(dead_code, unused_variables)] use crate::cells::Cell; pub struct Spore { x: bool } pub struct Sporangium { x: bool } pub fn produce_spore(factory: &mut Sporangium) -> Spore { Spore { x: false} } fn recombine(parent: &mut Cell) {}
use std::collections::HashMap; use std::io; use std::io::Read; use regex::Regex; fn main() { let mut input = String::new(); io::stdin().read_to_string(&mut input).unwrap(); let line_regex = Regex::new(r"(?m)^([a-z ]+) bags contain (.+)\.$", ).unwrap(); let inner_regex = Regex::new(r"(\d+) ([a-z ]+) bags?", ).unwrap(); let bag_tree: HashMap<String, Vec<(u32, String)>> = line_regex.captures_iter(&input).map(|x| { (x[1].to_string(), inner_regex.captures_iter(&x[2]).map(|y| { (y[1].parse().unwrap(), y[2].to_string()) }).collect()) }).collect(); let mut visit_next: HashMap<&str, u32> = [("shiny gold", 1)].iter().cloned().collect(); let mut total_count: u32 = 0; while visit_next.len() > 0 { visit_next = visit_next.into_iter().fold(HashMap::new(), |mut acc, (name, count)| { if let Some(children) = bag_tree.get(name) { acc = children.iter().fold(acc, |mut inner_acc, (child_count, child_name)| { *inner_acc.entry(child_name).or_insert(0) += child_count * count; inner_acc }); } acc }); total_count += visit_next.values().sum::<u32>(); } println!("{}", total_count); }
use crate::models::file_stores::FileStores; use crate::models::shop::{Shop, ShopForm}; use crate::views; use maud::Markup; use rocket::request::Form; use rocket::response::Redirect; use rocket::State; use std::iter::FromIterator; #[get("/")] pub fn list(store: State<FileStores>) -> Markup { use crate::models::item_name::ItemName; let mut entries = Vec::from_iter(store.shops.all::<Shop>().unwrap().clone()); entries.sort_by(|(_, a), (_, b)| a.name_upper().cmp(&b.name_upper())); views::shops::list(entries) } #[get("/<id>")] pub fn edit_page(id: String, store: State<FileStores>) -> Markup { let shop = { if id.eq("0") { Shop { name: "".to_owned(), } } else { store.shops.get::<Shop>(&id).unwrap() } }; views::shops::detail(id, shop, "Fiche magasin", "/shops") } #[post("/", data = "<form>")] pub fn create(form: Form<ShopForm>, store: State<FileStores>) -> Redirect { let shop_form = form.into_inner(); let shop = Shop { name: shop_form.name, }; store.shops.save(&shop).expect("erreur sauvegarde shops"); Redirect::to("/shops") } #[put("/", data = "<form>")] pub fn save(form: Form<ShopForm>, store: State<FileStores>) -> Redirect { let shop_form = form.into_inner(); let shop_updated = Shop { name: shop_form.name, }; store .shops .save_with_id(&shop_updated, &shop_form.uuid) .expect("erreur fichier shops"); Redirect::to("/shops") }
use cortex_m::interrupt::free; use stm32f4xx_hal::{interrupt, stm32 as stm32f401}; use crate::event::InterruptEvent; use crate::EVENT_QUEUE; #[derive(Debug, Clone, Copy)] pub struct ClockState { pub hour: u8, pub minute: u8, pub second: u8, pub weekday: u8, pub day: u8, pub month: u8, pub year: u8, } impl Default for ClockState { fn default() -> Self { ClockState { hour: 0, minute: 0, second: 0, weekday: 1, day: 1, month: 1, year: 0, } } } #[derive(Debug, Clone, Copy)] pub struct AlarmState { pub hour: u8, pub minute: u8, pub enabled: bool, } pub struct Clock { rtc: stm32f401::RTC, } #[allow(dead_code)] impl Clock { pub fn new(reg: stm32f401::RTC) -> Clock { Clock { rtc: reg } } // disables the write protection of the RTC registers for manipulation and then enables it again fn protected<F>(&mut self, f: F) where F: FnOnce(&mut stm32f401::RTC), { // Disable RTC write protection self.rtc.wpr.write(|w| w.key().bits(0xCA)); self.rtc.wpr.write(|w| w.key().bits(0x53)); f(&mut self.rtc); // enable RTC write protection self.rtc.wpr.write(|w| w.key().bits(0xFF)); } fn initialization_mode<F>(&mut self, f: F) where F: FnOnce(&mut stm32f401::RTC), { self.protected(|rtc| { // Enter initialization mode rtc.isr.modify(|_, w| w.init().init_mode()); // wait for confirmation while rtc.isr.read().initf().is_not_allowed() {} // we can now safely initialize the RTC f(rtc); // exit initialization mode rtc.isr.modify(|_, w| w.init().free_running_mode()); }); } /// Initialize the RTC clock pub fn init(&mut self) { self.initialization_mode(|rtc| { // program prescaler (if needed) // since the default is okay if using a 32.768 kHz crystal we do not need to do anything // configure time format rtc.cr.modify(|_, w| w.fmt().twenty_four_hour()); }); } pub fn is_set(&self) -> bool { self.rtc.isr.read().inits().bit_is_set() } pub fn get_state(&self) -> ClockState { let tr = self.rtc.tr.read(); let dr = self.rtc.dr.read(); ClockState { hour: tr.ht().bits() * 10 + tr.hu().bits(), minute: tr.mnt().bits() * 10 + tr.mnu().bits(), second: tr.st().bits() * 10 + tr.su().bits(), weekday: dr.wdu().bits(), day: dr.dt().bits() * 10 + dr.du().bits(), month: if dr.mt().bits() { 10 } else { 0 } + dr.mu().bits(), year: dr.yt().bits() * 10 + dr.yu().bits(), } } pub fn set_state(&mut self, state: ClockState) { self.initialization_mode(|rtc| { rtc.tr.modify(|_, w| { w.ht() .bits(state.hour / 10) .hu() .bits(state.hour % 10) .mnt() .bits(state.minute / 10) .mnu() .bits(state.minute % 10) .st() .bits(state.second / 10) .su() .bits(state.second % 10) }); rtc.dr.modify(|_, w| { w.dt() .bits(state.day / 10) .du() .bits(state.day % 10) .mt() .bit(state.month >= 10) .mu() .bits(state.month % 10) .yt() .bits(state.year / 10) .yu() .bits(state.year % 10) }); rtc.cr.modify(|_, w| w.fmt().twenty_four_hour()); }) } pub fn get_alarm(&self) -> AlarmState { let cr = self.rtc.cr.read(); let alrmar = self.rtc.alrmar.read(); AlarmState { hour: alrmar.ht().bits() * 10 + alrmar.hu().bits(), minute: alrmar.mnt().bits() * 10 + alrmar.mnu().bits(), enabled: cr.alrae().is_enabled(), } } pub fn set_alarm(&mut self, alarm: AlarmState) { self.protected(|rtc| { // disable alarm A rtc.cr.modify(|_, w| w.alrae().disabled()); // wait for confirmation while rtc.isr.read().alrawf().is_update_not_allowed() {} // configure alarm A rtc.alrmar.modify(|_, w| { w.msk1() .mask() // care about seconds .msk2() .mask() // care about minutes .msk3() .mask() // care about hours .msk4() .not_mask() // do not care about date/week day .pm() .am() // AM/24 hour format .ht() .bits(alarm.hour / 10) // set hour .hu() .bits(alarm.hour % 10) .mnt() .bits(alarm.minute / 10) // set minute .mnu() .bits(alarm.minute % 10) .st() .bits(0) // set seconds to zero to match on new minute .su() .bits(0) }); // re-enable alarm A (if enabled) rtc.cr.modify(|_, w| { w.alrae().variant(match alarm.enabled { false => stm32f401::rtc::cr::ALRAE_A::DISABLED, true => stm32f401::rtc::cr::ALRAE_A::ENABLED, }) }); }); } pub fn alarm_triggered(&self) -> bool { self.rtc.isr.read().alraf().is_match_() } pub fn alarm_reset(&mut self) { self.rtc.isr.modify(|_, w| w.alraf().clear()) } pub fn enable_alarm_interrupt(&mut self, exti: &stm32f401::EXTI) { // According to page 449 of the STM32F401xDE reference manual // enable EXTI Line 17 in interrupt mode and select rising edge sensitivity exti.imr.modify(|_, w| w.mr17().unmasked()); exti.rtsr.modify(|_, w| w.tr17().enabled()); // enable RTC alarm A interrupt self.protected(|rtc| rtc.cr.modify(|_, w| w.alraie().enabled())); // enable RTC Alarm interrupt in the NVIC stm32f401::NVIC::unpend(stm32f4xx_hal::interrupt::RTC_ALARM); unsafe { stm32f401::NVIC::unmask(stm32f4xx_hal::interrupt::RTC_ALARM); }; } } #[interrupt] fn RTC_ALARM() { free(|cs| { // SAFETY only used to reset the interrupt pending bit atomically with no side effects unsafe { (*stm32f401::EXTI::ptr()).pr.write(|w| w.pr17().set_bit()); } EVENT_QUEUE.put(cs, InterruptEvent::Alarm); }); }
//! Domain separation context helpers. use std::sync::Mutex; use once_cell::sync::Lazy; use oasis_core_runtime::common::{crypto::hash::Hash, namespace::Namespace}; const CHAIN_CONTEXT_SEPARATOR: &[u8] = b" for chain "; static CHAIN_CONTEXT: Lazy<Mutex<Option<Vec<u8>>>> = Lazy::new(Default::default); /// Return the globally configured chain domain separation context. /// /// The returned domain separation context is computed as: /// /// ```plain /// <base> || " for chain " || <chain-context> /// ``` /// /// # Panics /// /// This function will panic in case the global chain domain separation context was not previously /// set using `set_chain_context`. /// pub fn get_chain_context_for(base: &[u8]) -> Vec<u8> { let guard = CHAIN_CONTEXT.lock().unwrap(); let chain_context = match guard.as_ref() { Some(cc) => cc, None => { drop(guard); // Avoid poisioning the global lock. panic!("chain domain separation context must be configured"); } }; let mut ctx = vec![0; base.len() + CHAIN_CONTEXT_SEPARATOR.len() + chain_context.len()]; ctx[..base.len()].copy_from_slice(base); ctx[base.len()..base.len() + CHAIN_CONTEXT_SEPARATOR.len()] .copy_from_slice(CHAIN_CONTEXT_SEPARATOR); ctx[base.len() + CHAIN_CONTEXT_SEPARATOR.len()..].copy_from_slice(chain_context); ctx } /// Configure the global chain domain separation context. /// /// The domain separation context is computed as: /// /// ```plain /// Base-16(H(<runtime-id> || <consensus-chain-context>)) /// ``` /// /// # Panics /// /// This function will panic in case the global chain domain separation context was already set. /// pub fn set_chain_context(runtime_id: Namespace, consensus_chain_context: &str) { let ctx = hex::encode(&Hash::digest_bytes_list(&[ runtime_id.as_ref(), consensus_chain_context.as_bytes(), ])); let mut guard = CHAIN_CONTEXT.lock().unwrap(); if let Some(ref existing) = *guard { let ex = String::from_utf8(existing.clone()).unwrap(); drop(guard); // Avoid poisioning the global lock. panic!("chain domain separation context already set: {}", ex,); } *guard = Some(ctx.into_bytes()); } #[cfg(test)] mod test { use super::*; static TEST_GUARD: Lazy<Mutex<()>> = Lazy::new(Default::default); fn reset_chain_context() { *CHAIN_CONTEXT.lock().unwrap() = None; } #[test] fn test_chain_context() { let _guard = TEST_GUARD.lock().unwrap(); reset_chain_context(); set_chain_context( "8000000000000000000000000000000000000000000000000000000000000000".into(), "643fb06848be7e970af3b5b2d772eb8cfb30499c8162bc18ac03df2f5e22520e", ); let ctx = get_chain_context_for(b"oasis-runtime-sdk/tx: v0"); assert_eq!(&String::from_utf8(ctx).unwrap(), "oasis-runtime-sdk/tx: v0 for chain ca4842870b97a6d5c0d025adce0b6a0dec94d2ba192ede70f96349cfbe3628b9"); } #[test] fn test_chain_context_not_configured() { let _guard = TEST_GUARD.lock().unwrap(); reset_chain_context(); let result = std::panic::catch_unwind(|| get_chain_context_for(b"test")); assert!(result.is_err()); } #[test] fn test_chain_context_already_configured() { let _guard = TEST_GUARD.lock().unwrap(); reset_chain_context(); set_chain_context( "8000000000000000000000000000000000000000000000000000000000000000".into(), "643fb06848be7e970af3b5b2d772eb8cfb30499c8162bc18ac03df2f5e22520e", ); let result = std::panic::catch_unwind(|| { set_chain_context( "8000000000000000000000000000000000000000000000000000000000000001".into(), "643fb06848be7e970af3b5b2d772eb8cfb30499c8162bc18ac03df2f5e22520e", ) }); assert!(result.is_err()); } }
use super::{CHUNK_SIZE, MapElement}; /// Map Chunk #[derive(Debug, Clone)] pub struct Chunk { data: Vec<MapElement> } impl Chunk { /// Create a new, empty chunk pub fn new() -> Self { Self { data: vec![MapElement::default(); CHUNK_SIZE * CHUNK_SIZE] } } /// Get the index of a given position pub fn get_index(x: usize, y: usize) -> usize { x + y * CHUNK_SIZE } /// Get a value at a particular position pub fn get(&self, x: usize, y: usize) -> &MapElement { &self.data[Self::get_index(x, y)] } /// Get a mutable reference value at a particular position pub fn get_mut(&mut self, x: usize, y: usize) -> &mut MapElement { &mut self.data[Self::get_index(x, y)] } } // Render the Chunk into a string impl std::fmt::Display for Chunk { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { for v in self.data.iter() { write!(f, "{}", v.to_string())?; } Ok(()) } } impl std::str::FromStr for Chunk { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() != CHUNK_SIZE * CHUNK_SIZE { return Err(format!("Size `{}` is not the required chunk size {}", s.len(), CHUNK_SIZE * CHUNK_SIZE)); } let mut data = Vec::new(); for c in s.as_bytes() { data.push(str::parse::<MapElement>( std::str::from_utf8(&[*c]).map_err(|e| format!("Cannot convert to utf8 `{}`", e))? )?); } Ok(Self{data}) } } impl std::convert::From<&Chunk> for serde_json::Value { fn from(chunk: &Chunk) -> Self { serde_json::json!(chunk.to_string()) } } impl std::default::Default for Chunk { fn default() -> Self { Self::new() } }
use crate::{ internal::{factorial, fibonacci}, AST, }; #[test] fn fibonacci_int() { let input = AST::integer(0); let output = AST::integer(0); assert_eq!(fibonacci(&input).unwrap(), output); let input = AST::integer(10u128); let output = AST::integer(55u128); assert_eq!(fibonacci(&input).unwrap(), output); } #[test] fn factorial_int() { let input = AST::integer(0); let output = AST::integer(1); assert_eq!(factorial(&input).unwrap(), output); let input = AST::integer(10u128); let output = AST::integer(3628800u128); assert_eq!(factorial(&input).unwrap(), output); }
pub struct BPlusTree<'a> { root: Option<Box<Node<'a>>>, first_leaf: Option<Node<'a>>, m: u8, } struct Node<'a> { parent: Option<Box<Node<'a>>>, degree: u8, max_degree: u8, min_degree: u8, keys: Vec<u8>, children: Option<Vec<&'a mut LeafNode>>, } struct LeafNode { dict: Vec<Dictionary> } struct BTReeError { details: String } impl BTReeError { fn new(msg: &str) -> BTReeError { BTReeError{details: msg.to_string()} } } // Will hopefully be able to make values generic struct Dictionary { key: u64, value: u64, } pub fn build_btree<'a>() -> BPlusTree<'a> { BPlusTree { root: None, first_leaf: None, m: 3, } } impl <'a>BPlusTree<'a> { fn binary_search(dict_pair: Vec<Dictionary>, n_pairs: u64, t: u64) -> Option<usize> { let key_1: u64 = dict_pair[0].key; let key_2: u64 = dict_pair[1].key; let mut comparison: i32; if key_1 < key_2 { comparison = 1; } else if key_1 == key_2 { comparison = 0 } else { comparison = -1 } dict_pair.iter().position(|x| x.key == t) } fn insert(&mut self, x: u64) { self.root = match self.root { None => Some(Box::new(Node { parent: None, degree: 0, max_degree: 2, min_degree: 3, keys: vec![1, 2], children: None, })), _ => { println!("insert - TODO"); Some(Box::new(Node { parent: None, degree: 0, max_degree: 2, min_degree: 3, keys: vec![1, 2], children: None, })) } } } }
use iron::prelude::*; use iron::{status, Handler}; use api::rocketchat::WebhookMessage; use api::MatrixApi; use config::Config; use handlers::rocketchat::Forwarder; use log::{self, IronLogger}; use middleware::RocketchatToken; use models::{ConnectionPool, RocketchatServer, VirtualUser}; /// Rocket.Chat is an endpoint of the application service API which is called by the Rocket.Chat /// server to push new messages. pub struct Rocketchat { /// Application service configuration pub config: Config, /// Matrix REST API pub matrix_api: Box<MatrixApi>, } impl Rocketchat { /// Rocket.Chat endpoint with middleware pub fn chain(config: &Config, matrix_api: Box<MatrixApi>) -> Chain { let rocketchat = Rocketchat { config: config.clone(), matrix_api }; let mut chain = Chain::new(rocketchat); chain.link_before(RocketchatToken {}); chain } } impl Handler for Rocketchat { fn handle(&self, request: &mut Request) -> IronResult<Response> { let logger = IronLogger::from_request(request)?; let connection = ConnectionPool::from_request(request)?; let message = request.extensions.get::<WebhookMessage>().expect("Middleware ensures the presence of a message"); let server = request.extensions.get::<RocketchatServer>().expect("Middleware ensures the presence of a server"); let virtual_user = VirtualUser::new(&self.config, &logger, self.matrix_api.as_ref()); let forwarder = Forwarder::new(&self.config, &connection, &logger, self.matrix_api.as_ref(), &virtual_user); if let Err(err) = forwarder.send(server, message) { log::log_error(&logger, &err); } Ok(Response::with((status::Ok, "{}".to_string()))) } }
#[doc = "Writer for register IFCR"] pub type W = crate::W<u32, super::IFCR>; #[doc = "Register IFCR `reset()`'s with value 0"] impl crate::ResetValue for super::IFCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `TEIF8`"] pub struct TEIF8_W<'a> { w: &'a mut W, } impl<'a> TEIF8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } #[doc = "Write proxy for field `HTIF8`"] pub struct HTIF8_W<'a> { w: &'a mut W, } impl<'a> HTIF8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Write proxy for field `TCIF8`"] pub struct TCIF8_W<'a> { w: &'a mut W, } impl<'a> TCIF8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Write proxy for field `GIF8`"] pub struct GIF8_W<'a> { w: &'a mut W, } impl<'a> GIF8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Write proxy for field `TEIF7`"] pub struct TEIF7_W<'a> { w: &'a mut W, } impl<'a> TEIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } #[doc = "Write proxy for field `HTIF7`"] pub struct HTIF7_W<'a> { w: &'a mut W, } impl<'a> HTIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26); self.w } } #[doc = "Write proxy for field `TCIF7`"] pub struct TCIF7_W<'a> { w: &'a mut W, } impl<'a> TCIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Write proxy for field `GIF7`"] pub struct GIF7_W<'a> { w: &'a mut W, } impl<'a> GIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Write proxy for field `TEIF6`"] pub struct TEIF6_W<'a> { w: &'a mut W, } impl<'a> TEIF6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Write proxy for field `HTIF6`"] pub struct HTIF6_W<'a> { w: &'a mut W, } impl<'a> HTIF6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Write proxy for field `TCIF6`"] pub struct TCIF6_W<'a> { w: &'a mut W, } impl<'a> TCIF6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "Write proxy for field `GIF6`"] pub struct GIF6_W<'a> { w: &'a mut W, } impl<'a> GIF6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Write proxy for field `TEIF5`"] pub struct TEIF5_W<'a> { w: &'a mut W, } impl<'a> TEIF5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } #[doc = "Write proxy for field `HTIF5`"] pub struct HTIF5_W<'a> { w: &'a mut W, } impl<'a> HTIF5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Write proxy for field `TCIF5`"] pub struct TCIF5_W<'a> { w: &'a mut W, } impl<'a> TCIF5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Write proxy for field `GIF5`"] pub struct GIF5_W<'a> { w: &'a mut W, } impl<'a> GIF5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Write proxy for field `TEIF4`"] pub struct TEIF4_W<'a> { w: &'a mut W, } impl<'a> TEIF4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } #[doc = "Write proxy for field `HTIF4`"] pub struct HTIF4_W<'a> { w: &'a mut W, } impl<'a> HTIF4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } #[doc = "Write proxy for field `TCIF4`"] pub struct TCIF4_W<'a> { w: &'a mut W, } impl<'a> TCIF4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Write proxy for field `GIF4`"] pub struct GIF4_W<'a> { w: &'a mut W, } impl<'a> GIF4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Write proxy for field `TEIF3`"] pub struct TEIF3_W<'a> { w: &'a mut W, } impl<'a> TEIF3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Write proxy for field `HTIF3`"] pub struct HTIF3_W<'a> { w: &'a mut W, } impl<'a> HTIF3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Write proxy for field `TCIF3`"] pub struct TCIF3_W<'a> { w: &'a mut W, } impl<'a> TCIF3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `GIF3`"] pub struct GIF3_W<'a> { w: &'a mut W, } impl<'a> GIF3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Write proxy for field `TEIF2`"] pub struct TEIF2_W<'a> { w: &'a mut W, } impl<'a> TEIF2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Write proxy for field `HTIF2`"] pub struct HTIF2_W<'a> { w: &'a mut W, } impl<'a> HTIF2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Write proxy for field `TCIF2`"] pub struct TCIF2_W<'a> { w: &'a mut W, } impl<'a> TCIF2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `GIF2`"] pub struct GIF2_W<'a> { w: &'a mut W, } impl<'a> GIF2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `TEIF1`"] pub struct TEIF1_W<'a> { w: &'a mut W, } impl<'a> TEIF1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `HTIF1`"] pub struct HTIF1_W<'a> { w: &'a mut W, } impl<'a> HTIF1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `TCIF1`"] pub struct TCIF1_W<'a> { w: &'a mut W, } impl<'a> TCIF1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `GIF1`"] pub struct GIF1_W<'a> { w: &'a mut W, } impl<'a> GIF1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl W { #[doc = "Bit 31 - TEIF8"] #[inline(always)] pub fn teif8(&mut self) -> TEIF8_W { TEIF8_W { w: self } } #[doc = "Bit 30 - HTIF8"] #[inline(always)] pub fn htif8(&mut self) -> HTIF8_W { HTIF8_W { w: self } } #[doc = "Bit 29 - TCIF8"] #[inline(always)] pub fn tcif8(&mut self) -> TCIF8_W { TCIF8_W { w: self } } #[doc = "Bit 28 - GIF8"] #[inline(always)] pub fn gif8(&mut self) -> GIF8_W { GIF8_W { w: self } } #[doc = "Bit 27 - TEIF7"] #[inline(always)] pub fn teif7(&mut self) -> TEIF7_W { TEIF7_W { w: self } } #[doc = "Bit 26 - HTIF7"] #[inline(always)] pub fn htif7(&mut self) -> HTIF7_W { HTIF7_W { w: self } } #[doc = "Bit 25 - TCIF7"] #[inline(always)] pub fn tcif7(&mut self) -> TCIF7_W { TCIF7_W { w: self } } #[doc = "Bit 24 - GIF7"] #[inline(always)] pub fn gif7(&mut self) -> GIF7_W { GIF7_W { w: self } } #[doc = "Bit 23 - TEIF6"] #[inline(always)] pub fn teif6(&mut self) -> TEIF6_W { TEIF6_W { w: self } } #[doc = "Bit 22 - HTIF6"] #[inline(always)] pub fn htif6(&mut self) -> HTIF6_W { HTIF6_W { w: self } } #[doc = "Bit 21 - TCIF6"] #[inline(always)] pub fn tcif6(&mut self) -> TCIF6_W { TCIF6_W { w: self } } #[doc = "Bit 20 - GIF6"] #[inline(always)] pub fn gif6(&mut self) -> GIF6_W { GIF6_W { w: self } } #[doc = "Bit 19 - TEIF5"] #[inline(always)] pub fn teif5(&mut self) -> TEIF5_W { TEIF5_W { w: self } } #[doc = "Bit 18 - HTIF5"] #[inline(always)] pub fn htif5(&mut self) -> HTIF5_W { HTIF5_W { w: self } } #[doc = "Bit 17 - TCIF5"] #[inline(always)] pub fn tcif5(&mut self) -> TCIF5_W { TCIF5_W { w: self } } #[doc = "Bit 16 - GIF5"] #[inline(always)] pub fn gif5(&mut self) -> GIF5_W { GIF5_W { w: self } } #[doc = "Bit 15 - TEIF4"] #[inline(always)] pub fn teif4(&mut self) -> TEIF4_W { TEIF4_W { w: self } } #[doc = "Bit 14 - HTIF4"] #[inline(always)] pub fn htif4(&mut self) -> HTIF4_W { HTIF4_W { w: self } } #[doc = "Bit 13 - TCIF4"] #[inline(always)] pub fn tcif4(&mut self) -> TCIF4_W { TCIF4_W { w: self } } #[doc = "Bit 12 - GIF4"] #[inline(always)] pub fn gif4(&mut self) -> GIF4_W { GIF4_W { w: self } } #[doc = "Bit 11 - TEIF3"] #[inline(always)] pub fn teif3(&mut self) -> TEIF3_W { TEIF3_W { w: self } } #[doc = "Bit 10 - HTIF3"] #[inline(always)] pub fn htif3(&mut self) -> HTIF3_W { HTIF3_W { w: self } } #[doc = "Bit 9 - TCIF3"] #[inline(always)] pub fn tcif3(&mut self) -> TCIF3_W { TCIF3_W { w: self } } #[doc = "Bit 8 - GIF3"] #[inline(always)] pub fn gif3(&mut self) -> GIF3_W { GIF3_W { w: self } } #[doc = "Bit 7 - TEIF2"] #[inline(always)] pub fn teif2(&mut self) -> TEIF2_W { TEIF2_W { w: self } } #[doc = "Bit 6 - HTIF2"] #[inline(always)] pub fn htif2(&mut self) -> HTIF2_W { HTIF2_W { w: self } } #[doc = "Bit 5 - TCIF2"] #[inline(always)] pub fn tcif2(&mut self) -> TCIF2_W { TCIF2_W { w: self } } #[doc = "Bit 4 - GIF2"] #[inline(always)] pub fn gif2(&mut self) -> GIF2_W { GIF2_W { w: self } } #[doc = "Bit 3 - TEIF1"] #[inline(always)] pub fn teif1(&mut self) -> TEIF1_W { TEIF1_W { w: self } } #[doc = "Bit 2 - HTIF1"] #[inline(always)] pub fn htif1(&mut self) -> HTIF1_W { HTIF1_W { w: self } } #[doc = "Bit 1 - TCIF1"] #[inline(always)] pub fn tcif1(&mut self) -> TCIF1_W { TCIF1_W { w: self } } #[doc = "Bit 0 - GIF1"] #[inline(always)] pub fn gif1(&mut self) -> GIF1_W { GIF1_W { w: self } } }
#![cfg_attr(not(feature = "std"), no_std)] use frame_support::codec::{Decode, Encode}; use frame_support::traits::Vec; use frame_support::{decl_error, decl_event, decl_module, decl_storage, dispatch, traits::Get}; use frame_system::ensure_signed; use frame_support::traits::Box; use sp_runtime::traits::Hash; #[cfg(test)] mod mock; #[cfg(test)] mod tests; pub trait Config: frame_system::Config { type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>; } type AssetName = Vec<u8>; #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Debug)] pub struct Asset<AccountId> { name: AssetName, issuer: AccountId, } type AssetOf<T> = Asset<<T as frame_system::Config>::AccountId>; decl_storage! { trait Store for Module<T: Config> as LogionNft { pub AssetById get(fn asset_by_id): map hasher(blake2_128_concat) T::Hash => AssetOf<T>; pub TokenByAccount get(fn token_by_account): map hasher(blake2_128_concat) T::AccountId => Vec<T::Hash>; } } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Config>::AccountId, Hash = <T as frame_system::Config>::Hash, { TokenIssued(Hash, AccountId), TokenBurned(Hash, AccountId), } ); decl_error! { pub enum Error for Module<T: Config> { AssetAlreadyExists, NoTokenToBurn, NotIssuer, } } #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Debug)] pub struct NewAsset { name: AssetName, } decl_module! { pub struct Module<T: Config> for enum Call where origin: T::Origin { type Error = Error<T>; fn deposit_event() = default; #[weight = 10_000 + T::DbWeight::get().writes(1)] pub fn issue_asset(origin, new_asset: NewAsset) -> dispatch::DispatchResult { let issuer = ensure_signed(origin)?; let asset = AssetOf::<T> { name: new_asset.name, issuer: issuer.clone(), }; let asset_id = T::Hashing::hash(&asset_data(&asset)); if AssetById::<T>::contains_key(&asset_id) { Err(Error::<T>::AssetAlreadyExists)? } else { AssetById::<T>::insert::<T::Hash, AssetOf<T>>(asset_id, asset); let mut account_assets = TokenByAccount::<T>::get(&issuer).clone(); account_assets.push(asset_id.clone()); TokenByAccount::<T>::insert::<T::AccountId, Vec<T::Hash>>(issuer.clone(), account_assets); Self::deposit_event(RawEvent::TokenIssued(asset_id, issuer)); Ok(()) } } #[weight = 10_000 + T::DbWeight::get().writes(1)] pub fn burn_token(origin, asset_hash: T::Hash) -> dispatch::DispatchResult { let issuer = ensure_signed(origin)?; let mut account_tokens = TokenByAccount::<T>::get(&issuer).clone(); let tokens_in_account = account_tokens.len(); account_tokens.retain(|hash: &T::Hash| *hash != asset_hash); let tokens_left = account_tokens.len(); if tokens_in_account == tokens_left { Err(Error::<T>::NoTokenToBurn)? } else { let asset = Self::asset_by_id(&asset_hash); if asset.issuer != issuer { Err(Error::<T>::NotIssuer)? } else { AssetById::<T>::remove::<T::Hash>(asset_hash.clone()); TokenByAccount::<T>::insert::<T::AccountId, Vec<T::Hash>>(issuer.clone(), account_tokens); Self::deposit_event(RawEvent::TokenBurned(asset_hash, issuer)); Ok(()) } } } } } pub fn asset_data<AccountId: Encode>(asset: &Asset<AccountId>) -> Box<Vec<u8>> { let mut data = Box::new(Vec::new()); data.extend_from_slice(&asset.name[..]); data.extend_from_slice(&asset.issuer.encode()); return data; }
use std::io::Write; fn main() -> Result<(), lexopt::Error> { let args = Args::parse()?; let stdout = std::io::stdout(); let mut stdout = stdout.lock(); for fixed in 0..16 { let style = style(fixed, args.layer, args.effects); let _ = print_number(&mut stdout, fixed, style); if fixed == 7 || fixed == 15 { let _ = writeln!(&mut stdout); } } for r in 0..6 { let _ = writeln!(stdout); for g in 0..6 { for b in 0..6 { let fixed = r * 36 + g * 6 + b + 16; let style = style(fixed, args.layer, args.effects); let _ = print_number(&mut stdout, fixed, style); } let _ = writeln!(stdout); } } for c in 0..24 { if 0 == c % 8 { let _ = writeln!(stdout); } let fixed = 232 + c; let style = style(fixed, args.layer, args.effects); let _ = print_number(&mut stdout, fixed, style); } Ok(()) } fn style(fixed: u8, layer: Layer, effects: anstyle::Effects) -> anstyle::Style { let color = anstyle::Ansi256Color(fixed).into(); (match layer { Layer::Fg => anstyle::Style::new().fg_color(Some(color)), Layer::Bg => anstyle::Style::new().bg_color(Some(color)), Layer::Underline => anstyle::Style::new().underline_color(Some(color)), }) | effects } fn print_number( stdout: &mut std::io::StdoutLock<'_>, fixed: u8, style: anstyle::Style, ) -> std::io::Result<()> { write!( stdout, "{}{:>4}{}", style.render(), fixed, anstyle::Reset.render() ) } #[derive(Default)] struct Args { effects: anstyle::Effects, layer: Layer, } #[derive(Copy, Clone)] enum Layer { Fg, Bg, Underline, } impl Default for Layer { fn default() -> Self { Layer::Fg } } impl Args { fn parse() -> Result<Self, lexopt::Error> { use lexopt::prelude::*; let mut res = Args::default(); let mut args = lexopt::Parser::from_env(); while let Some(arg) = args.next()? { match arg { Long("layer") => { res.layer = args.value()?.parse_with(|s| match s { "fg" => Ok(Layer::Fg), "bg" => Ok(Layer::Bg), "underline" => Ok(Layer::Underline), _ => Err("expected values fg, bg, underline"), })?; } Long("effect") => { const EFFECTS: [(&str, anstyle::Effects); 12] = [ ("bold", anstyle::Effects::BOLD), ("dimmed", anstyle::Effects::DIMMED), ("italic", anstyle::Effects::ITALIC), ("underline", anstyle::Effects::UNDERLINE), ("double_underline", anstyle::Effects::DOUBLE_UNDERLINE), ("curly_underline", anstyle::Effects::CURLY_UNDERLINE), ("dotted_underline", anstyle::Effects::DOTTED_UNDERLINE), ("dashed_underline", anstyle::Effects::DASHED_UNDERLINE), ("blink", anstyle::Effects::BLINK), ("invert", anstyle::Effects::INVERT), ("hidden", anstyle::Effects::HIDDEN), ("strikethrough", anstyle::Effects::STRIKETHROUGH), ]; let effect = args.value()?.parse_with(|s| { EFFECTS .into_iter() .find(|(name, _)| *name == s) .map(|(_, effect)| effect) .ok_or_else(|| { format!( "expected one of {}", EFFECTS .into_iter() .map(|(n, _)| n) .collect::<Vec<_>>() .join(", ") ) }) })?; res.effects = res.effects.insert(effect); } _ => return Err(arg.unexpected()), } } Ok(res) } }
use crate::measure; use std::fmt::Display; use std::io::BufRead; use std::iter; use std::slice::Iter; pub fn run(input: impl BufRead) { let lines = read_input(input); measure::duration(|| { let (mut stacks, moves) = read_instructions(&lines); rearrange_crates_single(&mut stacks, &moves); println!("* Part 1: {}", top_crates(&stacks)); }); measure::duration(|| { let (mut stacks, moves) = read_instructions(&lines); rearrange_crates_multiple(&mut stacks, &moves); println!("* Part 2: {}", top_crates(&stacks)); }); } #[derive(Debug, PartialEq)] struct Move { count: usize, from: usize, to: usize, } impl Move { fn single(&self, stacks: &mut Vec<Vec<Crate>>) { for _ in 0..self.count { let c = stacks[self.from - 1].pop().expect("expected crate"); stacks[self.to - 1].push(c); } } fn multiple(&self, stacks: &mut Vec<Vec<Crate>>) { let crates = pop_multiple(&mut stacks[self.from - 1], self.count); stacks[self.to - 1].extend(crates.iter().rev()); } } fn pop_multiple(stack: &mut Vec<Crate>, count: usize) -> Vec<Crate> { stack.drain(stack.len() - count..).rev().collect() } impl Display for Move { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "move {} from {} to {}", self.count, self.from, self.to) } } impl From<&str> for Move { fn from(s: &str) -> Self { let parts: Vec<usize> = s .split(' ') .skip(1) .step_by(2) .map(|p| p.parse().expect("expected number")) .collect(); Move { count: parts[0], from: parts[1], to: parts[2], } } } type Crate = char; fn read_input(input: impl BufRead) -> Vec<String> { input .lines() .map(|line| line.expect("expected line")) .collect() } fn read_instructions(lines: &Vec<String>) -> (Vec<Vec<Crate>>, Vec<Move>) { let mut lines = lines.iter(); let stacks = read_stacks(&mut lines); let moves = read_moves(&mut lines); (stacks, moves) } fn read_stacks(lines: &mut Iter<String>) -> Vec<Vec<Crate>> { let mut lines: Vec<&String> = lines.by_ref().take_while(|line| !line.is_empty()).collect(); let count = lines .pop() .expect("expected stack numbers") .split_whitespace() .count(); let mut stacks: Vec<Vec<Crate>> = iter::repeat_with(|| vec![]).take(count).collect(); lines.iter().rev().for_each(|line| { line.chars() .skip(1) .step_by(4) .enumerate() .filter(|(_, c)| *c != ' ') .for_each(|(i, c)| stacks[i].push(c)); }); stacks } fn read_moves(lines: &mut Iter<String>) -> Vec<Move> { lines.map(|l| Move::from(&l[..])).collect() } fn rearrange_crates_single(stacks: &mut Vec<Vec<Crate>>, moves: &Vec<Move>) { moves.iter().for_each(|m| m.single(stacks)); } fn rearrange_crates_multiple(stacks: &mut Vec<Vec<Crate>>, moves: &Vec<Move>) { moves.iter().for_each(|m| m.multiple(stacks)); } fn top_crates(stacks: &Vec<Vec<Crate>>) -> String { stacks .iter() .map(|s| s.last().expect("expected crate")) .collect() } #[cfg(test)] mod tests { use super::*; const INPUT: &[u8] = b" [D] [N] [C] [Z] [M] [P] 1 2 3 move 1 from 2 to 1 move 3 from 1 to 3 move 2 from 2 to 1 move 1 from 1 to 2 "; #[test] fn test_read_instructions() { let (stacks, moves) = read_instructions(&read_input(INPUT)); assert_eq!(stacks.len(), 3); assert_eq!(stacks[0], ['Z', 'N']); assert_eq!(stacks[1], ['M', 'C', 'D']); assert_eq!(stacks[2], ['P']); assert_eq!(moves.len(), 4); assert_eq!( moves[0], Move { count: 1, from: 2, to: 1 } ); assert_eq!( moves[1], Move { count: 3, from: 1, to: 3 } ); assert_eq!( moves[2], Move { count: 2, from: 2, to: 1 } ); assert_eq!( moves[3], Move { count: 1, from: 1, to: 2 } ); } #[test] fn test_top_crates() { let (stacks, _) = read_instructions(&read_input(INPUT)); assert_eq!(top_crates(&stacks), "NDP"); } #[test] fn test_rearrange_crates_single() { let (mut stacks, moves) = read_instructions(&read_input(INPUT)); rearrange_crates_single(&mut stacks, &moves); assert_eq!(top_crates(&stacks), "CMZ"); } #[test] fn test_rearrange_crates_multiple() { let (mut stacks, moves) = read_instructions(&read_input(INPUT)); rearrange_crates_multiple(&mut stacks, &moves); assert_eq!(top_crates(&stacks), "MCD"); } }
use combine::{count, easy::Stream, token, Parser}; pub fn run(path: &str) { let input = std::fs::read_to_string(path).expect("Couldn't read input file"); let node = parse_node() .easy_parse(&input) .expect("Couldn't parse nodes") .0; println!("Day 8, part 1: {}", node.sum_metadatas()); println!("Day 8, part 2: {}", node.value()); } #[derive(Debug, PartialEq)] struct Node { children: Vec<Node>, metadata: Vec<u32>, } impl Node { fn sum_metadatas(&self) -> u32 { let child_sum: u32 = self.children.iter().map(|n| n.sum_metadatas()).sum(); self.metadata.iter().sum::<u32>() + child_sum } fn value(&self) -> u32 { if self.children.len() == 0 { self.sum_metadatas() } else { self.metadata .iter() .map(|&i| { self.children .get((i as usize) - 1) .map(|n| n.value()) .unwrap_or(0) }).sum() } } } fn digits_u32<'a>() -> impl Parser<Input = Stream<&'a str>, Output = u32> { combine::from_str(combine::parser::range::take_while1(|c: char| { c.is_digit(10) })) } fn parse_node_<'a>() -> impl Parser<Input = Stream<&'a str>, Output = Node> { digits_u32() .skip(token(' ')) .then(move |num_child_nodes: u32| { digits_u32().then(move |num_metadatas: u32| { ( count(num_child_nodes as usize, token(' ').with(parse_node())), count(num_metadatas as usize, token(' ').with(digits_u32())), ) .map(|t| Node { children: t.0, metadata: t.1, }) }) }) } parser!{ fn parse_node['a]()(Stream<&'a str>) -> Node { parse_node_() } } #[cfg(test)] mod test { use super::*; fn node_d() -> Node { Node { children: vec![], metadata: vec![99], } } fn node_c() -> Node { Node { children: vec![node_d()], metadata: vec![2], } } fn node_b() -> Node { Node { children: vec![], metadata: vec![10, 11, 12], } } fn node_a() -> Node { Node { children: vec![node_b(), node_c()], metadata: vec![1, 1, 2], } } #[test] fn test_parse_node() { assert_eq!(parse_node().easy_parse("0 1 99"), Ok((node_d(), ""))); assert_eq!(parse_node().easy_parse("1 1 0 1 99 2"), Ok((node_c(), ""))); assert_eq!( parse_node().easy_parse("2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2"), Ok((node_a(), "")) ); } #[test] fn test_sum_metadatas() { assert_eq!(node_a().sum_metadatas(), 138); } #[test] fn test_value() { assert_eq!(node_c().value(), 0); assert_eq!(node_b().value(), 33); assert_eq!(node_a().value(), 66); } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - SHAMD5 DMA Interrupt Mask"] pub shamd5_dmaim: SHAMD5_DMAIM, #[doc = "0x04 - SHAMD5 DMA Raw Interrupt Status"] pub shamd5_dmaris: SHAMD5_DMARIS, #[doc = "0x08 - SHAMD5 DMA Masked Interrupt Status"] pub shamd5_dmamis: SHAMD5_DMAMIS, #[doc = "0x0c - SHAMD5 DMA Interrupt Clear"] pub shamd5_dmaic: SHAMD5_DMAIC, } #[doc = "SHAMD5 DMA Interrupt Mask"] pub struct SHAMD5_DMAIM { register: vcell::VolatileCell<u32>, } #[doc = "SHAMD5 DMA Interrupt Mask"] pub mod shamd5_dmaim; #[doc = "SHAMD5 DMA Raw Interrupt Status"] pub struct SHAMD5_DMARIS { register: vcell::VolatileCell<u32>, } #[doc = "SHAMD5 DMA Raw Interrupt Status"] pub mod shamd5_dmaris; #[doc = "SHAMD5 DMA Masked Interrupt Status"] pub struct SHAMD5_DMAMIS { register: vcell::VolatileCell<u32>, } #[doc = "SHAMD5 DMA Masked Interrupt Status"] pub mod shamd5_dmamis; #[doc = "SHAMD5 DMA Interrupt Clear"] pub struct SHAMD5_DMAIC { register: vcell::VolatileCell<u32>, } #[doc = "SHAMD5 DMA Interrupt Clear"] pub mod shamd5_dmaic;
use crate::engine::runtime::{OptMap, parser_tokens}; //use test::Bencher; //#[bench] //fn bench_is_opt(b: &mut Bencher) { // // // let optMap=OptMap::new(); // b.iter(|| { // optMap.is_opt("+"); // }); //} // //#[bench] //fn bench_parser_tokens(b: &mut Bencher) { // let m= &OptMap::new(); // b.iter(|| { // parser_tokens(&String::from(" a + b"), m); // }); //}
// Copyright 2020-2021, The Tremor Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Tremor event processing pipeline #![deny(warnings)] #![deny(missing_docs)] #![recursion_limit = "1024"] #![deny( clippy::all, clippy::unwrap_used, clippy::unnecessary_unwrap, clippy::pedantic )] // TODO this is needed due to a false positive in clippy // https://github.com/rust-lang/rust/issues/83125 // we will need this in 1.53.1 #![allow(proc_macro_back_compat)] #[macro_use] extern crate serde_derive; #[macro_use] extern crate log; use crate::errors::{ErrorKind, Result}; use crate::op::prelude::*; use beef::Cow; use executable_graph::NodeConfig; use halfbrown::HashMap; use lazy_static::lazy_static; use op::trickle::window; use petgraph::graph::{self, NodeIndex}; use simd_json::OwnedValue; use std::cmp::Ordering; use std::collections::BTreeMap; use std::fmt::Display; use std::iter::Iterator; use std::str::FromStr; use std::{fmt, sync::Mutex}; use tremor_script::prelude::*; /// Pipeline Errors pub mod errors; mod event; mod executable_graph; #[macro_use] mod macros; pub(crate) mod op; const COUNT: Cow<'static, str> = Cow::const_str("count"); const MEASUREMENT: Cow<'static, str> = Cow::const_str("measurement"); const TAGS: Cow<'static, str> = Cow::const_str("tags"); const FIELDS: Cow<'static, str> = Cow::const_str("fields"); const TIMESTAMP: Cow<'static, str> = Cow::const_str("timestamp"); /// Tools to turn tremor query into pipelines pub mod query; pub use crate::event::{Event, ValueIter, ValueMetaIter}; pub use crate::executable_graph::{ExecutableGraph, OperatorNode}; pub(crate) use crate::executable_graph::{NodeMetrics, State}; pub use op::{ConfigImpl, InitializableOperator, Operator}; pub use tremor_script::prelude::EventOriginUri; pub(crate) type PortIndexMap = HashMap<(NodeIndex, Cow<'static, str>), Vec<(NodeIndex, Cow<'static, str>)>>; pub(crate) type ExecPortIndexMap = HashMap<(usize, Cow<'static, str>), Vec<(usize, Cow<'static, str>)>>; /// A configuration map pub type ConfigMap = Option<serde_yaml::Value>; /// A lookup function to used to look up operators pub type NodeLookupFn = fn( config: &NodeConfig, uid: u64, defn: Option<&tremor_script::srs::Stmt>, node: Option<&tremor_script::srs::Stmt>, windows: Option<HashMap<String, window::Impl>>, ) -> Result<OperatorNode>; /// Stringified numeric key /// from <https://github.com/serde-rs/json-benchmark/blob/master/src/prim_str.rs> #[derive(Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Debug)] pub struct PrimStr<T>(T) where T: Copy + Ord + Display + FromStr; impl<T> simd_json_derive::SerializeAsKey for PrimStr<T> where T: Copy + Ord + Display + FromStr, { fn json_write<W>(&self, writer: &mut W) -> std::io::Result<()> where W: std::io::Write, { write!(writer, "\"{}\"", self.0) } } impl<T> simd_json_derive::Serialize for PrimStr<T> where T: Copy + Ord + Display + FromStr, { fn json_write<W>(&self, writer: &mut W) -> std::io::Result<()> where W: std::io::Write, { write!(writer, "\"{}\"", self.0) } } impl<'input, T> simd_json_derive::Deserialize<'input> for PrimStr<T> where T: Copy + Ord + Display + FromStr, { #[inline] fn from_tape(tape: &mut simd_json_derive::Tape<'input>) -> simd_json::Result<Self> where Self: std::marker::Sized + 'input, { if let Some(simd_json::Node::String(s)) = tape.next() { Ok(PrimStr(FromStr::from_str(s).map_err(|_e| { simd_json::Error::generic(simd_json::ErrorType::Serde("not a number".into())) })?)) } else { Err(simd_json::Error::generic( simd_json::ErrorType::ExpectedNull, )) } } } /// Operator metadata #[derive( Clone, Debug, Default, PartialEq, simd_json_derive::Serialize, simd_json_derive::Deserialize, )] pub struct OpMeta(BTreeMap<PrimStr<u64>, OwnedValue>); impl OpMeta { /// inserts a value pub fn insert<V>(&mut self, key: u64, value: V) -> Option<OwnedValue> where OwnedValue: From<V>, { self.0.insert(PrimStr(key), OwnedValue::from(value)) } /// reads a value pub fn get(&mut self, key: u64) -> Option<&OwnedValue> { self.0.get(&PrimStr(key)) } /// checks existance of a key #[must_use] pub fn contains_key(&self, key: u64) -> bool { self.0.contains_key(&PrimStr(key)) } /// Merges two op meta maps, overwriting values with `other` on duplicates pub fn merge(&mut self, mut other: Self) { self.0.append(&mut other.0); } } lazy_static! { /// Function registory for the pipeline to look up functions // We wrap the registry in a mutex so that we can add functions from the outside // if required. pub static ref FN_REGISTRY: Mutex<Registry> = { let registry: Registry = tremor_script::registry(); Mutex::new(registry) }; } pub(crate) fn common_cow(s: &str) -> beef::Cow<'static, str> { macro_rules! cows { ($target:expr, $($cow:expr),*) => { match $target { $($cow => $cow.into()),*, _ => beef::Cow::from($target.to_string()), } }; } cows!(s, "in", "out", "err", "main") } /// Type of nodes #[derive(Debug, Clone, Ord, PartialOrd, PartialEq, Eq, Hash)] pub enum NodeKind { /// An input, this is the one end of the graph Input, /// An output, this is the other end of the graph Output(Cow<'static, str>), /// An operator Operator, /// A select statement Select, /// A Script statement Script, } impl NodeKind { fn skippable(&self) -> bool { matches!(self, Self::Operator | Self::Select | Self::Script) } } impl Default for NodeKind { fn default() -> Self { Self::Operator } } /// A circuit breaker action #[derive( Debug, Clone, Copy, PartialEq, simd_json_derive::Serialize, simd_json_derive::Deserialize, )] pub enum CbAction { /// Nothing of note None, /// The circuit breaker is triggerd and should break Close, /// The circuit breaker is restored and should work again Open, /// Acknowledge delivery of messages up to a given ID. /// All messages prior to and including this will be considered delivered. Ack, /// Fail backwards to a given ID /// All messages after and including this will be considered non delivered Fail, } impl Default for CbAction { fn default() -> Self { Self::None } } impl From<bool> for CbAction { fn from(success: bool) -> Self { if success { CbAction::Ack } else { CbAction::Fail } } } impl CbAction { /// This is a Circuit Breaker related message #[must_use] pub fn is_cb(self) -> bool { self == CbAction::Close || self == CbAction::Open } /// This is a Guaranteed Delivery related message #[must_use] pub fn is_gd(self) -> bool { self == CbAction::Ack || self == CbAction::Fail } } /// Event identifier /// /// Events are identified by their source, the stream within that source that originated the given event /// and an `event_id` that is unique only within the same stream. /// /// `EventId` also tracks min and max event ids for other events in order to support batched and grouped events /// and facilitate CB mechanics #[derive( Debug, Clone, PartialEq, Default, simd_json_derive::Serialize, simd_json_derive::Deserialize, )] pub struct EventId { source_id: u64, stream_id: u64, event_id: u64, tracked_event_ids: Vec<TrackedEventIds>, } /// default stream id if streams dont make sense pub const DEFAULT_STREAM_ID: u64 = 0; impl EventId { #[must_use] /// create a new `EventId` from numeric ids pub fn new(source_id: u64, stream_id: u64, event_id: u64) -> Self { Self { source_id, stream_id, event_id, tracked_event_ids: Vec::with_capacity(0), } } #[must_use] /// return the `source_id` of this event /// the unique id of the source/onramp/pipeline-node where this event came from pub fn source_id(&self) -> u64 { self.source_id } /// setter for `source_id` pub fn set_source_id(&mut self, source_id: u64) { self.source_id = source_id; } #[must_use] /// return the `stream_id` of this event /// the unique id of the stream within a source/onramp/pipeline-node where this event came from pub fn stream_id(&self) -> u64 { self.stream_id } /// setter for `stream_id` pub fn set_stream_id(&mut self, stream_id: u64) { self.stream_id = stream_id; } #[must_use] /// return the `event_id` of this event /// the unique id of the event within its stream pub fn event_id(&self) -> u64 { self.event_id } /// setter for `event_id` pub fn set_event_id(&mut self, event_id: u64) { self.event_id = event_id; } /// track the min and max of the given `event_id` /// and also include all event ids `event_id` was tracking pub fn track(&mut self, event_id: &EventId) { self.track_ids( event_id.source_id, event_id.stream_id, event_id.event_id, event_id.event_id, ); for other_tracked in &event_id.tracked_event_ids { match self .tracked_event_ids .binary_search_by(|probe| probe.compare(other_tracked)) { Ok(idx) => { // ALLOW: binary_search_by verified this idx exists unsafe { self.tracked_event_ids.get_unchecked_mut(idx) } .track_ids(other_tracked.min_event_id, other_tracked.max_event_id); } Err(idx) => self.tracked_event_ids.insert(idx, other_tracked.clone()), } } } /// track the given event id by its raw numeric ids pub fn track_id(&mut self, source_id: u64, stream_id: u64, event_id: u64) { self.track_ids(source_id, stream_id, event_id, event_id); } fn track_ids(&mut self, source_id: u64, stream_id: u64, min_event_id: u64, max_event_id: u64) { // track our own id upon first track call, so we can keep resolving min and max simpler if self.tracked_event_ids.is_empty() { self.tracked_event_ids.push(TrackedEventIds::new( self.source_id, self.stream_id, self.event_id, self.event_id, )); } match self .tracked_event_ids .binary_search_by(|probe| probe.compare_ids(source_id, stream_id)) { Ok(idx) => { unsafe { self.tracked_event_ids.get_unchecked_mut(idx) } .track_ids(min_event_id, max_event_id); } Err(idx) => self.tracked_event_ids.insert( idx, TrackedEventIds::new(source_id, stream_id, min_event_id, max_event_id), ), } } #[must_use] /// get minimum event id for a given source and stream, if it is tracked /// /// This also always checks the actual eventId, not only the tracked ones, this way we can save allocations when used within insights pub fn get_min_by_stream(&self, source_id: u64, stream_id: u64) -> Option<u64> { if self.tracked_event_ids.is_empty() && self.source_id == source_id && self.stream_id == stream_id { Some(self.event_id) } else { self.tracked_event_ids.iter().find_map(|teid| { if (source_id, stream_id) == (teid.source_id, teid.stream_id) { Some(teid.min_event_id) } else { None } }) } } #[must_use] /// checks if the given `EventId` is tracked by this one. /// Also returns true, if the `event_id` has the same id as `self`. pub fn is_tracking(&self, event_id: &EventId) -> bool { let is_same = self.source_id() == event_id.source_id() && self.stream_id() == event_id.stream_id() && self.event_id() == event_id.event_id(); is_same || match self.tracked_event_ids.binary_search_by(|probe| { probe.compare_ids(event_id.source_id(), event_id.stream_id()) }) { Ok(idx) => { let entry = unsafe { self.tracked_event_ids.get_unchecked(idx) }; // this is only a heuristic, but is good enough for now (entry.min_event_id <= event_id.event_id) && (event_id.event_id <= entry.max_event_id) } Err(_) => false, } } #[must_use] /// get maximum event id for a given source and stream if we have it here /// /// This also always checks the actual eventId, not only the tracked ones pub fn get_max_by_stream(&self, source_id: u64, stream_id: u64) -> Option<u64> { if self.tracked_event_ids.is_empty() && self.source_id == source_id && self.stream_id == stream_id { Some(self.event_id) } else { self.tracked_event_ids.iter().find_map(|teid| { if (source_id, stream_id) == (teid.source_id, teid.stream_id) { Some(teid.max_event_id) } else { None } }) } } #[must_use] /// get the minimum tracked (`stream_id`, `event_id`) /// by chosing events with smaller stream id /// /// This also always checks the actual eventId, not only the tracked ones pub fn get_min_by_source(&self, source_id: u64) -> Option<(u64, u64)> { // TODO: change the return type to an iterator, so we make sure to return all values for all streams if self.tracked_event_ids.is_empty() && self.source_id == source_id { Some((self.stream_id, self.event_id)) } else { self.tracked_event_ids .iter() .filter(|teid| teid.source_id == source_id) .min_by(|teid1, teid2| teid1.stream_id.cmp(&teid2.stream_id)) .map(|teid| (teid.stream_id, teid.min_event_id)) } } #[must_use] /// get the maximum tracked (`stream_id`, `event_id`) /// by chosing events with bigger stream id /// /// This also always checks the actual eventId, not only the tracked ones pub fn get_max_by_source(&self, source_id: u64) -> Option<(u64, u64)> { // TODO: change the return type to an iterator, so we make sure to return all values for all streams if self.tracked_event_ids.is_empty() && self.source_id == source_id { Some((self.stream_id, self.event_id)) } else { self.tracked_event_ids .iter() .filter(|teid| teid.source_id == source_id) .max_by(|teid1, teid2| teid1.stream_id.cmp(&teid2.stream_id)) .map(|teid| (teid.stream_id, teid.max_event_id)) } } } impl From<(u64, u64, u64)> for EventId { fn from(x: (u64, u64, u64)) -> Self { EventId::new(x.0, x.1, x.2) } } impl fmt::Display for EventId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}:{}:{}", self.source_id, self.stream_id, self.event_id)?; if !self.tracked_event_ids.is_empty() { let mut iter = self.tracked_event_ids.iter(); if let Some(ids) = iter.next() { write!(f, " {}", ids)?; } for ids in iter { write!(f, ", {}", ids)?; } } Ok(()) } } #[derive( Debug, Clone, PartialEq, Default, simd_json_derive::Serialize, simd_json_derive::Deserialize, )] /// tracked min and max event id for pub struct TrackedEventIds { /// uid of the source this event originated from pub source_id: u64, /// uid of the stream within the source this event originated from pub stream_id: u64, /// tracking min event id pub min_event_id: u64, /// tracking max event id pub max_event_id: u64, } impl TrackedEventIds { #[must_use] /// create a new instance with min and max set to `event_id`. pub fn new(source_id: u64, stream_id: u64, min_event_id: u64, max_event_id: u64) -> Self { Self { source_id, stream_id, min_event_id, max_event_id, } } #[must_use] /// create tracked ids from a single `event_id` pub fn from_id(source_id: u64, stream_id: u64, event_id: u64) -> Self { Self { source_id, stream_id, min_event_id: event_id, max_event_id: event_id, } } #[must_use] /// returns true if this struct tracks the given source and stream ids pub fn tracks_id(&self, source_id: u64, stream_id: u64) -> bool { self.source_id == source_id && self.stream_id == stream_id } #[must_use] /// compares against the given source and stream ids, using simple numeric ordering pub fn compare_ids(&self, source_id: u64, stream_id: u64) -> Ordering { (self.source_id, self.stream_id).cmp(&(source_id, stream_id)) } #[must_use] /// compare source and stream ids against the ones given in `other` pub fn compare(&self, other: &TrackedEventIds) -> Ordering { (self.source_id, self.stream_id).cmp(&(other.source_id, other.stream_id)) } /// track everything from the given `event_id` pub fn track(&mut self, event_id: &EventId) { #[cfg(test)] { debug_assert!( self.source_id == event_id.source_id, "incompatible source ids" ); debug_assert!( self.stream_id == event_id.stream_id, "incompatible stream ids" ); } self.track_ids(event_id.event_id, event_id.event_id); } /// track a single event id pub fn track_id(&mut self, event_id: u64) { self.track_ids(event_id, event_id); } /// track a min and max event id pub fn track_ids(&mut self, min_event_id: u64, max_event_id: u64) { self.min_event_id = self.min_event_id.min(min_event_id); self.max_event_id = self.max_event_id.max(max_event_id); } /// merge the other `ids` into this one pub fn merge(&mut self, ids: &TrackedEventIds) { // TODO: once https://github.com/rust-lang/rust-clippy/issues/6970 is fixed comment those in again #[cfg(test)] { debug_assert!(self.source_id == ids.source_id, "incompatible source ids"); debug_assert!(self.stream_id == ids.stream_id, "incompatible stream ids"); } self.track_ids(ids.min_event_id, ids.max_event_id); } } impl From<&EventId> for TrackedEventIds { fn from(e: &EventId) -> Self { Self::from_id(e.source_id, e.stream_id, e.event_id) } } impl From<(u64, u64, u64)> for TrackedEventIds { fn from(x: (u64, u64, u64)) -> Self { Self::from_id(x.0, x.1, x.2) } } impl fmt::Display for TrackedEventIds { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "[min={}:{}:{}, max={}:{}:{}]", self.source_id, self.stream_id, self.min_event_id, self.source_id, self.stream_id, self.max_event_id ) } } // TODO adapt for streaming, so we maintain multiple counters per stream #[derive(Debug, Clone, Copy, Default)] /// for generating consecutive unique event ids pub struct EventIdGenerator(u64, u64, u64); impl EventIdGenerator { /// generate the next event id for this stream pub fn next_id(&mut self) -> EventId { let event_id = self.2; self.2 = self.2.wrapping_add(1); EventId::new(self.0, self.1, event_id) } #[must_use] /// create a new generator using the default stream id pub fn new(source_id: u64) -> Self { Self(source_id, DEFAULT_STREAM_ID, 0) } #[must_use] /// create a new generator using the given source and stream id pub fn with_stream(source_id: u64, stream_id: u64) -> Self { Self(source_id, stream_id, 0) } /// set the source id pub fn set_source(&mut self, source_id: u64) { self.0 = source_id; } /// set the stream id pub fn set_stream(&mut self, stream_id: u64) { self.1 = stream_id; } } /// The kind of signal this is #[derive( Debug, Clone, Copy, PartialEq, simd_json_derive::Serialize, simd_json_derive::Deserialize, )] pub enum SignalKind { // Lifecycle /// Init singnal Init, /// Shutdown Signal Shutdown, // Pause, TODO debug trace // Resume, TODO debug trace // Step, TODO ( into, over, to next breakpoint ) /// Control Control, /// Periodic Tick Tick, } // We ignore this since it's a simple lookup table #[cfg(not(tarpaulin_include))] fn factory(node: &NodeConfig) -> Result<Box<dyn InitializableOperator>> { #[cfg(feature = "bert")] use op::bert::{SequenceClassificationFactory, SummerizationFactory}; use op::debug::EventHistoryFactory; use op::generic::{BatchFactory, CounterFactory}; use op::grouper::BucketGrouperFactory; use op::identity::PassthroughFactory; use op::qos::{BackpressureFactory, PercentileFactory, RoundRobinFactory, WalFactory}; let name_parts: Vec<&str> = node.op_type.split("::").collect(); let factory = match name_parts.as_slice() { ["passthrough"] => PassthroughFactory::new_boxed(), ["debug", "history"] => EventHistoryFactory::new_boxed(), ["grouper", "bucket"] => BucketGrouperFactory::new_boxed(), ["generic", "batch"] => BatchFactory::new_boxed(), ["generic", "backpressure"] => { error!("The generic::backpressure operator is depricated, please use qos::backpressure instread."); BackpressureFactory::new_boxed() } ["generic", "counter"] => CounterFactory::new_boxed(), ["qos", "backpressure"] => BackpressureFactory::new_boxed(), ["qos", "roundrobin"] => RoundRobinFactory::new_boxed(), ["qos", "wal"] => WalFactory::new_boxed(), ["qos", "percentile"] => PercentileFactory::new_boxed(), #[cfg(feature = "bert")] ["bert", "sequence_classification"] => SequenceClassificationFactory::new_boxed(), #[cfg(feature = "bert")] ["bert", "summarization"] => SummerizationFactory::new_boxed(), [namespace, name] => { return Err(ErrorKind::UnknownOp((*namespace).to_string(), (*name).to_string()).into()); } _ => return Err(ErrorKind::UnknownNamespace(node.op_type.clone()).into()), }; Ok(factory) } fn operator(uid: u64, node: &NodeConfig) -> Result<Box<dyn Operator + 'static>> { factory(node)?.from_node(uid, node) } /// Takes a name, tags and creates a influx codec compatible Value #[must_use] pub fn influx_value( metric_name: Cow<'static, str>, tags: HashMap<Cow<'static, str>, Value<'static>>, count: u64, timestamp: u64, ) -> Value<'static> { literal!({ MEASUREMENT: metric_name, TAGS: tags, FIELDS: { COUNT: count }, TIMESTAMP: timestamp }) } #[derive(Debug, Default)] struct Connection { from: Cow<'static, str>, to: Cow<'static, str>, } impl Display for Connection { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { let from: &str = &self.from; let to: &str = &self.to; match (from, to) { ("out", "in") => write!(f, ""), ("out", to) => write!(f, "{}", to), (from, "in") => write!(f, "{} ", from), (from, to) => write!(f, "{} -> {}", from, to), } } } pub(crate) type ConfigGraph = graph::DiGraph<NodeConfig, Connection>; #[cfg(test)] mod test { use super::*; use simd_json_derive::{Deserialize, Serialize}; #[test] fn prim_str() { let p = PrimStr(42); let fourtytwo = r#""42""#; let mut fourtytwo_s = fourtytwo.to_string(); let mut fourtytwo_i = "42".to_string(); assert_eq!(fourtytwo, p.json_string().unwrap()); assert_eq!( p, PrimStr::from_slice(unsafe { fourtytwo_s.as_bytes_mut() }).unwrap() ); assert!(PrimStr::<i32>::from_slice(unsafe { fourtytwo_i.as_bytes_mut() }).is_err()); } #[test] fn op_meta_merge() { let mut m1 = OpMeta::default(); let mut m2 = OpMeta::default(); m1.insert(1, 1); m1.insert(2, 1); m2.insert(1, 2); m2.insert(3, 2); m1.merge(m2); assert!(m1.contains_key(1)); assert!(m1.contains_key(2)); assert!(m1.contains_key(3)); assert_eq!(m1.get(1).unwrap(), &2); assert_eq!(m1.get(2).unwrap(), &1); assert_eq!(m1.get(3).unwrap(), &2); } #[test] fn cbaction_creation() { assert_eq!(CbAction::default(), CbAction::None); assert_eq!(CbAction::from(true), CbAction::Ack); assert_eq!(CbAction::from(false), CbAction::Fail); } #[test] fn cbaction_is_gd() { assert_eq!(CbAction::None.is_gd(), false); assert_eq!(CbAction::Fail.is_gd(), true); assert_eq!(CbAction::Ack.is_gd(), true); assert_eq!(CbAction::Open.is_gd(), false); assert_eq!(CbAction::Close.is_gd(), false); } #[test] fn cbaction_is_cb() { assert_eq!(CbAction::None.is_cb(), false); assert_eq!(CbAction::Fail.is_cb(), false); assert_eq!(CbAction::Ack.is_cb(), false); assert_eq!(CbAction::Open.is_cb(), true); assert_eq!(CbAction::Close.is_cb(), true); } #[test] fn event_ids() { let mut ids1 = EventId::new(1, 1, 1); assert_eq!(Some(1), ids1.get_max_by_stream(1, 1)); assert_eq!(None, ids1.get_max_by_stream(1, 2)); assert_eq!(None, ids1.get_max_by_stream(2, 1)); let mut ids2 = EventId::new(1, 1, 2); assert_eq!(ids2.get_max_by_stream(1, 1), Some(2)); assert_eq!(ids2.get_max_by_stream(1, 3), None); assert_eq!(ids2.get_max_by_stream(2, 1), None); ids1.track_id(2, DEFAULT_STREAM_ID, 1); ids2.track_id(2, DEFAULT_STREAM_ID, 3); assert_eq!(ids1.get_max_by_source(1), Some((1, 1))); assert_eq!(ids1.get_max_by_source(2), Some((DEFAULT_STREAM_ID, 1))); assert_eq!(ids2.get_min_by_source(1), Some((1, 2))); assert_eq!(ids2.get_min_by_source(2), Some((DEFAULT_STREAM_ID, 3))); ids1.track(&ids2); assert_eq!(ids1.get_max_by_source(1), Some((1, 2))); assert_eq!(ids1.get_min_by_source(1), Some((1, 1))); assert_eq!(ids1.get_min_by_source(3), None); assert_eq!(ids1.get_max_by_source(3), None); assert_eq!(ids1.get_max_by_source(2), Some((DEFAULT_STREAM_ID, 3))); assert_eq!(ids1.get_min_by_source(2), Some((DEFAULT_STREAM_ID, 1))); assert_eq!(ids1.get_max_by_stream(1, 1), Some(2)); assert_eq!(ids1.get_max_by_stream(1, 2), None); assert_eq!(ids1.get_min_by_stream(2, DEFAULT_STREAM_ID), Some(1)); assert_eq!(ids1.get_min_by_stream(2, 42), None); let id = EventId::from((1, DEFAULT_STREAM_ID, 42_u64)); assert_eq!(id.get_max_by_stream(1, DEFAULT_STREAM_ID), Some(42)); assert_eq!(id.get_max_by_stream(5, DEFAULT_STREAM_ID), None); } #[test] fn tracked_event_ids() { let teid1 = TrackedEventIds::default(); assert_eq!( ( teid1.source_id, teid1.stream_id, teid1.min_event_id, teid1.max_event_id ), (0, 0, 0, 0) ); let mut teid2 = TrackedEventIds::new(1, 2, 3, 4); let eid1 = EventId::new(1, 2, 6); let eid2 = EventId::new(1, 2, 1); teid2.track(&eid1); assert_eq!(teid2.max_event_id, eid1.event_id); assert_eq!(teid2.min_event_id, 3); teid2.track(&eid2); assert_eq!(teid2.min_event_id, eid2.event_id); assert_eq!(teid2.max_event_id, eid1.event_id); let teid3 = TrackedEventIds::from((1, 2, 19)); teid2.merge(&teid3); assert_eq!(teid2.min_event_id, 1); assert_eq!(teid2.max_event_id, 19); teid2.track_id(0); assert_eq!(teid2.min_event_id, 0); } }
extern crate lambda; use lambda::*; fn main() { //let program = "(x. y. x - y) 3 2"; let program = "(def. def (t.f.t) true. def (t.f.f) false. def (a.a false true) not. def (a.b. a true b) or. def (a.b. a b false) and. def (f.x. x) 0. def (f.x. f x) 1. def (f.x. f (f x)) 2. def (n.f.x. f (n f x)) S. def (n.m. (n S) m) +. def (n.m. n (+ m) 0) *. def (n.n (x. false) true) is_zero. def (n.n (g.k. is_zero (g 1) k (+ (g k) 1)) (v.0) 0) P. def (r.n. is_zero n 1 (* n (r (P n)))) factorial. def (S 2) 3. def (S 3) 4. def (f. (x. f (x x)) (x. f (x x))) Y. Y factorial 3 ) (a.b. b a)"; run(program); }
mod day1; mod day2; mod day3; fn main() { println!("Let's do this! Advent 2016!!!"); println!("day 1, puzzle 1: {}", day1::puzzle1()); println!("day 1, puzzle 2: {}", day1::puzzle2()); println!("day 2, puzzle 1: {}", day2::puzzle1()); println!("day 2, puzzle 2: {}", day2::puzzle2()); println!("day 3, puzzle 1: {}", day3::puzzle1()); println!("day 3, puzzle 2: {}", day3::puzzle2()); }
use procon_reader::ProconReader; fn main() { let stdin = std::io::stdin(); let mut rd = ProconReader::new(stdin.lock()); let n: usize = rd.get(); let m: usize = rd.get(); let inf = std::u64::MAX / 2; let mut d = vec![vec![inf; n]; n]; for v in 0..n { d[v][v] = 0; } for _ in 0..m { let a: usize = rd.get(); let b: usize = rd.get(); let c: u64 = rd.get(); d[a - 1][b - 1] = c; } let mut ans = 0; for k in 0..n { for s in 0..n { for t in 0..n { d[s][t] = d[s][t].min(d[s][k] + d[k][t]); } } for s in 0..n { for t in 0..n { if d[s][t] < inf { ans += d[s][t]; } } } } println!("{}", ans); }
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(box_syntax)] struct DroppableStruct; enum DroppableEnum { DroppableVariant1, DroppableVariant2 } static mut DROPPED: bool = false; impl Drop for DroppableStruct { fn drop(&mut self) { unsafe { DROPPED = true; } } } impl Drop for DroppableEnum { fn drop(&mut self) { unsafe { DROPPED = true; } } } trait MyTrait { fn dummy(&self) { } } impl MyTrait for Box<DroppableStruct> {} impl MyTrait for Box<DroppableEnum> {} struct Whatever { w: Box<MyTrait+'static> } impl Whatever { fn new(w: Box<MyTrait+'static>) -> Whatever { Whatever { w: w } } } fn main() { { let f: Box<_> = box DroppableStruct; let _a = Whatever::new(box f as Box<MyTrait>); } assert!(unsafe { DROPPED }); unsafe { DROPPED = false; } { let f: Box<_> = box DroppableEnum::DroppableVariant1; let _a = Whatever::new(box f as Box<MyTrait>); } assert!(unsafe { DROPPED }); }
extern crate rand; #[path="geom.rs"] mod geom; use glium::Surface; pub struct Cylinder { vbo: (glium::VertexBuffer<geom::Position>, glium::VertexBuffer<geom::Normal>), // 顶点缓冲 waves: [[f32; 3]; 440], vertex: Vec<geom::Position>, position: [[f32;4];4], // 位置坐标矩阵 rotate: [[f32;4];4], // 旋转矩阵 scale: [[f32;4];4], // 尺寸矩阵 pmodel: [[f32;4];4], // 父节点模型矩阵 color: [f32; 3], } impl Cylinder { pub fn wave(&mut self, display: &glium::Display) { let mut new_vertex: Vec<geom::Position> = Vec::new(); for index in 0..440 { let x: f32 = self.vertex[index].position[0]; let x: f32 = x + self.waves[index][0].cos() * self.waves[index][1]; let y: f32 = self.vertex[index].position[1]; let y: f32 = y + self.waves[index][0].sin() * self.waves[index][1]; let z: f32 = self.vertex[index].position[2]; new_vertex.push(geom::Position{position:[x, y, z]}); self.waves[index][0] += self.waves[index][2]; } self.vbo = Cylinder::create_vbo(display, &new_vertex); } pub fn create_vbo(display: &glium::Display, vertex: &Vec<geom::Position>) -> (glium::VertexBuffer<geom::Position>, glium::VertexBuffer<geom::Normal>) { let mut shape: Vec<geom::Position> = Vec::new(); for i in 0..40 { for index in 0..10 { let first = index + i*11; if i==39 { // 正面 shape.push(vertex[index+1]); shape.push(vertex[first+1]); shape.push(vertex[first]); // 反面 shape.push(vertex[first]); shape.push(vertex[index]); shape.push(vertex[index+1]); } else { // 正面 shape.push(vertex[first+12]); shape.push(vertex[first+1]); shape.push(vertex[first]); // 反面 shape.push(vertex[first]); shape.push(vertex[first+11]); shape.push(vertex[first+12]); } } } let mut normals: Vec<geom::Normal> = Vec::new(); // 一共600个三角形 for i in 0..800 { // 每个三角形3个顶点 let a:[f32; 3] = [shape[i*3+1].position[0]-shape[i*3].position[0], shape[i*3+1].position[1]-shape[i*3].position[1], shape[i*3+1].position[2]-shape[i*3].position[2]]; let b:[f32; 3] = [shape[i*3+2].position[0]-shape[i*3+1].position[0], shape[i*3+2].position[1]-shape[i*3+1].position[1], shape[i*3+2].position[2]-shape[i*3+1].position[2]]; // 求个叉乘 let normal:[f32; 3] = [a[1]*b[2]-a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0]]; for _j in 0..3 { normals.push(geom::Normal {normal}); } } ( glium::VertexBuffer::new(display, &shape).unwrap(), glium::VertexBuffer::new(display, &normals).unwrap()) } // 新建对象 pub fn new(display: &glium::Display) -> Cylinder { // 1. 确定所有顶点的坐标, 并按顺序排列好 let mut vertex: Vec<geom::Position> = Vec::new(); let mut index = 0; let mut waves:[[f32; 3]; 440]=[[0.0; 3]; 440]; for i in 0..40 { let angle: f32 = std::f32::consts::PI / 20.0 * i as f32; for z in -5..6 { let x: f32 = angle.cos(); let y: f32 = angle.sin(); vertex.push(geom::Position {position: [x, y, 0.1 * z as f32]}); waves[index] = [ rand::random::<f32>() * std::f32::consts::PI * 2.0, // 随机角度 0.01 + rand::random::<f32>() * 0.03, // 随机距离 0.016 + rand::random::<f32>() * 0.032 // 转动角度 ]; index+=1; } } let vbo = Cylinder::create_vbo(display, &vertex); // 4. 生成圆柱体 let initmatrix: [[f32; 4]; 4] = [ [1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0f32]]; Cylinder { vbo, vertex, waves, position: initmatrix, rotate: initmatrix, scale: initmatrix, pmodel: initmatrix, color: [0.41, 0.76, 0.76f32], } } // 绘制函数 pub fn draw<S, T>(&self, target: &mut S, program: &glium::Program, uniform: &glium::uniforms::UniformBuffer<T>, depth: &glium::texture::depth_texture2d::DepthTexture2d) where S: glium::Surface, T: glium::uniforms::UniformBlock+glium::buffer::Content, { // 开启深度测试 let params = glium::DrawParameters { depth: glium::Depth { test: glium::draw_parameters::DepthTest::IfLess, write: true, .. Default::default() }, .. Default::default() }; let model: [[f32; 4]; 4] = geom::matrix_multi(&self.scale, &geom::matrix_multi(&self.rotate, &geom::matrix_multi(&self.position, &self.pmodel))); let uniforms = uniform! { object_color: self.color, MyBlock: uniform, model: model, shadowMap: depth, }; target.draw( (&self.vbo.0, &self.vbo.1), glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList), program, &uniforms, &params ).unwrap(); } // 设置父节点模型矩阵 pub fn set_pmodel(&mut self, model: &[[f32; 4]; 4]) { self.pmodel = *model; } // 对于转换来说, 首先要进行缩放操作 pub fn set_scale(&mut self, x: f32, y: f32, z: f32) { self.scale = [ [x, 0.0, 0.0, 0.0], [0.0, y, 0.0, 0.0], [0.0, 0.0, z, 0.0], [0.0, 0.0, 0.0, 1.0f32], ] } // 然后是旋转 pub fn set_rotate(&mut self, angle: f32, xyz: i32) { self.rotate = if xyz==0 { // 沿x轴旋转 [[1.0, 0.0, 0.0, 0.0], [0.0, angle.cos(), angle.sin(), 0.0], [0.0, -angle.sin(), angle.cos(), 0.0], [0.0, 0.0, 0.0, 1.0]] } else if xyz==1 { // 绕y轴旋转 [[angle.cos(), 0.0, -angle.sin(), 0.0], [0.0, 1.0, 0.0, 0.0], [angle.sin(), 0.0, angle.cos(), 0.0], [0.0, 0.0, 0.0, 1.0]] } else if xyz==2 { // 绕z轴旋转 [[angle.cos(), angle.sin(), 0.0, 0.0], [-angle.sin(), angle.cos(), 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]] } else { [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]] }; } // 最后进行位移操作 pub fn set_position(&mut self, x: f32, y: f32, z: f32) { self.position = [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [x, y, z, 1.0f32]]; } // 设置颜色 pub fn set_color(&mut self, r: f32, g: f32, b: f32) { self.color = [r, g, b]; } }
use alloc::vec::Vec; use eosio::{ AccountName, Action, DataStream, NumBytes, Read, ReadError, Transaction, TransactionId, Write, WriteError, }; /// This method will abort execution of wasm without failing the contract. This is used to bypass all cleanup / destructors that would normally be called. #[inline] pub fn eosio_exit<C>(code: C) where C: Into<i32>, { unsafe { eosio_cdt_sys::eosio_exit(code.into()) } } /// Sends an inline action. /// /// # Errors /// /// Will return `Err` if there was an issue serializing the action. #[inline] pub fn send_inline_action(action: &Action<Vec<u8>>) -> Result<(), WriteError> { let size = action.num_bytes(); let mut bytes = vec![0_u8; size]; let mut pos = 0; action.write(&mut bytes, &mut pos)?; let ptr = bytes[..].as_mut_ptr(); unsafe { eosio_cdt_sys::send_inline(ptr, pos) } Ok(()) } /// Sends a context free inline action. /// /// # Errors /// /// Will return `Err` if there was an issue serializing the action. #[inline] pub fn send_context_free_inline_action( action: &Action<Vec<u8>>, ) -> Result<(), WriteError> { let size = action.num_bytes(); let mut bytes = vec![0_u8; size]; let mut pos = 0; action.write(&mut bytes, &mut pos)?; let ptr = bytes[..].as_mut_ptr(); unsafe { eosio_cdt_sys::send_context_free_inline(ptr, pos) } Ok(()) } /// Sends a deferred transaction /// /// # Errors /// /// Will return `Err` if there was an issue serializing the transaction. #[inline] pub fn send_deferred<I, P, T>( id: I, payer: P, trx: T, replace_existing: bool, ) -> Result<(), WriteError> where I: AsRef<TransactionId>, P: AsRef<AccountName>, T: AsRef<Transaction>, { let bytes = trx.as_ref().pack()?; send_deferred_bytes(id, payer, bytes, replace_existing); Ok(()) } /// Sends a deferred transaction from raw bytes. #[inline] pub fn send_deferred_bytes<I, P, T>( id: I, payer: P, bytes: T, replace_existing: bool, ) where I: AsRef<TransactionId>, P: AsRef<AccountName>, T: AsRef<[u8]>, { let id = id.as_ref().as_u128(); let id_ptr = &id as *const _ as *const u128; let bytes = bytes.as_ref(); unsafe { eosio_cdt_sys::send_deferred( id_ptr, payer.as_ref().as_u64(), bytes.as_ptr(), bytes.len(), replace_existing.into(), ) } } /// Cancels a deferred transaction #[must_use] #[inline] pub fn cancel_deferred<I: AsRef<TransactionId>>(id: I) -> bool { let sender_id = id.as_ref().as_u128(); let sender_id_ptr = &sender_id as *const _ as *const u128; let result = unsafe { eosio_cdt_sys::cancel_deferred(sender_id_ptr) }; result == 1 } /// Reads action data /// /// # Errors /// /// Will return `Err` if there was a problem reading the action data. #[inline] pub fn read_action_data<T: Read>() -> Result<T, ReadError> { let num_bytes = unsafe { eosio_cdt_sys::action_data_size() }; let mut bytes = vec![0_u8; num_bytes as usize]; let ptr: *mut eosio_cdt_sys::c_void = &mut bytes[..] as *mut _ as *mut eosio_cdt_sys::c_void; unsafe { eosio_cdt_sys::read_action_data(ptr, num_bytes); } let mut pos = 0; T::read(&bytes, &mut pos) } /// Get a `DataStream` of the current action data #[must_use] #[inline] pub fn current_data_stream() -> DataStream { let num_bytes = unsafe { eosio_cdt_sys::action_data_size() }; let mut bytes = vec![0_u8; num_bytes as usize]; let ptr: *mut eosio_cdt_sys::c_void = &mut bytes[..] as *mut _ as *mut eosio_cdt_sys::c_void; unsafe { eosio_cdt_sys::read_action_data(ptr, num_bytes); } bytes.into() }
use super::util::{mmio_write, mmio_read, enable_irq_no}; #[allow(dead_code)] mod constval { // The GPIO registers base address. pub const GPIO_BASE: u32 = 0x3F200000; // for raspi2 & 3, 0x20200000 for raspi1 // The offsets for reach register. // Controls actuation of pull up/down to ALL GPIO pins. pub const GPPUD: u32 = (GPIO_BASE + 0x94); // Controls actuation of pull up/down for specific GPIO pin. pub const GPPUDCLK0: u32 = (GPIO_BASE + 0x98); // The base address for UART. pub const UART0_BASE: u32 = 0x3F201000; // for raspi2 & 3, 0x20201000 for raspi1 // The offsets for reach register for the UART. pub const UART0_DR: u32 = (UART0_BASE + 0x00); pub const UART0_RSRECR: u32 = (UART0_BASE + 0x04); pub const UART0_FR: u32 = (UART0_BASE + 0x18); pub const UART0_ILPR: u32 = (UART0_BASE + 0x20); pub const UART0_IBRD: u32 = (UART0_BASE + 0x24); pub const UART0_FBRD: u32 = (UART0_BASE + 0x28); pub const UART0_LCRH: u32 = (UART0_BASE + 0x2C); pub const UART0_CR: u32 = (UART0_BASE + 0x30); pub const UART0_IFLS: u32 = (UART0_BASE + 0x34); pub const UART0_IMSC: u32 = (UART0_BASE + 0x38); pub const UART0_RIS: u32 = (UART0_BASE + 0x3C); pub const UART0_MIS: u32 = (UART0_BASE + 0x40); pub const UART0_ICR: u32 = (UART0_BASE + 0x44); pub const UART0_DMACR: u32 = (UART0_BASE + 0x48); pub const UART0_ITCR: u32 = (UART0_BASE + 0x80); pub const UART0_ITIP: u32 = (UART0_BASE + 0x84); pub const UART0_ITOP: u32 = (UART0_BASE + 0x88); pub const UART0_TDR: u32 = (UART0_BASE + 0x8C); } use self::constval::*; use super::util::delay; fn transmit_fifo_full() -> bool { unsafe {mmio_read(UART0_FR) & (1 << 5) > 0} } fn receive_fifo_empty() -> bool { unsafe {mmio_read(UART0_FR) & (1 << 4) > 0} } pub fn writec(c: u8) { while transmit_fifo_full() {} unsafe {mmio_write(UART0_DR, c as u32);} } pub fn getc() -> u8 { while receive_fifo_empty() {} unsafe {mmio_read(UART0_DR) as u8} } pub fn write(msg: &str) { for c in msg.chars() { writec(c as u8) } } pub fn init() { unsafe { // Disable UART0. mmio_write(UART0_CR, 0x00000000); // Setup the GPIO pin 14 && 15. // Disable pull up/down for all GPIO pins & delay for 150 cycles. mmio_write(GPPUD, 0x00000000); delay(150); // Disable pull up/down for pin 14,15 & delay for 150 cycles. mmio_write(GPPUDCLK0, (1 << 14) | (1 << 15)); delay(150); // Write 0 to GPPUDCLK0 to make it take effect. mmio_write(GPPUDCLK0, 0x00000000); // Clear pending interrupts. mmio_write(UART0_ICR, 0x7FF); // Set integer & fractional part of baud rate. // Divider = UART_CLOCK/(16 * Baud) // Fraction part register = (Fractional part * 64) + 0.5 // UART_CLOCK = 3000000; Baud = 115200. // Divider = 3000000 / (16 * 115200) = 1.627 = ~1. mmio_write(UART0_IBRD, 1); // Fractional part register = (.627 * 64) + 0.5 = 40.6 = ~40. mmio_write(UART0_FBRD, 40); // Enable FIFO & 8 bit data transmissio (1 stop bit, no parity). mmio_write(UART0_LCRH, (1 << 4) | (1 << 5) | (1 << 6)); // Mask all interrupts. // mmio_write( // UART0_IMSC, // (1 << 1) | (1 << 4) | (1 << 5) | (1 << 6) | (1 << 7) | (1 << 8) | (1 << 9) | (1 << 10), // ); // Enable UART0, receive & transfer part of UART. mmio_write(UART0_CR, (1 << 0) | (1 << 8) | (1 << 9)); // unmask RX IRQ mmio_write(UART0_IMSC, 1 << 4); // enable uart irq enable_irq_no(57); } }
use std::{marker::PhantomData, pin::Pin}; use async_stream::stream; use futures_util::{stream::Stream, StreamExt}; use raw::{ClientMessage, ClientPayload, GraphQLReceiver, GraphQLSender, Payload, ServerMessage}; use serde::de::DeserializeOwned; use tokio::sync::{broadcast, mpsc}; use tokio_tungstenite::{connect_async, tungstenite}; pub mod raw; pub use tungstenite::handshake::client::Request; pub use tungstenite::Error; pub struct GraphQLWebSocket { tx: broadcast::Sender<ClientMessage>, server_tx: broadcast::Sender<ServerMessage>, #[allow(dead_code)] // Need this to avoid a hangup server_rx: broadcast::Receiver<ServerMessage>, id_count: u64, } impl GraphQLWebSocket { pub async fn connect(request: Request) -> Result<GraphQLWebSocket, tungstenite::Error> { let (stream, _) = match connect_async(request).await { Ok(v) => v, Err(e) => return Err(e), }; let (sink, stream) = StreamExt::split(stream); let (tx_in, rx_in) = broadcast::channel(16); let tx_in0 = tx_in.clone(); tokio::spawn(async move { let rx = GraphQLReceiver { stream }; let mut stream = rx.stream(); while let Some(msg) = stream.next().await { match msg { Ok(ServerMessage::ConnectionKeepAlive) => {} Ok(v) => { let _ = tx_in0.send(v); } Err(e) => tracing::error!("{:?}", e), } } }); let (tx_out, mut rx_out) = broadcast::channel(16); tokio::spawn(async move { let mut tx = GraphQLSender { sink }; tx.send(ClientMessage::ConnectionInit { payload: None }) .await .unwrap(); while let Ok(msg) = rx_out.recv().await { match tx.send(msg).await { Ok(()) => {} Err(e) => tracing::error!("{:?}", e), } } }); let socket = GraphQLWebSocket { tx: tx_out, server_tx: tx_in, server_rx: rx_in, id_count: 0, }; Ok(socket) } pub fn subscribe<T: DeserializeOwned + Unpin + Send + 'static>( &mut self, payload: ClientPayload, ) -> GraphQLSubscription<T> { self.id_count += 1; let id = format!("{:x}", self.id_count); let sub = GraphQLSubscription::<T>::new(id, self.tx.clone(), self.server_tx.subscribe(), payload); sub } } pub struct GraphQLSubscription< T: DeserializeOwned = serde_json::Value, E: DeserializeOwned = serde_json::Value, > { id: String, tx: broadcast::Sender<ClientMessage>, rx: broadcast::Receiver<ServerMessage>, payload: ClientPayload, ty_value: PhantomData<T>, ty_error: PhantomData<E>, } pub enum SubscriptionError { InvalidData(Payload), InternalError(serde_json::Value), } impl<T, E> GraphQLSubscription<T, E> where T: DeserializeOwned + Unpin + Send + 'static, E: DeserializeOwned + Unpin + Send + 'static, { pub fn new( id: String, tx: broadcast::Sender<ClientMessage>, rx: broadcast::Receiver<ServerMessage>, payload: ClientPayload, ) -> Self { Self { id, tx, rx, payload, ty_value: PhantomData, ty_error: PhantomData, } } fn spawn_task(self) -> mpsc::Receiver<Result<Payload<T, E>, serde_json::Value>> { let mut this = self; let id = this.id.clone(); let payload = this.payload.clone(); let (tx, rx) = mpsc::channel(16); tokio::spawn(async move { tracing::trace!("Sending start message"); this.tx.send(ClientMessage::Start { id, payload }).unwrap(); tracing::trace!("Sent!"); while let Ok(msg) = this.rx.recv().await { tracing::trace!("{:?}", &msg); match msg { ServerMessage::Data { id, payload } => { if id == this.id { let raw_data = payload.data.unwrap_or(serde_json::Value::Null); let raw_errors = payload.errors.unwrap_or(serde_json::Value::Null); let data: Option<T> = serde_json::from_value(raw_data).unwrap_or(None); let errors: Option<E> = serde_json::from_value(raw_errors).unwrap_or(None); let _ = tx.send(Ok(Payload { data, errors })).await; } } ServerMessage::Complete { id } => { if id == this.id { return; } } ServerMessage::ConnectionError { payload } => { let _ = tx.send(Err(payload)).await; return; } ServerMessage::Error { id, payload } => { if id == this.id { let _ = tx.send(Err(payload)).await; } } ServerMessage::ConnectionAck => {} ServerMessage::ConnectionKeepAlive => {} } } }); // Box::pin(stream! { // } // }) rx } pub fn stream( self, ) -> Pin<Box<dyn Stream<Item = Result<Payload<T, E>, serde_json::Value>> + Send>> { let this = self; Box::pin(stream! { let mut rx = this.spawn_task(); while let Some(msg) = rx.recv().await { yield msg; } }) } } impl<T, E> Drop for GraphQLSubscription<T, E> where T: DeserializeOwned, E: DeserializeOwned, { fn drop(&mut self) { tracing::trace!("Dropping WebSocket subscription (stopping)..."); self.tx .send(ClientMessage::Stop { id: self.id.clone(), }) .unwrap_or(0); } } impl Drop for GraphQLWebSocket { fn drop(&mut self) { tracing::trace!("Dropping WebSocket connection (terminating)..."); self.tx .send(ClientMessage::ConnectionTerminate) .unwrap_or(0); } }
use std::fmt; use std::mem::MaybeUninit; use std::ops::Deref; use std::os; use std::os::raw::c_char; use anyhow::anyhow; use firefly_util as util; use firefly_util::emit::Emit; use super::*; use crate::support::{OwnedStringRef, StringRef}; use crate::Context; extern "C" { type MlirModule; } /// This type represents a non-owned reference to an MLIR module #[repr(transparent)] #[derive(Copy, Clone)] pub struct Module(*mut MlirModule); impl Module { #[inline(always)] pub fn is_null(&self) -> bool { self.0.is_null() } /// Returns the module name, if one was provided pub fn name(&self) -> Option<StringRef> { let name = unsafe { mlir_module_get_name(*self) }; if name.is_null() { None } else { Some(name) } } /// Set the data layout string for this module pub fn set_data_layout<S: Into<StringRef>>(&self, layout: S) { self.set_attribute_by_name("llvm.data_layout", StringAttr::get(self.context(), layout)); } /// Set the target triple for this module pub fn set_target_triple<S: Into<StringRef>>(&self, triple: S) { self.set_attribute_by_name( "llvm.target_triple", StringAttr::get(self.context(), triple), ); } pub fn get_func_by_name<S: Into<StringRef>>(&self, name: S) -> Option<FuncOp> { SymbolTable::lookup_symbol_in(self.base(), name.into()).and_then(|op| op.try_into().ok()) } /// Returns the module body as a Block pub fn body(&self) -> Block { unsafe { mlir_module_get_body(*self) } } } impl Operation for Module { fn context(&self) -> Context { unsafe { mlir_module_get_context(*self) } } fn base(&self) -> OperationBase { unsafe { mlir_module_get_operation(*self) } } } impl TryFrom<OperationBase> for Module { type Error = InvalidTypeCastError; fn try_from(op: OperationBase) -> Result<Self, Self::Error> { if unsafe { mlir_operation_isa_module(op) } { Ok(unsafe { mlir_module_from_operation(op) }) } else { Err(InvalidTypeCastError) } } } impl Into<OperationBase> for Module { #[inline] fn into(self) -> OperationBase { unsafe { mlir_module_get_operation(self) } } } impl Eq for Module {} impl PartialEq for Module { #[inline] fn eq(&self, other: &Self) -> bool { core::ptr::eq(self.0, other.0) } } impl fmt::Pointer for Module { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:p}", self.0) } } impl fmt::Debug for Module { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(name) = self.name() { write!(f, "Module({} @ {:p})", name, self) } else { write!(f, "Module(nofile @ {:p})", self) } } } impl fmt::Display for Module { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let op: OperationBase = (*self).into(); write!(f, "{}", &op) } } impl Emit for Module { fn file_type(&self) -> Option<&'static str> { Some("mlir") } fn emit(&self, f: &mut std::fs::File) -> anyhow::Result<()> { let fd = util::fs::get_file_descriptor(f); let mut error = MaybeUninit::uninit(); let failed = unsafe { mlir_emit_to_file_descriptor(*self, fd, error.as_mut_ptr()) }; if failed { let error = unsafe { OwnedStringRef::from_ptr(error.assume_init()) }; Err(anyhow!("{}", &error)) } else { Ok(()) } } } /// This type represents an owned reference to an MLIR module #[repr(transparent)] pub struct OwnedModule(Module); impl OwnedModule { /// Creates a new empty module with the given source location pub fn new(loc: Location) -> Self { unsafe { mlir_module_create_empty(loc) } } /// Parses an MLIR module from the given input string pub fn parse_string<S: Into<StringRef>>(context: Context, input: S) -> anyhow::Result<Self> { let base = unsafe { mlir_module_create_parse(context, input.into()) }; if base.is_null() { Err(anyhow!("failed to parse module from the given string")) } else { Ok(Self(base)) } } /// Parses an MLIR module from the file at the given path pub fn parse_file<S: Into<StringRef>>(context: Context, path: S) -> anyhow::Result<Self> { let base = unsafe { mlir_parse_file(context, path.into()) }; if base.is_null() { Err(anyhow!("failed to parse module from the given string")) } else { Ok(Self(base)) } } /// Returns the module name, if one was provided pub fn name(&self) -> Option<StringRef> { let name = unsafe { mlir_module_get_name(self.0) }; if name.is_null() { None } else { Some(name) } } pub fn get_func_by_name<S: Into<StringRef>>(&self, name: S) -> Option<FuncOp> { SymbolTable::lookup_symbol_in(self.0.base(), name.into()).and_then(|op| op.try_into().ok()) } } unsafe impl Send for OwnedModule {} unsafe impl Sync for OwnedModule {} impl Clone for OwnedModule { fn clone(&self) -> Self { unsafe { mlir_module_clone(self.0) } } } impl Drop for OwnedModule { fn drop(&mut self) { unsafe { mlir_module_destroy(self.0) } } } impl Deref for OwnedModule { type Target = Module; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl AsRef<Module> for OwnedModule { fn as_ref(&self) -> &Module { &self.0 } } impl Operation for OwnedModule { fn context(&self) -> Context { self.0.context() } fn base(&self) -> OperationBase { self.0.base() } } impl fmt::Debug for OwnedModule { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self.0) } } impl fmt::Display for OwnedModule { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl Eq for OwnedModule {} impl PartialEq for OwnedModule { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl Emit for OwnedModule { fn file_type(&self) -> Option<&'static str> { self.0.file_type() } fn emit(&self, f: &mut std::fs::File) -> anyhow::Result<()> { self.0.emit(f) } } extern "C" { #[link_name = "mlirOperationIsAModule"] fn mlir_operation_isa_module(op: OperationBase) -> bool; #[link_name = "mlirModuleCreateEmpty"] fn mlir_module_create_empty(loc: Location) -> OwnedModule; #[link_name = "mlirModuleCreateParse"] fn mlir_module_create_parse(context: Context, input: StringRef) -> Module; #[link_name = "mlirModuleGetContext"] fn mlir_module_get_context(module: Module) -> Context; #[link_name = "mlirModuleGetName"] fn mlir_module_get_name(module: Module) -> StringRef; #[link_name = "mlirModuleGetBody"] fn mlir_module_get_body(module: Module) -> Block; #[link_name = "mlirModuleClone"] fn mlir_module_clone(module: Module) -> OwnedModule; #[link_name = "mlirModuleDestroy"] fn mlir_module_destroy(module: Module); #[link_name = "mlirModuleGetOperation"] fn mlir_module_get_operation(module: Module) -> OperationBase; #[link_name = "mlirModuleFromOperation"] fn mlir_module_from_operation(op: OperationBase) -> Module; #[cfg(not(windows))] #[link_name = "MLIREmitToFileDescriptor"] fn mlir_emit_to_file_descriptor( m: Module, fd: os::unix::io::RawFd, error_message: *mut *mut c_char, ) -> bool; #[cfg(windows)] #[link_name = "MLIREmitToFileDescriptor"] fn mlir_emit_to_file_descriptor( m: Module, fd: os::windows::io::RawHandle, error_message: *mut *mut c_char, ) -> bool; #[link_name = "mlirParseFile"] fn mlir_parse_file(context: Context, filename: StringRef) -> Module; }
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::any::Any; use std::sync::Arc; use common_exception::Result; use crate::processors::port::InputPort; use crate::processors::port::OutputPort; use crate::processors::processor::Event; use crate::processors::Processor; pub struct DuplicateProcessor { input: Arc<InputPort>, output1: Arc<OutputPort>, output2: Arc<OutputPort>, /// Whether two outputs should finish together. force_finish_together: bool, } /// This processor duplicate the input data to two outputs. impl DuplicateProcessor { pub fn create( input: Arc<InputPort>, output1: Arc<OutputPort>, output2: Arc<OutputPort>, force_finish_together: bool, ) -> Self { DuplicateProcessor { input, output1, output2, force_finish_together, } } } #[async_trait::async_trait] impl Processor for DuplicateProcessor { fn name(&self) -> String { "Duplicate".to_string() } fn as_any(&mut self) -> &mut dyn Any { self } fn event(&mut self) -> Result<Event> { let is_finished1 = self.output1.is_finished(); let is_finished2 = self.output2.is_finished(); let one_finished = is_finished1 || is_finished2; let all_finished = is_finished1 && is_finished2; let can_push1 = self.output1.can_push(); let can_push2 = self.output2.can_push(); if all_finished || (self.force_finish_together && one_finished) { self.input.finish(); self.output1.finish(); self.output2.finish(); return Ok(Event::Finished); } if self.input.is_finished() { self.output1.finish(); self.output2.finish(); return Ok(Event::Finished); } if (!is_finished1 && !can_push1) || (!is_finished2 && !can_push2) { return Ok(Event::NeedConsume); } self.input.set_need_data(); if self.input.has_data() { let block = self.input.pull_data().unwrap(); if !is_finished1 { self.output1.push_data(block.clone()); } if !is_finished2 { self.output2.push_data(block); } return Ok(Event::NeedConsume); } Ok(Event::NeedData) } }
//https://leetcode.com/problems/unique-morse-code-words/ use std::collections::HashMap; impl Solution { pub fn unique_morse_representations(words: Vec<String>) -> i32 { let lettre_to_morse = [".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]; let mut word_to_morse:HashMap<String, bool> = HashMap::new(); let mut unique_morse = 0; for word in &words { let mut new_decoded_word = String::new(); for c in word.chars() { let c_index = c as usize - 'a' as usize; new_decoded_word.push_str(lettre_to_morse[c_index]); } let word_exist = word_to_morse.entry(new_decoded_word).or_insert(false); if *word_exist == false { unique_morse += 1; *word_exist = true; } } unique_morse } }
fn main() { // We can use a function previously defined as a function pointer { fn add_one(x: i32) -> i32 { x + 1 } fn do_twice(f: fn(i32) -> i32, arg: i32) -> i32 { f(arg) + f(arg) } let answer = do_twice(add_one, 5); println!("The answer is: {}", answer); // Here is a lambda just to remind me how it works let answer = do_twice(|x| x + 2, 5); println!("The answer is: {}", answer); // Another one let lambda = |x| x + 2; let answer = do_twice(lambda, 5); println!("The answer is: {}", answer); } // Examples with map { let list_of_numbers = vec![1, 2, 3]; #[allow(unused_variables)] let list_of_strings: Vec<String> = list_of_numbers.iter().map(|i| i.to_string()).collect(); let list_of_numbers = vec![1, 2, 3]; #[allow(unused_variables)] let list_of_strings: Vec<String> = list_of_numbers.iter().map(ToString::to_string).collect(); #[derive(Debug)] #[allow(dead_code)] enum Status { Value(u32), Stop, } // This generates a list of u32 from 0 to 20 and shove each value in a Status let list_of_statuses: Vec<Status> = (0u32..20).map(Status::Value).collect(); println!("list_of_statuses {:?}", list_of_statuses); } // To return a closure, it looks like we need Box { fn returns_closure() -> Box<dyn Fn(i32) -> i32> { Box::new(|x| x + 1) } println!("{}", returns_closure()(1)); } }
use crate::displays::{DisplayServer, Event}; use crate::config::Config; use crate::window::{WindowType, Geometry, Window}; use std::rc::Rc; use xcb_util::ewmh; use xcb_util::keysyms::KeySymbols; use crate::keys::xcb_keys::XcbKeyCombo; use std::cell::RefCell; use std::ops::Deref; use futures::Stream; use futures::task::{Context, Poll}; use std::pin::Pin; #[derive(Clone)] pub struct XcbDisplayServer { connection: Rc<ewmh::Connection>, events: Rc<RefCell<Vec<Event<xcb::Window, XcbKeyCombo>>>>, } impl Stream for XcbDisplayServer { type Item = Event<xcb::Window, XcbKeyCombo>; fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { self.connection.flush(); let mut events = self.events.borrow_mut(); if events.len() == 0 { match self.connection.wait_for_event() { Some(event) => Poll::Ready(Some(self.match_event(event))), None => Poll::Pending, } } else { let event = events.remove(0); if event == Event::DisplayEnded { return Poll::Ready(None) } if let Some(event) = self.connection.poll_for_event() { events.push(self.match_event(event)); }; Poll::Ready(Some(event)) } } } impl DisplayServer for XcbDisplayServer { type Window = xcb::Window; type KeyCombo = XcbKeyCombo; fn new(_config: &Config) -> Self { let (connection, screen_num) = xcb::Connection::connect(None).unwrap(); let connection = ewmh::Connection::connect(connection).map_err(|e| e.0).unwrap(); let setup = connection.get_setup(); let events = [( xcb::CW_EVENT_MASK, xcb::EVENT_MASK_BUTTON_PRESS | xcb::EVENT_MASK_BUTTON_RELEASE | xcb::EVENT_MASK_KEY_PRESS | xcb::EVENT_MASK_EXPOSURE | xcb::EVENT_MASK_SUBSTRUCTURE_REDIRECT | xcb::EVENT_MASK_SUBSTRUCTURE_NOTIFY, )]; let screens = setup.roots() .map(|screen| screen.root()) .map(|screen| { let cookie = xcb::change_window_attributes(&connection, screen, &events); if !cookie.request_check().is_ok() { panic!("There's another Window Manager Running!"); } screen }) .map(|screen| Event::ScreenAdded(screen, Self::get_screen_view(&connection, screen))) .collect(); XcbDisplayServer { connection: Rc::new(connection), events: Rc::new(RefCell::new(screens)), } } fn configure_window(&self, window: &Window<xcb::Window>) { let view = window.get_view(); let values = [ (xcb::CONFIG_WINDOW_X as u16, view.position.x as u32), (xcb::CONFIG_WINDOW_Y as u16, view.position.y as u32), (xcb::CONFIG_WINDOW_WIDTH as u16, view.size.width), (xcb::CONFIG_WINDOW_HEIGHT as u16, view.size.height), ]; let window_id = window.deref(); xcb::configure_window(&self.connection, *window_id, &values); let events = [( xcb::CW_EVENT_MASK, xcb::EVENT_MASK_BUTTON_PRESS | xcb::EVENT_MASK_BUTTON_RELEASE | xcb::EVENT_MASK_KEY_PRESS )]; xcb::change_window_attributes(&self.connection, *window_id, &events); } fn set_visibility(&self, window: &xcb::Window, show: bool) { if show { xcb::map_window(&self.connection, *window); } else { xcb::unmap_window(&self.connection, *window); } } fn quit(&self) { self.events.borrow_mut().push(Event::DisplayEnded) } } impl XcbDisplayServer { fn get_screen_view(connection: &xcb::Connection, screen: u32) -> Geometry { let reply = xcb::get_geometry(connection, screen) .get_reply() .unwrap(); Geometry::new(0, 0, u32::from(reply.width()), u32::from(reply.height())) } fn match_event(&self, event: xcb::GenericEvent) -> Event<xcb::Window, XcbKeyCombo> { match event.response_type() { xcb::CONFIGURE_REQUEST => { Event::Ignored } xcb::KEY_PRESS => { let key_press: &xcb::KeyPressEvent = unsafe { xcb::cast_event(&event) }; let key_symbols = KeySymbols::new(&self.connection); let keysym = key_symbols.press_lookup_keysym(key_press, 0); let mod_mask = u32::from(key_press.state()); let key_combo = XcbKeyCombo { mod_mask, key: keysym }; Event::KeyPressed(key_combo) } xcb::MAP_REQUEST => { let map_request: &xcb::MapRequestEvent = unsafe { xcb::cast_event(&event) }; Event::WindowAdded(map_request.window(), WindowType::Normal) } xcb::UNMAP_NOTIFY => { let unmap_notify: &xcb::UnmapNotifyEvent = unsafe { xcb::cast_event(&event) }; // if unmap_notify.event() == self.root { Event::WindowRemoved(unmap_notify.event()) // } else { // Event::Ignored // } } xcb::DESTROY_NOTIFY => { let destroy_event: &xcb::DestroyNotifyEvent = unsafe { xcb::cast_event(&event) }; Event::WindowRemoved(destroy_event.window()) } xcb::ENTER_NOTIFY => { let enter_event: &xcb::EnterNotifyEvent = unsafe { xcb::cast_event(&event) }; Event::WindowFocused(enter_event.event()) } _ => Event::Ignored, } } }
use crate::texture::{Texture, SolidColor}; use crate::sphere::random_in_unit_sphere; use crate::vec3::{Point3, Vec3}; use crate::sphere::random_unit_vector; use crate::ray::Ray; use crate::hittable::HitRecord; use crate::color::Color; pub trait Material { fn emitted(&self, _u: f32, _v: f32, _p: &Point3) -> Color { Color(Vec3::new(0., 0., 0.)) } fn scatter(&self, spot: &Ray, rec: &HitRecord) -> Option<(Ray, Color)>; } pub struct Lambertian { pub albedo: Box<dyn Texture>, } impl Lambertian { pub fn new_from_texture(texture: Box<dyn Texture>) -> Self { Lambertian{ albedo: texture } } pub fn new_from_color(color: Color) -> Self{ Lambertian{ albedo: Box::new(SolidColor::new_from_color(color)) } } } impl Material for Lambertian { fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> Option<(Ray, Color)> { let scatter_direction = rec.normal + random_unit_vector(); let scattered = Ray::new(rec.p, scatter_direction, r_in.time); let attenuation = self.albedo.value(rec.u, rec.v, &rec.p); Some((scattered, attenuation)) } } fn reflect(v: &Vec3, n: &Vec3) -> Vec3 { (*v) - 2. * (*v).dot(*n) * (*n) } pub struct Metal { pub albedo: Color, pub fuzzy: f32, } impl Metal { pub fn new(albedo: Color, fuzzy: f32) -> Self { Metal { albedo: albedo, fuzzy: fuzzy.min(1.0), } } } impl Material for Metal { fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> Option<(Ray, Color)> { let reflected = reflect(&r_in.direction.normalize(), &rec.normal); let scattered = Ray::new_without_move(rec.p, reflected + self.fuzzy * random_in_unit_sphere()); if scattered.direction.dot(rec.normal) > 0. { Some((scattered, self.albedo)) } else { None } } } fn refract(uv: Vec3, n: Vec3, etai_over_etat: f32) -> Vec3 { let cos_theta = (-uv).dot(n); let r_out_perp = etai_over_etat * (uv + cos_theta * n); let r_out_parallel = -((1.0 - r_out_perp.length_squared()).abs().sqrt() * n); r_out_perp + r_out_parallel } fn schlick(cosine: f32, ref_idx: f32) -> f32 { let r0 = (1. - ref_idx) / (1. + ref_idx); let r0 = r0 * r0; r0 + (1. - r0) * (1. - cosine).powi(5) } pub struct Dielectric { pub ref_idx: f32, } impl Material for Dielectric { fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> std::option::Option<(Ray, Color)> { let attenuation = Color(Vec3::new(1., 1., 1.)); let etai_over_etat = if rec.is_front_face { 1.0 / self.ref_idx } else { self.ref_idx }; let unit_direction = r_in.direction.normalize(); let cos_theta = (-unit_direction).dot(rec.normal).min(1.0); let sin_theta = (1.0 - cos_theta * cos_theta).sqrt(); if etai_over_etat * sin_theta > 1.0 { let reflected = reflect(&unit_direction, &rec.normal); let scattered = Ray::new_without_move(rec.p, reflected); Some((scattered, attenuation)) } else if rand::random::<f32>() < schlick(cos_theta, etai_over_etat) { let reflected = reflect(&unit_direction, &rec.normal); let scattered = Ray::new_without_move(rec.p, reflected); Some((scattered, attenuation)) } else { let refracted = refract(unit_direction, rec.normal, etai_over_etat); let scattered = Ray::new_without_move(rec.p, refracted); Some((scattered, attenuation)) } } } pub struct DiffuseLight { pub emit: Box<dyn Texture>, } impl DiffuseLight { pub fn new(c: Color) -> Self { Self { emit: Box::new(SolidColor::new_from_color(c)), } } } impl Material for DiffuseLight { fn emitted(&self, u: f32, v: f32, p: &Point3) -> Color { self.emit.value(u, v, p) } fn scatter(&self, _: &Ray, _: &HitRecord) -> Option<(Ray, Color)> { None } }
extern crate rand; use std::collections::HashSet; use rand::Rng; use super::Point; use super::Direction; use super::super::grid::Grid; use super::MazeGenerator; pub struct Prim { filled: HashSet<Point>, frontier: HashSet<Point>, } impl MazeGenerator for Prim { fn generate<T: Grid>(grid: &mut T) { let (width, height) = grid.get_dimensions(); if width == 0 || height == 0 { return; } let mut state = Prim::new(); let x = rand::thread_rng().gen_range(0, width); let y = rand::thread_rng().gen_range(0, height); state.filled.insert((x, y)); state.get_frontier(grid, (x, y)); while !state.frontier.is_empty() { // Pick random frontier tile state.add_random_frontier(grid); } } } impl Prim { fn new() -> Prim { Prim { filled: HashSet::new(), frontier: HashSet::new(), } } fn get_frontier<T: Grid>(&mut self, grid: &T, (x, y): Point) { let (width, height) = grid.get_dimensions(); let filled = &self.filled; let frontier = &mut self.frontier; // Check left if x > 0 { if !filled.contains(&(x - 1, y)) { frontier.insert((x - 1, y)); } } // Check right if x < width - 1 { if !filled.contains(&(x + 1, y)) { frontier.insert((x + 1, y)); } } // Check up if y < height - 1 { if !filled.contains(&(x, y + 1)) { frontier.insert((x, y + 1)); } } // Check down if y > 0 { if !filled.contains(&(x, y - 1)) { frontier.insert((x, y - 1)); } } } fn add_random_frontier<T: Grid>(&mut self, grid: &mut T) { let tile = self.pop_random_frontier() .expect("Invalid frontier state"); self.place_tile_filled(grid, tile); self.get_frontier(grid, tile); } fn pop_random_frontier(&mut self) -> Result<Point, &'static str> { // Get size of frontier let max_val = self.frontier.len(); // get random index for value let index = rand::thread_rng().gen_range(0, max_val); let mut point = Option::None; for (i, v) in self.frontier.iter().enumerate() { if i == index { point = Option::Some(*v); break; } } if let Option::Some(p) = point { assert!(self.frontier.remove(&p)); Result::Ok(p) } else { Result::Err("Frontier is empty") } } fn place_tile_filled<T: Grid>(&mut self, grid: &mut T, (x, y): Point) { let mut dirs = Vec::new(); let (width, height) = grid.get_dimensions(); let filled = &mut self.filled; // Check up if y < height - 1 && filled.contains(&(x, y + 1)) { dirs.push(Direction::Up); } // Check down if y > 0 && filled.contains(&(x, y - 1)) { dirs.push(Direction::Down); } // Check left if x > 0 && filled.contains(&(x - 1, y)) { dirs.push(Direction::Left); } // Check right if x < width - 1 && filled.contains(&(x + 1, y)) { dirs.push(Direction::Right); } if dirs.len() > 0 { // Pick random direction let index = rand::thread_rng().gen_range(0, dirs.len()); grid.carve_path((x, y), dirs[index]) .expect("Failed to carve path"); filled.insert((x, y)); } } }
use crate::file_util::read_lines; #[derive(Debug)] struct Block { id: u16, rows: [u16; 10], border_clockwise: [u16; 4], border_anti_clockwise: [u16; 4], matching_ids: [Option<(u16, bool)>; 4] } #[derive(Eq, PartialEq, Clone)] enum Flip { FlipX, FlipY, FlipXY, Identity } #[derive(Eq, PartialEq, Clone)] enum Rotate { RotateLeft, RotateRight, RotateRightRight, Identity } impl Block { fn new(id: u16, rows: [u16; 10]) -> Self { let mut left = 0; let mut right = 0; let mut multiplier = 1_u16; // clock-wise bit arrangement for i in 0..10 { left += if rows[10 - i - 1] & 1 == 1 { multiplier } else { 0 }; right += if rows[i] & 512 == 512 { multiplier } else { 0 }; multiplier <<= 1; } Block { id, rows, border_clockwise: [rows[0], right, rows[9].reverse_bits() >> 6, left], border_anti_clockwise: [ rows[0].reverse_bits() >> 6, right.reverse_bits() >> 6, rows[9], left.reverse_bits() >> 6 ], matching_ids: [None; 4] } } fn transformed_rows(&self, flip: Flip, rotate: Rotate) -> [u16; 10] { let mut result = [0; 10]; match flip { Flip::Identity => self.rows.iter() .enumerate() .for_each(|(i, r)| result[i] = *r), Flip::FlipX => { self.rows.iter() .enumerate() .for_each(|(i, r)| result[i] = r.reverse_bits() >> 6) }, Flip::FlipY => { let mut i = 1_u16; for _ in 0..10 { for y in 0..10 { result[y] += self.rows[10 - y - 1] & i } i <<= 1; } }, Flip::FlipXY => { result = Self::transform_xy(&self.rows) } } match rotate { Rotate::RotateLeft => { let mut digit = 1u16; let mut replacement = [0_u16; 10]; for y in 0..10 { let mut other_digit = 1; for x in 0..10 { replacement[y] += if result[x] & digit == digit { other_digit } else { 0 }; other_digit <<= 1; } digit <<= 1; } for i in 0..10 { replacement[i] = replacement[i].reverse_bits() >> 6 } replacement }, Rotate::RotateRight => { let mut digit = 1u16; let mut replacement = [0_u16; 10]; for y in 0..10 { let mut other_digit = 512u16; for x in 0..10 { replacement[y] += if result[10 - x - 1] & digit == digit { other_digit } else { 0 }; other_digit >>= 1; } digit <<= 1; } for i in 0..10 { replacement[i] = replacement[i].reverse_bits() >> 6 } replacement }, Rotate::RotateRightRight => { Self::transform_xy(&result) } Rotate::Identity => result } } fn transform_xy(to_transform: &[u16; 10]) -> [u16; 10] { let mut result = [0u16; 10]; for y in 0..10 { let mut i = 1_u16; let reversed = to_transform[10 - y - 1].reverse_bits() >> 6; for _ in 0..10 { result[y] += reversed & i; i <<= 1; } } result } fn add_matching_sides(&mut self, block: &mut Block) -> &mut Self { let iter = self.border_clockwise.iter() .zip(self.border_anti_clockwise.iter()).enumerate(); for (id, (side, flipped_side)) in iter { for (other_id, other_side) in block.border_clockwise.iter().enumerate() { if side == other_side || flipped_side == other_side { self.matching_ids[id] = Some((block.id, flipped_side == other_side)); block.matching_ids[other_id] = Some((self.id, flipped_side == other_side)); return self; } } } self } fn missing_sides(&self) -> usize { self.matching_ids.iter().filter(|x| x.is_none()).count() } } trait FlipSide { fn flip_side(self) -> Self; } impl FlipSide for u16 { fn flip_side(self) -> Self { self.reverse_bits() >> 6 } } #[allow(dead_code)] pub fn run_day_twenty() { let mut iter = read_lines("assets/day_twenty"); let mut signatures = read_image_signatures(&mut iter); populate_matches(&mut signatures); let corners: Vec<&Block> = signatures.iter() .filter(|x| x.missing_sides() == 2) .collect(); println!( "Part 1 {:?}", corners.iter().map(|it| it.id as usize).product::<usize>() ); let start_corner = corners[0]; let matching_ids = &start_corner.matching_ids; let (transform, index_adjustment) = if matching_ids[0].is_none() && matching_ids[1].is_none() { (Rotate::RotateRight, 1_usize) } else if matching_ids[1].is_none() && matching_ids[2].is_none() { (Rotate::Identity, 0) } else if matching_ids[2].is_none() && matching_ids[3].is_none() { (Rotate::RotateLeft, 3) } else { (Rotate::RotateRightRight, 2) }; let mut board = vec!(vec!(None; 10); 1); board[0][0] = Some((start_corner, transform, Flip::Identity)); print(&board); // I give up ... } fn print(board: &[Vec<Option<(&Block, Rotate, Flip)>>]) { for row in board.iter() { let cols = row.iter().filter_map(|x| { if let Some((board, rot, flip)) = x { Some(board.transformed_rows(flip.clone(), rot.clone())) } else { None } }).collect::<Vec<[u16; 10]>>(); for i in 0..10 { for col in cols.iter() { print!("{:010b} ", col[i].reverse_bits() >> 6); } println!(); } println!(); } } fn populate_matches(signatures: &mut Vec<Block>) { for i in 1..signatures.len() { let split = signatures.split_at_mut(i); let left = split.0; let right = split.1; if let Some(signature )= left.last_mut() { for other_signature in right.iter_mut(){ signature.add_matching_sides(other_signature); } } } } fn read_image_signatures(iter: &mut impl Iterator<Item = String>) -> Vec<Block> { let mut signatures = Vec::new(); while let Some(block) = read_image_signature(iter) { signatures.push(block); } signatures } fn read_image_signature(iter: &mut impl Iterator<Item = String>) -> Option<Block> { let id = iter.next()? .chars() .skip(5) .take_while(|it| *it != ':') .collect::<String>() .parse::<u16>() .ok()?; let block_iter = iter .take_while(|it| !it.trim().is_empty()); let mut rows = [0; 10]; for (i, block) in block_iter.enumerate() { let mut digit = 1_u16; for c in block.chars(){ if c == '#' { rows[i] += digit; } digit <<= 1 } } Some(Block::new(id, rows)) }
pub use bound_int_types::bound_int_types; #[macro_export] macro_rules! bound_int_eval { ( $lhs:tt + $rhs:tt $( $op:tt $rest:tt )* ) => {{ let sum = $lhs.plus($rhs); bound_int_eval!(sum $( $op $rest )*) }}; ( $lhs:tt - $rhs:tt $( $op:tt $rest:tt )* ) => {{ let sum = $lhs.minus($rhs); bound_int_eval!(sum $( $op $rest )*) }}; ($val:expr) => { $val }; }
/// Like a circular buffer of size 2, aka a double buffer. #[derive(Debug)] pub struct OldNew<T> where T: Default, { left: T, right: T, state: OldNewState, } #[derive(Debug)] pub enum OldNewState { LeftOldRightNew, LeftNewRightOld, } #[derive(Debug)] pub struct OldNewResult<T> { pub old: T, pub new: T, } impl<T> Default for OldNew<T> where T: Default, { fn default() -> Self { Self::new() } } impl<T> OldNew<T> where T: Default, { pub fn new() -> Self { Self { left: Default::default(), right: Default::default(), state: OldNewState::LeftNewRightOld, } } pub fn set_new(&mut self, value: T) { match &self.state { OldNewState::LeftNewRightOld => self.left = value, OldNewState::LeftOldRightNew => self.right = value, } } pub fn get(&self) -> OldNewResult<&T> { match &self.state { OldNewState::LeftNewRightOld => OldNewResult { old: &self.right, new: &self.left, }, OldNewState::LeftOldRightNew => OldNewResult { old: &self.left, new: &self.right, }, } } pub fn get_mut(&mut self) -> OldNewResult<&mut T> { match &self.state { OldNewState::LeftNewRightOld => OldNewResult { old: &mut self.right, new: &mut self.left, }, OldNewState::LeftOldRightNew => OldNewResult { old: &mut self.left, new: &mut self.right, }, } } pub fn swap(&mut self) { self.state = match &self.state { OldNewState::LeftNewRightOld => OldNewState::LeftOldRightNew, OldNewState::LeftOldRightNew => OldNewState::LeftNewRightOld, }; } }
// Copyright 2018 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use failure::{format_err, Error}; use fidl::endpoints::create_endpoints; use fidl_fuchsia_auth::AuthStateSummary; use fidl_fuchsia_auth_account::{ AccountManagerMarker, AccountManagerProxy, LocalAccountId, Status, }; use fuchsia_async as fasync; use futures::prelude::*; /// Executes the supplied test function against a connected AccountManagerProxy. fn proxy_test<TestFn, Fut>(test_fn: TestFn) where TestFn: FnOnce(AccountManagerProxy) -> Fut, Fut: Future<Output = Result<(), Error>>, { let mut executor = fasync::Executor::new().expect("Failed to create executor"); let proxy = fuchsia_app::client::connect_to_service::<AccountManagerMarker>() .expect("Failed to connect to account manager service");; executor.run_singlethreaded(test_fn(proxy)).expect("Executor run failed.") } /// Calls provision_new_account on the supplied account_manager, returning an error on any /// non-OK responses, or the account ID on success. async fn provision_new_account( account_manager: &AccountManagerProxy, ) -> Result<LocalAccountId, Error> { match await!(account_manager.provision_new_account())? { (Status::Ok, Some(new_account_id)) => Ok(*new_account_id), (status, _) => Err(format_err!("ProvisionNewAccount returned status: {:?}", status)), } } // TODO(jsankey): Work with ComponentFramework and cramertj@ to develop a nice Rust equivalent of // the C++ TestWithEnvironment fixture to provide isolated environments for each test case. For now // we verify all functionality in a single test case. #[test] fn test_account_functionality() { proxy_test(async move |account_manager| { // Verify we initially have no accounts. assert_eq!(await!(account_manager.get_account_ids())?, vec![]); // Provision a new account. let mut account_1 = await!(provision_new_account(&account_manager))?; assert_eq!( await!(account_manager.get_account_ids())?, vec![LocalAccountId { id: account_1.id }] ); // Provision a second new account and verify it has a different ID. let mut account_2 = await!(provision_new_account(&account_manager))?; assert_ne!(account_1.id, account_2.id); // Connect a channel to one of these accounts and verify it's usable. let (acp_client_end, _) = create_endpoints()?; let (account_client_end, account_server_end) = create_endpoints()?; assert_eq!( await!(account_manager.get_account( &mut account_1, acp_client_end, account_server_end ))?, Status::Ok ); let account_proxy = account_client_end.into_proxy()?; let account_auth_state = match await!(account_proxy.get_auth_state())? { (Status::Ok, Some(auth_state)) => *auth_state, (status, _) => return Err(format_err!("GetAuthState returned status: {:?}", status)), }; assert_eq!(account_auth_state.summary, AuthStateSummary::Unknown); // Connect a channel to the account's default persona and verify it's usable. let (persona_client_end, persona_server_end) = create_endpoints()?; assert_eq!(await!(account_proxy.get_default_persona(persona_server_end))?.0, Status::Ok); let persona_proxy = persona_client_end.into_proxy()?; let persona_auth_state = match await!(persona_proxy.get_auth_state())? { (Status::Ok, Some(auth_state)) => *auth_state, (status, _) => return Err(format_err!("GetAuthState returned status: {:?}", status)), }; assert_eq!(persona_auth_state.summary, AuthStateSummary::Unknown); // Delete both accounts and verify they are removed. assert_eq!(await!(account_manager.remove_account(&mut account_1))?, Status::Ok); assert_eq!(await!(account_manager.remove_account(&mut account_2))?, Status::Ok); assert_eq!(await!(account_manager.get_account_ids())?, vec![]); Ok(()) }); }
extern crate serde; mod test_utils; use chrono::NaiveDate; use flexi_logger::LoggerHandle; use hdbconnect_async::{Connection, HdbResult}; // From wikipedia: // // Isolation level Lost updates Dirty reads Non-repeatable reads Phantoms // ---------------------------------------------------------------------------------------- // Read Uncommitted don't occur may occur may occur may occur // Read Committed don't occur don't occur may occur may occur // Repeatable Read don't occur don't occur don't occur may occur // Serializable don't occur don't occur don't occur don't occur // #[tokio::test] // cargo test --test test_031_transactions -- --nocapture pub async fn test_031_transactions() -> HdbResult<()> { let mut log_handle = test_utils::init_logger(); let start = std::time::Instant::now(); let mut connection = test_utils::get_authenticated_connection().await?; connection.set_auto_commit(false).await?; if let Some(server_error) = write1_read2(&mut log_handle, &mut connection, "READ UNCOMMITTED") .await .err() .unwrap() .server_error() { let error_info: (i32, String, String) = connection .query(&format!( "select * from SYS.M_ERROR_CODES where code = {}", server_error.code() )) .await? .try_into() .await?; assert_eq!(error_info.0, 7); assert_eq!(error_info.1, "ERR_FEATURE_NOT_SUPPORTED"); log::info!("error_info: {:?}", error_info); } else { panic!("did not receive ServerError"); } write1_read2(&mut log_handle, &mut connection, "READ COMMITTED").await?; write1_read2(&mut log_handle, &mut connection, "REPEATABLE READ").await?; write1_read2(&mut log_handle, &mut connection, "SERIALIZABLE").await?; // SET TRANSACTION { READ ONLY | READ WRITE } // SET TRANSACTION LOCK WAIT TIMEOUT <unsigned_integer> // (milliseconds) // let result = conn.exec("SET TRANSACTION LOCK WAIT TIMEOUT 3000")?; // (milliseconds) test_utils::closing_info(connection, start).await } async fn write1_read2( _log_handle: &mut LoggerHandle, connection1: &mut Connection, isolation: &str, ) -> HdbResult<()> { log::info!("Test isolation level {}", isolation); connection1 .exec(&format!("SET TRANSACTION ISOLATION LEVEL {isolation}")) .await?; log::info!( "verify that we can read uncommitted data in same connection, but not on other connection" ); connection1 .multiple_statements_ignore_err(vec!["drop table TEST_TRANSACTIONS"]) .await; let stmts = vec![ "create table TEST_TRANSACTIONS (strng NVARCHAR(100) primary key, nmbr INT, dt LONGDATE)", "insert into TEST_TRANSACTIONS (strng,nmbr,dt) values('Hello',1,'01.01.1900')", "insert into TEST_TRANSACTIONS (strng,nmbr,dt) values('world!',20,'01.01.1901')", "insert into TEST_TRANSACTIONS (strng,nmbr,dt) values('I am here.',300,'01.01.1902')", ]; connection1.multiple_statements(stmts).await?; connection1.commit().await?; // read above three lines assert_eq!(get_checksum(connection1).await, 321); let mut connection2 = connection1.spawn().await?; // read them also from a new connection assert_eq!(get_checksum(&mut connection2).await, 321); let mut prepared_statement1 = connection1 .prepare("insert into TEST_TRANSACTIONS (strng,nmbr,dt) values(?,?,?)") .await?; prepared_statement1.add_batch(&("who", 4000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap()))?; prepared_statement1.add_batch(&( "added", 50_000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap(), ))?; prepared_statement1.add_batch(&( "this?", 600_000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap(), ))?; prepared_statement1.execute_batch().await?; // read the new lines from connection1 assert_eq!(get_checksum(connection1).await, 654_321); // fail to read the new lines from connection2 assert_eq!(get_checksum(&mut connection2).await, 321); // fail to read the new lines from connection1 after rollback connection1.rollback().await?; assert_eq!(get_checksum(connection1).await, 321); // add and read the new lines from connection1 prepared_statement1.add_batch(&("who", 4000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap()))?; prepared_statement1.add_batch(&( "added", 50_000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap(), ))?; prepared_statement1.add_batch(&( "this?", 600_000, NaiveDate::from_ymd_opt(1903, 1, 1).unwrap(), ))?; prepared_statement1.execute_batch().await?; assert_eq!(get_checksum(connection1).await, 654_321); // fail to read the new lines from connection2 assert_eq!(get_checksum(&mut connection2).await, 321); // after commit, read the new lines also from connection2 connection1.commit().await?; assert_eq!(get_checksum(&mut connection2).await, 654_321); Ok(()) } async fn get_checksum(conn: &mut Connection) -> usize { let resultset = conn .query("select sum(nmbr) from TEST_TRANSACTIONS") .await .unwrap(); let checksum: usize = resultset.try_into().await.unwrap(); checksum }
// Bring in the standard library module to handle Error types use std::error::Error; // Bring in the standard library module to handle files use std::fs; // use std::env; pub struct Config { pub query: String, pub filename: String, pub case_sensitive: bool, } // The error type for Result is &'static str as this is the type of string literals which is what // is being returned in this program impl Config { // Remember that a ' denotes a lifetime specifier and that static is the longest // lifetime annotation in Rust - the duration of an entire program! pub fn new(mut args: std::env::Args) -> Result<Config, &'static str> { // Skip the 0th index containing calling context args.next(); // Returning an err here is more appropriate than invoking // panic! as this is a usage error rather than a programming // problem let query = match args.next() { Some(arg) => arg, None => return Err("Didn't get a query string"), }; let filename = match args.next() { Some(arg) => arg, None => return Err("Didn't get a file name"), }; let case_sensitive = env::var("CASE_INSENSITIVE").is_err(); Ok(Config { query, filename, case_sensitive }) } } // 'dyn Error' here stands for 'dynamic error' and allows the function to return any return value // that is an Error type // Box<dyn Error> is a trait object which allows us to return a type that implements the // Error trait which is good for returning different error values that may be of // different types in different error cases pub fn run(config: Config) -> Result<(), Box<dyn Error>> { let contents = fs::read_to_string(config.filename)?; let results = if config.case_sensitive { search(&config.query, &contents) } else { search_case_insensitive(&config.query, &contents) }; for line in results { println!("{}", line); } // Wrapping the unit type in the Ok() in this case signifies that we only care about // the side effects of the run() function Ok(()) } // The actual searching for minigrep - the heart of the program // There is an explicit lifetime which specifies that the returned vector should contain string // slices that reference slices of the argument `contents` rather than `query` // We're essentially telling the compiler the lifetime of the returned vector is the // same as the lifetime of contents! Query can be tossed after we finish searching fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { // Take advantage of iterator adapter methods like filter!1 contents.lines() .filter(|line| line.contains(query)) .collect() } fn search_case_insensitive<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { let query = query.to_lowercase(); let mut results = Vec::new(); for line in contents.lines() { if line.to_lowercase().contains(&query) { results.push(line); } } results } #[cfg(test)] mod tests { use super::*; #[test] fn case_sensitive() { let query = "duct"; let contents = "\ Rust: safe, fast, productive. Pick three. Duct tape."; assert_eq!( vec!["safe, fast, productive."], search(query, contents) ); } #[test] fn case_insensitive() { let query = "rUsT"; let contents = "\ Rust: safe, fast, productive. Pick three. Trust me."; assert_eq!( vec!["Rust:", "Trust me."], search_case_insensitive(query, contents) ); } }
use rustaoc2021::calculator::run_timed; use rustaoc2021::day15::part_2; fn main() { let input2 = include_str!(r"../../resources/inputs/day15-input.txt"); run_timed(part_2,input2, 2); }
// Copyright 2020-2021, The Tremor Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::errors::{Error, Result}; use clap::{self, ArgMatches}; use clap_generate::{ generate, generators::{Bash, Elvish, Fish, PowerShell, Zsh}, }; use std::path::Path; const ERR_MSG: &str = "Unable to guess your shell, please provide an explicit shell to create completions for."; pub(crate) fn run_cmd(mut app: clap::App, matches: &ArgMatches) -> Result<()> { if let Some((shell, _)) = matches.subcommand() { generate_for_shell(app, shell) } else { // TODO There is no way in standard clap to narrow help to the subcommand app.print_long_help().map_err(|e| e.into()) } } fn generate_for_shell(mut app: clap::App, shell: &str) -> Result<()> { match shell { "guess" => guess_shell(app), "bash" => { generate::<Bash, _>(&mut app, "tremor", &mut std::io::stdout()); Ok(()) } "elvish" => { generate::<Elvish, _>(&mut app, "tremor", &mut std::io::stdout()); Ok(()) } "fish" => { generate::<Fish, _>(&mut app, "tremor", &mut std::io::stdout()); Ok(()) } "powershell" => { generate::<PowerShell, _>(&mut app, "tremor", &mut std::io::stdout()); Ok(()) } "zsh" => { generate::<Zsh, _>(&mut app, "tremor", &mut std::io::stdout()); Ok(()) } _ => Err(format!("Unsupported shell: {}", shell).into()), } } fn guess_shell(app: clap::App) -> Result<()> { let shell = if std::env::var_os("ZSH_NAME").is_some() { Ok("zsh".to_string()) } else if std::env::var_os("PSModulePath").is_some() { Ok("powershell".to_string()) } else { match std::env::var_os("SHELL") .and_then(|s| Path::new(&s).file_name().map(std::ffi::OsStr::to_os_string)) { Some(shell) => { #[allow(clippy::map_err_ignore)] // Error is OsString, unusable here shell.into_string().map_err(|_| Error::from(ERR_MSG)) } None => Err(ERR_MSG.into()), } }?; generate_for_shell(app, &shell) }
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::SAC { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = "Possible values of the field `ADC_SAC_AVG`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ADC_SAC_AVGR { #[doc = "No hardware oversampling"] ADC_SAC_AVG_OFF, #[doc = "2x hardware oversampling"] ADC_SAC_AVG_2X, #[doc = "4x hardware oversampling"] ADC_SAC_AVG_4X, #[doc = "8x hardware oversampling"] ADC_SAC_AVG_8X, #[doc = "16x hardware oversampling"] ADC_SAC_AVG_16X, #[doc = "32x hardware oversampling"] ADC_SAC_AVG_32X, #[doc = "64x hardware oversampling"] ADC_SAC_AVG_64X, #[doc = r"Reserved"] _Reserved(u8), } impl ADC_SAC_AVGR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { ADC_SAC_AVGR::ADC_SAC_AVG_OFF => 0, ADC_SAC_AVGR::ADC_SAC_AVG_2X => 1, ADC_SAC_AVGR::ADC_SAC_AVG_4X => 2, ADC_SAC_AVGR::ADC_SAC_AVG_8X => 3, ADC_SAC_AVGR::ADC_SAC_AVG_16X => 4, ADC_SAC_AVGR::ADC_SAC_AVG_32X => 5, ADC_SAC_AVGR::ADC_SAC_AVG_64X => 6, ADC_SAC_AVGR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> ADC_SAC_AVGR { match value { 0 => ADC_SAC_AVGR::ADC_SAC_AVG_OFF, 1 => ADC_SAC_AVGR::ADC_SAC_AVG_2X, 2 => ADC_SAC_AVGR::ADC_SAC_AVG_4X, 3 => ADC_SAC_AVGR::ADC_SAC_AVG_8X, 4 => ADC_SAC_AVGR::ADC_SAC_AVG_16X, 5 => ADC_SAC_AVGR::ADC_SAC_AVG_32X, 6 => ADC_SAC_AVGR::ADC_SAC_AVG_64X, i => ADC_SAC_AVGR::_Reserved(i), } } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_OFF`"] #[inline(always)] pub fn is_adc_sac_avg_off(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_OFF } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_2X`"] #[inline(always)] pub fn is_adc_sac_avg_2x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_2X } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_4X`"] #[inline(always)] pub fn is_adc_sac_avg_4x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_4X } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_8X`"] #[inline(always)] pub fn is_adc_sac_avg_8x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_8X } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_16X`"] #[inline(always)] pub fn is_adc_sac_avg_16x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_16X } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_32X`"] #[inline(always)] pub fn is_adc_sac_avg_32x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_32X } #[doc = "Checks if the value of the field is `ADC_SAC_AVG_64X`"] #[inline(always)] pub fn is_adc_sac_avg_64x(&self) -> bool { *self == ADC_SAC_AVGR::ADC_SAC_AVG_64X } } #[doc = "Values that can be written to the field `ADC_SAC_AVG`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ADC_SAC_AVGW { #[doc = "No hardware oversampling"] ADC_SAC_AVG_OFF, #[doc = "2x hardware oversampling"] ADC_SAC_AVG_2X, #[doc = "4x hardware oversampling"] ADC_SAC_AVG_4X, #[doc = "8x hardware oversampling"] ADC_SAC_AVG_8X, #[doc = "16x hardware oversampling"] ADC_SAC_AVG_16X, #[doc = "32x hardware oversampling"] ADC_SAC_AVG_32X, #[doc = "64x hardware oversampling"] ADC_SAC_AVG_64X, } impl ADC_SAC_AVGW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { ADC_SAC_AVGW::ADC_SAC_AVG_OFF => 0, ADC_SAC_AVGW::ADC_SAC_AVG_2X => 1, ADC_SAC_AVGW::ADC_SAC_AVG_4X => 2, ADC_SAC_AVGW::ADC_SAC_AVG_8X => 3, ADC_SAC_AVGW::ADC_SAC_AVG_16X => 4, ADC_SAC_AVGW::ADC_SAC_AVG_32X => 5, ADC_SAC_AVGW::ADC_SAC_AVG_64X => 6, } } } #[doc = r"Proxy"] pub struct _ADC_SAC_AVGW<'a> { w: &'a mut W, } impl<'a> _ADC_SAC_AVGW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ADC_SAC_AVGW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "No hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_off(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_OFF) } #[doc = "2x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_2x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_2X) } #[doc = "4x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_4x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_4X) } #[doc = "8x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_8x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_8X) } #[doc = "16x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_16x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_16X) } #[doc = "32x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_32x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_32X) } #[doc = "64x hardware oversampling"] #[inline(always)] pub fn adc_sac_avg_64x(self) -> &'a mut W { self.variant(ADC_SAC_AVGW::ADC_SAC_AVG_64X) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(7 << 0); self.w.bits |= ((value as u32) & 7) << 0; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:2 - Hardware Averaging Control"] #[inline(always)] pub fn adc_sac_avg(&self) -> ADC_SAC_AVGR { ADC_SAC_AVGR::_from(((self.bits >> 0) & 7) as u8) } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:2 - Hardware Averaging Control"] #[inline(always)] pub fn adc_sac_avg(&mut self) -> _ADC_SAC_AVGW { _ADC_SAC_AVGW { w: self } } }
//! Types that map to concepts in HTTP. //! //! This module exports types that map to HTTP concepts or to the underlying //! HTTP library when needed. Because the underlying HTTP library is likely to //! change (see <a //! href="https://github.com/SergioBenitez/Rocket/issues/17">#17</a>), types in //! [hyper](hyper/index.html) should be considered unstable. pub mod hyper; pub mod uri; #[macro_use] mod known_media_types; mod cookies; mod session; mod method; mod media_type; mod content_type; mod status; mod header; mod accept; mod raw_str; pub(crate) mod parse; // We need to export these for codegen, but otherwise it's unnecessary. // TODO: Expose a `const fn` from ContentType when possible. (see RFC#1817) pub mod uncased; #[doc(hidden)] pub use self::parse::IndexedStr; #[doc(hidden)] pub use self::media_type::MediaParams; pub use self::method::Method; pub use self::content_type::ContentType; pub use self::accept::{Accept, WeightedMediaType}; pub use self::status::{Status, StatusClass}; pub use self::header::{Header, HeaderMap}; pub use self::raw_str::RawStr; pub use self::media_type::MediaType; pub use self::cookies::*; pub use self::session::*; use smallvec::{Array, SmallVec}; pub trait IntoCollection<T> { fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A>; fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, f: F) -> SmallVec<A>; } impl<T> IntoCollection<T> for T { #[inline] fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> { let mut vec = SmallVec::new(); vec.push(self); vec } #[inline(always)] fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, mut f: F) -> SmallVec<A> { f(self).into_collection() } } impl<T> IntoCollection<T> for Vec<T> { #[inline(always)] fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> { SmallVec::from_vec(self) } #[inline] fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, mut f: F) -> SmallVec<A> { self.into_iter().map(|item| f(item)).collect() } } impl<'a, T: Clone> IntoCollection<T> for &'a [T] { #[inline(always)] fn into_collection<A: Array<Item=T>>(self) -> SmallVec<A> { self.iter().cloned().collect() } #[inline] fn mapped<U, F: FnMut(T) -> U, A: Array<Item=U>>(self, mut f: F) -> SmallVec<A> { self.iter().cloned().map(|item| f(item)).collect() } }
//! This module defines utilities for working with the `Rc` and `Weak` types. use std::rc::Rc; use std::rc::Weak; use super::option::*; // TODO[WD,AO]: Think about merging it with `OptionOps`. /// Mapping methods to the `Weak` type. pub trait WeakOps { type Target; fn for_each <U,F> (self , f:F) where F : FnOnce(Self::Target) -> U; fn for_each_ref <U,F> (&self , f:F) where F : FnOnce(&Self::Target) -> U; } impl<T> WeakOps for Weak<T> { type Target = Rc<T>; fn for_each<U,F>(self, f:F) where F : FnOnce(Self::Target) -> U { self.upgrade().for_each(f) } fn for_each_ref<U,F>(&self, f:F) where F : FnOnce(&Self::Target) -> U { self.upgrade().for_each_ref(f) } }
use nom::IResult; use typeahead::Parse; #[derive(Serialize, Deserialize, PartialOrd, Ord, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Key { Backspace, Left, Right, Up, Down, Home, End, PageUp, PageDown, Delete, Insert, /// Function keys. F(u8), /// Normal character. Char(char), Null, Esc, Space, Tab, Csi, XCsi, Eol, Help, Undo, /// Keypad numerics (0 - 9) Keypad(u8), KeypadHome, KeypadEnd, KeypadPageUp, KeypadPageDown, KeypadPlus, KeypadMinus, KeypadMultiply, KeypadDivide, KeypadEnter, KeypadDot, } #[derive(Serialize, Deserialize, PartialOrd, Ord, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum MultiKey { A(Key), // A single keystroke. Shift(Key), Ctrl(Key), Alt(Key), // aka Meta, aka Option. Cmd(Key), // Apple's Command key. } impl Parse for MultiKey { fn decimal(&self) -> Option<char> { match *self { MultiKey::A(Key::Char(c)) => { if c.is_digit(10) { return Some(c); } else { return None; } } _ => None, } } fn character(&self) -> Option<char> { match *self { MultiKey::A(Key::Char(c)) => Some(c), MultiKey::A(Key::KeypadEnter) => Some('\r'), _ => None, } } } // From vim, :help map-special-keys: // DETAIL: Vim first checks if a sequence from the keyboard is mapped. If it // isn't the terminal key codes are tried. If a terminal code is found it is // replaced with the internal code. Then the check for a mapping is done again // (so you can map an internal code to something else). What is written into // the script file depends on what is recognized. If the terminal key code was // recognized as a mapping the key code itself is written to the script file. // If it was recognized as a terminal code the internal code is written to the // script file. // // The above suggests we should be precise about how we store our keys. pub fn parse_angle(buffer: &str) -> Option<Key> { use self::Key::*; let key = match buffer.to_lowercase().as_ref() { "nul" => Null, "bs" => Backspace, "tab" => Tab, "nl" => Char('\n'), "ff" => Char('\x0c'), "cr" | "return" | "enter" => Char('\r'), "esc" => Esc, "space" => Space, "lt" => Char('<'), "bslash" => Char('\\'), "bar" => Char('|'), "del" => Delete, "csi" => Csi, "xcsi" => XCsi, "eol" => Eol, "up" => Up, "down" => Down, "left" => Left, "right" => Right, "f1" => F(1), "f2" => F(2), "f3" => F(3), "f4" => F(4), "f5" => F(5), "f6" => F(6), "f7" => F(7), "f8" => F(8), "f9" => F(9), "f10" => F(10), "f11" => F(11), "f12" => F(12), "help" => Help, "undo" => Undo, "insert" => Insert, "home" => Home, "end" => End, "pageup" => PageUp, "pagedown" => PageDown, "khome" => KeypadHome, "kend" => KeypadEnd, "kpageup" => KeypadPageUp, "kpagedown" => KeypadPageDown, "kplus" => KeypadPlus, "kminus" => KeypadMinus, "kmultiply" => KeypadMultiply, "kdivide" => KeypadDivide, "kenter" => KeypadEnter, "kpoint" => KeypadDot, "k0" => Keypad(0), "k1" => Keypad(1), "k2" => Keypad(2), "k3" => Keypad(3), "k4" => Keypad(4), "k5" => Keypad(5), "k6" => Keypad(6), "k7" => Keypad(7), "k8" => Keypad(8), "k9" => Keypad(9), _ => { return None; } }; Some(key) } pub fn parse_key(buffer: &str) -> Option<Key> { if buffer.chars().count() == 1 { return Some(Key::Char(buffer.chars().next().unwrap())); } None } pub fn parse_any(buffer: &str) -> Option<Key> { parse_angle(buffer).or(parse_key(buffer)) } #[macro_use] pub mod parse { use super::*; use key::MultiKey::*; named!( shift<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<S-"), is_not!(">"), tag_s!(">")), |st| { parse_any(st).map(|x| Shift(x)) } ) ); named!( ctrl<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<C-"), is_not!(">"), tag_s!(">")), |st| { parse_any(st).map(|x| Ctrl(x)) } ) ); named!( meta<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<M-"), is_not!(">"), tag_s!(">")), |st| { parse_any(st).map(|x| Alt(x)) } ) ); named!( alt<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<A-"), is_not!(">"), tag_s!(">")), |st| { parse_any(st).map(|x| Alt(x)) } ) ); named!( cmd<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<D-"), is_not!(">"), tag_s!(">")), |st| { parse_any(st).map(|x| Cmd(x)) } ) ); named!( angle<&str, MultiKey>, map_opt!( delimited!(tag_no_case_s!("<"), is_not!(">"), tag_s!(">")), |st| { parse_angle(st).map(|x| A(x)) } ) ); named!( key<&str, MultiKey>, map_opt!( take_s!(1), |st| { parse_key(st).map(|x| A(x)) } ) ); named!(any<&str, MultiKey>, alt!( angle | key )); named!(parse_keys<&str, Vec<MultiKey>>, many0!( alt!(shift | ctrl | meta | alt | cmd | any))); pub fn parse(st: &str) -> Vec<MultiKey> { match parse_keys(st) { IResult::Done(_, x) => x, _ => Vec::new(), } } #[test] fn shift_test() { assert_eq!(parse::shift("<s-tab>"), IResult::Done("", Shift(Tab))); } #[test] fn ctrl_test() { assert_eq!(parse::ctrl("<c-tab>"), IResult::Done("", Ctrl(Tab))); } #[test] fn alt_test() { assert_eq!(parse::alt("<a-tab>"), IResult::Done("", Alt(Tab))); } #[test] fn meta_test() { assert_eq!(parse::meta("<m-tab>"), IResult::Done("", Alt(Tab))); } #[test] fn cmd_test() { assert_eq!(parse::cmd("<d-tab>"), IResult::Done("", Cmd(Tab))); } #[test] fn any_test() { assert_eq!(parse::any("<tab>"), IResult::Done("", A(Tab))); } #[test] fn parse_key_test() { assert_eq!(parse::parse("S"), vec![A(Char('S'))]); } #[test] fn parse_test() { assert_eq!(parse::parse("<S-Tab>S"), vec![Shift(Tab), A(Char('S'))]); } }
use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::punctuated::Punctuated; use syn::{ parse_macro_input, Data, DataStruct, DeriveInput, Error, Field, Fields, Generics, Ident, }; use crate::{Attr, GenericsStreams, MULTIPLE_FLATTEN_ERROR}; /// Error if the derive was used on an unsupported type. const UNSUPPORTED_ERROR: &str = "SerdeReplace must be used on a tuple struct"; pub fn derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); match input.data { Data::Struct(DataStruct { fields: Fields::Unnamed(_), .. }) | Data::Enum(_) => { derive_direct(input.ident, input.generics).into() }, Data::Struct(DataStruct { fields: Fields::Named(fields), .. }) => { derive_recursive(input.ident, input.generics, fields.named).into() }, _ => Error::new(input.ident.span(), UNSUPPORTED_ERROR).to_compile_error().into(), } } pub fn derive_direct(ident: Ident, generics: Generics) -> TokenStream2 { quote! { impl <#generics> alacritty_config::SerdeReplace for #ident <#generics> { fn replace(&mut self, value: toml::Value) -> Result<(), Box<dyn std::error::Error>> { *self = serde::Deserialize::deserialize(value)?; Ok(()) } } } } pub fn derive_recursive<T>( ident: Ident, generics: Generics, fields: Punctuated<Field, T>, ) -> TokenStream2 { let GenericsStreams { unconstrained, constrained, .. } = crate::generics_streams(&generics.params); let replace_arms = match_arms(&fields); quote! { #[allow(clippy::extra_unused_lifetimes)] impl <'de, #constrained> alacritty_config::SerdeReplace for #ident <#unconstrained> { fn replace(&mut self, value: toml::Value) -> Result<(), Box<dyn std::error::Error>> { match value.as_table() { Some(table) => { for (field, next_value) in table { let next_value = next_value.clone(); let value = value.clone(); match field.as_str() { #replace_arms _ => { let error = format!("Field \"{}\" does not exist", field); return Err(error.into()); }, } } }, None => *self = serde::Deserialize::deserialize(value)?, } Ok(()) } } } } /// Create SerdeReplace recursive match arms. fn match_arms<T>(fields: &Punctuated<Field, T>) -> TokenStream2 { let mut stream = TokenStream2::default(); let mut flattened_arm = None; // Create arm for each field. for field in fields { let ident = field.ident.as_ref().expect("unreachable tuple struct"); let literal = ident.to_string(); // Check if #[config(flattened)] attribute is present. let flatten = field .attrs .iter() .filter_map(|attr| attr.parse_args::<Attr>().ok()) .any(|parsed| parsed.ident.as_str() == "flatten"); if flatten && flattened_arm.is_some() { return Error::new(ident.span(), MULTIPLE_FLATTEN_ERROR).to_compile_error(); } else if flatten { flattened_arm = Some(quote! { _ => alacritty_config::SerdeReplace::replace(&mut self.#ident, value)?, }); } else { stream.extend(quote! { #literal => alacritty_config::SerdeReplace::replace(&mut self.#ident, next_value)?, }); } } // Add the flattened catch-all as last match arm. if let Some(flattened_arm) = flattened_arm.take() { stream.extend(flattened_arm); } stream }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use failure::Error; #[fuchsia_async::run_singlethreaded(test)] async fn test_get_option_subnet() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec![ "get", "option", "subnet-mask", ]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, "SubnetMask(Ipv4Address { addr: [0, 0, 0, 0] })\n"); Ok(()) } #[fuchsia_async::run_singlethreaded(test)] async fn test_get_parameter_lease() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec![ "get", "parameter", "lease-length", ]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, "Lease(LeaseLength { default: None, max: None })\n"); Ok(()) } #[fuchsia_async::run_singlethreaded(test)] async fn test_set_option_subnet() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec![ "set", "option", "subnet-mask", "--mask", "255.255.255.0", ]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, ""); Ok(()) } #[fuchsia_async::run_singlethreaded(test)] async fn test_set_parameter_lease() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec![ "set", "parameter", "lease-length", "--default", "42", ]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, ""); Ok(()) } #[fuchsia_async::run_singlethreaded(test)] async fn test_list_option() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec!["list", "option"]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, "[]\n"); Ok(()) } #[fuchsia_async::run_singlethreaded(test)] async fn test_list_parameter() -> Result<(), Error> { let launcher = fuchsia_component::client::launcher()?; let cli_url = fuchsia_component::fuchsia_single_component_package_url!("dhcpd-cli"); let cli_app = fuchsia_component::client::AppBuilder::new(cli_url).args(vec!["list", "parameter"]); let output = cli_app.output(&launcher)?.await?; let stdout = std::str::from_utf8(&output.stdout)?; let stderr = std::str::from_utf8(&output.stderr)?; assert_eq!(stderr, ""); assert_eq!(stdout, "[]\n"); Ok(()) }
/// emscripten: _llvm_log10_f64 pub extern "C" fn _llvm_log10_f64(value: f64) -> f64 { debug!("emscripten::_llvm_log10_f64"); value.log10() } /// emscripten: _llvm_log2_f64 pub extern "C" fn _llvm_log2_f64(value: f64) -> f64 { debug!("emscripten::_llvm_log2_f64"); value.log2() } // emscripten: f64-rem pub extern "C" fn f64_rem(x: f64, y: f64) -> f64 { debug!("emscripten::f64-rem"); x % y }