text
stringlengths
8
4.13M
use std::rc::Rc; use crate::class_file::unvalidated::Attribute; use crate::class_file::unvalidated::AttributeInfo; use crate::class_file::unvalidated::ConstantIdx; // TODO: helper to consistency check flags #[derive(Debug, Clone, Copy)] pub struct MethodAccessFlags { pub flags: u16, } #[allow(dead_code)] impl MethodAccessFlags { pub fn is_public(&self) -> bool { (self.flags & 0x0001) == 0x0001 } pub fn is_private(&self) -> bool { (self.flags & 0x0002) == 0x0002 } pub fn is_protected(&self) -> bool { (self.flags & 0x0004) == 0x0004 } pub fn is_static(&self) -> bool { (self.flags & 0x0008) == 0x0008 } pub fn is_final(&self) -> bool { (self.flags & 0x0010) == 0x0010 } pub fn is_synchronized(&self) -> bool { (self.flags & 0x0020) == 0x0020 } pub fn is_bridge(&self) -> bool { (self.flags & 0x0040) == 0x0040 } pub fn is_varargs(&self) -> bool { (self.flags & 0x0080) == 0x0080 } pub fn is_native(&self) -> bool { (self.flags & 0x0100) == 0x0100 } pub fn is_abstract(&self) -> bool { (self.flags & 0x0400) == 0x0400 } pub fn is_strict(&self) -> bool { (self.flags & 0x0800) == 0x0800 } pub fn is_synthetic(&self) -> bool { (self.flags & 0x1000) == 0x1000 } } #[derive(Debug)] pub struct MethodInfo { pub access_flags: MethodAccessFlags, pub name_index: ConstantIdx, pub descriptor_index: ConstantIdx, pub attributes: Vec<AttributeInfo>, } #[derive(Debug)] pub struct MethodHandle { pub access_flags: MethodAccessFlags, pub name: String, pub descriptor: String, pub attributes: Vec<Rc<Attribute>>, } impl MethodHandle { pub fn access(&self) -> &MethodAccessFlags { &self.access_flags } pub fn body(&self) -> Option<Rc<Attribute>> { for attr in self.attributes.iter() { let attr_ref: &Attribute = &*attr; if let Attribute::Code(_, _, _, _, _) = attr_ref { return Some(Rc::clone(&attr)); } } None } } #[derive(Debug)] pub enum MethodHandleBehavior { GetField, GetStatic, PutField, PutStatic, InvokeVirtual, InvokeStatic, InvokeSpecial, NewInvokeSpecial, InvokeInterface, Other(u8), }
// Num of CSRs const NUM_CSR: usize = 0x1000; // CSR Index definitions const CSR_INDEX_MSTATUS : usize = 0x300; const CSR_INDEX_MTVEC : usize = 0x305; const CSR_INDEX_MEPC : usize = 0x341; const CSR_INDEX_MCAUSE : usize = 0x342; const CSR_INDEX_MTVAL : usize = 0x343; // register definitions bitfield! { pub struct MSTATUS(u32); impl Debug; pub sd, set_sd: 31, 31; pub tsr, set_tsr: 22, 22; pub tw, set_tw: 21, 21; pub tvm, set_tvm: 20, 20; pub mxr, set_mxr: 19, 19; pub sum, set_sum: 18, 18; pub mprv, set_mprv: 17, 17; pub xs, set_xs: 16, 15; pub fs, set_fs: 14, 13; pub mpp, set_mpp: 12, 11; pub spp, set_spp: 8, 8; pub mpie, set_mpie: 7, 7; pub spie, set_spie: 5, 5; pub upie, set_upie: 4, 4; pub mie, set_mie: 3, 3; pub sie, set_sie: 1, 1; pub uie, set_uei: 0, 0; } bitfield! { pub struct MTVEC(u32); impl Debug; pub base, set_base: 31, 2; pub mode, set_mode: 1, 0; } // CSR struct definition pub struct Csr { values: [u32; NUM_CSR], } #[allow(dead_code)] impl Csr { pub fn new() -> Csr { Csr { values: [0; NUM_CSR] } } pub fn read(&self, index: usize) -> u32 { self.values[index] } pub fn write(&mut self, index: usize, value: u32) { self.values[index] = value } pub fn read_mstatus(&self) -> MSTATUS { MSTATUS(self.read(CSR_INDEX_MSTATUS)) } pub fn write_mstatus(&mut self, value: MSTATUS) { self.write(CSR_INDEX_MSTATUS, value.0) } pub fn read_mtvec(&self) -> MTVEC { MTVEC(self.read(CSR_INDEX_MTVEC)) } pub fn write_mtvec(&mut self, value: MTVEC) { self.write(CSR_INDEX_MTVEC, value.0) } pub fn read_mepc(&self) -> u32 { self.read(CSR_INDEX_MEPC) } pub fn write_mepc(&mut self, value: u32) { self.write(CSR_INDEX_MEPC, value) } pub fn read_mcause(&self) -> u32 { self.read(CSR_INDEX_MCAUSE) } pub fn write_mcause(&mut self, value: u32) { self.write(CSR_INDEX_MCAUSE, value) } pub fn read_mtval(&self) -> u32 { self.read(CSR_INDEX_MTVAL) } pub fn write_mtval(&mut self, value: u32) { self.write(CSR_INDEX_MTVAL, value) } }
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::_3_CTL { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_ENABLER { bits: bool, } impl PWM_3_CTL_ENABLER { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_ENABLEW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_ENABLEW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 0); self.w.bits |= ((value as u32) & 1) << 0; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_MODER { bits: bool, } impl PWM_3_CTL_MODER { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_MODEW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_MODEW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 1); self.w.bits |= ((value as u32) & 1) << 1; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_DEBUGR { bits: bool, } impl PWM_3_CTL_DEBUGR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_DEBUGW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_DEBUGW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 2); self.w.bits |= ((value as u32) & 1) << 2; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_LOADUPDR { bits: bool, } impl PWM_3_CTL_LOADUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_LOADUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_LOADUPDW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 3); self.w.bits |= ((value as u32) & 1) << 3; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_CMPAUPDR { bits: bool, } impl PWM_3_CTL_CMPAUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_CMPAUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_CMPAUPDW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u32) & 1) << 4; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_CMPBUPDR { bits: bool, } impl PWM_3_CTL_CMPBUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_CMPBUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_CMPBUPDW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 5); self.w.bits |= ((value as u32) & 1) << 5; self.w } } #[doc = "Possible values of the field `PWM_3_CTL_GENAUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_GENAUPDR { #[doc = "Immediate"] PWM_3_CTL_GENAUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_GENAUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_GENAUPD_GS, #[doc = r"Reserved"] _Reserved(u8), } impl PWM_3_CTL_GENAUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_I => 0, PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_LS => 2, PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_GS => 3, PWM_3_CTL_GENAUPDR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> PWM_3_CTL_GENAUPDR { match value { 0 => PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_I, 2 => PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_LS, 3 => PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_GS, i => PWM_3_CTL_GENAUPDR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENAUPD_I`"] #[inline(always)] pub fn is_pwm_3_ctl_genaupd_i(&self) -> bool { *self == PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_I } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENAUPD_LS`"] #[inline(always)] pub fn is_pwm_3_ctl_genaupd_ls(&self) -> bool { *self == PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_LS } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENAUPD_GS`"] #[inline(always)] pub fn is_pwm_3_ctl_genaupd_gs(&self) -> bool { *self == PWM_3_CTL_GENAUPDR::PWM_3_CTL_GENAUPD_GS } } #[doc = "Values that can be written to the field `PWM_3_CTL_GENAUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_GENAUPDW { #[doc = "Immediate"] PWM_3_CTL_GENAUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_GENAUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_GENAUPD_GS, } impl PWM_3_CTL_GENAUPDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_I => 0, PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_LS => 2, PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_GS => 3, } } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_GENAUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_GENAUPDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PWM_3_CTL_GENAUPDW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Immediate"] #[inline(always)] pub fn pwm_3_ctl_genaupd_i(self) -> &'a mut W { self.variant(PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_I) } #[doc = "Locally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_genaupd_ls(self) -> &'a mut W { self.variant(PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_LS) } #[doc = "Globally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_genaupd_gs(self) -> &'a mut W { self.variant(PWM_3_CTL_GENAUPDW::PWM_3_CTL_GENAUPD_GS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 6); self.w.bits |= ((value as u32) & 3) << 6; self.w } } #[doc = "Possible values of the field `PWM_3_CTL_GENBUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_GENBUPDR { #[doc = "Immediate"] PWM_3_CTL_GENBUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_GENBUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_GENBUPD_GS, #[doc = r"Reserved"] _Reserved(u8), } impl PWM_3_CTL_GENBUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_I => 0, PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_LS => 2, PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_GS => 3, PWM_3_CTL_GENBUPDR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> PWM_3_CTL_GENBUPDR { match value { 0 => PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_I, 2 => PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_LS, 3 => PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_GS, i => PWM_3_CTL_GENBUPDR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENBUPD_I`"] #[inline(always)] pub fn is_pwm_3_ctl_genbupd_i(&self) -> bool { *self == PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_I } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENBUPD_LS`"] #[inline(always)] pub fn is_pwm_3_ctl_genbupd_ls(&self) -> bool { *self == PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_LS } #[doc = "Checks if the value of the field is `PWM_3_CTL_GENBUPD_GS`"] #[inline(always)] pub fn is_pwm_3_ctl_genbupd_gs(&self) -> bool { *self == PWM_3_CTL_GENBUPDR::PWM_3_CTL_GENBUPD_GS } } #[doc = "Values that can be written to the field `PWM_3_CTL_GENBUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_GENBUPDW { #[doc = "Immediate"] PWM_3_CTL_GENBUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_GENBUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_GENBUPD_GS, } impl PWM_3_CTL_GENBUPDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_I => 0, PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_LS => 2, PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_GS => 3, } } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_GENBUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_GENBUPDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PWM_3_CTL_GENBUPDW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Immediate"] #[inline(always)] pub fn pwm_3_ctl_genbupd_i(self) -> &'a mut W { self.variant(PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_I) } #[doc = "Locally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_genbupd_ls(self) -> &'a mut W { self.variant(PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_LS) } #[doc = "Globally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_genbupd_gs(self) -> &'a mut W { self.variant(PWM_3_CTL_GENBUPDW::PWM_3_CTL_GENBUPD_GS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 8); self.w.bits |= ((value as u32) & 3) << 8; self.w } } #[doc = "Possible values of the field `PWM_3_CTL_DBCTLUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBCTLUPDR { #[doc = "Immediate"] PWM_3_CTL_DBCTLUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBCTLUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBCTLUPD_GS, #[doc = r"Reserved"] _Reserved(u8), } impl PWM_3_CTL_DBCTLUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_I => 0, PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_LS => 2, PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_GS => 3, PWM_3_CTL_DBCTLUPDR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> PWM_3_CTL_DBCTLUPDR { match value { 0 => PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_I, 2 => PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_LS, 3 => PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_GS, i => PWM_3_CTL_DBCTLUPDR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBCTLUPD_I`"] #[inline(always)] pub fn is_pwm_3_ctl_dbctlupd_i(&self) -> bool { *self == PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_I } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBCTLUPD_LS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbctlupd_ls(&self) -> bool { *self == PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_LS } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBCTLUPD_GS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbctlupd_gs(&self) -> bool { *self == PWM_3_CTL_DBCTLUPDR::PWM_3_CTL_DBCTLUPD_GS } } #[doc = "Values that can be written to the field `PWM_3_CTL_DBCTLUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBCTLUPDW { #[doc = "Immediate"] PWM_3_CTL_DBCTLUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBCTLUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBCTLUPD_GS, } impl PWM_3_CTL_DBCTLUPDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_I => 0, PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_LS => 2, PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_GS => 3, } } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_DBCTLUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_DBCTLUPDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PWM_3_CTL_DBCTLUPDW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Immediate"] #[inline(always)] pub fn pwm_3_ctl_dbctlupd_i(self) -> &'a mut W { self.variant(PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_I) } #[doc = "Locally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbctlupd_ls(self) -> &'a mut W { self.variant(PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_LS) } #[doc = "Globally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbctlupd_gs(self) -> &'a mut W { self.variant(PWM_3_CTL_DBCTLUPDW::PWM_3_CTL_DBCTLUPD_GS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 10); self.w.bits |= ((value as u32) & 3) << 10; self.w } } #[doc = "Possible values of the field `PWM_3_CTL_DBRISEUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBRISEUPDR { #[doc = "Immediate"] PWM_3_CTL_DBRISEUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBRISEUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBRISEUPD_GS, #[doc = r"Reserved"] _Reserved(u8), } impl PWM_3_CTL_DBRISEUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_I => 0, PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_LS => 2, PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_GS => 3, PWM_3_CTL_DBRISEUPDR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> PWM_3_CTL_DBRISEUPDR { match value { 0 => PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_I, 2 => PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_LS, 3 => PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_GS, i => PWM_3_CTL_DBRISEUPDR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBRISEUPD_I`"] #[inline(always)] pub fn is_pwm_3_ctl_dbriseupd_i(&self) -> bool { *self == PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_I } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBRISEUPD_LS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbriseupd_ls(&self) -> bool { *self == PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_LS } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBRISEUPD_GS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbriseupd_gs(&self) -> bool { *self == PWM_3_CTL_DBRISEUPDR::PWM_3_CTL_DBRISEUPD_GS } } #[doc = "Values that can be written to the field `PWM_3_CTL_DBRISEUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBRISEUPDW { #[doc = "Immediate"] PWM_3_CTL_DBRISEUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBRISEUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBRISEUPD_GS, } impl PWM_3_CTL_DBRISEUPDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_I => 0, PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_LS => 2, PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_GS => 3, } } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_DBRISEUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_DBRISEUPDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PWM_3_CTL_DBRISEUPDW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Immediate"] #[inline(always)] pub fn pwm_3_ctl_dbriseupd_i(self) -> &'a mut W { self.variant(PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_I) } #[doc = "Locally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbriseupd_ls(self) -> &'a mut W { self.variant(PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_LS) } #[doc = "Globally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbriseupd_gs(self) -> &'a mut W { self.variant(PWM_3_CTL_DBRISEUPDW::PWM_3_CTL_DBRISEUPD_GS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 12); self.w.bits |= ((value as u32) & 3) << 12; self.w } } #[doc = "Possible values of the field `PWM_3_CTL_DBFALLUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBFALLUPDR { #[doc = "Immediate"] PWM_3_CTL_DBFALLUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBFALLUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBFALLUPD_GS, #[doc = r"Reserved"] _Reserved(u8), } impl PWM_3_CTL_DBFALLUPDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_I => 0, PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_LS => 2, PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_GS => 3, PWM_3_CTL_DBFALLUPDR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> PWM_3_CTL_DBFALLUPDR { match value { 0 => PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_I, 2 => PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_LS, 3 => PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_GS, i => PWM_3_CTL_DBFALLUPDR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBFALLUPD_I`"] #[inline(always)] pub fn is_pwm_3_ctl_dbfallupd_i(&self) -> bool { *self == PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_I } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBFALLUPD_LS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbfallupd_ls(&self) -> bool { *self == PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_LS } #[doc = "Checks if the value of the field is `PWM_3_CTL_DBFALLUPD_GS`"] #[inline(always)] pub fn is_pwm_3_ctl_dbfallupd_gs(&self) -> bool { *self == PWM_3_CTL_DBFALLUPDR::PWM_3_CTL_DBFALLUPD_GS } } #[doc = "Values that can be written to the field `PWM_3_CTL_DBFALLUPD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PWM_3_CTL_DBFALLUPDW { #[doc = "Immediate"] PWM_3_CTL_DBFALLUPD_I, #[doc = "Locally Synchronized"] PWM_3_CTL_DBFALLUPD_LS, #[doc = "Globally Synchronized"] PWM_3_CTL_DBFALLUPD_GS, } impl PWM_3_CTL_DBFALLUPDW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_I => 0, PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_LS => 2, PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_GS => 3, } } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_DBFALLUPDW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_DBFALLUPDW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PWM_3_CTL_DBFALLUPDW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Immediate"] #[inline(always)] pub fn pwm_3_ctl_dbfallupd_i(self) -> &'a mut W { self.variant(PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_I) } #[doc = "Locally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbfallupd_ls(self) -> &'a mut W { self.variant(PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_LS) } #[doc = "Globally Synchronized"] #[inline(always)] pub fn pwm_3_ctl_dbfallupd_gs(self) -> &'a mut W { self.variant(PWM_3_CTL_DBFALLUPDW::PWM_3_CTL_DBFALLUPD_GS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 14); self.w.bits |= ((value as u32) & 3) << 14; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_FLTSRCR { bits: bool, } impl PWM_3_CTL_FLTSRCR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_FLTSRCW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_FLTSRCW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 16); self.w.bits |= ((value as u32) & 1) << 16; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_MINFLTPERR { bits: bool, } impl PWM_3_CTL_MINFLTPERR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_MINFLTPERW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_MINFLTPERW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 17); self.w.bits |= ((value as u32) & 1) << 17; self.w } } #[doc = r"Value of the field"] pub struct PWM_3_CTL_LATCHR { bits: bool, } impl PWM_3_CTL_LATCHR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_3_CTL_LATCHW<'a> { w: &'a mut W, } impl<'a> _PWM_3_CTL_LATCHW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 18); self.w.bits |= ((value as u32) & 1) << 18; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - PWM Block Enable"] #[inline(always)] pub fn pwm_3_ctl_enable(&self) -> PWM_3_CTL_ENABLER { let bits = ((self.bits >> 0) & 1) != 0; PWM_3_CTL_ENABLER { bits } } #[doc = "Bit 1 - Counter Mode"] #[inline(always)] pub fn pwm_3_ctl_mode(&self) -> PWM_3_CTL_MODER { let bits = ((self.bits >> 1) & 1) != 0; PWM_3_CTL_MODER { bits } } #[doc = "Bit 2 - Debug Mode"] #[inline(always)] pub fn pwm_3_ctl_debug(&self) -> PWM_3_CTL_DEBUGR { let bits = ((self.bits >> 2) & 1) != 0; PWM_3_CTL_DEBUGR { bits } } #[doc = "Bit 3 - Load Register Update Mode"] #[inline(always)] pub fn pwm_3_ctl_loadupd(&self) -> PWM_3_CTL_LOADUPDR { let bits = ((self.bits >> 3) & 1) != 0; PWM_3_CTL_LOADUPDR { bits } } #[doc = "Bit 4 - Comparator A Update Mode"] #[inline(always)] pub fn pwm_3_ctl_cmpaupd(&self) -> PWM_3_CTL_CMPAUPDR { let bits = ((self.bits >> 4) & 1) != 0; PWM_3_CTL_CMPAUPDR { bits } } #[doc = "Bit 5 - Comparator B Update Mode"] #[inline(always)] pub fn pwm_3_ctl_cmpbupd(&self) -> PWM_3_CTL_CMPBUPDR { let bits = ((self.bits >> 5) & 1) != 0; PWM_3_CTL_CMPBUPDR { bits } } #[doc = "Bits 6:7 - PWMnGENA Update Mode"] #[inline(always)] pub fn pwm_3_ctl_genaupd(&self) -> PWM_3_CTL_GENAUPDR { PWM_3_CTL_GENAUPDR::_from(((self.bits >> 6) & 3) as u8) } #[doc = "Bits 8:9 - PWMnGENB Update Mode"] #[inline(always)] pub fn pwm_3_ctl_genbupd(&self) -> PWM_3_CTL_GENBUPDR { PWM_3_CTL_GENBUPDR::_from(((self.bits >> 8) & 3) as u8) } #[doc = "Bits 10:11 - PWMnDBCTL Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbctlupd(&self) -> PWM_3_CTL_DBCTLUPDR { PWM_3_CTL_DBCTLUPDR::_from(((self.bits >> 10) & 3) as u8) } #[doc = "Bits 12:13 - PWMnDBRISE Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbriseupd(&self) -> PWM_3_CTL_DBRISEUPDR { PWM_3_CTL_DBRISEUPDR::_from(((self.bits >> 12) & 3) as u8) } #[doc = "Bits 14:15 - PWMnDBFALL Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbfallupd(&self) -> PWM_3_CTL_DBFALLUPDR { PWM_3_CTL_DBFALLUPDR::_from(((self.bits >> 14) & 3) as u8) } #[doc = "Bit 16 - Fault Condition Source"] #[inline(always)] pub fn pwm_3_ctl_fltsrc(&self) -> PWM_3_CTL_FLTSRCR { let bits = ((self.bits >> 16) & 1) != 0; PWM_3_CTL_FLTSRCR { bits } } #[doc = "Bit 17 - Minimum Fault Period"] #[inline(always)] pub fn pwm_3_ctl_minfltper(&self) -> PWM_3_CTL_MINFLTPERR { let bits = ((self.bits >> 17) & 1) != 0; PWM_3_CTL_MINFLTPERR { bits } } #[doc = "Bit 18 - Latch Fault Input"] #[inline(always)] pub fn pwm_3_ctl_latch(&self) -> PWM_3_CTL_LATCHR { let bits = ((self.bits >> 18) & 1) != 0; PWM_3_CTL_LATCHR { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - PWM Block Enable"] #[inline(always)] pub fn pwm_3_ctl_enable(&mut self) -> _PWM_3_CTL_ENABLEW { _PWM_3_CTL_ENABLEW { w: self } } #[doc = "Bit 1 - Counter Mode"] #[inline(always)] pub fn pwm_3_ctl_mode(&mut self) -> _PWM_3_CTL_MODEW { _PWM_3_CTL_MODEW { w: self } } #[doc = "Bit 2 - Debug Mode"] #[inline(always)] pub fn pwm_3_ctl_debug(&mut self) -> _PWM_3_CTL_DEBUGW { _PWM_3_CTL_DEBUGW { w: self } } #[doc = "Bit 3 - Load Register Update Mode"] #[inline(always)] pub fn pwm_3_ctl_loadupd(&mut self) -> _PWM_3_CTL_LOADUPDW { _PWM_3_CTL_LOADUPDW { w: self } } #[doc = "Bit 4 - Comparator A Update Mode"] #[inline(always)] pub fn pwm_3_ctl_cmpaupd(&mut self) -> _PWM_3_CTL_CMPAUPDW { _PWM_3_CTL_CMPAUPDW { w: self } } #[doc = "Bit 5 - Comparator B Update Mode"] #[inline(always)] pub fn pwm_3_ctl_cmpbupd(&mut self) -> _PWM_3_CTL_CMPBUPDW { _PWM_3_CTL_CMPBUPDW { w: self } } #[doc = "Bits 6:7 - PWMnGENA Update Mode"] #[inline(always)] pub fn pwm_3_ctl_genaupd(&mut self) -> _PWM_3_CTL_GENAUPDW { _PWM_3_CTL_GENAUPDW { w: self } } #[doc = "Bits 8:9 - PWMnGENB Update Mode"] #[inline(always)] pub fn pwm_3_ctl_genbupd(&mut self) -> _PWM_3_CTL_GENBUPDW { _PWM_3_CTL_GENBUPDW { w: self } } #[doc = "Bits 10:11 - PWMnDBCTL Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbctlupd(&mut self) -> _PWM_3_CTL_DBCTLUPDW { _PWM_3_CTL_DBCTLUPDW { w: self } } #[doc = "Bits 12:13 - PWMnDBRISE Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbriseupd(&mut self) -> _PWM_3_CTL_DBRISEUPDW { _PWM_3_CTL_DBRISEUPDW { w: self } } #[doc = "Bits 14:15 - PWMnDBFALL Update Mode"] #[inline(always)] pub fn pwm_3_ctl_dbfallupd(&mut self) -> _PWM_3_CTL_DBFALLUPDW { _PWM_3_CTL_DBFALLUPDW { w: self } } #[doc = "Bit 16 - Fault Condition Source"] #[inline(always)] pub fn pwm_3_ctl_fltsrc(&mut self) -> _PWM_3_CTL_FLTSRCW { _PWM_3_CTL_FLTSRCW { w: self } } #[doc = "Bit 17 - Minimum Fault Period"] #[inline(always)] pub fn pwm_3_ctl_minfltper(&mut self) -> _PWM_3_CTL_MINFLTPERW { _PWM_3_CTL_MINFLTPERW { w: self } } #[doc = "Bit 18 - Latch Fault Input"] #[inline(always)] pub fn pwm_3_ctl_latch(&mut self) -> _PWM_3_CTL_LATCHW { _PWM_3_CTL_LATCHW { w: self } } }
//#![feature(remarkable)] extern crate libremarkable; use self::libremarkable::framebuffer as remarkable_fb; use self::libremarkable::framebuffer::{FramebufferIO, FramebufferRefresh, FramebufferBase}; use geom::Rectangle; use framebuffer::{UpdateMode, Framebuffer}; use errors::*; use self::libremarkable::framebuffer::common::*; use self::libremarkable::framebuffer::refresh::PartialRefreshMode; pub struct RemarkableFramebuffer<'a> { fb: remarkable_fb::core::Framebuffer<'a> } impl<'a> Framebuffer for RemarkableFramebuffer<'a> { fn set_pixel(&mut self, x: u32, y: u32, color: u8) { // print!("-set_pixel {} {} {}\n", x, y, color); self.fb.write_pixel(y as usize, x as usize, color::NATIVE_COMPONENTS(color,color,color,color)); } fn set_blended_pixel(&mut self, x: u32, y: u32, color: u8, alpha: f32) { if alpha == 1.0 { self.set_pixel(x, y, color); return; } let dst_color = self.fb.read_pixel(y as usize, x as usize); let dst_color = dst_color.as_native(); let (dst_r, dst_g, dst_b) = (dst_color[0], dst_color[1], dst_color[2]); let src_alpha = color as f32 * alpha; let r = src_alpha + (1.0 - alpha) * dst_r as f32; let g = src_alpha + (1.0 - alpha) * dst_g as f32; let b = src_alpha + (1.0 - alpha) * dst_b as f32; let a = (r+g+b)/3.0; //we ignoring alpha of pixel read // print!("setting blended color: dst: {} {} {} src: {} res: {} {} {} {} \n" , dst_r, dst_g, dst_b, src_alpha, r, g, b, a); self.fb.write_pixel(y as usize, x as usize, color::NATIVE_COMPONENTS(r as u8, b as u8, g as u8, a as u8)); } fn invert_region(&mut self, rect: &Rectangle) { println!("invert_region"); } fn update(&mut self, rect: &Rectangle, mode: UpdateMode) -> Result<u32> { // println!("update (mode {:?})", mode); let rm_mxcfb_rect = mxcfb_rect { top: rect.min.y as u32, left: rect.min.x as u32, width: rect.width(), height: rect.height() }; let (is_partial, waveform_mode, temperature) = match mode { UpdateMode::Gui | UpdateMode::Partial => (true, waveform_mode::WAVEFORM_MODE_GC16_FAST, display_temp::TEMP_USE_REMARKABLE_DRAW), UpdateMode::Full => (false, waveform_mode::WAVEFORM_MODE_GC16_FAST, display_temp::TEMP_USE_REMARKABLE_DRAW), UpdateMode::Fast | UpdateMode::FastMono => (true, waveform_mode::WAVEFORM_MODE_GC16_FAST, display_temp::TEMP_USE_REMARKABLE_DRAW), }; let token = if is_partial { self.fb.partial_refresh( &rm_mxcfb_rect, PartialRefreshMode::Async, waveform_mode, temperature, dither_mode::EPDC_FLAG_USE_DITHERING_PASSTHROUGH, 0, false, ) } else { self.fb.full_refresh( waveform_mode, temperature, dither_mode::EPDC_FLAG_USE_DITHERING_PASSTHROUGH, 0, false) }; // println!("update completed -> {}", token); Ok(token) } fn wait(&mut self, token: u32) -> Result<i32> { // println!("wait token {}", token); let res = self.fb.wait_refresh_complete(token) as i32; // println!("wait completed -> {}\n", res); Ok(res) } fn save(&self, path: &str) -> Result<()> { // println!("save {}", path); Ok(()) } fn toggle_inverted(&mut self) { println!("toggle_inverted"); } fn toggle_monochrome(&mut self) { println!("toggle_monochrome"); } fn width(&self) -> u32 { self.fb.var_screen_info.xres } fn height(&self) -> u32 { self.fb.var_screen_info.yres } } impl<'a> RemarkableFramebuffer <'a> { pub fn new() -> Result<RemarkableFramebuffer<'static>> { let framebuffer = remarkable_fb::core::Framebuffer::new("/dev/fb0"); Ok(RemarkableFramebuffer { fb: framebuffer }) } }
use crate::keys::{Key, KeyCombo, ModKey}; #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct XcbKeyCombo { pub mod_mask: u32, pub key: u32, } impl From<ModKey> for u32 { fn from(mod_key: ModKey) -> Self { match mod_key { ModKey::Shift => xcb::MOD_MASK_SHIFT, ModKey::Lock => xcb::MOD_MASK_LOCK, ModKey::Control => xcb::MOD_MASK_CONTROL, ModKey::Mod1 => xcb::MOD_MASK_1, ModKey::Mod2 => xcb::MOD_MASK_2, ModKey::Mod3 => xcb::MOD_MASK_3, ModKey::Mod4 => xcb::MOD_MASK_4, ModKey::Mod5 => xcb::MOD_MASK_5, } } } impl From<Key> for u32 { fn from(key: Key) -> Self { key.0 as u32 } } impl From<KeyCombo> for XcbKeyCombo { fn from(key: KeyCombo) -> Self { let mod_mask = key.mod_keys .into_iter() .fold(0, |mask, mod_key| { let mod_mask: u32 = mod_key.into(); mask | mod_mask }); let key = key.key.into(); XcbKeyCombo { mod_mask, key } } }
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. /// The value encoded in this opcode is NOT the same as that defined by IANA; instead the encoded values are the IANA values left-shifted by 3. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) struct MessageOpcode; impl MessageOpcode { /// Query. /// /// Defined in RFC 1035. pub(crate) const Query: u8 = 0; /// Inverse Query ('IQuery'). /// /// Defined in RFC 1035; made obsolete by RFC 3425. pub(crate) const InverseQuery: u8 = 1; /// Status. /// /// Defined in RFC 1035. pub(crate) const Status: u8 = 2; /// Notify. /// /// Defined in RFC 1996. pub(crate) const Notify: u8 = 4; /// Update. /// /// Defined in RFC 2136. pub(crate) const Update: u8 = 5; /// DNS Stateful Operations, DSO. /// /// Defined in [RFC-ietf-dnsop-session-signal-20](http://www.iana.org/go/draft-ietf-dnsop-session-signal-20). pub(crate) const DnsStatefulOperations: u8 = 6; }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! 2022-11-25: //! TODO: support synchronize with remote //! Note: //! currently, we only care about immutable tables //! once the table created we don't update it. use std::any::Any; use std::sync::Arc; use async_trait::async_trait; use common_catalog::plan::PartStatistics; use common_catalog::plan::Partitions; use common_catalog::plan::PushDownInfo; use common_catalog::table::Table; use common_catalog::table_context::TableContext; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableIdent; use common_meta_app::schema::TableInfo; use common_storage::DataOperator; use futures::StreamExt; use iceberg_rs::model::table::TableMetadata; use opendal::Operator; use crate::converters::meta_iceberg_to_databend; /// file marking the current version of metadata file const META_PTR: &str = "metadata/version_hint.text"; /// accessor wrapper as a table #[allow(unused)] pub struct IcebergTable { /// database that belongs to database: String, /// name of the current table name: String, /// root of the table tbl_root: DataOperator, /// table metadata manifests: TableMetadata, /// table information info: TableInfo, } impl IcebergTable { /// create a new table on the table directory pub async fn try_create_table_from_read( catalog: &str, database: &str, table_name: &str, tbl_root: DataOperator, ) -> Result<IcebergTable> { let op = tbl_root.operator(); // detect the latest manifest file let latest_manifest = Self::version_detect(&op).await?; // get table metadata from metadata file let meta_json = op.read(&latest_manifest).await.map_err(|e| { ErrorCode::ReadTableDataError(format!( "invalid metadata in {}: {:?}", &latest_manifest, e )) })?; let metadata: TableMetadata = serde_json::de::from_slice(meta_json.as_slice()).map_err(|e| { ErrorCode::ReadTableDataError(format!( "invalid metadata in {}: {:?}", &latest_manifest, e )) })?; let sp = tbl_root.params(); // construct table info let info = TableInfo { ident: TableIdent::new(0, 0), desc: format!("IcebergTable: '{database}'.'{table_name}'"), name: table_name.to_string(), meta: meta_iceberg_to_databend(catalog, &sp, &metadata), ..Default::default() }; // finish making table Ok(Self { database: database.to_string(), name: table_name.to_string(), tbl_root, manifests: metadata, info, }) } /// version_detect figures out the manifest list version of the table /// and gives the relative path from table root directory /// to latest metadata json file async fn version_detect(tbl_root: &Operator) -> Result<String> { // try Dremio's way // Dremio has an `version_hint.txt` file // recording the latest snapshot version number // and stores metadata if let Ok(version_hint) = tbl_root.read(META_PTR).await { if let Ok(version_str) = String::from_utf8(version_hint) { if let Ok(version) = version_str.trim().parse::<u64>() { return Ok(format!("metadata/v{version}.metadata.json")); } } } // try Spark's way // Spark will arange all files with a sequential number // in such case, we just need to find the file with largest alphabetical name. let files = tbl_root.list("metadata/").await.map_err(|e| { ErrorCode::ReadTableDataError(format!("Cannot list metadata directory: {e:?}")) })?; files .filter_map(|obj| async { if let Ok(obj) = obj { if obj.name().ends_with(".metadata.json") { Some(obj.name().to_string()) } else { None } } else { None } }) .collect::<Vec<String>>() .await .into_iter() .max() .map(|s| format!("metadata/{s}")) .ok_or_else(|| ErrorCode::ReadTableDataError("Cannot get the latest manifest file")) } } #[async_trait] impl Table for IcebergTable { fn is_local(&self) -> bool { false } fn as_any(&self) -> &dyn Any { self } fn get_table_info(&self) -> &TableInfo { &self.info } fn name(&self) -> &str { &self.get_table_info().name } async fn read_partitions( &self, _ctx: Arc<dyn TableContext>, _push_downs: Option<PushDownInfo>, ) -> Result<(PartStatistics, Partitions)> { todo!() } }
//! Methods to cleanup the object store. use std::{ collections::HashSet, sync::{Arc, Mutex}, }; use crate::{ catalog::{CatalogParquetInfo, CatalogState, PreservedCatalog}, storage::data_location, }; use futures::TryStreamExt; use object_store::{ path::{parsed::DirsAndFileName, ObjectStorePath}, ObjectStore, ObjectStoreApi, }; use observability_deps::tracing::info; use snafu::{ResultExt, Snafu}; #[derive(Debug, Snafu)] pub enum Error { #[snafu(display("Error from read operation while cleaning object store: {}", source))] ReadError { source: <ObjectStore as ObjectStoreApi>::Error, }, #[snafu(display("Error from write operation while cleaning object store: {}", source))] WriteError { source: <ObjectStore as ObjectStoreApi>::Error, }, #[snafu(display("Error from catalog loading while cleaning object store: {}", source))] CatalogLoadError { source: crate::catalog::Error }, } pub type Result<T, E = Error> = std::result::Result<T, E>; /// Delete all unreferenced parquet files. /// /// This will hold the transaction lock while the list of files is being gathered. To limit the time the lock is held /// use `max_files` which will limit the number of files to delete in this cleanup round. pub async fn cleanup_unreferenced_parquet_files( catalog: &PreservedCatalog, max_files: usize, ) -> Result<()> { // Create a transaction to prevent parallel modifications of the catalog. This avoids that we delete files there // that are about to get added to the catalog. let transaction = catalog.open_transaction().await; let store = catalog.object_store(); let server_id = catalog.server_id(); let db_name = catalog.db_name(); let all_known = { // replay catalog transactions to track ALL (even dropped) files that are referenced let (_catalog, state) = PreservedCatalog::load::<TracerCatalogState>( Arc::clone(&store), server_id, db_name.to_string(), (), ) .await .context(CatalogLoadError)? .expect("catalog gone while reading it?"); let file_guard = state.files.lock().expect("lock poissened?"); file_guard.clone() }; let prefix = data_location(&store, server_id, db_name); // gather a list of "files to remove" eagerly so we do not block transactions on the catalog for too long let mut to_remove = vec![]; let mut stream = store.list(Some(&prefix)).await.context(ReadError)?; 'outer: while let Some(paths) = stream.try_next().await.context(ReadError)? { for path in paths { if to_remove.len() >= max_files { info!(%max_files, "reached limit of number of files to cleanup in one go"); break 'outer; } let path_parsed: DirsAndFileName = path.clone().into(); // only delete if all of the following conditions are met: // - filename ends with `.parquet` // - file is not tracked by the catalog if path_parsed .file_name .as_ref() .map(|part| part.encoded().ends_with(".parquet")) .unwrap_or(false) && !all_known.contains(&path_parsed) { to_remove.push(path); } } } // abort transaction cleanly to avoid warnings about uncommited transactions transaction.abort(); // now that the transaction lock is dropped, perform the actual (and potentially slow) delete operation let n_files = to_remove.len(); info!(%n_files, "Found files to delete, start deletion."); for path in to_remove { info!(path = %path.display(), "Delete file"); store.delete(&path).await.context(WriteError)?; } info!(%n_files, "Finished deletion, removed files."); Ok(()) } /// Catalog state that traces all used parquet files. struct TracerCatalogState { files: Mutex<HashSet<DirsAndFileName>>, } impl CatalogState for TracerCatalogState { type EmptyInput = (); fn new_empty(_db_name: &str, _data: Self::EmptyInput) -> Self { Self { files: Default::default(), } } fn add( &mut self, _object_store: Arc<ObjectStore>, info: CatalogParquetInfo, ) -> crate::catalog::Result<()> { self.files .lock() .expect("lock poissened?") .insert(info.path); Ok(()) } fn remove(&mut self, _path: DirsAndFileName) -> crate::catalog::Result<()> { // Do NOT remove the file since we still need it for time travel Ok(()) } } #[cfg(test)] mod tests { use std::{collections::HashSet, num::NonZeroU32, sync::Arc}; use bytes::Bytes; use data_types::server_id::ServerId; use object_store::path::{parsed::DirsAndFileName, ObjectStorePath, Path}; use super::*; use crate::{ catalog::test_helpers::TestCatalogState, test_utils::{chunk_addr, db_name, make_metadata, make_object_store}, }; #[tokio::test] async fn test_cleanup_empty() { let object_store = make_object_store(); let server_id = make_server_id(); let db_name = "db1"; let (catalog, _state) = PreservedCatalog::new_empty::<TestCatalogState>( Arc::clone(&object_store), server_id, db_name.to_string(), (), ) .await .unwrap(); // run clean-up cleanup_unreferenced_parquet_files(&catalog, 1_000) .await .unwrap(); } #[tokio::test] async fn test_cleanup_rules() { let object_store = make_object_store(); let server_id = make_server_id(); let db_name = db_name(); let (catalog, _state) = PreservedCatalog::new_empty::<TestCatalogState>( Arc::clone(&object_store), server_id, db_name.to_string(), (), ) .await .unwrap(); // create some data let mut paths_keep = vec![]; let mut paths_delete = vec![]; { let mut transaction = catalog.open_transaction().await; // an ordinary tracked parquet file => keep let (path, md) = make_metadata(&object_store, "foo", chunk_addr(1)).await; transaction.add_parquet(&path.clone().into(), &md).unwrap(); paths_keep.push(path.display()); // another ordinary tracked parquet file that was added and removed => keep (for time travel) let (path, md) = make_metadata(&object_store, "foo", chunk_addr(2)).await; transaction.add_parquet(&path.clone().into(), &md).unwrap(); transaction.remove_parquet(&path.clone().into()).unwrap(); paths_keep.push(path.display()); // not a parquet file => keep let mut path: DirsAndFileName = path.into(); path.file_name = Some("foo.txt".into()); let path = object_store.path_from_dirs_and_filename(path); create_empty_file(&object_store, &path).await; paths_keep.push(path.display()); // an untracked parquet file => delete let (path, _md) = make_metadata(&object_store, "foo", chunk_addr(3)).await; paths_delete.push(path.display()); transaction.commit().await.unwrap(); } // run clean-up cleanup_unreferenced_parquet_files(&catalog, 1_000) .await .unwrap(); // list all files let all_files = list_all_files(&object_store).await; for p in paths_keep { assert!(dbg!(&all_files).contains(dbg!(&p))); } for p in paths_delete { assert!(!dbg!(&all_files).contains(dbg!(&p))); } } #[tokio::test] async fn test_cleanup_with_parallel_transaction() { let object_store = make_object_store(); let server_id = make_server_id(); let db_name = db_name(); let (catalog, _state) = PreservedCatalog::new_empty::<TestCatalogState>( Arc::clone(&object_store), server_id, db_name.to_string(), (), ) .await .unwrap(); // try multiple times to provoke a conflict for i in 0..100 { let (path, _) = tokio::join!( async { let mut transaction = catalog.open_transaction().await; let (path, md) = make_metadata(&object_store, "foo", chunk_addr(i)).await; transaction.add_parquet(&path.clone().into(), &md).unwrap(); transaction.commit().await.unwrap(); path.display() }, async { cleanup_unreferenced_parquet_files(&catalog, 1_000) .await .unwrap(); }, ); let all_files = list_all_files(&object_store).await; assert!(all_files.contains(&path)); } } #[tokio::test] async fn test_cleanup_max_files() { let object_store = make_object_store(); let server_id = make_server_id(); let db_name = db_name(); let (catalog, _state) = PreservedCatalog::new_empty::<TestCatalogState>( Arc::clone(&object_store), server_id, db_name.to_string(), (), ) .await .unwrap(); // create some files let mut to_remove: HashSet<String> = Default::default(); for chunk_id in 0..3 { let (path, _md) = make_metadata(&object_store, "foo", chunk_addr(chunk_id)).await; to_remove.insert(path.display()); } // run clean-up cleanup_unreferenced_parquet_files(&catalog, 2) .await .unwrap(); // should only delete 2 let all_files = list_all_files(&object_store).await; let leftover: HashSet<_> = all_files.intersection(&to_remove).collect(); assert_eq!(leftover.len(), 1); // run clean-up again cleanup_unreferenced_parquet_files(&catalog, 2) .await .unwrap(); // should delete remaining file let all_files = list_all_files(&object_store).await; let leftover: HashSet<_> = all_files.intersection(&to_remove).collect(); assert_eq!(leftover.len(), 0); } fn make_server_id() -> ServerId { ServerId::new(NonZeroU32::new(1).unwrap()) } async fn create_empty_file(object_store: &ObjectStore, path: &Path) { let data = Bytes::default(); let len = data.len(); object_store .put( &path, futures::stream::once(async move { Ok(data) }), Some(len), ) .await .unwrap(); } async fn list_all_files(object_store: &ObjectStore) -> HashSet<String> { object_store .list(None) .await .unwrap() .try_concat() .await .unwrap() .iter() .map(|p| p.display()) .collect() } }
use anyhow::{Context, Result, bail, ensure}; // enum MyError{ // Io(std::io::Error), // Num(std::num::ParseIntError), // } fn get_int_from_file() -> Result<i32>{ let path = "number.txt"; let num_str = std::fs::read_to_string(path).with_context(|| format!("failed to read string from {}", path))?; if num_str.len() >= 10{ bail!("it may be too large number"); } ensure!(num_str.starts_with("1"), "first digit is not 1"); num_str .trim() .parse::<i32>() .map(|t| t*2) .context("failed to parse string") } fn main(){ match get_int_from_file(){ Ok(x) => println!("{}", x), Err(e) => println!("{:#?}", e), } }
mod address; pub use self::address::write_address; pub use self::address::read_address;
//! Demo runner. use std::fmt; pub mod debug; /// Possible runner errors. #[derive(Debug)] pub enum Error { CannotCreateWindow(String), CannotCreateStore(String), DemoInitializationFailure(String) } impl Error { pub(crate) fn cannot_create_window<R>(reason: R) -> Self where R: Into<String> { Error::CannotCreateWindow(reason.into()) } pub(crate) fn cannot_create_store<R>(reason: R) -> Self where R: Into<String> { Error::CannotCreateStore(reason.into()) } pub(crate) fn demo_initialization_failure<R>(reason: R) -> Self where R: Into<String> { Error::DemoInitializationFailure(reason.into()) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Error::CannotCreateWindow(ref reason) => write!(f, "cannot create window: {}", reason), Error::CannotCreateStore(ref reason) => write!(f, "cannot create store: {}", reason), Error::DemoInitializationFailure(ref reason) => write!(f, "demo failed to initialize: {}", reason), } } }
use std::fmt; use super::*; use crate::support::StringRef; extern "C" { type LlvmType; } /// Represents the kind of a type /// /// This is primarily used in FFI #[repr(C)] #[derive(Copy, Clone, PartialEq, Eq)] #[allow(non_camel_case_types)] pub enum TypeKind { Void = 0, FP16, FP32, FP64, FP80, // 80 bit floating point type (X87) FP128, // 128-bit floating point type (112-bit mantissa) FP128_PPC, // 128-bit floating point type (two 64-bits) Label, Integer, // for any bit width Function, Struct, Array, Pointer, Vector, // fixed-width SIMD vector type Metadata, MMX, // x86 mmx Token, ScalableVector, // scalable SIMD vector type BFloat, // 16-bit brain floating point AMX, // x86 amx } /// This trait is implemented by all LLVM types /// /// Types have the following hierarchy: /// /// Type: /// Integers /// Floats /// Functions /// Sequences: /// Array /// Pointer /// Vector /// Void /// Label /// Metadata pub trait Type { /// Returns the kind of type this is fn kind(&self) -> TypeKind { extern "C" { fn LLVMGetTypeKind(ty: TypeBase) -> TypeKind; } unsafe { LLVMGetTypeKind(self.base()) } } /// Returns true if this type has a size fn is_sized(&self) -> bool { extern "C" { fn LLVMTypeIsSized(ty: TypeBase) -> bool; } unsafe { LLVMTypeIsSized(self.base()) } } /// Returns the context this type was created in fn context(&self) -> Context { extern "C" { fn LLVMGetTypeContext(ty: TypeBase) -> Context; } unsafe { LLVMGetTypeContext(self.base()) } } /// Prints a textual representation of this type to stderr fn dump(&self) { extern "C" { fn LLVMDumpType(ty: TypeBase); } unsafe { LLVMDumpType(self.base()); } } /// Gets an opaque handle for this type to be used with the FFI bridge fn base(&self) -> TypeBase; } /// Represents a type that contains one or more elements of a single type /// /// * arrays /// * vectors /// * pointers pub trait SequentialType: Type { /// Returns the element type of this container fn element_type(&self) -> TypeBase { extern "C" { fn LLVMGetElementType(ty: TypeBase) -> TypeBase; } unsafe { LLVMGetElementType(self.base()) } } fn subtypes(&self) -> Vec<TypeBase> { extern "C" { fn LLVMGetSubtypes(ty: TypeBase, results: *mut TypeBase); } let len = self.arity(); let mut subtypes = Vec::with_capacity(len); unsafe { LLVMGetSubtypes(self.base(), subtypes.as_mut_ptr()); subtypes.set_len(len); } subtypes } /// Returns the size of this container, i.e. length fn arity(&self) -> usize { extern "C" { fn LLVMGetNumContainedTypes(ty: TypeBase) -> u32; } unsafe { LLVMGetNumContainedTypes(self.base()) as usize } } } /// Represents an opaque handle to an LLVM type for use in the FFI bridge, /// or in situations where a container of mixed types are needed (e.g. struct fields) #[repr(transparent)] #[derive(Copy, Clone)] pub struct TypeBase(*const LlvmType); impl Type for TypeBase { #[inline(always)] fn base(&self) -> TypeBase { *self } } impl fmt::Display for TypeBase { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { extern "C" { fn LLVMPrintTypeToString(ty: TypeBase) -> *const std::os::raw::c_char; } let string = unsafe { StringRef::from_ptr(LLVMPrintTypeToString(*self)) }; write!(f, "{}", &string) } } macro_rules! impl_type_traits { ($ty:ident, $($kind:ident),+) => { impl Type for $ty { fn base(&self) -> TypeBase { self.0 } } impl Into<TypeBase> for $ty { fn into(self) -> TypeBase { self.0 } } impl TryFrom<TypeBase> for $ty { type Error = InvalidTypeCastError; fn try_from(ty: TypeBase) -> Result<Self, Self::Error> { match ty.kind() { $( TypeKind::$kind => Ok(Self(ty)), )* _ => Err(InvalidTypeCastError), } } } impl fmt::Display for $ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } }; } /// Represents the void/unit type, i.e. represents nothing #[repr(transparent)] #[derive(Copy, Clone)] pub struct VoidType(TypeBase); impl_type_traits!(VoidType, Void); /// Represents the set of all floating-point types in LLVM #[repr(transparent)] #[derive(Copy, Clone)] pub struct FloatType(TypeBase); impl_type_traits!(FloatType, FP16, FP32, FP64, FP80, FP128, FP128_PPC, BFloat); /// Represents the set of all integer types in LLVM #[repr(transparent)] #[derive(Copy, Clone)] pub struct IntegerType(TypeBase); impl_type_traits!(IntegerType, Integer); impl IntegerType { /// Gets the bitwidth of this integer type, e.g. 64 pub fn bitwidth(self) -> usize { extern "C" { fn LLVMGetIntTypeWidth(ty: IntegerType) -> u32; } unsafe { LLVMGetIntTypeWidth(self) as usize } } } /// Represents the type of a function in LLVM, i.e. its signature #[repr(transparent)] #[derive(Copy, Clone)] pub struct FunctionType(TypeBase); impl_type_traits!(FunctionType, Function); impl FunctionType { pub fn new<R: Type>(return_ty: R, params: &[TypeBase], is_variadic: bool) -> Self { extern "C" { fn LLVMFunctionType( return_ty: TypeBase, param_types: *const TypeBase, num_params: u32, is_variadic: bool, ) -> FunctionType; } unsafe { LLVMFunctionType( return_ty.base(), params.as_ptr(), params.len().try_into().unwrap(), is_variadic, ) } } pub fn is_variadic(self) -> bool { extern "C" { fn LLVMIsFunctionVarArg(ty: FunctionType) -> bool; } unsafe { LLVMIsFunctionVarArg(self) } } pub fn return_type(self) -> TypeBase { extern "C" { fn LLVMGetReturnType(ty: FunctionType) -> TypeBase; } unsafe { LLVMGetReturnType(self) } } pub fn arity(self) -> usize { extern "C" { fn LLVMCountParamTypes(ty: FunctionType) -> u32; } unsafe { LLVMCountParamTypes(self) as usize } } pub fn params(self) -> Vec<TypeBase> { extern "C" { fn LLVMGetParamTypes(ty: FunctionType, params: *mut TypeBase); } let len = self.arity(); let mut params = Vec::with_capacity(len); unsafe { LLVMGetParamTypes(self, params.as_mut_ptr()); params.set_len(len); } params } } /// Represents struct/record types in LLVM #[repr(transparent)] #[derive(Copy, Clone)] pub struct StructType(TypeBase); impl_type_traits!(StructType, Struct); impl StructType { pub fn name(self) -> Option<StringRef> { extern "C" { fn LLVMGetStructName(ty: StructType) -> *const std::os::raw::c_char; } let ptr = unsafe { LLVMGetStructName(self) }; if ptr.is_null() { None } else { Some(unsafe { StringRef::from_ptr(ptr) }) } } pub fn set_body(self, body: &[TypeBase], packed: bool) { extern "C" { fn LLVMStructSetBody( ty: StructType, elements: *const TypeBase, num_elements: u32, packed: bool, ); } unsafe { LLVMStructSetBody(self, body.as_ptr(), body.len().try_into().unwrap(), packed) } } pub fn arity(self) -> usize { extern "C" { fn LLVMCountStructElementTypes(ty: StructType) -> u32; } unsafe { LLVMCountStructElementTypes(self) as usize } } pub fn element(self, index: usize) -> TypeBase { extern "C" { fn LLVMStructGetTypeAtIndex(ty: StructType, index: u32) -> TypeBase; } assert!( index < self.arity(), "invalid element index, {} is out of bounds", index ); unsafe { LLVMStructGetTypeAtIndex(self, index.try_into().unwrap()) } } pub fn elements(self) -> Vec<TypeBase> { extern "C" { fn LLVMGetStructElementTypes(ty: StructType, elements: *mut TypeBase); } let len = self.arity(); let mut elements = Vec::with_capacity(len); unsafe { LLVMGetStructElementTypes(self, elements.as_mut_ptr()); elements.set_len(len); } elements } pub fn is_packed(self) -> bool { extern "C" { fn LLVMIsPackedStruct(ty: StructType) -> bool; } unsafe { LLVMIsPackedStruct(self) } } pub fn is_opaque(self) -> bool { extern "C" { fn LLVMIsOpaqueStruct(ty: StructType) -> bool; } unsafe { LLVMIsOpaqueStruct(self) } } pub fn is_literal(self) -> bool { extern "C" { fn LLVMIsLiteralStruct(ty: StructType) -> bool; } unsafe { LLVMIsLiteralStruct(self) } } } /// Represents a fixed size container of a given element type #[repr(transparent)] #[derive(Copy, Clone)] pub struct ArrayType(TypeBase); impl_type_traits!(ArrayType, Array); impl SequentialType for ArrayType {} impl ArrayType { pub fn new<T: Type>(element_ty: T, arity: usize) -> Self { extern "C" { fn LLVMArrayType(element_ty: TypeBase, arity: u32) -> ArrayType; } unsafe { LLVMArrayType(element_ty.base(), arity.try_into().unwrap()) } } pub fn len(self) -> usize { extern "C" { fn LLVMGetArrayLength(ty: ArrayType) -> u32; } unsafe { LLVMGetArrayLength(self) as usize } } } /// Represents a pointer type #[repr(transparent)] #[derive(Copy, Clone)] pub struct PointerType(TypeBase); impl_type_traits!(PointerType, Pointer); impl SequentialType for PointerType {} impl PointerType { pub fn new<T: Type>(pointee: T, address_space: u32) -> Self { extern "C" { fn LLVMPointerType(pointee: TypeBase, address_space: u32) -> PointerType; } unsafe { LLVMPointerType(pointee.base(), address_space) } } pub fn address_space(self) -> u32 { extern "C" { fn LLVMGetPointerAddressSpace(ty: PointerType) -> u32; } unsafe { LLVMGetPointerAddressSpace(self) } } } /// Represents the type of metadata when used as a value #[repr(transparent)] #[derive(Copy, Clone)] pub struct MetadataType(TypeBase); impl_type_traits!(MetadataType, Metadata); /// Represents the type of a token value #[repr(transparent)] #[derive(Copy, Clone)] pub struct TokenType(TypeBase); impl_type_traits!(TokenType, Token); /// Represents the type of a label (e.g. block label) when used as a value #[repr(transparent)] #[derive(Copy, Clone)] pub struct LabelType(TypeBase); impl_type_traits!(LabelType, Label);
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::GString; use glib_sys; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use webkit2_webextension_sys; use DOMDOMWindow; use DOMDocument; use DOMElement; use DOMEventTarget; use DOMHTMLElement; use DOMNode; use DOMObject; glib_wrapper! { pub struct DOMHTMLIFrameElement(Object<webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, webkit2_webextension_sys::WebKitDOMHTMLIFrameElementClass, DOMHTMLIFrameElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget; match fn { get_type => || webkit2_webextension_sys::webkit_dom_html_iframe_element_get_type(), } } pub const NONE_DOMHTMLI_FRAME_ELEMENT: Option<&DOMHTMLIFrameElement> = None; pub trait DOMHTMLIFrameElementExt: 'static { #[cfg_attr(feature = "v2_22", deprecated)] fn get_align(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_content_document(&self) -> Option<DOMDocument>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_content_window(&self) -> Option<DOMDOMWindow>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_frame_border(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_height(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_long_desc(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_margin_height(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_margin_width(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_name(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_scrolling(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_src(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_width(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn set_align(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_frame_border(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_height(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_long_desc(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_margin_height(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_margin_width(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_name(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_scrolling(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_src(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_width(&self, value: &str); fn connect_property_align_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_content_document_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; fn connect_property_content_window_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; fn connect_property_frame_border_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_long_desc_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_margin_height_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; fn connect_property_margin_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_scrolling_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_src_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<DOMHTMLIFrameElement>> DOMHTMLIFrameElementExt for O { fn get_align(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_align( self.as_ref().to_glib_none().0, ), ) } } fn get_content_document(&self) -> Option<DOMDocument> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_content_document( self.as_ref().to_glib_none().0, ), ) } } fn get_content_window(&self) -> Option<DOMDOMWindow> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_content_window( self.as_ref().to_glib_none().0, ), ) } } fn get_frame_border(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_frame_border( self.as_ref().to_glib_none().0, ), ) } } fn get_height(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_height( self.as_ref().to_glib_none().0, ), ) } } fn get_long_desc(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_long_desc( self.as_ref().to_glib_none().0, ), ) } } fn get_margin_height(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_margin_height( self.as_ref().to_glib_none().0, ), ) } } fn get_margin_width(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_margin_width( self.as_ref().to_glib_none().0, ), ) } } fn get_name(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_name( self.as_ref().to_glib_none().0, ), ) } } fn get_scrolling(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_scrolling( self.as_ref().to_glib_none().0, ), ) } } fn get_src(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_src( self.as_ref().to_glib_none().0, ), ) } } fn get_width(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_iframe_element_get_width( self.as_ref().to_glib_none().0, ), ) } } fn set_align(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_align( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_frame_border(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_frame_border( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_height(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_height( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_long_desc(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_long_desc( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_margin_height(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_margin_height( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_margin_width(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_margin_width( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_name(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_name( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_scrolling(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_scrolling( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_src(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_src( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_width(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_iframe_element_set_width( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn connect_property_align_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_align_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::align\0".as_ptr() as *const _, Some(transmute(notify_align_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_content_document_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_content_document_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::content-document\0".as_ptr() as *const _, Some(transmute( notify_content_document_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_content_window_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_content_window_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::content-window\0".as_ptr() as *const _, Some(transmute( notify_content_window_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_frame_border_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_frame_border_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::frame-border\0".as_ptr() as *const _, Some(transmute( notify_frame_border_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_height_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::height\0".as_ptr() as *const _, Some(transmute(notify_height_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_long_desc_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_long_desc_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::long-desc\0".as_ptr() as *const _, Some(transmute(notify_long_desc_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_margin_height_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_margin_height_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::margin-height\0".as_ptr() as *const _, Some(transmute( notify_margin_height_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_margin_width_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_margin_width_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::margin-width\0".as_ptr() as *const _, Some(transmute( notify_margin_width_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_name_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::name\0".as_ptr() as *const _, Some(transmute(notify_name_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_scrolling_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_scrolling_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::scrolling\0".as_ptr() as *const _, Some(transmute(notify_scrolling_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_src_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_src_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::src\0".as_ptr() as *const _, Some(transmute(notify_src_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_width_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLIFrameElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLIFrameElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLIFrameElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::width\0".as_ptr() as *const _, Some(transmute(notify_width_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } } impl fmt::Display for DOMHTMLIFrameElement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "DOMHTMLIFrameElement") } }
//! A collection of general utilities use crate::error::QuicksilverError; use futures::{Future, future}; use std::path::Path; #[cfg(not(target_arch="wasm32"))] use { crate::Result, std::{ fs::File, io::Read, } }; #[cfg(target_arch="wasm32")] use { futures::Async, std::io::{Error as IOError, ErrorKind}, stdweb::{ Reference, InstanceOf, unstable::TryInto, web::{XmlHttpRequest, ArrayBuffer, TypedArray, XhrReadyState}, } }; /// Create a Future that loads a file into an owned Vec of bytes /// /// It exists for loading files from the server with Javascript on the web, and providing a unified /// API between desktop and the web when it comes to file loading pub fn load_file(path: impl AsRef<Path>) -> impl Future<Item = Vec<u8>, Error = QuicksilverError> { #[cfg(not(target_arch="wasm32"))] return future::result(load(path)); #[cfg(target_arch="wasm32")] return { future::result(create_request(path.as_ref().to_str().expect("The path must be able to be stringified"))) .and_then(|xhr| future::poll_fn(move || { let status = xhr.status(); let ready_state = xhr.ready_state(); match (status / 100, ready_state) { (2, XhrReadyState::Done) => { let response: Reference = xhr.raw_response().try_into().expect("The response will always be a JS object"); let array = if TypedArray::<u8>::instance_of(&response) { response.downcast::<TypedArray<u8>>().map(|arr| arr.to_vec()) } else if ArrayBuffer::instance_of(&response) { response.downcast::<ArrayBuffer>().map(|arr| TypedArray::<u8>::from(arr).to_vec()) } else { return Err(new_wasm_error(&format!("Unknown file encoding type: {:?}", response))); }; if let Some(array) = array { Ok(Async::Ready(array)) } else { Err(new_wasm_error("Failed to cast file into bytes")) } }, (2, _) => Ok(Async::NotReady), (0, _) => Ok(Async::NotReady), _ => Err(new_wasm_error("Non-200 status code returned")) } })) }; } #[cfg(target_arch="wasm32")] fn create_request(path: &str) -> Result<XmlHttpRequest, QuicksilverError> { let xhr = XmlHttpRequest::new(); web_try(xhr.open("GET", path), "Failed to create a GET request")?; web_try(xhr.send(), "Failed to send a GET request")?; js! { @{&xhr}.responseType = "arraybuffer"; } Ok(xhr) } #[cfg(target_arch="wasm32")] fn web_try<T, E>(result: Result<T, E>, error: &str) -> Result<T, QuicksilverError> { match result { Ok(val) => Ok(val), Err(_) => Err(new_wasm_error(error)) } } #[cfg(target_arch="wasm32")] fn new_wasm_error(string: &str) -> QuicksilverError { IOError::new(ErrorKind::NotFound, string).into() } #[cfg(not(target_arch="wasm32"))] fn load(path: impl AsRef<Path>) -> Result<Vec<u8>> { let mut data = Vec::new(); File::open(path)?.read_to_end(&mut data)?; Ok(data) }
use super::*; #[derive(Clone,Copy,Eq)] pub struct Vertex { pub id:i32, pub x: i32, pub y: i32, } impl AsIndexForGraph for Vertex { fn index(&self) -> &i32 {&(self.id)} } impl Ord for Vertex { fn cmp(&self, other:&Vertex) -> Ordering { self.id.cmp(&other.id) } } impl PartialOrd for Vertex { fn partial_cmp(&self,other:&Vertex) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Vertex { fn eq(&self,other:&Vertex) -> bool { self.id == other.id } } impl Vertex { pub fn new(i:i32,xn:i32,yn:i32) -> Vertex { Vertex{id:i,x:xn,y:yn} } }
/* Copyright (C) 2016 Yutaka Kamei */ #![allow(non_snake_case)] use std::error::Error; use std::fmt::{self, Debug}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_serialize::base64::{self, FromBase64, ToBase64}; use time::{Tm, now_utc, strftime, strptime}; pub mod resource; const USER : &'static str = "urn:ietf:params:scim:schemas:core:2.0:User"; const GROUP : &'static str = "urn:ietf:params:scim:schemas:core:2.0:Group"; const ENTERPRISE_USER : &'static str = "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User"; const RESOURCETYPE : &'static str = "urn:ietf:params:scim:schemas:core:2.0:ResourceType"; const LIST_RESPONSE : &'static str = "urn:ietf:params:scim:api:messages:2.0:ListResponse"; const ERROR: &'static str = "urn:ietf:params:scim:api:messages:2.0:Error"; trait Attr : Encodable + Decodable { fn name() -> String; fn typ() -> ScimSchemaType; fn sub_attributes() -> Option<Vec<AttributeSchemaInfo>> { None } fn is_multi_valued() -> bool { false } fn description() -> String { Self::name() } fn is_required() -> bool { false } fn canonical_values() -> Option<Vec<String>> { None } fn is_case_exact() -> bool { false } fn mutability() -> Mutability { Mutability::readWrite } fn returned() -> Returned { Returned::default } fn is_unique() -> bool { // NOTE: we notice none or server false } fn reference_types() -> Option<Vec<String>> { None } fn show_schema_info() -> AttributeSchemaInfo { AttributeSchemaInfo { name: Self::name(), typ: Self::typ(), subAttributes: Self::sub_attributes(), multiValued: Self::is_multi_valued(), description: Self::description(), required: Self::is_required(), canonicalValues: Self::canonical_values(), caseExact: Self::is_case_exact(), mutability: Self::mutability(), returned: Self::returned(), uniqueness: if Self::is_unique() { format!("server") } else { format!("none") }, referenceTypes: Self::reference_types(), } } } #[derive(RustcEncodable, Debug)] #[allow(non_camel_case_types)] enum ScimSchemaType { string, boolean, // decimal, // NOTE: This type is not used // integer, // NOTE: This type is not used // dateTime, // NOTE: This type is not used binary, reference, complex, } #[derive(RustcEncodable, Debug)] #[allow(non_camel_case_types)] enum Mutability { readOnly, readWrite, immutable, writeOnly, } #[derive(RustcEncodable, Debug)] #[allow(non_camel_case_types)] enum Returned { // always, // NOTE: This type is not used never, default, // request, // NOTE: This type is not used } #[derive(Debug)] struct AttributeSchemaInfo { name: String, typ: ScimSchemaType, subAttributes: Option<Vec<AttributeSchemaInfo>>, multiValued: bool, description: String, required: bool, canonicalValues: Option<Vec<String>>, caseExact: bool, mutability: Mutability, returned: Returned, uniqueness: String, // NOTE: uniqueness is server or none in thie implementation. referenceTypes: Option<Vec<String>>, } impl Encodable for AttributeSchemaInfo { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("Attribute", 12, |s| { try!(s.emit_struct_field("name", 0, |s| self.name.encode(s))); try!(s.emit_struct_field("type", 1, |s| self.typ.encode(s))); if let Some(ref val) = self.subAttributes { try!(s.emit_struct_field("subAttributes", 1, |s| val.encode(s))); } try!(s.emit_struct_field("multiValued", 1, |s| self.multiValued.encode(s))); try!(s.emit_struct_field("description", 1, |s| self.description.encode(s))); try!(s.emit_struct_field("required", 1, |s| self.required.encode(s))); if let Some(ref val) = self.canonicalValues { try!(s.emit_struct_field("canonicalValues", 1, |s| val.encode(s))); } try!(s.emit_struct_field("caseExact", 1, |s| self.caseExact.encode(s))); try!(s.emit_struct_field("mutability", 1, |s| self.mutability.encode(s))); try!(s.emit_struct_field("returned", 1, |s| self.returned.encode(s))); try!(s.emit_struct_field("uniqueness", 1, |s| self.uniqueness.encode(s))); if let Some(ref val) = self.referenceTypes { try!(s.emit_struct_field("referenceTypes", 1, |s| val.encode(s))); } Ok(()) }) } } pub trait Resource : Encodable { fn show_resource_type() -> Option<ResourceType> { None } fn show_schema_info() -> SchemaInfo; } pub struct ListResponse<T: Encodable> { schemas: Vec<String>, totalResults: usize, itemsPerPage: Option<usize>, startIndex: Option<usize>, Resources: Vec<T>, } impl<T: Encodable> ListResponse<T> { pub fn new(t: Vec<T>) -> ListResponse<T> { ListResponse { schemas: vec![LIST_RESPONSE.to_string()], totalResults: t.len(), itemsPerPage: None, startIndex: None, Resources: t, } } } impl<T: Encodable> Encodable for ListResponse<T> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("ListResponse", 5, |s| { try!(s.emit_struct_field("schemas", 0, |s| self.schemas.encode(s))); try!(s.emit_struct_field("totalResults", 1, |s| self.totalResults.encode(s))); if let Some(ref val) = self.itemsPerPage { try!(s.emit_struct_field("itemsPerPage", 1, |s| val.encode(s))); } if let Some(ref val) = self.startIndex { try!(s.emit_struct_field("startIndex", 1, |s| val.encode(s))); } try!(s.emit_struct_field("Resources", 1, |s| self.Resources.encode(s))); Ok(()) }) } } #[derive(RustcEncodable, Debug)] pub struct ResourceType { schemas: Vec<String>, id: String, name: String, description: String, endpoint: String, schema: String, schemaExtensions: Option<Vec<SchemaExtension>>, meta: Meta, } #[derive(RustcEncodable, Debug)] struct SchemaExtension { schema: String, required: bool, } #[derive(RustcEncodable)] pub struct SchemaInfo { id: String, name: String, description: String, attributes: Vec<AttributeSchemaInfo>, } #[derive(Debug)] pub struct ScimError { schemas: Vec<String>, status: String, scimType: Option<ScimErrorKind>, detail: Option<String>, } impl ScimError { pub fn new(status: String, scimType: Option<ScimErrorKind>, detail: Option<String>) -> ScimError { ScimError { schemas: vec![ERROR.to_string()], status: status, scimType: scimType, detail: detail, } } pub fn not_found(detail: Option<String>) -> ScimError { ScimError { schemas: vec![ERROR.to_string()], status: "404".to_string(), scimType: None, detail: detail, } } } impl Encodable for ScimError { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("ScimError", 4, |s| { try!(s.emit_struct_field("schemas", 0, |s| self.schemas.encode(s))); try!(s.emit_struct_field("status", 1, |s| self.status.encode(s))); let mut idx = 2; if let Some(ref val) = self.scimType { try!(s.emit_struct_field("scimType", idx, |s| val.encode(s))); idx += 1; } if let Some(ref val) = self.detail { try!(s.emit_struct_field("detail", idx, |s| val.encode(s))); } Ok(()) }) } } impl fmt::Display for ScimError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(self, f) } } impl Error for ScimError { fn description(&self) -> &str { match self.scimType { Some(ScimErrorKind::invalidFilter) => "ScimError: invalidFilter", Some(ScimErrorKind::tooMany) => "ScimError: tooMany", Some(ScimErrorKind::uniqueness) => "ScimError: uniqueness", Some(ScimErrorKind::mutability) => "ScimError: mutability", Some(ScimErrorKind::invalidSyntax) => "ScimError: invalidSyntax", Some(ScimErrorKind::invalidPath) => "ScimError: invalidPath", Some(ScimErrorKind::noTarget) => "ScimError: noTarget", Some(ScimErrorKind::invalidValue) => "ScimError: invalidValue", Some(ScimErrorKind::invalidVers) => "ScimError: invalidVers", Some(ScimErrorKind::sensitive) => "ScimError: sensitive", None => { match self.status.as_ref() { "400" => "Bad Request", "401" => "Unauthorized", "403" => "Forbidden", "404" => "Not Found", "405" => "Method Not Allowed", "409" => "Conflict", "413" => "Payload Too Large", "500" => "Internal Server Error", "501" => "Not Implemented", "503" => "Service Unavailable", _ => "Unknown", } }, } } } #[derive(RustcEncodable, Debug)] #[allow(non_camel_case_types)] pub enum ScimErrorKind { invalidFilter, tooMany, uniqueness, mutability, invalidSyntax, invalidPath, noTarget, invalidValue, invalidVers, sensitive, } // SCIM Specific value types #[derive(Clone, Debug, PartialEq)] pub struct Binary(pub Vec<u8>); impl Encodable for Binary { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { let config = base64::Config { char_set: base64::CharacterSet::UrlSafe, newline: base64::Newline::LF, pad: false, line_length: None, }; s.emit_str(&self.0[..].to_base64(config)) } } impl Decodable for Binary { fn decode<D: Decoder>(d: &mut D) -> Result<Binary, D::Error> { d.read_str() .and_then(|s| s.from_base64() .map_err(|e| d.error(&format!("{}", e))) .and_then(|v| Ok(Binary(v)))) } } #[derive(Clone, Debug, PartialEq)] pub struct DateTime(Tm); impl DateTime { pub fn now_utc() -> DateTime { DateTime(now_utc()) } } impl Encodable for DateTime { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { // FIXME: unwrap() let t = strftime("%Y-%m-%dT%H:%M:%S%Z", &self.0.to_utc()).unwrap(); s.emit_str(&format!("{}", t)) } } impl Decodable for DateTime { fn decode<D: Decoder>(d: &mut D) -> Result<DateTime, D::Error> { d.read_str() .and_then(|s| strptime(&s, "%Y-%m-%dT%H:%M:%S%Z").map_err(|e| d.error(&format!("[error: strptime] {}", e)))) .and_then(|t| Ok(DateTime(t))) } } #[derive(Clone, Debug, PartialEq)] pub struct Meta { pub resourceType: Option<String>, pub created: Option<DateTime>, pub lastModified: Option<DateTime>, pub location: Option<String>, pub version: Option<String>, } impl Meta { pub fn new<S: ToString>(s: S) -> Meta { Meta { resourceType: Some(s.to_string()), created: None, lastModified: None, location: None, version: None, } } } impl Encodable for Meta { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("Meta", 5, |s| { let mut idx = 0; if let Some(ref val) = self.resourceType { try!(s.emit_struct_field("resourceType", idx, |s| val.encode(s))); idx += 1; } if let Some(ref val) = self.created { try!(s.emit_struct_field("created", idx, |s| val.encode(s))); idx += 1; } if let Some(ref val) = self.lastModified { try!(s.emit_struct_field("lastModified", idx, |s| val.encode(s))); idx += 1; } if let Some(ref val) = self.location { try!(s.emit_struct_field("location", idx, |s| val.encode(s))); idx += 1; } if let Some(ref val) = self.version { try!(s.emit_struct_field("version", idx, |s| val.encode(s))); } Ok(()) }) } }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use lazy_static::lazy_static; use alloc::collections::btree_map::BTreeMap; use clap::{App, AppSettings, SubCommand, ArgMatches, Arg}; use alloc::string::String; use super::super::super::qlib::linux_def::*; use super::super::super::qlib::common::*; use super::super::cmd::config::*; use super::super::container::container::*; use super::command::*; #[derive(Default, Debug)] pub struct KillCmd { pub id: String, pub all: bool, pub pid: i32, pub sig: Vec<String> } impl KillCmd { pub fn Init(cmd_matches: &ArgMatches) -> Result<Self> { let pidStr = cmd_matches.value_of("pid").unwrap().to_string(); let pid = match pidStr.parse::<i32>() { Err(_e) => return Err(Error::Common(format!("pid {} cant not be parsed as int type", pidStr))), Ok(v) => v, }; let sig = match cmd_matches.values_of("sig") { None => Vec::new(), Some(iter) => iter.map(|s| s.to_string()).collect(), }; if sig.len() >= 2 { return Err(Error::Common(format!("too many signals {:?}", &sig))); } return Ok(Self { id: cmd_matches.value_of("id").unwrap().to_string(), all: cmd_matches.is_present("all"), pid: pid, sig: sig, }) } pub fn SubCommand<'a, 'b>(common: &CommonArgs<'a, 'b>) -> App<'a, 'b> { return SubCommand::with_name("kill") .setting(AppSettings::ColoredHelp) .arg(&common.id_arg) .arg( Arg::with_name("all") .long("all") .short("a") .help("send the specified signal to all processes inside the container"), ) .arg( Arg::with_name("pid") .default_value("0") .long("pid") .takes_value(true) .help("send the specified signal to a specific process"), ) .setting(AppSettings::TrailingVarArg) .arg( Arg::with_name("sig") .multiple(false), ) .about("sends a signal to the container"); } pub fn Run(&self, gCfg: &GlobalConfig) -> Result<()> { info!("Container:: Kill ...."); let mut signal = if self.sig.len() == 0 { "".to_string() } else { self.sig[0].to_string() }; if signal.len() == 0 { signal = "KILL".to_string(); } let sig = ParseSignal(&signal)?; let container = Container::Load(&gCfg.RootDir, &self.id)?; if self.pid != 0 { return container.SignalProcess(sig, self.pid) } else { return container.SignalContainer(sig, self.all) } } } pub fn ParseSignal(s: &str) -> Result<i32> { match s.parse::<i32>() { Ok(n) => { for (_, id) in SIGNAL_MAP.iter() { if n == *id { return Ok(n) } } return Err(Error::Common(format!("unknown signal {}", n))); } Err(_) => (), } let str = s.to_uppercase(); let str = if str.starts_with("SIG") { &s["SIG".len()..] } else { &str }; match SIGNAL_MAP.get(&str) { None => return Err(Error::Common(format!("unknown signal {}", s))), Some(sig) => Ok(*sig), } } lazy_static! { static ref SIGNAL_MAP: BTreeMap<&'static str, i32> = { let mut map = BTreeMap::new(); map.insert("ABRT", Signal::SIGABRT); map.insert("ALRM", Signal::SIGALRM); map.insert("BUS", Signal::SIGBUS); map.insert("CHLD", Signal::SIGCHLD); map.insert("CLD", Signal::SIGCLD); map.insert("CONT", Signal::SIGCONT); map.insert("FPE", Signal::SIGFPE); map.insert("HUP", Signal::SIGHUP); map.insert("ILL", Signal::SIGILL); map.insert("INT", Signal::SIGINT); map.insert("IO", Signal::SIGIO); map.insert("IOT", Signal::SIGIOT); map.insert("KILL", Signal::SIGKILL); map.insert("PIPE", Signal::SIGPIPE); map.insert("POLL", Signal::SIGPOLL); map.insert("PROF", Signal::SIGPROF); map.insert("PWR", Signal::SIGPWR); map.insert("QUIT", Signal::SIGQUIT); map.insert("SEGV", Signal::SIGSEGV); map.insert("STKFLT", Signal::SIGSTKFLT); map.insert("STOP", Signal::SIGSTOP); map.insert("SYS", Signal::SIGSYS); map.insert("TERM", Signal::SIGTERM); map.insert("TRAP", Signal::SIGTRAP); map.insert("TSTP", Signal::SIGTSTP); map.insert("TTIN", Signal::SIGTTIN); map.insert("TTOU", Signal::SIGTTOU); map.insert("URG", Signal::SIGURG); map.insert("USR1", Signal::SIGUSR1); map.insert("USR2", Signal::SIGUSR2); map.insert("VTALRM", Signal::SIGVTALRM); map.insert("WINCH", Signal::SIGWINCH); map.insert("XCPU", Signal::SIGXCPU); map.insert("XFSZ", Signal::SIGXFSZ); map }; }
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass // Test that the compiler considers the 'static bound declared in the // trait. Issue #20890. // pretty-expanded FIXME #23616 trait Foo { type Value: 'static; fn dummy(&self) { } } fn require_static<T: 'static>() {} fn takes_foo<F: Foo>() { require_static::<F::Value>() } fn main() { }
//! Reinterprets the bits of a value of one type as another type. //! A more secure version than raw_transmute because it additionally checks the data sizes. use crate::raw_transmute::unchecked_transmute; /// Reinterprets the bits of a value of one type as another type. /// The function is completely constant, in case of a size mismatch, a panic pops up. pub const unsafe fn transmute_or_panic<D, To>(in_data: D) -> To { let size_d = core::mem::size_of::<D>(); let size_to = core::mem::size_of::<To>(); if size_d != size_to { panic!( concat!( "Error using `transmute_or_panic`, size of type `", stringify!(size_d), "` is not equal to size of type `", stringify!(size_to), "`." ) ); } unchecked_transmute(in_data) } /// Reinterprets the bits of a value of one type as another type. /// The function is completely constant, in case of a size mismatch, a panic pops up. #[inline(always)] pub const unsafe fn inline_transmute_or_panic<D, To>(in_data: D) -> To { let size_d = core::mem::size_of::<D>(); let size_to = core::mem::size_of::<To>(); if size_d != size_to { panic!( concat!( "Error using `transmute_or_panic`, size of type `", stringify!(size_d), "` is not equal to size of type `", stringify!(size_to), "`." ) ); } unchecked_transmute(in_data) } /// Reinterprets the bits of a value of one type as another type. /// The function is completely constant, if the size does not match, an error pops up. pub const unsafe fn transmute_or_errresult<D, To>(in_data: D) -> Result<To, (&'static str, D)> { let size_d = core::mem::size_of::<D>(); let size_to = core::mem::size_of::<To>(); if size_d != size_to { return Err(( concat!( "Error using `transmute_or_errresult`, size of type `", stringify!(size_d), "` is not equal to size of type `", stringify!(size_to), "`." ), in_data )); } Ok(unchecked_transmute(in_data)) } /// Reinterprets the bits of a value of one type as another type. /// The function is completely constant, if the size does not match, an error pops up. #[inline(always)] pub const unsafe fn inline_transmute_or_errresult<D, To>(in_data: D) -> Result<To, (&'static str, D)> { let size_d = core::mem::size_of::<D>(); let size_to = core::mem::size_of::<To>(); if size_d != size_to { return Err(( concat!( "Error using `transmute_or_errresult`, size of type `", stringify!(size_d), "` is not equal to size of type `", stringify!(size_to), "`." ), in_data )); } Ok(unchecked_transmute(in_data)) }
use std::collections::HashMap; use crate::{ config::Config, fs::{self, FileInfo}, sync::file_events_buffer::FileEventsBuffer, }; use tokio::{ io::AsyncRead, io::AsyncReadExt, io::AsyncWrite, io::AsyncWriteExt, io::{ReadHalf, WriteHalf}, }; const BUFFER_SIZE: usize = 8 * 1024; pub struct Sender<T: AsyncWrite + Unpin> { stream: T, } impl<T: AsyncWrite + Unpin> Sender<T> { pub fn new(stream: T) -> Self { Self { stream } } /// read `buf_size` from `buf_read` and write into internal stream pub async fn send_file<R: AsyncRead + Unpin>( &mut self, ident: u64, buf_read: &mut R, ) -> crate::Result<()> { let buff = bincode::serialize(&ident)?; self.stream.write_all(&buff).await?; tokio::io::copy(buf_read, &mut self.stream).await?; Ok(()) } } pub(crate) struct Receiver<'a, T: AsyncRead + Unpin> { stream: T, ident: u64, files: HashMap<u64, FileInfo>, config: &'a Config, peer_address: String, } impl<'a, T: AsyncRead + Unpin> Receiver<'a, T> { pub fn new(stream: T, config: &'a Config, peer_address: String) -> Self { Receiver { stream, ident: 0, files: HashMap::new(), config, peer_address, } } async fn read_file<'b>( &mut self, file_info: FileInfo, events_buffer: &'b FileEventsBuffer, ) -> crate::Result<()> { let mut buf = [0u8; BUFFER_SIZE]; let mut buf_size = file_info.size.unwrap() as usize; let mut buf_write = fs::get_temp_file(&file_info, &self.config).await?; while buf_size > 0 { let size = std::cmp::min(BUFFER_SIZE, buf_size); self.stream.read_exact(&mut buf[..size]).await?; buf_write.write(&buf[..size]).await?; buf_size -= size; } buf_write.flush().await?; events_buffer.add_event(&file_info, &self.peer_address); fs::flush_temp_file(&file_info, &self.config).await?; Ok(()) } pub async fn wait_files<'b>( &mut self, events_buffer: &'b FileEventsBuffer, ) -> crate::Result<()> { while self.files.len() > 0 { let mut handle_buf = [0u8; 8]; self.stream.read_exact(&mut handle_buf[..]).await?; let file_handle: u64 = bincode::deserialize(&handle_buf)?; // TODO: handle error match self.files.remove(&file_handle) { Some(file_info) => { self.read_file(file_info, events_buffer).await?; } None => { log::error!("file handle {} don't exist", &file_handle) } } } Ok(()) } pub fn prepare_file_transfer(&mut self, file: FileInfo) -> u64 { self.ident += 1; self.files.insert(self.ident, file); self.ident } } pub(crate) fn file_streamers<'a, T>( stream: T, config: &'a Config, peer_address: String, ) -> (Receiver<'a, ReadHalf<T>>, Sender<WriteHalf<T>>) where T: AsyncRead + AsyncWrite, { let (rx, tx) = tokio::io::split(stream); (Receiver::new(rx, config, peer_address), Sender::new(tx)) } #[cfg(test)] mod tests { use std::{ path::{Path, PathBuf}, sync::Arc, }; use super::*; fn sample_config(test_folder: &str) -> Config { Config::parse_content(format!( "port = 8090 [paths] a = \"./tmp/{}\"", test_folder )) .unwrap() } fn create_tmp_file(path: PathBuf, contents: &str) { if !path.parent().unwrap().exists() { std::fs::create_dir_all(path.parent().unwrap()).unwrap(); } std::fs::write(path, contents).unwrap(); } #[tokio::test] async fn file_streamer() -> Result<(), Box<dyn std::error::Error>> { let (rx_stream, tx_stream) = tokio::io::duplex(BUFFER_SIZE); let config = Arc::new(sample_config("file_streamer")); let mut tx = Sender { stream: tx_stream }; let mut rx = Receiver { ident: 0, files: HashMap::new(), stream: rx_stream, config: &config, peer_address: "".into(), }; create_tmp_file("./tmp/file_streamer/file_1".into(), "some content"); let mut file = FileInfo::new( "a".into(), "file_1".into(), Path::new("./tmp/file_streamer/file_1").metadata().unwrap(), ); let mut buffer: &[u8] = b"some file content"; file.size = Some(buffer.len() as u64); let file_handle = rx.prepare_file_transfer(file); tokio::spawn(async move { tx.send_file(file_handle, &mut buffer).await.unwrap(); }); let events_buffer = FileEventsBuffer::new(config.clone()); rx.wait_files(&events_buffer).await.unwrap(); assert_eq!( tokio::fs::read_to_string("./tmp/file_streamer/file_1") .await .unwrap(), "some file content" ); return Ok(()); } }
use crate::types::*; use itertools::Itertools; use serde::{Deserialize, Serialize}; use super::engine::composition::PosMatcher; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct POSFilter { pub matcher: PosMatcher, } impl POSFilter { fn is_word_data_match(&self, data: &WordData) -> bool { self.matcher.is_match(&data.pos) } fn keep(&self, data: &mut Word) { data.tags.retain(|x| self.is_word_data_match(x)) } fn remove(&self, data: &mut Word) { data.tags.retain(|x| !self.is_word_data_match(x)) } pub fn and(filters: &[&Self], data: &Word) -> bool { data.tags .iter() .any(|x| filters.iter().all(|filter| filter.is_word_data_match(x))) } pub fn apply(filters: &[Vec<&Self>], data: &mut Word) { data.tags.retain(|x| { filters .iter() .any(|filter| filter.iter().all(|f| f.is_word_data_match(x))) }) } } #[derive(Serialize, Deserialize)] pub enum Disambiguation { Remove(Vec<either::Either<owned::WordData, POSFilter>>), Add(Vec<owned::WordData>), Replace(Vec<owned::WordData>), Filter(Vec<Option<either::Either<owned::WordData, POSFilter>>>), Unify(Vec<Vec<POSFilter>>, Vec<Option<POSFilter>>, Vec<bool>), Nop, } impl Disambiguation { pub fn apply<'t>(&'t self, groups: Vec<Vec<&mut IncompleteToken<'t>>>, retain_last: bool) { match self { Disambiguation::Remove(data_or_filters) => { for (group, data_or_filter) in groups.into_iter().zip(data_or_filters) { for token in group.into_iter() { match data_or_filter { either::Left(data) => { token.word.tags.retain(|x| { !(x.pos == data.pos.as_ref_id() && (data.lemma.as_ref().is_empty() || x.lemma == data.lemma.as_ref_id())) }); } either::Right(filter) => { filter.remove(&mut token.word); } } } } } Disambiguation::Filter(filters) => { for (group, maybe_filter) in groups.into_iter().zip(filters) { if let Some(data_or_filter) = maybe_filter { match data_or_filter { either::Left(limit) => { for token in group.into_iter() { let last = token.word.tags.get(0).map_or_else( || token.word.text.clone(), |x| x.lemma.clone(), ); token.word.tags.retain(|x| x.pos == limit.pos.as_ref_id()); if token.word.tags.is_empty() { token.word.tags.push(WordData::new( if retain_last { last } else { token.word.text.clone() }, limit.pos.as_ref_id(), )); } } } either::Right(filter) => { for token in group.into_iter() { filter.keep(&mut token.word) } } } } } } Disambiguation::Add(datas) => { for (group, data) in groups.into_iter().zip(datas) { for token in group.into_iter() { let data = WordData::new( if data.lemma.as_ref().is_empty() { token.word.text.clone() } else { data.lemma.as_ref_id() }, data.pos.as_ref_id(), ); token.word.tags.push(data); token.word.tags.retain(|x| !x.pos.as_ref().is_empty()); } } } Disambiguation::Replace(datas) => { for (group, data) in groups.into_iter().zip(datas) { for token in group.into_iter() { let data = WordData::new( if data.lemma.as_ref().is_empty() { token.word.text.clone() } else { data.lemma.as_ref_id() }, data.pos.as_ref_id(), ); token.word.tags.clear(); token.word.tags.push(data); } } } Disambiguation::Unify(filters, disambigs, mask) => { let filters: Vec<_> = filters.iter().multi_cartesian_product().collect(); let mut filter_mask: Vec<_> = filters.iter().map(|_| true).collect(); for (group, use_mask_val) in groups.iter().zip(mask) { for token in group.iter() { if *use_mask_val { let finalized: Token = (*token).clone().into(); for (mask_val, filter) in filter_mask.iter_mut().zip(filters.iter()) { *mask_val = *mask_val && POSFilter::and(filter, &finalized.word); } } } } if !filter_mask.iter().any(|x| *x) { return; } let to_apply: Vec<_> = filter_mask .iter() .zip(filters) .filter_map( |(mask_val, filter)| { if *mask_val { Some(filter) } else { None } }, ) .collect(); for ((group, disambig), use_mask_val) in groups.into_iter().zip(disambigs).zip(mask) { if *use_mask_val { for token in group.into_iter() { let before = token.word.clone(); POSFilter::apply(&to_apply, &mut token.word); if let Some(disambig) = disambig { disambig.keep(&mut token.word); } if token.word.tags.is_empty() { token.word = before; } } } } } Disambiguation::Nop => {} } } } #[derive(Debug, Deserialize, Serialize)] pub struct DisambiguationChange { pub text: String, pub char_span: (usize, usize), pub before: owned::Word, pub after: owned::Word, } #[derive(Debug, Serialize, Deserialize)] pub enum DisambiguationExample { Unchanged(String), Changed(DisambiguationChange), }
pub use utils::RcStr; pub use telamon_gen::ast::*; pub use telamon_gen::lexer::{Lexer, LexerPosition, Position, Spanned}; pub use telamon_gen::parser; #[cfg(test)] mod undefined { pub use super::*; /// Missing the set MySet from a Integer. #[test] fn parameter() { assert_eq!( parser::parse_ast(Lexer::new( b"define integer foo($arg in MySet): \"mycode\" end" .to_vec() )) .unwrap() .type_check() .err(), Some(TypeError::Undefined { object_name: Spanned { beg: Position { position: LexerPosition { line: 0, column: 15 }, ..Default::default() }, end: Position { position: LexerPosition { line: 0, column: 18 }, ..Default::default() }, data: String::from("MySet"), } }) ); } } #[cfg(test)] mod redefinition { pub use super::*; /// Redefinition of the foo Integer. #[test] fn integer() { assert_eq!( parser::parse_ast(Lexer::new( b"define integer foo(): \"mycode\" end define integer foo(): \"mycode\" end" .to_vec() )) .unwrap() .type_check() .err(), Some(TypeError::Redefinition { object_kind: Spanned { beg: Position { position: LexerPosition { line: 0, column: 15 }, ..Default::default() }, end: Position { position: LexerPosition { line: 0, column: 18 }, ..Default::default() }, data: Hint::Integer, }, object_name: Spanned { beg: Position { position: LexerPosition { line: 2, column: 29 }, ..Default::default() }, end: Position { position: LexerPosition { line: 2, column: 32 }, ..Default::default() }, data: String::from("foo"), } }) ); } }
//! Prints "Hello, world!" on the host console using semihosting #![no_main] #![no_std] extern crate panic_halt; extern crate stm32f1; #[macro_export] extern crate lcd1602; use stm32f1::stm32f103::{Interrupt, Peripherals, CorePeripherals, gpioa}; use lcd1602::replace; use lcd1602::driver; use cortex_m_rt::entry; use cortex_m_semihosting::{debug, hprintln}; #[entry] fn main() -> ! { let p = Peripherals::take().unwrap(); let lcd = driver::LCD1602::new(&p); lcd.init(driver::LCD16X2_DISPLAY_ON_CURSOR_OFF_BLINK_OFF); lcd.set_backlight(true); lcd.puts("Hello, world!"); //hprintln!("Hello world!").unwrap(); let custom_char: [u8; 8] = [0x0e, 0x1b, 0x11, 0x11, 0x11, 0x11, 0x1f, 0x1f]; let mut cnt = 0; let mut sym = 0; lcd.create_custom_char(0, &custom_char); loop { driver::delay_us(10000); if cnt == 40 { cnt = 0; sym = 1 - sym; } if cnt & 1 == 0 { lcd.putc(if sym == 0 {'.'} else {'+'}) } else { lcd.putc(0 as char) } cnt += 1; } }
extern crate winrt_notification; use winrt_notification::{ Duration, Sound, Toast, }; fn main() { let duration = Duration::Short; let sound = Some(Sound::SMS); Toast::new(Toast::POWERSHELL_APP_ID) .title("first toast") .text1("line1") .duration(duration) .sound(sound) .show() // silently consume errors .expect("notification failed"); Toast::new(Toast::POWERSHELL_APP_ID) .title("another toast") .text1("line1") .duration(duration) .sound(sound) .show() // silently consume errors .expect("notification failed"); }
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Check that functions can modify local state. // pretty-expanded FIXME #23616 #![feature(box_syntax)] fn sums_to(v: Vec<isize> , sum: isize) -> bool { let mut i = 0; let mut sum0 = 0; while i < v.len() { sum0 += v[i]; i += 1; } return sum0 == sum; } fn sums_to_using_uniq(v: Vec<isize> , sum: isize) -> bool { let mut i = 0; let mut sum0: Box<_> = box 0; while i < v.len() { *sum0 += v[i]; i += 1; } return *sum0 == sum; } fn sums_to_using_rec(v: Vec<isize> , sum: isize) -> bool { let mut i = 0; let mut sum0 = F {f: 0}; while i < v.len() { sum0.f += v[i]; i += 1; } return sum0.f == sum; } struct F<T> { f: T } fn sums_to_using_uniq_rec(v: Vec<isize> , sum: isize) -> bool { let mut i = 0; let mut sum0 = F::<Box<_>> {f: box 0}; while i < v.len() { *sum0.f += v[i]; i += 1; } return *sum0.f == sum; } pub fn main() { }
use crate::errors::*; use crate::version::Version; use bytes::*; use std::cell::RefCell; use std::mem; use std::ops::{Add, Sub}; use std::rc::Rc; pub const INT_8: u8 = 0xC8; pub const INT_16: u8 = 0xC9; pub const INT_32: u8 = 0xCA; pub const INT_64: u8 = 0xCB; #[derive(Debug, PartialEq, Eq, Clone)] pub struct BoltInteger { pub value: i64, } impl Add for BoltInteger { type Output = Self; fn add(self, rhs: Self) -> Self::Output { (self.value + rhs.value).into() } } impl Sub for BoltInteger { type Output = Self; fn sub(self, rhs: Self) -> Self::Output { (self.value - rhs.value).into() } } impl BoltInteger { pub fn new(value: i64) -> BoltInteger { BoltInteger { value } } pub fn can_parse(_: Version, input: Rc<RefCell<Bytes>>) -> bool { let marker = input.borrow()[0]; (-16..=127).contains(&(marker as i8)) || marker == INT_8 || marker == INT_16 || marker == INT_32 || marker == INT_64 } } impl BoltInteger { pub fn parse(_: Version, input: Rc<RefCell<Bytes>>) -> Result<BoltInteger> { let mut input = input.borrow_mut(); let value: i64 = match input.get_u8() { marker if (-16..=127).contains(&(marker as i8)) => marker as i64, INT_8 => input.get_i8() as i64, INT_16 => input.get_i16() as i64, INT_32 => input.get_i32() as i64, INT_64 => input.get_i64() as i64, marker => { return Err(Error::InvalidTypeMarker { type_name: "integer", marker, }) } }; Ok(BoltInteger::new(value)) } pub fn into_bytes(self, _: Version) -> Result<Bytes> { let mut bytes = BytesMut::with_capacity(mem::size_of::<u8>() + mem::size_of::<i64>()); match self.value { -16..=127 => bytes.put_u8(self.value as u8), -128..=-17 => { bytes.put_u8(INT_8); bytes.put_i8(self.value as i8); } 128..=32_767 | -32_768..=-129 => { bytes.put_u8(INT_16); bytes.put_i16(self.value as i16); } 32_768..=2_147_483_647 | -2_147_483_648..=-32_769 => { bytes.put_u8(INT_32); bytes.put_i32(self.value as i32); } 2_147_483_648..=9_223_372_036_854_775_807 | -9_223_372_036_854_775_808..=-2_147_483_649 => { bytes.put_u8(INT_64); bytes.put_i64(self.value as i64); } } Ok(bytes.freeze()) } } impl Into<BoltInteger> for i64 { fn into(self) -> BoltInteger { BoltInteger::new(self) } } impl Into<i64> for BoltInteger { fn into(self) -> i64 { self.value } } //TODO: use macros impl Into<BoltInteger> for i32 { fn into(self) -> BoltInteger { BoltInteger::new(self as i64) } } #[cfg(test)] mod tests { use super::*; #[test] fn should_serialize_integer() { let bolt_int = BoltInteger::new(42); let b: Bytes = bolt_int.into_bytes(Version::V4_1).unwrap(); assert_eq!(&b[..], &[0x2A]); let bolt_int = BoltInteger::new(-127); let b: Bytes = bolt_int.into_bytes(Version::V4_1).unwrap(); assert_eq!(&b[..], &[INT_8, 0x81]); let bolt_int = BoltInteger::new(129); let b: Bytes = bolt_int.into_bytes(Version::V4_1).unwrap(); assert_eq!(&b[..], &[INT_16, 0x00, 0x81]); let bolt_int = BoltInteger::new(32_768); let b: Bytes = bolt_int.into_bytes(Version::V4_1).unwrap(); assert_eq!(&b[..], &[INT_32, 0x00, 0x00, 0x80, 0x00]); let bolt_int = BoltInteger::new(2_147_483_648); let b: Bytes = bolt_int.into_bytes(Version::V4_1).unwrap(); assert_eq!( &b[..], &[INT_64, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00] ); } #[test] fn should_deserialize_integer() { let b = Rc::new(RefCell::new(Bytes::from_static(&[0x2A]))); let bolt_int: BoltInteger = BoltInteger::parse(Version::V4_1, b).unwrap(); assert_eq!(bolt_int.value, 42); let b = Rc::new(RefCell::new(Bytes::from_static(&[INT_8, 0x81]))); let bolt_int: BoltInteger = BoltInteger::parse(Version::V4_1, b).unwrap(); assert_eq!(bolt_int.value, -127); let b = Rc::new(RefCell::new(Bytes::from_static(&[INT_16, 0x00, 0x81]))); let bolt_int: BoltInteger = BoltInteger::parse(Version::V4_1, b).unwrap(); assert_eq!(bolt_int.value, 129); let b = Rc::new(RefCell::new(Bytes::from_static(&[ INT_32, 0x00, 0x00, 0x80, 0x00, ]))); let bolt_int: BoltInteger = BoltInteger::parse(Version::V4_1, b).unwrap(); assert_eq!(bolt_int.value, 32_768); let b = Rc::new(RefCell::new(Bytes::from_static(&[ INT_64, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, ]))); let bolt_int: BoltInteger = BoltInteger::parse(Version::V4_1, b).unwrap(); assert_eq!(bolt_int.value, 2_147_483_648); } }
pub struct Solution; impl Solution { pub fn longest_substring(s: String, k: i32) -> i32 { fn rec(bytes: &[u8], k: usize) -> usize { if bytes.len() < k { return 0; } let mut count = [0; 26]; for &b in bytes { count[(b - b'a') as usize] += 1; } if count.iter().all(|&cnt| cnt == 0 || cnt >= k) { return bytes.len(); } let mut max = 0; let mut from = 0; for i in 0..bytes.len() { if count[(bytes[i] - b'a') as usize] < k { max = max.max(rec(&bytes[from..i], k)); from = i + 1; } } max = max.max(rec(&bytes[from..], k)); max } rec(s.as_bytes(), k as usize) as i32 } } #[test] fn test0395() { fn case(s: &str, k: i32, want: i32) { let got = Solution::longest_substring(s.to_string(), k); assert_eq!(got, want); } case("aaabb", 3, 3); case("ababbc", 2, 5); }
use std::cell::RefCell; use std::collections::HashMap; use std::collections::{btree_map::Entry as BTreeMapEntry, BTreeMap}; use std::rc::Rc; use crate::common::FilePosition; use crate::idl; use super::errors::ValidationError; use super::fieldset::Fieldset; use super::r#enum::Enum; use super::r#struct::Struct; use super::r#type::UserDefinedType; use super::service::Service; use super::typemap::TypeMap; #[derive(Default)] pub struct Namespace { pub path: Vec<String>, pub types: BTreeMap<String, UserDefinedType>, pub services: BTreeMap<String, Service>, pub namespaces: BTreeMap<String, Namespace>, } impl Namespace { pub(crate) fn from_idl<'a>( inss: impl Iterator<Item = &'a crate::idl::Namespace>, builtin_types: &HashMap<String, String>, ) -> Result<Self, ValidationError> { let mut ns = Self::default(); let mut type_map = TypeMap::new(); for ins in inss { ns.idl_convert(ins, &mut type_map, &builtin_types)?; } ns.resolve(&type_map)?; Ok(ns) } fn add_type(&mut self, type_: UserDefinedType, type_map: &mut TypeMap) { type_map.insert(&type_); self.types.insert(type_.fqtn().name.to_owned(), type_); } fn idl_convert( &mut self, ins: &crate::idl::Namespace, type_map: &mut TypeMap, builtin_types: &HashMap<String, String>, ) -> Result<(), ValidationError> { let mut names: BTreeMap<String, FilePosition> = BTreeMap::new(); for ipart in ins.parts.iter() { match names.entry(ipart.name().to_owned()) { BTreeMapEntry::Occupied(entry) => { return Err(ValidationError::DuplicateIdentifier { position: entry.get().clone(), identifier: ipart.name().to_owned(), }); } BTreeMapEntry::Vacant(entry) => { entry.insert(ipart.position().clone()); } } match ipart { idl::NamespacePart::Enum(ienum) => { self.add_type( UserDefinedType::Enum(Rc::new(RefCell::new(Enum::from_idl( &ienum, self, &builtin_types, )))), type_map, ); } idl::NamespacePart::Struct(istruct) => { self.add_type( UserDefinedType::Struct(Rc::new(RefCell::new(Struct::from_idl( &istruct, self, &builtin_types, )))), type_map, ); } idl::NamespacePart::Fieldset(ifieldset) => { self.add_type( UserDefinedType::Fieldset(Rc::new(RefCell::new(Fieldset::from_idl( &ifieldset, self, &builtin_types, )))), type_map, ); } idl::NamespacePart::Service(iservice) => { self.services.insert( iservice.name.clone(), Service::from_idl(iservice, self, &builtin_types), ); // This is done in the next step. Since services do not // define any types we can ignore the merging and just // delay processing of the service to the resolve step. } idl::NamespacePart::Namespace(inamespace) => { let mut child_ns = Self { path: self.path.clone(), ..Default::default() }; child_ns.path.push(ipart.name().to_owned()); child_ns.idl_convert(&inamespace, type_map, &builtin_types)?; self.namespaces.insert(inamespace.name.to_owned(), child_ns); } }; } Ok(()) } fn resolve(&mut self, type_map: &TypeMap) -> Result<(), ValidationError> { for ud_type in self.types.values_mut() { ud_type.resolve(type_map)?; } for service in self.services.values_mut() { service.resolve(type_map)?; } for child_ns in self.namespaces.values_mut() { child_ns.resolve(type_map)?; } Ok(()) } pub fn name(&self) -> &str { self.path.last().unwrap() } }
#![allow(dead_code)] #![allow(unused_imports)] #![allow(unused_parens)] extern crate num; extern crate eventual; pub mod mandelbrot; pub struct Generator { center_x: f64, center_y: f64, zoom: f64, res: u32, threads: u32, iterations: u32 } impl Generator { pub fn new(res: u32, threads: u32, iterations: u32) -> Generator { Generator { center_x : 0.0, center_y : 0.0, zoom : 1.0, res: res, threads : threads, iterations: iterations } } fn print_parameters(&mut self) { println!(""); println!("x: {}, y: {}, zoom: {}", self.center_x, self.center_y, self.zoom); } pub fn set_location(&mut self, center_x: f64, center_y: f64, zoom: f64) { self.center_x = center_x; self.center_y = center_y; self.zoom = zoom; } pub fn compute(&mut self) -> Vec<u8>{ self.print_parameters(); mandelbrot::mandelbrot_scene(self.iterations, self.res, self.threads, self.center_x, self.center_y, self.zoom) } pub fn get_resolution(&mut self) -> u32 { self.res } } #[cfg(test)] mod tests { use super::*; #[test] fn scene_is_generated_and_is_correct() { let iterations: u32 = 100; let res: u32 = 100; // Meaning a square resolution of RESxRES pixels let threads: u32 = 1; let zoom: f64 = 1.0; let center_x = 0.0; let center_y = 0.0; let mut generator = Generator::new(res, threads, iterations); generator.set_location(center_x, center_y, zoom); let scene = generator.compute(); assert_eq!(scene[0], 0); assert_eq!(scene[9999], 0); assert!(scene[3150] > 0); } #[test] fn scene_is_generated_and_is_correct_with_multithreading() { let iterations: u32 = 100; let res: u32 = 100; // Meaning a square resolution of RESxRES pixels let threads: u32 = 8; let zoom: f64 = 1.0; let center_x = 0.0; let center_y = 0.0; let mut generator = Generator::new(res, threads, iterations); generator.set_location(center_x, center_y, zoom); let scene = generator.compute(); assert_eq!(scene[0], 0); assert_eq!(scene[9999], 0); assert!(scene[3150] > 0); } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::registry::base::Command; use crate::registry::service_context::ServiceContext; use crate::switchboard::base::{ IntlInfo, SettingRequest, SettingRequestResponder, SettingResponse, }; use failure::{format_err, Error}; use fuchsia_async as fasync; use futures::StreamExt; use futures::TryFutureExt; use parking_lot::RwLock; use std::sync::Arc; pub struct IntlController { service_context_handle: Arc<RwLock<ServiceContext>>, } /// Controller for processing switchboard messages surrounding the Intl /// protocol, backed by a number of services, including TimeZone. impl IntlController { pub fn spawn( service_context_handle: Arc<RwLock<ServiceContext>>, ) -> Result<futures::channel::mpsc::UnboundedSender<Command>, Error> { let handle = Arc::new(RwLock::new(Self { service_context_handle: service_context_handle })); let (ctrl_tx, mut ctrl_rx) = futures::channel::mpsc::unbounded::<Command>(); let handle_clone = handle.clone(); fasync::spawn( async move { while let Some(command) = ctrl_rx.next().await { handle_clone.write().process_command(command); } Ok(()) } .unwrap_or_else(|_e: failure::Error| {}), ); return Ok(ctrl_tx); } fn process_command(&self, command: Command) { match command { Command::HandleRequest(request, responder) => match request { SettingRequest::SetTimeZone(id) => { self.set_time_zone(id, responder); } SettingRequest::Get => { self.get(responder); } _ => { responder.send(Err(format_err!("unimplemented"))).ok(); } }, Command::ChangeState(_state) => { // For now, ignore all state changes. } } } fn get(&self, responder: SettingRequestResponder) { let service_result = self.service_context_handle.write().connect::<fidl_fuchsia_timezone::TimezoneMarker>(); if service_result.is_err() { responder.send(Err(format_err!("get time zone failed"))).ok(); return; } let proxy = service_result.unwrap(); fasync::spawn( async move { if let Ok(id) = proxy.get_timezone_id().await { responder .send(Ok(Some(SettingResponse::Intl(IntlInfo { time_zone_id: id })))) .ok(); } else { responder.send(Err(format_err!("get time zone failed"))).ok(); } Ok(()) } .unwrap_or_else(|_e: failure::Error| {}), ); } fn set_time_zone(&self, time_zone_id: String, responder: SettingRequestResponder) { let service_result = self.service_context_handle.write().connect::<fidl_fuchsia_timezone::TimezoneMarker>(); if service_result.is_err() { responder.send(Err(format_err!("get time zone failed"))).ok(); return; } let proxy = service_result.unwrap(); fasync::spawn( async move { if let Ok(true) = proxy.set_timezone(time_zone_id.as_str()).await { responder.send(Ok(None)).ok(); } else { responder.send(Err(format_err!("set time zone failed"))).ok(); } Ok(()) } .unwrap_or_else(|_e: failure::Error| {}), ); } }
// auto generated, do not modify. // created: Mon Feb 22 23:57:02 2016 // src-file: /QtCore/qjsonarray.h // dst-file: /src/core/qjsonarray.rs // // header block begin => #![feature(libc)] #![feature(core)] #![feature(collections)] extern crate libc; use self::libc::*; // <= header block end // main block begin => // <= main block end // use block begin => use std::ops::Deref; use super::qjsonvalue::*; // 773 // use super::qlist::*; // 775 use super::qstringlist::*; // 773 // <= use block end // ext block begin => // #[link(name = "Qt5Core")] // #[link(name = "Qt5Gui")] // #[link(name = "Qt5Widgets")] // #[link(name = "QtInline")] extern { fn QJsonArray_Class_Size() -> c_int; // proto: QJsonValue QJsonArray::first(); fn C_ZNK10QJsonArray5firstEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; // proto: bool QJsonArray::empty(); fn C_ZNK10QJsonArray5emptyEv(qthis: u64 /* *mut c_void*/) -> c_char; // proto: QJsonValue QJsonArray::takeAt(int i); fn C_ZN10QJsonArray6takeAtEi(qthis: u64 /* *mut c_void*/, arg0: c_int) -> *mut c_void; // proto: void QJsonArray::removeLast(); fn C_ZN10QJsonArray10removeLastEv(qthis: u64 /* *mut c_void*/); // proto: void QJsonArray::pop_front(); fn C_ZN10QJsonArray9pop_frontEv(qthis: u64 /* *mut c_void*/); // proto: QVariantList QJsonArray::toVariantList(); fn C_ZNK10QJsonArray13toVariantListEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; // proto: void QJsonArray::~QJsonArray(); fn C_ZN10QJsonArrayD2Ev(qthis: u64 /* *mut c_void*/); // proto: int QJsonArray::size(); fn C_ZNK10QJsonArray4sizeEv(qthis: u64 /* *mut c_void*/) -> c_int; // proto: int QJsonArray::count(); fn C_ZNK10QJsonArray5countEv(qthis: u64 /* *mut c_void*/) -> c_int; // proto: void QJsonArray::QJsonArray(); fn C_ZN10QJsonArrayC2Ev() -> u64; // proto: QJsonValue QJsonArray::at(int i); fn C_ZNK10QJsonArray2atEi(qthis: u64 /* *mut c_void*/, arg0: c_int) -> *mut c_void; // proto: void QJsonArray::pop_back(); fn C_ZN10QJsonArray8pop_backEv(qthis: u64 /* *mut c_void*/); // proto: bool QJsonArray::isEmpty(); fn C_ZNK10QJsonArray7isEmptyEv(qthis: u64 /* *mut c_void*/) -> c_char; // proto: static QJsonArray QJsonArray::fromStringList(const QStringList & list); fn C_ZN10QJsonArray14fromStringListERK11QStringList(arg0: *mut c_void) -> *mut c_void; // proto: QJsonValue QJsonArray::last(); fn C_ZNK10QJsonArray4lastEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; // proto: void QJsonArray::removeFirst(); fn C_ZN10QJsonArray11removeFirstEv(qthis: u64 /* *mut c_void*/); // proto: void QJsonArray::removeAt(int i); fn C_ZN10QJsonArray8removeAtEi(qthis: u64 /* *mut c_void*/, arg0: c_int); } // <= ext block end // body block begin => // class sizeof(QJsonArray)=16 #[derive(Default)] pub struct QJsonArray { // qbase: None, pub qclsinst: u64 /* *mut c_void*/, } impl /*struct*/ QJsonArray { pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QJsonArray { return QJsonArray{qclsinst: qthis, ..Default::default()}; } } // proto: QJsonValue QJsonArray::first(); impl /*struct*/ QJsonArray { pub fn first<RetType, T: QJsonArray_first<RetType>>(& self, overload_args: T) -> RetType { return overload_args.first(self); // return 1; } } pub trait QJsonArray_first<RetType> { fn first(self , rsthis: & QJsonArray) -> RetType; } // proto: QJsonValue QJsonArray::first(); impl<'a> /*trait*/ QJsonArray_first<QJsonValue> for () { fn first(self , rsthis: & QJsonArray) -> QJsonValue { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray5firstEv()}; let mut ret = unsafe {C_ZNK10QJsonArray5firstEv(rsthis.qclsinst)}; let mut ret1 = QJsonValue::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: bool QJsonArray::empty(); impl /*struct*/ QJsonArray { pub fn empty<RetType, T: QJsonArray_empty<RetType>>(& self, overload_args: T) -> RetType { return overload_args.empty(self); // return 1; } } pub trait QJsonArray_empty<RetType> { fn empty(self , rsthis: & QJsonArray) -> RetType; } // proto: bool QJsonArray::empty(); impl<'a> /*trait*/ QJsonArray_empty<i8> for () { fn empty(self , rsthis: & QJsonArray) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray5emptyEv()}; let mut ret = unsafe {C_ZNK10QJsonArray5emptyEv(rsthis.qclsinst)}; return ret as i8; // 1 // return 1; } } // proto: QJsonValue QJsonArray::takeAt(int i); impl /*struct*/ QJsonArray { pub fn takeAt<RetType, T: QJsonArray_takeAt<RetType>>(& self, overload_args: T) -> RetType { return overload_args.takeAt(self); // return 1; } } pub trait QJsonArray_takeAt<RetType> { fn takeAt(self , rsthis: & QJsonArray) -> RetType; } // proto: QJsonValue QJsonArray::takeAt(int i); impl<'a> /*trait*/ QJsonArray_takeAt<QJsonValue> for (i32) { fn takeAt(self , rsthis: & QJsonArray) -> QJsonValue { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray6takeAtEi()}; let arg0 = self as c_int; let mut ret = unsafe {C_ZN10QJsonArray6takeAtEi(rsthis.qclsinst, arg0)}; let mut ret1 = QJsonValue::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QJsonArray::removeLast(); impl /*struct*/ QJsonArray { pub fn removeLast<RetType, T: QJsonArray_removeLast<RetType>>(& self, overload_args: T) -> RetType { return overload_args.removeLast(self); // return 1; } } pub trait QJsonArray_removeLast<RetType> { fn removeLast(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::removeLast(); impl<'a> /*trait*/ QJsonArray_removeLast<()> for () { fn removeLast(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray10removeLastEv()}; unsafe {C_ZN10QJsonArray10removeLastEv(rsthis.qclsinst)}; // return 1; } } // proto: void QJsonArray::pop_front(); impl /*struct*/ QJsonArray { pub fn pop_front<RetType, T: QJsonArray_pop_front<RetType>>(& self, overload_args: T) -> RetType { return overload_args.pop_front(self); // return 1; } } pub trait QJsonArray_pop_front<RetType> { fn pop_front(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::pop_front(); impl<'a> /*trait*/ QJsonArray_pop_front<()> for () { fn pop_front(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray9pop_frontEv()}; unsafe {C_ZN10QJsonArray9pop_frontEv(rsthis.qclsinst)}; // return 1; } } // proto: QVariantList QJsonArray::toVariantList(); impl /*struct*/ QJsonArray { pub fn toVariantList<RetType, T: QJsonArray_toVariantList<RetType>>(& self, overload_args: T) -> RetType { return overload_args.toVariantList(self); // return 1; } } pub trait QJsonArray_toVariantList<RetType> { fn toVariantList(self , rsthis: & QJsonArray) -> RetType; } // proto: QVariantList QJsonArray::toVariantList(); impl<'a> /*trait*/ QJsonArray_toVariantList<u64> for () { fn toVariantList(self , rsthis: & QJsonArray) -> u64 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray13toVariantListEv()}; let mut ret = unsafe {C_ZNK10QJsonArray13toVariantListEv(rsthis.qclsinst)}; return ret as u64; // 5 // return 1; } } // proto: void QJsonArray::~QJsonArray(); impl /*struct*/ QJsonArray { pub fn free<RetType, T: QJsonArray_free<RetType>>(& self, overload_args: T) -> RetType { return overload_args.free(self); // return 1; } } pub trait QJsonArray_free<RetType> { fn free(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::~QJsonArray(); impl<'a> /*trait*/ QJsonArray_free<()> for () { fn free(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArrayD2Ev()}; unsafe {C_ZN10QJsonArrayD2Ev(rsthis.qclsinst)}; // return 1; } } // proto: int QJsonArray::size(); impl /*struct*/ QJsonArray { pub fn size<RetType, T: QJsonArray_size<RetType>>(& self, overload_args: T) -> RetType { return overload_args.size(self); // return 1; } } pub trait QJsonArray_size<RetType> { fn size(self , rsthis: & QJsonArray) -> RetType; } // proto: int QJsonArray::size(); impl<'a> /*trait*/ QJsonArray_size<i32> for () { fn size(self , rsthis: & QJsonArray) -> i32 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray4sizeEv()}; let mut ret = unsafe {C_ZNK10QJsonArray4sizeEv(rsthis.qclsinst)}; return ret as i32; // 1 // return 1; } } // proto: int QJsonArray::count(); impl /*struct*/ QJsonArray { pub fn count<RetType, T: QJsonArray_count<RetType>>(& self, overload_args: T) -> RetType { return overload_args.count(self); // return 1; } } pub trait QJsonArray_count<RetType> { fn count(self , rsthis: & QJsonArray) -> RetType; } // proto: int QJsonArray::count(); impl<'a> /*trait*/ QJsonArray_count<i32> for () { fn count(self , rsthis: & QJsonArray) -> i32 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray5countEv()}; let mut ret = unsafe {C_ZNK10QJsonArray5countEv(rsthis.qclsinst)}; return ret as i32; // 1 // return 1; } } // proto: void QJsonArray::QJsonArray(); impl /*struct*/ QJsonArray { pub fn new<T: QJsonArray_new>(value: T) -> QJsonArray { let rsthis = value.new(); return rsthis; // return 1; } } pub trait QJsonArray_new { fn new(self) -> QJsonArray; } // proto: void QJsonArray::QJsonArray(); impl<'a> /*trait*/ QJsonArray_new for () { fn new(self) -> QJsonArray { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArrayC2Ev()}; let ctysz: c_int = unsafe{QJsonArray_Class_Size()}; let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64; let qthis: u64 = unsafe {C_ZN10QJsonArrayC2Ev()}; let rsthis = QJsonArray{qclsinst: qthis, ..Default::default()}; return rsthis; // return 1; } } // proto: QJsonValue QJsonArray::at(int i); impl /*struct*/ QJsonArray { pub fn at<RetType, T: QJsonArray_at<RetType>>(& self, overload_args: T) -> RetType { return overload_args.at(self); // return 1; } } pub trait QJsonArray_at<RetType> { fn at(self , rsthis: & QJsonArray) -> RetType; } // proto: QJsonValue QJsonArray::at(int i); impl<'a> /*trait*/ QJsonArray_at<QJsonValue> for (i32) { fn at(self , rsthis: & QJsonArray) -> QJsonValue { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray2atEi()}; let arg0 = self as c_int; let mut ret = unsafe {C_ZNK10QJsonArray2atEi(rsthis.qclsinst, arg0)}; let mut ret1 = QJsonValue::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QJsonArray::pop_back(); impl /*struct*/ QJsonArray { pub fn pop_back<RetType, T: QJsonArray_pop_back<RetType>>(& self, overload_args: T) -> RetType { return overload_args.pop_back(self); // return 1; } } pub trait QJsonArray_pop_back<RetType> { fn pop_back(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::pop_back(); impl<'a> /*trait*/ QJsonArray_pop_back<()> for () { fn pop_back(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray8pop_backEv()}; unsafe {C_ZN10QJsonArray8pop_backEv(rsthis.qclsinst)}; // return 1; } } // proto: bool QJsonArray::isEmpty(); impl /*struct*/ QJsonArray { pub fn isEmpty<RetType, T: QJsonArray_isEmpty<RetType>>(& self, overload_args: T) -> RetType { return overload_args.isEmpty(self); // return 1; } } pub trait QJsonArray_isEmpty<RetType> { fn isEmpty(self , rsthis: & QJsonArray) -> RetType; } // proto: bool QJsonArray::isEmpty(); impl<'a> /*trait*/ QJsonArray_isEmpty<i8> for () { fn isEmpty(self , rsthis: & QJsonArray) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray7isEmptyEv()}; let mut ret = unsafe {C_ZNK10QJsonArray7isEmptyEv(rsthis.qclsinst)}; return ret as i8; // 1 // return 1; } } // proto: static QJsonArray QJsonArray::fromStringList(const QStringList & list); impl /*struct*/ QJsonArray { pub fn fromStringList_s<RetType, T: QJsonArray_fromStringList_s<RetType>>( overload_args: T) -> RetType { return overload_args.fromStringList_s(); // return 1; } } pub trait QJsonArray_fromStringList_s<RetType> { fn fromStringList_s(self ) -> RetType; } // proto: static QJsonArray QJsonArray::fromStringList(const QStringList & list); impl<'a> /*trait*/ QJsonArray_fromStringList_s<QJsonArray> for (&'a QStringList) { fn fromStringList_s(self ) -> QJsonArray { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray14fromStringListERK11QStringList()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZN10QJsonArray14fromStringListERK11QStringList(arg0)}; let mut ret1 = QJsonArray::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QJsonValue QJsonArray::last(); impl /*struct*/ QJsonArray { pub fn last<RetType, T: QJsonArray_last<RetType>>(& self, overload_args: T) -> RetType { return overload_args.last(self); // return 1; } } pub trait QJsonArray_last<RetType> { fn last(self , rsthis: & QJsonArray) -> RetType; } // proto: QJsonValue QJsonArray::last(); impl<'a> /*trait*/ QJsonArray_last<QJsonValue> for () { fn last(self , rsthis: & QJsonArray) -> QJsonValue { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK10QJsonArray4lastEv()}; let mut ret = unsafe {C_ZNK10QJsonArray4lastEv(rsthis.qclsinst)}; let mut ret1 = QJsonValue::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QJsonArray::removeFirst(); impl /*struct*/ QJsonArray { pub fn removeFirst<RetType, T: QJsonArray_removeFirst<RetType>>(& self, overload_args: T) -> RetType { return overload_args.removeFirst(self); // return 1; } } pub trait QJsonArray_removeFirst<RetType> { fn removeFirst(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::removeFirst(); impl<'a> /*trait*/ QJsonArray_removeFirst<()> for () { fn removeFirst(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray11removeFirstEv()}; unsafe {C_ZN10QJsonArray11removeFirstEv(rsthis.qclsinst)}; // return 1; } } // proto: void QJsonArray::removeAt(int i); impl /*struct*/ QJsonArray { pub fn removeAt<RetType, T: QJsonArray_removeAt<RetType>>(& self, overload_args: T) -> RetType { return overload_args.removeAt(self); // return 1; } } pub trait QJsonArray_removeAt<RetType> { fn removeAt(self , rsthis: & QJsonArray) -> RetType; } // proto: void QJsonArray::removeAt(int i); impl<'a> /*trait*/ QJsonArray_removeAt<()> for (i32) { fn removeAt(self , rsthis: & QJsonArray) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN10QJsonArray8removeAtEi()}; let arg0 = self as c_int; unsafe {C_ZN10QJsonArray8removeAtEi(rsthis.qclsinst, arg0)}; // return 1; } } // <= body block end
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::GString; use glib::StaticType; use glib::Value; use glib_sys; use gobject_sys; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use webkit2_webextension_sys; use DOMElement; use DOMEventTarget; use DOMHTMLElement; use DOMNode; use DOMObject; glib_wrapper! { pub struct DOMHTMLScriptElement(Object<webkit2_webextension_sys::WebKitDOMHTMLScriptElement, webkit2_webextension_sys::WebKitDOMHTMLScriptElementClass, DOMHTMLScriptElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget; match fn { get_type => || webkit2_webextension_sys::webkit_dom_html_script_element_get_type(), } } pub const NONE_DOMHTML_SCRIPT_ELEMENT: Option<&DOMHTMLScriptElement> = None; pub trait DOMHTMLScriptElementExt: 'static { #[cfg_attr(feature = "v2_22", deprecated)] fn get_charset(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_defer(&self) -> bool; #[cfg_attr(feature = "v2_22", deprecated)] fn get_event(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_html_for(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_src(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_text(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_type_attr(&self) -> Option<GString>; #[cfg_attr(feature = "v2_22", deprecated)] #[cfg(any(feature = "v2_16", feature = "dox"))] fn set_charset(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_defer(&self, value: bool); #[cfg_attr(feature = "v2_22", deprecated)] fn set_event(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_html_for(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_src(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_text(&self, value: &str); #[cfg_attr(feature = "v2_22", deprecated)] fn set_type_attr(&self, value: &str); fn set_property_charset(&self, charset: Option<&str>); fn get_property_type(&self) -> Option<GString>; fn set_property_type(&self, type_: Option<&str>); fn connect_property_charset_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_defer_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_event_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_html_for_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_src_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<DOMHTMLScriptElement>> DOMHTMLScriptElementExt for O { fn get_charset(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_charset( self.as_ref().to_glib_none().0, ), ) } } fn get_defer(&self) -> bool { unsafe { from_glib( webkit2_webextension_sys::webkit_dom_html_script_element_get_defer( self.as_ref().to_glib_none().0, ), ) } } fn get_event(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_event( self.as_ref().to_glib_none().0, ), ) } } fn get_html_for(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_html_for( self.as_ref().to_glib_none().0, ), ) } } fn get_src(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_src( self.as_ref().to_glib_none().0, ), ) } } fn get_text(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_text( self.as_ref().to_glib_none().0, ), ) } } fn get_type_attr(&self) -> Option<GString> { unsafe { from_glib_full( webkit2_webextension_sys::webkit_dom_html_script_element_get_type_attr( self.as_ref().to_glib_none().0, ), ) } } #[cfg(any(feature = "v2_16", feature = "dox"))] fn set_charset(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_charset( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_defer(&self, value: bool) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_defer( self.as_ref().to_glib_none().0, value.to_glib(), ); } } fn set_event(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_event( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_html_for(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_html_for( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_src(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_src( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_text(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_text( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_type_attr(&self, value: &str) { unsafe { webkit2_webextension_sys::webkit_dom_html_script_element_set_type_attr( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } fn set_property_charset(&self, charset: Option<&str>) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"charset\0".as_ptr() as *const _, Value::from(charset).to_glib_none().0, ); } } fn get_property_type(&self) -> Option<GString> { unsafe { let mut value = Value::from_type(<GString as StaticType>::static_type()); gobject_sys::g_object_get_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"type\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `type` getter") } } fn set_property_type(&self, type_: Option<&str>) { unsafe { gobject_sys::g_object_set_property( self.to_glib_none().0 as *mut gobject_sys::GObject, b"type\0".as_ptr() as *const _, Value::from(type_).to_glib_none().0, ); } } fn connect_property_charset_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_charset_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::charset\0".as_ptr() as *const _, Some(transmute(notify_charset_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_defer_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_defer_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::defer\0".as_ptr() as *const _, Some(transmute(notify_defer_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_event_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_event_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::event\0".as_ptr() as *const _, Some(transmute(notify_event_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_html_for_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_html_for_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::html-for\0".as_ptr() as *const _, Some(transmute(notify_html_for_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_src_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_src_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::src\0".as_ptr() as *const _, Some(transmute(notify_src_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_text_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::text\0".as_ptr() as *const _, Some(transmute(notify_text_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_type_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMHTMLScriptElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMHTMLScriptElement>, { let f: &F = &*(f as *const F); f(&DOMHTMLScriptElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::type\0".as_ptr() as *const _, Some(transmute(notify_type_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } } impl fmt::Display for DOMHTMLScriptElement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "DOMHTMLScriptElement") } }
#![cfg_attr(not(feature = "std"), no_std)] use frame_support::{ decl_module, decl_event,dispatch,transactional, traits::{Currency, ExistenceRequirement::{KeepAlive},tokens::WithdrawReasons}, }; use frame_system::{ensure_root}; type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; pub trait Config: frame_system::Config { type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>; type Currency: Currency<Self::AccountId>; } decl_event!( pub enum Event<T> where AccountId = <T as frame_system::Config>::AccountId, Balance = BalanceOf<T> { Reward(AccountId,Balance), Deduct(AccountId,Balance), } ); decl_module! { /// Nicks module declaration. pub struct Module<T: Config> for enum Call where origin: T::Origin { fn deposit_event() = default; #[weight = 0] #[transactional] pub fn reward(origin,value: BalanceOf<T>,who: T::AccountId) -> dispatch::DispatchResult { //ensure_root(origin)?; T::Currency::deposit_creating(&who, value); Self::deposit_event(RawEvent::Reward(who,value)); Ok(()) } #[weight = 0] pub fn deduct(origin,value: BalanceOf<T>,who: T::AccountId) -> dispatch::DispatchResult { //ensure_root(origin)?; T::Currency::withdraw(&who, value,WithdrawReasons::FEE,KeepAlive)?; Self::deposit_event(RawEvent::Deduct(who,value)); Ok(()) } } }
//! Independent traits //! //! This implements `Rng` for any `CryptoRng` implicitly. //! Each `Rng` automatically and safely implements its base `CryptoRng`. //! //! Note: this *only* considers the next_u32 member function //! //! Thoughts: works nicely. Implementing each trait requires only one impl block. //! It seems we have a choice: have two variants of adaptors (`as_rng` and //! `as_rng_ref`) or add an implicit impl which may panic (see extends_Rng2). // ——— traits ——— #[derive(Debug)] struct CryptoError; trait CryptoRng { fn try_next_u32(&mut self) -> Result<u32, CryptoError>; } trait Rng: CryptoRng { fn next_u32(&mut self) -> u32; } // ——— impls ——— // This automatically implements the base trait for any type implementing Rng. impl<R: Rng+?Sized> CryptoRng for R { fn try_next_u32(&mut self) -> Result<u32, CryptoError> { Ok(self.next_u32()) } } // ——— adaptor ——— /* Potentially also useful for consuming a CryptoRng. // Given `rng` of type `T` where `T: CryptoRng`, this can consume // `rng` (`as_rng(rng)`) fn as_rng<CR: CryptoRng>(rng: CR) -> AsRng<CR> { AsRng { rng } } struct AsRng<CR: CryptoRng+?Sized> { rng: CR } impl<CR: CryptoRng+?Sized> Rng for AsRng<CR> { fn next_u32(&mut self) -> u32 { self.rng.try_next_u32().unwrap() } } */ // Given `rng` of type `T` where `T: CryptoRng`, this can consume // `&mut rng` (`as_rng_ref(&mut rng)`) fn as_rng_ref<'a, CR: 'a+CryptoRng+?Sized>(rng: &'a mut CR) -> AsRRng<'a, CR> { AsRRng { rng } } struct AsRRng<'a, CR: 'a+CryptoRng+?Sized> { rng: &'a mut CR } impl<'a, CR: CryptoRng+?Sized> Rng for AsRRng<'a, CR> { fn next_u32(&mut self) -> u32 { self.rng.try_next_u32().unwrap() } } // ——— test RNGs ——— // A non-crypto Rng #[derive(Debug)] struct TestRng(u32); impl Rng for TestRng { fn next_u32(&mut self) -> u32 { self.0 } } // A CryptoRng #[derive(Debug)] struct TestCRng(u32); impl CryptoRng for TestCRng { fn try_next_u32(&mut self) -> Result<u32, CryptoError> { Ok(self.0) } } // ——— usage ——— fn main() { let mut t = TestRng(13); let mut c = TestCRng(42); println!("t: {:?} impls Rng", t); println!("c: {:?} impls CryptoRng", c); { // Do both traits support both functions via static dispatch? println!("t, static dispatch, using CryptoRng: {:?}", t.try_next_u32()); println!("t, static dispatch, using Rng: {:?}", t.next_u32()); println!("c, static dispatch, using CryptoRng: {:?}", c.try_next_u32()); // as_rng(c) also works, but consumes c: println!("c, static dispatch, using Rng: {:?}", as_rng_ref(&mut c).next_u32()); } { // Can both types be used via CryptoRng with dynamic dispatch? let cr = &mut c as &mut CryptoRng; println!("c, dynamic dispatch, using CryptoRng: {:?}", cr.try_next_u32()); let tr = &mut t as &mut CryptoRng; println!("t, dynamic dispatch, using CryptoRng: {:?}", tr.try_next_u32()); } { // Can both types be used via Rng with dynamic dispatch? let mut cr = as_rng_ref(&mut c as &mut CryptoRng); let tr = &mut t as &mut Rng; println!("c, dynamic dispatch, using Rng: {:?}", cr.next_u32()); println!("t, dynamic dispatch, using Rng: {:?}", tr.next_u32()); } }
use tendermint_light_client::{ components::{ io::{AtHeight, Io}, scheduler, verifier::ProdVerifier, }, fork_detector::ProdForkDetector, light_client::{self, LightClient}, peer_list::PeerList, state::State, store::LightStore, supervisor::{Handle, Instance, Supervisor}, types::{LightBlock, PeerId, Status, Time}, }; use std::collections::HashMap; use std::convert::TryInto; use std::{ fs, path::{Path, PathBuf}, time::Duration, }; use tendermint_light_client::store::memory::MemoryStore; use tendermint_light_client::tests::{ AnonLightBlock, MockClock, MockEvidenceReporter, MockIo, TestBisection, TrustOptions, }; const TEST_FILES_PATH: &str = "./tests/support/"; fn read_json_fixture(file: impl AsRef<Path>) -> String { fs::read_to_string(file).unwrap() } fn load_multi_peer_testcases(dir: &str) -> Vec<TestBisection<LightBlock>> { let paths = fs::read_dir(PathBuf::from(TEST_FILES_PATH).join(dir)).unwrap(); paths .flatten() .map(|entry| read_json_fixture(entry.path())) .map(|contents| serde_json::from_str::<TestBisection<AnonLightBlock>>(&contents).unwrap()) .map(|testcase| testcase.into()) .collect::<Vec<TestBisection<LightBlock>>>() } fn make_instance(peer_id: PeerId, trust_options: TrustOptions, io: MockIo, now: Time) -> Instance { let trusted_height = trust_options.height.value(); let trusted_state = io .fetch_light_block(peer_id, AtHeight::At(trusted_height)) .expect("could not 'request' light block"); let mut light_store = MemoryStore::new(); light_store.insert(trusted_state, Status::Trusted); let state = State { light_store: Box::new(light_store), verification_trace: HashMap::new(), }; let options = light_client::Options { trust_threshold: trust_options.trust_level, trusting_period: trust_options.period.into(), clock_drift: Duration::from_secs(10), }; let verifier = ProdVerifier::default(); let clock = MockClock { now }; let scheduler = scheduler::basic_bisecting_schedule; let light_client = LightClient::new(peer_id, options, clock, scheduler, verifier, io); Instance::new(light_client, state) } fn run_multipeer_test(tc: TestBisection<LightBlock>) { let primary = tc.primary.lite_blocks[0].provider; println!( "Running Test Case: {}\nwith Primary Peer: {:?}", tc.description, primary ); let expects_err = match &tc.expected_output { Some(eo) => eo.eq("error"), None => false, }; let io = MockIo::new(tc.primary.chain_id, tc.primary.lite_blocks); let primary_instance = make_instance(primary, tc.trust_options.clone(), io.clone(), tc.now); let mut peer_list = PeerList::builder(); peer_list = peer_list.primary(primary, primary_instance); for provider in tc.witnesses.into_iter() { let peer_id = provider.value.lite_blocks[0].provider; println!("Witness: {}", peer_id); let io = MockIo::new(provider.value.chain_id, provider.value.lite_blocks); let instance = make_instance(peer_id, tc.trust_options.clone(), io.clone(), tc.now); peer_list = peer_list.witness(peer_id, instance); } let mut supervisor = Supervisor::new( peer_list.build(), ProdForkDetector::default(), MockEvidenceReporter::new(), ); // TODO: Add method to `Handle` to get a copy of the current peer list let handle = supervisor.handle(); std::thread::spawn(|| supervisor.run()); let target_height = tc.height_to_verify.try_into().unwrap(); match handle.verify_to_target(target_height) { Ok(new_state) => { // Check that the expected state and new_state match let untrusted_light_block = io .fetch_light_block(primary, AtHeight::At(target_height)) .expect("header at untrusted height not found"); let expected_state = untrusted_light_block; assert_eq!(new_state.height(), expected_state.height()); assert_eq!(new_state, expected_state); // Check the verdict assert!(!expects_err); } Err(e) => { dbg!(e); assert!(expects_err); } } // TODO: Check the peer list // TODO: Check we recorded a fork evidence (or not) } #[test] fn deserialize_multi_peer_json() { load_multi_peer_testcases("bisection/multi_peer"); } #[test] fn run_multipeer_tests() { let testcases = load_multi_peer_testcases("bisection/multi_peer"); for testcase in testcases { run_multipeer_test(testcase); } }
use challenges::{ chal43::{dsa_leaky_k_attack, hash_msg_to_hexstr, is_dsa_key_pair}, chal43::{DsaKeyPair, DsaPubKey, DsaPublicParam, DsaSignature}, random_bytes, }; use num::{BigUint, FromPrimitive}; use std::process; fn main() { println!("🔓 Challenge 43"); println!("Leaky k DSA signing attack ... (may take a few sec)"); let total_crack = 100; let mut success_crack = 0; for _ in 0..total_crack { let dsa = DsaKeyPair::key_gen(); let pk = dsa.get_pub_key(); let msg = random_bytes(30); // arbitarily chosen message length let (k, sig) = dsa.leaky_sign(&msg); let guess_key = dsa_leaky_k_attack(&pk.pub_param.q, &msg, &k, &sig); if is_dsa_key_pair(&pk, &guess_key) { success_crack += 1; } } println!( "Cracked the private key {} out of {} times", success_crack, total_crack ); println!("\nRealisitc attack on DSA signature with low entropy k ... (took me ~7 min on my laptop)"); let pk = DsaPubKey { pub_param: DsaPublicParam::default(), pub_key: BigUint::parse_bytes( b"84ad4719d044495496a3201c8ff484feb45b962e7302e56a392aee4abab3e4bdebf2\ 955b4736012f21a08084056b19bcd7fee56048e004e44984e2f411788efdc837a0d2\ e5abb7b555039fd243ac01f0fb2ed1dec568280ce678e931868d23eb095fde9d3779\ 191b8c0299d6e07bbb283e6633451e535c45513b2d33c99ea17", 16, ) .unwrap(), }; let msg = b"For those that envy a MC it can be hazardous to your health\n\ So be friendly, a matter of life and death, just like a etch-a-sketch\n" .to_vec(); let sig = DsaSignature { r: BigUint::parse_bytes(b"548099063082341131477253921760299949438196259240", 10).unwrap(), s: BigUint::parse_bytes(b"857042759984254168557880549501802188789837994940", 10).unwrap(), }; // given that k is between 0 and 2^16 due to poor entropy, we can crack it. for _k in 1..65537 { let k = BigUint::from_u64(_k).unwrap(); let guess = dsa_leaky_k_attack(&pk.pub_param.q, &msg, &k, &sig); if is_dsa_key_pair(&pk, &guess) && hash_msg_to_hexstr(&guess.to_str_radix(16).as_bytes()) == "0954edd5e0afe5542a4adf012611a91912a3ec16" { println!("Cracked your private key: {} !!", &guess.to_str_radix(16)); process::exit(0); } } panic!("Failed to crack the private key."); }
#![feature(core_intrinsics)] mod lehmer64; pub use lehmer64::*; mod xorshift; pub use xorshift::*; mod xorshift_nocell; pub use xorshift_nocell::*; mod xorshift32_2; pub use xorshift32_2::*; mod xoshiro128plus; pub use xoshiro128plus::*; mod xoshiro256starstar; pub use xoshiro256starstar::*; mod xorshift32; pub use xorshift32::*; mod shasha; pub use shasha::*; macro_rules! benchmark { ($name:ident) => { let mut rng = $name::new(); let counter: u64 = 0xff_ffff; let mut times = 0; let mut result = 0; for _ in 0..counter { let start = unsafe { core::arch::x86_64::_rdtsc() }; result |= rng.rand(); let end = unsafe { core::arch::x86_64::_rdtsc() }; times += (end - start); } print!( "{:20} {:10.2} | {:#x}\n", stringify!($name), times as f64 / counter as f64, result ); }; } pub fn main() { print!("Clock cycles per .rand() on average over 0xff_fffff iterations\n"); benchmark!(Xorshift); benchmark!(Xorshift_nocell); benchmark!(Xorshift32); benchmark!(Xorshift32_2); benchmark!(Xoshiro256StarStar); benchmark!(Xoshiro128Plus); benchmark!(Lehmer64); benchmark!(ShaSha); }
extern crate ansi_escapes; extern crate atomic_counter; extern crate clap; extern crate collect_slice; extern crate crossbeam_channel; extern crate disque; extern crate indicatif; extern crate itertools; extern crate rand; extern crate shellexpand; mod job; mod signals; //use std::str::from_utf8; //use std::time::Duration; use std::collections::HashSet; use std::io; use std::io::prelude::*; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::thread; use std::time::Duration; static GLOBAL_JOBS_COUNT: AtomicUsize = AtomicUsize::new(0); static GLOBAL_ABORT: AtomicBool = AtomicBool::new(false); const GETJOB_COUNT: usize = 128; use clap::{crate_version, App, Arg}; use crossbeam_channel::{bounded, select, tick}; use disque::{AddJobBuilder, Disque}; use failure::Error; use indicatif::{ProgressBar, ProgressStyle}; use itertools::Itertools; use job::{CmdBody, ResultBody}; use signals::signal_notifier; fn main() { let result = try_main(); match result { Err(e) => { eprintln!("dwqc: error: {}", e); std::process::exit(1); } Ok(_) => {} }; } fn try_main() -> Result<(), Error> { let matches = App::new("dwqc in rust") .version(crate_version!()) .author("Kaspar Schleiser <kaspar@schleiser.de>") .about("Does awesome things") .arg( Arg::with_name("queue") .short('q') .long("queue") .help("Send job to specified queue") .required(false) .value_name("QUEUE") .takes_value(true), ) .arg( Arg::with_name("disque_url") .short('u') .long("disque") .help("Connect to specified disque instance") .required(false) .value_name("URL") .takes_value(true), ) .arg( Arg::with_name("verbose") .short('v') .help("Enable status output"), ) .arg( Arg::with_name("progress") .short('P') .help("Enable progress output"), ) .arg(Arg::with_name("stdin").short('s').help( "Read commands from stdin, one per line.\n\ If COMMAND is given, each line at spaces, and replace \"${N}\" \ with word at position 'N' (starting at 1), or \"${0}\" with whole \ line.", )) .arg( Arg::with_name("repo") .short('r') .value_name("URL") .help("Git repository to check out") .required(true) .env("DWQ_REPO"), ) .arg( Arg::with_name("commit") .short('c') .value_name("COMMIT") .help("Git commit to check out") .required(true) .env("DWQ_COMMIT"), ) .arg( Arg::with_name("command") .value_name("COMMAND") .help("Command to run") .index(1), ) .get_matches(); /* handle arguments */ let disque_url = matches .value_of("disque_url") .unwrap_or("redis://localhost:7711") .to_string(); let queue = matches.value_of("queue").unwrap_or("test").to_string(); println!("queue: {}", queue); let verbose = matches.is_present("verbose"); if verbose { println!("dwqc: status output enabled"); } let control_queue = format!("control::{}", rand::random::<u64>()); if verbose { println!("dwqc: control queue: {}", control_queue); } let progress = matches.is_present("progress"); /* set up channels */ let signals = signal_notifier().unwrap(); let update = tick(Duration::from_secs(1)); let (tx_cmds, rx_cmds) = bounded::<String>(1024); let (tx_jobid, rx_jobid) = bounded(1024); let (tx_result, rx_result) = bounded(1024); let (tx_reader, rx_reader) = bounded(1); /* used to keep track of jobs */ let mut jobs = HashSet::new(); let mut more_jobs_coming = true; let mut job_sender = Vec::new(); for _ in 1..8 { let disque_url_sender = disque_url.clone(); let tx = tx_jobid.clone(); let rx = rx_cmds.clone(); let queue = queue.clone(); job_sender.push(thread::spawn(move || { /* connect to disque */ let disque_url: &str = &disque_url_sender; let disque = Disque::open(disque_url).unwrap(); loop { let body_json = match rx.recv() { Ok(value) => value, Err(_) => break, }; // send job let jobid = AddJobBuilder::new(queue.as_bytes(), body_json.as_bytes(), 300 * 1000) .ttl(24 * 60 * 60 * 1000) .run(&disque) .unwrap(); match tx.send(jobid) { Ok(_) => (), Err(_) => return, // assuming this only happens if main has aborted. } } //println!("dwqc: job sender done."); })); } let disque_url_receiver = disque_url.clone(); let control_queue_receiver = control_queue.clone(); thread::spawn(move || { /* connect to disque */ let disque_url_receiver: &str = &disque_url_receiver; let disque = Disque::open(disque_url_receiver).unwrap(); let tx = tx_result.clone(); loop { let result = disque.getjob_count( false, None, GETJOB_COUNT, &[&control_queue_receiver.as_bytes()], ); let result = match result { Ok(t) => t, Err(_) => break, }; for (_, _, res_body_json) in result.iter() { let res_body: ResultBody = serde_json::from_slice(&res_body_json).unwrap(); match tx.send(res_body) { Ok(_) => continue, Err(_) => break, } } } //println!("dwqc: result_receiver done."); }); fn handle_result(jobs: &HashSet<String>, res_body: &ResultBody) -> bool { if !jobs.contains(&res_body.job_id) { eprintln!("got unexpected job result (id={})", &res_body.job_id); return false; } let output = match res_body.result.extra.get("output") { Some(value) => match value.as_str() { Some(value) => value, None => "", }, None => "", }; print!("{}", output); return true; }; if matches.is_present("command") && !matches.is_present("stdin") { // create json job body let body_json = CmdBody::new( matches.value_of("repo").unwrap().to_string(), matches.value_of("commit").unwrap().to_string(), matches.value_of("command").unwrap().to_string(), Some(&control_queue), ) .to_json(); GLOBAL_JOBS_COUNT.fetch_add(1, Ordering::SeqCst); tx_cmds.send(body_json.clone())?; more_jobs_coming = false; } else { let tx_cmds = tx_cmds.clone(); let tx_reader = tx_reader.clone(); thread::spawn(move || { let stdin = io::stdin(); for line in stdin.lock().lines() { let line = line.unwrap().to_string(); let split = line.split("###"); let parts = split.map(|x| x.trim()).collect::<Vec<_>>(); //println!("job: {}", line); // create json job body let body_json = CmdBody::new( matches.value_of("repo").unwrap().to_string(), matches.value_of("commit").unwrap().to_string(), parts[0].to_string(), Some(&control_queue), ) .to_json(); GLOBAL_JOBS_COUNT.fetch_add(1, Ordering::SeqCst); tx_cmds.send(body_json.clone()).unwrap(); } //println!("job reader done"); tx_reader.send(true).unwrap(); }); } let mut result = 0i32; let mut jobs_total = GLOBAL_JOBS_COUNT.load(Ordering::SeqCst); let mut jobs_done = 0; //let bar = ProgressBar::new(jobs_total as u64); let bar = match progress { true => ProgressBar::new(0u64), false => ProgressBar::hidden(), }; if !more_jobs_coming { bar.set_style(ProgressStyle::default_bar()) } else { bar.set_style(ProgressStyle::default_spinner()); bar.set_message("collecting jobs"); } while more_jobs_coming || (jobs_total - jobs_done) > 0 { select! { recv(rx_reader) -> _ => { more_jobs_coming = false; jobs_total = GLOBAL_JOBS_COUNT.load(Ordering::SeqCst); bar.set_style(ProgressStyle::default_bar().template("{msg} {spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} eta: {eta}").progress_chars("=>")); bar.set_length(jobs_total as u64); bar.set_message(""); bar.println("dwqc: all jobs collected"); } recv(update) -> _ => { bar.tick(); } recv(rx_jobid) -> jobid => { let jobid = match jobid { Err(_) => { continue; }, Ok(value) => value, }; jobs.insert(jobid); bar.tick(); } recv(rx_result) -> res_body => { if progress { print!("{}", ansi_escapes::EraseLines(2)); } let res_body = res_body.unwrap(); if handle_result(&jobs, &res_body) == false { continue; } if res_body.result.status != 0 { result = 1; } jobs_done += 1; jobs_total = GLOBAL_JOBS_COUNT.load(Ordering::SeqCst); if progress { println!(); bar.set_length(jobs_total as u64); bar.set_position(jobs_done as u64); bar.println(""); } } recv(signals) -> _ => { println!(); println!("dwqc: aborted."); GLOBAL_ABORT.store(true, Ordering::Relaxed); result = 1; break; } } } bar.finish_and_clear(); drop(tx_cmds); drop(rx_result); drop(rx_jobid); if GLOBAL_ABORT.load(Ordering::Relaxed) { if !jobs.is_empty() { println!("dwqc: cancelling jobs..."); let disque = Disque::open(&disque_url as &str).unwrap(); for chunk in &jobs.drain().chunks(4096) { let job_ids = chunk.collect::<Vec<String>>(); let job_ids: Vec<&[u8]> = job_ids.iter().map(String::as_bytes).collect(); disque.deljobs(&job_ids[..])?; } } } // job_sender.join().unwrap(); std::process::exit(result); //result_receiver.join().unwrap(); }
fn main() { let input = include_str!("day8.txt"); let mut v: Vec<Vec<&str>> = input.split("\n").map(|x: &str| x.split(" ").collect()).collect(); let mut accumulator = 0; let mut ranthru: Vec<i32> = vec![1000]; let mut i= 0; while !ranthru.contains(&i) { if v[i as usize][0] == "acc" { ranthru.push(i); accumulator += v[i as usize][1].replace("+", "").parse::<i32>().unwrap(); } else if v[i as usize][0] == "nop" { ranthru.push(i); } if v[i as usize][0] == "jmp" { ranthru.push(i); i += v[i as usize][1].replace("+", "").parse::<i32>().unwrap(); } else { i += 1; } } println!("{}", accumulator); }
//! # Resource //! //! A `Resource` is an immutable representation of the entity producing telemetry. For example, a //! process producing telemetry that is running in a container on Kubernetes has a Pod name, it is //! in a namespace, and possibly is part of a Deployment which also has a name. All three of these //! attributes can be included in the `Resource`. //! //! The primary purpose of resources as a first-class concept in the SDK is decoupling of discovery //! of resource information from exporters. This allows for independent development and easy //! customization for users that need to integrate with closed source environments. When used with //! distributed tracing, a resource can be associated with the [`TracerProvider`] when it is created. //! That association cannot be changed later. When associated with a `TracerProvider`, all `Span`s //! produced by any `Tracer` from the provider are associated with this `Resource`. //! //! [`TracerProvider`]: crate::trace::TracerProvider //! //! # Resource detectors //! //! `ResourceDetector`s are used to detect resource from runtime or environmental variables. The //! following `ResourceDetector`s are provided along with this SDK. //! //! - EnvResourceDetector, detect resource from environmental variables. //! - OsResourceDetector, detect OS from runtime. //! - ProcessResourceDetector, detect process information mod env; mod os; mod process; pub use env::EnvResourceDetector; pub use env::SdkProvidedResourceDetector; pub use os::OsResourceDetector; pub use process::ProcessResourceDetector; #[cfg(feature = "metrics")] use crate::attributes; use crate::{Key, KeyValue, Value}; #[cfg(feature = "serialize")] use serde::{Deserialize, Serialize}; use std::collections::{btree_map, BTreeMap}; use std::ops::Deref; use std::time::Duration; /// Describes an entity about which identifying information and metadata is exposed. /// /// Items are sorted by their key, and are only overwritten if the value is an empty string. #[cfg_attr(feature = "serialize", derive(Deserialize, Serialize))] #[derive(Clone, Debug, PartialEq)] pub struct Resource { attrs: BTreeMap<Key, Value>, } impl Default for Resource { fn default() -> Self { Self::from_detectors( Duration::from_secs(0), vec![Box::new(EnvResourceDetector::new())], ) } } impl Resource { /// Creates an empty resource. pub fn empty() -> Self { Self { attrs: Default::default(), } } /// Create a new `Resource` from key value pairs. /// /// Values are de-duplicated by key, and the first key-value pair with a non-empty string value /// will be retained pub fn new<T: IntoIterator<Item = KeyValue>>(kvs: T) -> Self { let mut resource = Resource::empty(); for kv in kvs.into_iter() { resource.attrs.insert(kv.key, kv.value); } resource } /// Create a new `Resource` from resource detectors. /// /// timeout will be applied to each detector. pub fn from_detectors(timeout: Duration, detectors: Vec<Box<dyn ResourceDetector>>) -> Self { let mut resource = Resource::empty(); for detector in detectors { let detected_res = detector.detect(timeout); for (key, value) in detected_res.into_iter() { // using insert instead of merge to avoid clone. resource.attrs.insert(key, value); } } resource } /// Create a new `Resource` by combining two resources. /// /// Keys from the `other` resource have priority over keys from this resource, even if the /// updated value is empty. pub fn merge<T: Deref<Target = Self>>(&self, other: T) -> Self { if self.attrs.is_empty() { return other.clone(); } if other.attrs.is_empty() { return self.clone(); } let mut resource = Resource::empty(); // attrs from self take the less priority, even when the new value is empty. for (k, v) in self.attrs.iter() { resource.attrs.insert(k.clone(), v.clone()); } for (k, v) in other.attrs.iter() { resource.attrs.insert(k.clone(), v.clone()); } resource } /// Returns the number of attributes for this resource pub fn len(&self) -> usize { self.attrs.len() } /// Returns `true` if the resource contains no attributes. pub fn is_empty(&self) -> bool { self.attrs.is_empty() } /// Gets an iterator over the attributes of this resource, sorted by key. pub fn iter(&self) -> Iter<'_> { self.into_iter() } /// Retrieve the value from resource associate with given key. pub fn get(&self, key: Key) -> Option<Value> { self.attrs.get(&key).cloned() } /// Encoded attributes #[cfg(feature = "metrics")] #[cfg_attr(docsrs, doc(cfg(feature = "metrics")))] pub fn encoded(&self, encoder: &dyn attributes::Encoder) -> String { encoder.encode(&mut self.into_iter()) } } /// An owned iterator over the entries of a `Resource`. #[derive(Debug)] pub struct IntoIter(btree_map::IntoIter<Key, Value>); impl Iterator for IntoIter { type Item = (Key, Value); fn next(&mut self) -> Option<Self::Item> { self.0.next() } } impl IntoIterator for Resource { type Item = (Key, Value); type IntoIter = IntoIter; fn into_iter(self) -> Self::IntoIter { IntoIter(self.attrs.into_iter()) } } /// An iterator over the entries of a `Resource`. #[derive(Debug)] pub struct Iter<'a>(btree_map::Iter<'a, Key, Value>); impl<'a> Iterator for Iter<'a> { type Item = (&'a Key, &'a Value); fn next(&mut self) -> Option<Self::Item> { self.0.next() } } impl<'a> IntoIterator for &'a Resource { type Item = (&'a Key, &'a Value); type IntoIter = Iter<'a>; fn into_iter(self) -> Self::IntoIter { Iter(self.attrs.iter()) } } /// ResourceDetector detects OpenTelemetry resource information /// /// Implementations of this trait can be passed to /// the `Resource::from_detectors` function to generate a Resource from the merged information. pub trait ResourceDetector { /// detect returns an initialized Resource based on gathered information. /// /// timeout is used in case the detection operation takes too much time. /// /// If source information to construct a Resource is inaccessible, an empty Resource should be returned /// /// If source information to construct a Resource is invalid, for example, /// missing required values. an empty Resource should be returned. fn detect(&self, timeout: Duration) -> Resource; } #[cfg(test)] mod tests { use super::*; use crate::sdk::resource::EnvResourceDetector; use std::collections::BTreeMap; use std::{env, time}; #[test] fn new_resource() { let args_with_dupe_keys = vec![KeyValue::new("a", ""), KeyValue::new("a", "final")]; let mut expected_attrs = BTreeMap::new(); expected_attrs.insert(Key::new("a"), Value::from("final")); assert_eq!( Resource::new(args_with_dupe_keys), Resource { attrs: expected_attrs } ); } #[test] fn merge_resource() { let resource_a = Resource::new(vec![ KeyValue::new("a", ""), KeyValue::new("b", "b-value"), KeyValue::new("d", "d-value"), ]); let resource_b = Resource::new(vec![ KeyValue::new("a", "a-value"), KeyValue::new("c", "c-value"), KeyValue::new("d", ""), ]); let mut expected_attrs = BTreeMap::new(); expected_attrs.insert(Key::new("a"), Value::from("a-value")); expected_attrs.insert(Key::new("b"), Value::from("b-value")); expected_attrs.insert(Key::new("c"), Value::from("c-value")); expected_attrs.insert(Key::new("d"), Value::from("")); assert_eq!( resource_a.merge(&resource_b), Resource { attrs: expected_attrs } ); } #[test] fn detect_resource() { env::set_var("OTEL_RESOURCE_ATTRIBUTES", "key=value, k = v , a= x, a=z"); env::set_var("irrelevant".to_uppercase(), "20200810"); let detector = EnvResourceDetector::new(); let resource = Resource::from_detectors(time::Duration::from_secs(5), vec![Box::new(detector)]); assert_eq!( resource, Resource::new(vec![ KeyValue::new("key", "value"), KeyValue::new("k", "v"), KeyValue::new("a", "x"), KeyValue::new("a", "z") ]) ) } }
use thiserror::Error; #[derive(Debug, Error)] pub enum Error { #[error("Null error")] NullError, #[error("Data parse error")] DataParseError, #[error("Network error")] NetworkError, #[error("Invalid packet")] InvalidPacketError, #[error("Pipe error")] PipeError, #[error("Operation timed out")] TimeoutError, }
use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use ic_cdk::export::candid::{CandidType, Deserialize, Principal}; use ic_cron::types::TaskId; use currency_token_client::types::{ControllerList, Controllers, TokenInfo}; use crate::common::types::Error; #[derive(CandidType, Deserialize)] pub struct CurrencyToken { pub balances: HashMap<Principal, u64>, pub total_supply: u64, pub info: TokenInfo, pub controllers: ControllerList, pub recurrent_mint_tasks: HashSet<TaskId>, pub recurrent_transfer_tasks: HashMap<Principal, HashSet<TaskId>>, } impl CurrencyToken { pub fn mint(&mut self, to: Principal, qty: u64) -> Result<(), Error> { if qty == 0 { return Err(Error::ZeroQuantity); } let prev_balance = self.balance_of(&to); let new_balance = prev_balance + qty; self.total_supply += qty; self.balances.insert(to, new_balance); Ok(()) } pub fn transfer(&mut self, from: Principal, to: Principal, qty: u64) -> Result<(), Error> { if qty == 0 { return Err(Error::ZeroQuantity); } let prev_from_balance = self.balance_of(&from); let prev_to_balance = self.balance_of(&to); if prev_from_balance < qty { return Err(Error::InsufficientBalance); } let new_from_balance = prev_from_balance - qty; let new_to_balance = prev_to_balance + qty; if new_from_balance == 0 { self.balances.remove(&from); } else { self.balances.insert(from, new_from_balance); } self.balances.insert(to, new_to_balance); Ok(()) } pub fn burn(&mut self, from: Principal, qty: u64) -> Result<(), Error> { if qty == 0 { return Err(Error::ZeroQuantity); } let prev_balance = self.balance_of(&from); if prev_balance < qty { return Err(Error::InsufficientBalance); } let new_balance = prev_balance - qty; if new_balance == 0 { self.balances.remove(&from); } else { self.balances.insert(from, new_balance); } self.total_supply -= qty; Ok(()) } pub fn update_info(&mut self, new_info: TokenInfo) -> TokenInfo { let old_info = self.info.clone(); self.info = new_info; old_info } pub fn update_mint_controllers(&mut self, new_mint_controllers: Controllers) -> Controllers { let old_controllers = self.controllers.mint_controllers.clone(); self.controllers.mint_controllers = new_mint_controllers; old_controllers } pub fn update_info_controllers(&mut self, new_info_controllers: Controllers) -> Controllers { let old_controllers = self.controllers.info_controllers.clone(); self.controllers.info_controllers = new_info_controllers; old_controllers } pub fn balance_of(&self, account_owner: &Principal) -> u64 { match self.balances.get(account_owner) { None => 0, Some(b) => *b, } } pub fn register_recurrent_transfer_task(&mut self, from: Principal, task_id: TaskId) { match self.recurrent_transfer_tasks.entry(from) { Entry::Occupied(mut entry) => { entry.get_mut().insert(task_id); } Entry::Vacant(entry) => { let mut s = HashSet::new(); s.insert(task_id); entry.insert(s); } }; } pub fn unregister_recurrent_transfer_task(&mut self, from: Principal, task_id: TaskId) -> bool { match self.recurrent_transfer_tasks.get_mut(&from) { Some(tasks) => tasks.remove(&task_id), None => false, } } pub fn get_recurrent_transfer_tasks(&self, from: Principal) -> Vec<TaskId> { self.recurrent_transfer_tasks .get(&from) .map(|t| t.iter().cloned().collect::<Vec<_>>()) .unwrap_or_default() } pub fn register_recurrent_mint_task(&mut self, task_id: TaskId) { self.recurrent_mint_tasks.insert(task_id); } pub fn unregister_recurrent_mint_task(&mut self, task_id: TaskId) -> bool { self.recurrent_mint_tasks.remove(&task_id) } pub fn get_recurrent_mint_tasks(&self) -> Vec<TaskId> { self.recurrent_mint_tasks.iter().cloned().collect() } } #[cfg(test)] mod tests { use std::collections::{HashMap, HashSet}; use ic_cdk::export::candid::Principal; use union_utils::random_principal_test; use antifragile_currency_token_client::types::{ControllerList, TokenInfo}; use crate::common::currency_token::CurrencyToken; fn magic_blob() -> Vec<u8> { vec![1u8, 3u8, 3u8, 7u8] } fn create_currency_token() -> (CurrencyToken, Principal) { let controller = random_principal_test(); let token = CurrencyToken { balances: HashMap::new(), total_supply: 0, info: TokenInfo { name: String::from("test"), symbol: String::from("TST"), decimals: 8, }, controllers: ControllerList::single(Some(controller)), recurrent_mint_tasks: HashSet::new(), recurrent_transfer_tasks: HashMap::new(), }; (token, controller) } #[test] fn creation_works_fine() { let (token, controller) = create_currency_token(); assert!(token.balances.is_empty()); assert_eq!(token.total_supply, 0); assert!(token.controllers.info_controllers.contains(&controller)); assert!(token.controllers.mint_controllers.contains(&controller)); assert_eq!(token.info.name, String::from("test")); assert_eq!(token.info.symbol, String::from("TST")); assert_eq!(token.info.decimals, 8); } #[test] fn minting_works_right() { let (mut token, controller) = create_currency_token(); let user_1 = random_principal_test(); token.mint(user_1, 100).ok().unwrap(); assert_eq!(token.total_supply, 100); assert_eq!(token.balances.len(), 1); assert_eq!(token.balances.get(&user_1).unwrap().clone(), 100); token.mint(controller, 200).ok().unwrap(); assert_eq!(token.total_supply, 300); assert_eq!(token.balances.len(), 2); assert_eq!(token.balances.get(&user_1).unwrap().clone(), 100); assert_eq!(token.balances.get(&controller).unwrap().clone(), 200); } #[test] fn burning_works_fine() { let (mut token, controller) = create_currency_token(); let user_1 = random_principal_test(); token.mint(user_1, 100).ok().unwrap(); token.burn(user_1, 90).ok().unwrap(); assert_eq!(token.balances.len(), 1); assert_eq!(token.balances.get(&user_1).unwrap().clone(), 10); assert_eq!(token.total_supply, 10); token.burn(user_1, 20).err().unwrap(); token.burn(user_1, 10).ok().unwrap(); assert!(token.balances.is_empty()); assert!(token.balances.get(&user_1).is_none()); assert_eq!(token.total_supply, 0); token.burn(user_1, 20).err().unwrap(); } #[test] fn transfer_works_fine() { let (mut token, controller) = create_currency_token(); let user_1 = random_principal_test(); let user_2 = random_principal_test(); token.mint(user_1, 1000).ok().unwrap(); token.transfer(user_1, user_2, 100).ok().unwrap(); assert_eq!(token.balances.len(), 2); assert_eq!(token.balances.get(&user_1).unwrap().clone(), 900); assert_eq!(token.balances.get(&user_2).unwrap().clone(), 100); assert_eq!(token.total_supply, 1000); token.transfer(user_1, user_2, 1000).err().unwrap(); token.transfer(controller, user_2, 100).err().unwrap(); token.transfer(user_2, user_1, 100).ok().unwrap(); assert_eq!(token.balances.len(), 1); assert_eq!(token.balances.get(&user_1).unwrap().clone(), 1000); assert!(token.balances.get(&user_2).is_none()); assert_eq!(token.total_supply, 1000); token.transfer(user_2, user_1, 1).err().unwrap(); token.transfer(user_2, user_1, 0).err().unwrap(); } #[test] fn info_update_works_well() { let (mut token, controller) = create_currency_token(); let new_info_1 = TokenInfo { name: String::from("name 1"), symbol: String::from("NME1"), decimals: 9, }; token.update_info(new_info_1); assert_eq!(token.info.name, String::from("name 1")); assert_eq!(token.info.symbol, String::from("NME1")); assert_eq!(token.info.decimals, 9); let new_info_2 = TokenInfo { name: String::from("name 2"), symbol: String::from("NME2"), decimals: 2, }; token.update_info(new_info_2); assert_eq!(token.info.name, String::from("name 2")); assert_eq!(token.info.symbol, String::from("NME2")); assert_eq!(token.info.decimals, 2); } }
mod token; mod parser; use std::env; use std::path::PathBuf; use std::io::BufReader; use std::fs::File; use token::tokenize; use token::token::Token; use parser::types::Value; use parser::parse; fn main() -> Result<(), Box<std::error::Error>> { const USAGE: &str = "usage: json_parser path/to/json/file"; let args: Vec<String> = env::args().collect(); if args.len() != 2 { println!("{}", USAGE); std::process::exit(1); } let rel_path = PathBuf::from(args[1].to_string()); let full_path = rel_path.canonicalize()?; println!("Parsing file: {:?}", full_path); let file = File::open(full_path)?; let mut reader = BufReader::new(file); let tokens: Vec<Token> = tokenize(&mut reader)?; let value: Value = parse(tokens)?; //print!("{}\n", value.to_pretty_string(0)); print!("{}\n", value); Ok(()) }
use super::*; #[test] fn with_number_atom_reference_function_port_or_local_pid_returns_first() { run!( |arc_process| { ( strategy::term::pid::external(arc_process.clone()), strategy::term::number_atom_reference_function_port_or_local_pid( arc_process.clone(), ), ) }, |(first, second)| { prop_assert_eq!(result(first, second), first); Ok(()) }, ); } #[test] fn with_lesser_external_pid_second_returns_first() { max( |_, process| process.external_pid(external_arc_node(), 1, 3).unwrap(), First, ); } #[test] fn with_same_external_pid_second_returns_first() { max(|first, _| first, First); } #[test] fn with_same_value_external_pid_second_returns_first() { max( |_, process| process.external_pid(external_arc_node(), 2, 3).unwrap(), First, ); } #[test] fn with_greater_external_pid_second_returns_second() { max( |_, process| process.external_pid(external_arc_node(), 3, 3).unwrap(), Second, ); } #[test] fn with_tuple_map_list_or_bitstring_returns_second() { run!( |arc_process| { ( strategy::term::pid::external(arc_process.clone()), strategy::term::tuple_map_list_or_bitstring(arc_process.clone()), ) }, |(first, second)| { prop_assert_eq!(result(first, second), second.into()); Ok(()) }, ); } fn max<R>(second: R, which: FirstSecond) where R: FnOnce(Term, &Process) -> Term, { super::max( |process| process.external_pid(external_arc_node(), 2, 3).unwrap(), second, which, ); }
use log::{info, warn, LevelFilter, Log}; fn main() { log::set_logger(&LOGGER) .map(|()| log::set_max_level(LevelFilter::Info)) .unwrap(); let mut yak = Yak("yak".to_string()); shave_the_yak(&mut yak); } struct SimpleLogger; impl Log for SimpleLogger { fn enabled(&self, metadata: &log::Metadata) -> bool { metadata.level() <= log::Level::Info } fn log(&self, record: &log::Record) { if self.enabled(record.metadata()) { println!("{}: {} {}", record.level(), record.target(), record.args()); } } fn flush(&self) {} } static LOGGER: SimpleLogger = SimpleLogger; #[derive(Debug)] struct Yak(String); impl Yak { fn shave(&mut self, _: u32) {} } fn find_a_razor() -> Result<u32, u32> { Ok(1) } fn shave_the_yak(yak: &mut Yak) { info!(target: "yak_events", "Commencing yak shaving for {:?}", yak); loop { match find_a_razor() { Ok(razor) => { info!("Razor located: {}", razor); yak.shave(razor); break; } Err(err) => { warn!("Unable to locate a razor: {}, retrying", err); } } } }
use proconio::{input, marker::Usize1}; fn main() { input! { n: usize, _k: usize, a: [[u8; n]; n], q: usize, queries: [(Usize1, Usize1); q], }; const INF: u64 = std::u64::MAX / 2; let mut d = vec![vec![INF; n]; n]; for v in 0..n { d[v][v] = 0; } for s in 0..n { for t in 0..n { if a[s][t] == 1 { d[s][t] = 1; } } } for v in 0..n { for s in 0..n { for t in 0..n { d[s][t] = d[s][t].min(d[s][v] + d[v][t]); } } } for (s, t) in queries { let s = s % n; let t = t % n; if a[s][t] == 1 && a[s][t] == 1 { println!("1"); continue; } let mut ans = INF; for v in 0..n { if a[s][v] == 1 { ans = ans.min(1 + d[v][t]); } } if ans == INF { println!("-1"); } else { println!("{}", ans); } } }
use prec::{Assoc, Climber, Expression, Rule, Token as PrecToken}; use std::fmt; /* This example uses the `prec` crate to perform integer operations. It supports parentheses, addition, subtraction, division, multiplication, exponents, and an additional operator for rounded-up division. */ #[derive(Hash, Eq, PartialEq, Copy, Clone)] pub enum Operator { // Addition Add, // Subtraction Sub, // Multiplication Mul, // Division, rounding down (standard behavior) Div, // Division, rounding up DivUp, // Exponent Exp, } impl fmt::Display for Operator { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{}", match self { Operator::Add => "+", Operator::Sub => "-", Operator::Mul => "*", Operator::Div => "/", Operator::DivUp => "/u", Operator::Exp => "^", } ) } } #[derive(Clone)] pub enum Token { Paren(Box<Expression<Operator, Token>>), Num(i64), } impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Token::Paren(expr) => write!(f, "( {} )", expr), Token::Num(num) => write!(f, "{}", num), } } } impl PrecToken<i64, ()> for Token { fn convert(self, _: &()) -> Result<i64, ()> { Ok(match self { Token::Paren(expr) => CLIMBER.process(expr.as_ref(), &())?, Token::Num(n) => n, }) } } fn handler(lhs: Token, op: Operator, rhs: Token, _: &()) -> Result<Token, ()> { let lhs: i64 = lhs.convert(&())?; let rhs: i64 = rhs.convert(&())?; Ok(match op { Operator::Add => Token::Num(lhs + rhs), Operator::Sub => Token::Num(lhs - rhs), Operator::Mul => Token::Num(lhs * rhs), Operator::Div => Token::Num(lhs / rhs), Operator::DivUp => Token::Num((lhs as f64 / rhs as f64 + 0.5) as i64), Operator::Exp => Token::Num(lhs.overflowing_pow(rhs as u32).0), }) } lazy_static::lazy_static! { pub static ref CLIMBER: Climber<Operator, Token, i64, ()> = Climber::new( vec![ Rule::new(Operator::Add, Assoc::Left) | Rule::new(Operator::Sub, Assoc::Left), Rule::new(Operator::Mul, Assoc::Left) | Rule::new(Operator::Div, Assoc::Left) | Rule::new(Operator::DivUp, Assoc::Left), Rule::new(Operator::Exp, Assoc::Right), ], handler, ); } fn main() { use Operator::*; use Token::*; // 2 + 2 // 4 let expression = Expression::new(Num(2i64), vec![(Add, Num(2))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 8 * 2 + 1 // 16 + 1 // 17 let expression = Expression::new(Num(8i64), vec![(Mul, Num(2)), (Add, Num(1))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 8 * ( 2 + 1 ) // 8 * 3 // 24 let expression = Expression::new( Num(8i64), vec![( Mul, Paren(Box::new(Expression::new(Num(2), vec![(Add, Num(1))]))), )], ); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 9 / 2 // 4 let expression = Expression::new(Num(9), vec![(Div, Num(2))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 9 /u 2 // 5 let expression = Expression::new(Num(9), vec![(DivUp, Num(2))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 5 ^ 2 // 25 let expression = Expression::new(Num(5), vec![(Exp, Num(2))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 1 + 5 ^ 3 + 1 // 1 + 125 + 1 // 127 let expression = Expression::new(Num(1), vec![(Add, Num(5)), (Exp, Num(3)), (Add, Num(1))]); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); // 5 + 3 ^ ( 1 + 1) - 2 * (8-1) / 3 // 5 + 3 ^ 2 - 2 * 7 / 3 // 5 + 9 - 2 * 7 / 3 // 5 + 9 - 14 / 3 // 5 + 9 - 4 // 10 let expression = Expression::new( Num(5), vec![ (Add, Num(3)), ( Exp, Paren(Box::new(Expression::new(Num(1), vec![(Add, Num(1))]))), ), (Sub, Num(2)), ( Mul, Paren(Box::new(Expression::new(Num(8), vec![(Sub, Num(1))]))), ), (Div, Num(3)), ], ); println!( "{} = {}", expression, CLIMBER.process(&expression, &()).unwrap() ); }
use crate::structs::raw::common::AddProp; use serde::Deserialize; #[derive(Deserialize)] #[serde(rename_all = "PascalCase")] pub struct CostItem { pub id: Option<usize>, pub count: Option<usize>, } #[derive(Deserialize)] #[serde(rename_all = "PascalCase")] pub struct Data { pub weapon_promote_id: usize, pub promote_level: Option<usize>, pub coin_cost: Option<usize>, pub cost_items: Vec<CostItem>, pub add_props: Vec<AddProp>, pub unlock_max_level: usize, pub required_player_level: Option<usize>, }
use vec3::*; use ray::*; use util::*; pub use num_traits::Zero; // ffmin/ffmax are faster because they do not worry about NaN and other issues. #[inline(always)] fn ffmin(a: f64, b: f64) -> f64 { if a < b { a } else { b } } #[inline(always)] fn ffmax(a: f64, b: f64) -> f64 { if a > b { a } else { b } } #[derive(Clone, Debug)] pub struct AABB { _min: Vec3<f64>, _max: Vec3<f64>, } impl AABB { pub fn new(a: Vec3<f64>, b: Vec3<f64>) -> AABB { AABB { _min: a, _max: b, } } pub fn zero() -> AABB { AABB::new(Vec3::zero(), Vec3::zero()) } #[inline(always)] pub fn min(&self) -> Vec3<f64> { self._min } #[inline(always)] pub fn max(&self) -> Vec3<f64> { self._max } pub fn longest_axis(&self) -> Axis { let a = self._max.x - self._min.x; let b = self._max.y - self._min.y; let c = self._max.z - self._min.z; if a > b && a > c { return Axis::X; } else if b > c { return Axis::Y; } else { return Axis::Z; } } pub fn area(&self) -> f64 { let a = self._max.x - self._min.x; let b = self._max.y - self._min.y; let c = self._max.z - self._min.z; return 2.0 * (a*b + b*c + c*a); } /// Check if the given ray hits the bounding box. pub fn hit(&self, _: &mut Rng, r: &Ray<f64>, tmin: f64, tmax: f64) -> bool { // Unfortunately rust (or llvm? // https://llvm.org/bugs/show_bug.cgi?id=27360) won't unroll the loop. macro_rules! check_axis { ($a:ident) => { let t0 = ffmin((self._min.$a - r.origin().$a) / r.direction().$a, (self._max.$a - r.origin().$a) / r.direction().$a); let t1 = ffmax((self._min.$a - r.origin().$a) / r.direction().$a, (self._max.$a - r.origin().$a) / r.direction().$a); let tmin = ffmax(t0, tmin); let tmax = ffmin(t1, tmax); if tmax <= tmin { return false; } } } check_axis!(x); check_axis!(y); check_axis!(z); return true; } } impl fmt::Display for AABB { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "AABB({}, {})", self._min, self._max) } } /// Compute the AABB that surrounds the two given boxes. pub fn surrounding_box(box0: &AABB, box1: &AABB) -> AABB { let small = Vec3::new(box0.min().x.min(box1.min().x), box0.min().y.min(box1.min().y), box0.min().z.min(box1.min().z)); let big = Vec3::new(box0.max().x.max(box1.max().x), box0.max().y.max(box1.max().y), box0.max().z.max(box1.max().z)); return AABB::new(small, big); }
extern crate toml; #[macro_use] extern crate serde_derive; extern crate yansi; extern crate hyper; extern crate base64_url; mod config; fn main() { config::config(); }
#[cfg(any(test, feature = "use_serde", feature = "default"))] use serde; use rand::os::OsRng; use rand::Rng; use std::fmt; use std::io; use std::cmp::{ PartialEq, Eq }; /// The length of a hash salt in bytes. /// /// Should be at least 16 bytes for really good salts. /// Doesn't need to be a huge number. pub const HASH_SALT_LEN: usize = 32; /// A salt for password hashing. /// /// Salts should be unique per user to provide maximum /// security. They can also only be created using a /// CSPRNG. Additionally, hash salts can be serialised /// and deserialised, as they should be stored next to /// your users' password hashes in your databases. /// /// # Why salts? /// /// Salts are a known random component that will be hashed /// together with a user's clear text password. The uniqueness /// of a salt, together with its unpredictability thanks to a /// CSPRNG, ensure, that two users having the same clear text /// passwords won't have the same password hashes, which makes /// it way harder for attackers to crack a whole password /// database. pub struct HashSalt { data: [u8; HASH_SALT_LEN], } impl HashSalt { /// Generates a new salt using an OS-provided CSPRNG. pub fn new() -> io::Result<HashSalt> { let mut osrng = OsRng::new()?; let mut spicy = HashSalt { data: [0; HASH_SALT_LEN] }; osrng.fill_bytes(&mut spicy.data[..]); Ok(spicy) } /// Creates a hash salt from a reader. /// /// Only use this function to load serialised salts /// that have been created using `new`. /// /// # Errors /// /// This function fails if the reader has not enough /// bytes to completely fill the hash salt. pub fn from_reader<R: io::Read>(r: &mut R) -> io::Result<HashSalt> { let mut spicy = HashSalt { data: [0; HASH_SALT_LEN] }; r.read_exact(spicy.as_slice_mut())?; Ok(spicy) } /// Get a slice of the hash salt's bytes. pub fn as_slice(&self) -> &[u8] { &self.data[..] } fn as_slice_mut(&mut self) -> &mut [u8] { &mut self.data[..] } fn copy_from_slice(&mut self, other: &[u8]) { self.as_slice_mut().copy_from_slice(other); } fn fmt_impl(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "HashSalt[ ")?; for x in &self.data[..] { write!(f, "{:02X}", *x)?; } write!(f, " ]") } } // Due to the lack of literal/constant values as generic parameters, // arrays and tuples are handled using lots of duplicate specialisations // up to a specific size. In typical Rust programs, this size is 32 entries. // Too bad that our arrays can be way bigger than that, which is why we // can't just derive. #[cfg(any(test, feature = "use_serde", feature = "default"))] impl serde::Serialize for HashSalt { fn serialize<S: serde::Serializer>(&self, ser: &mut S) -> Result<(), S::Error> { ser.serialize_newtype_struct("HashSalt", serde::bytes::Bytes::from(self.as_slice())) } } #[cfg(any(test, feature = "use_serde", feature = "default"))] impl serde::Deserialize for HashSalt { fn deserialize<D: serde::Deserializer>(de: &mut D) -> Result<Self, D::Error> { de.deserialize_newtype_struct("HashSalt", HashSaltSerdeVisitor {}) } } #[cfg(any(test, feature = "use_serde", feature = "default"))] struct HashSaltSerdeVisitor {} #[cfg(any(test, feature = "use_serde", feature = "default"))] impl serde::de::Visitor for HashSaltSerdeVisitor { type Value = HashSalt; fn visit_newtype_struct<D: serde::Deserializer>(&mut self, de: &mut D) -> Result<HashSalt, D::Error> { use serde::{ Deserialize, Error }; let bytes = serde::bytes::ByteBuf::deserialize(de)?; if bytes.len() != HASH_SALT_LEN { return Err(D::Error::invalid_length(bytes.len())); } let mut spicy = HashSalt { data: [0_u8; HASH_SALT_LEN] }; spicy.copy_from_slice(&bytes[..]); Ok(spicy) } fn visit_bytes<E: serde::Error>(&mut self, v: &[u8]) -> Result<HashSalt, E> { if v.len() == HASH_SALT_LEN { let mut spicy = HashSalt { data: [0_u8; HASH_SALT_LEN] }; spicy.copy_from_slice(v); Ok(spicy) } else { Err(E::invalid_length(v.len())) } } fn visit_seq<V: serde::de::SeqVisitor>(&mut self, mut visitor: V) -> Result<HashSalt, V::Error> { use serde::Error; let bytes = match visitor.visit::<serde::bytes::ByteBuf>()? { Some(bytes) => { visitor.end()?; bytes }, None => { visitor.end()?; return Err(V::Error::invalid_length(0)); }, }; if bytes.len() != HASH_SALT_LEN { return Err(V::Error::invalid_length(bytes.len())); } let mut spicy = HashSalt { data: [0_u8; HASH_SALT_LEN] }; spicy.copy_from_slice(&bytes[..]); Ok(spicy) } } // Same reason as the serde stuff. impl PartialEq for HashSalt { fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() } } impl Eq for HashSalt {} impl fmt::Debug for HashSalt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.fmt_impl(f) } } impl fmt::Display for HashSalt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.fmt_impl(f) } } #[cfg(test)] mod test { use super::{HashSalt, HASH_SALT_LEN}; use serde_test::{ Token, assert_tokens }; use std::io; #[test] fn salt_len_is_reasonably_large() { assert!(HASH_SALT_LEN >= 16, "Why not just make salts a constant 'asdf'?"); } #[test] fn generating_salts_works() { let spicy = HashSalt::new().unwrap(); assert_eq!(spicy.as_slice().len(), HASH_SALT_LEN); // Generating all-zeros is very unlikely. // Why then do I check for not everything being zero? // To chek whether something has been generated and stored. let mut test = 0; for x in spicy.as_slice() { test |= *x; } assert!(test != 0, "Try running the test again. Generating all-zeros is not impossible."); } #[test] fn from_reader_works() { const ARR1: &'static [u8] = &[42_u8; HASH_SALT_LEN]; const ARR2: &'static [u8] = &[ 1_u8; 1]; let mut data1 = io::Cursor::new(ARR1); let mut data2 = io::Cursor::new(ARR2); // Way too short. assert_eq!( HashSalt::from_reader(&mut data1).unwrap(), HashSalt { data: [42_u8; HASH_SALT_LEN] } ); assert_eq!( HashSalt::from_reader(&mut data2).unwrap_err().kind(), io::ErrorKind::UnexpectedEof ); } #[test] fn serde_works() { let spicy = HashSalt::new().unwrap(); // `serde_test` is very stupid, as in it doesn't // know about bytes. Just handle them like ordinary // integer arrays, 'cause who cares. let mut tokens: Vec<Token> = Vec::with_capacity(3 + (2 * spicy.as_slice().len())); tokens.push(Token::StructNewType("HashSalt")); tokens.push(Token::SeqStart(Some(HASH_SALT_LEN))); for x in spicy.as_slice() { tokens.push(Token::SeqSep); tokens.push(Token::U8(*x)); } tokens.push(Token::SeqEnd); assert_tokens(&spicy, &tokens[..]); } }
#![allow(unused_unsafe)] #![allow(dead_code)] use super::dx_pub_use::*; use super::unsafe_util::*; use std::ffi::CStr; use winapi::_core::mem; #[derive(Copy, Clone, Debug)] pub struct Vertex { pos: [f32; 3], uv: [f32; 2], } impl Vertex { pub fn new(pos: [f32; 3], uv: [f32; 2]) -> Vertex { Vertex { pos: pos, uv: uv } } } #[allow(non_camel_case_types)] pub struct XMFLOAT4 { pub x: f32, pub y: f32, pub z: f32, pub w: f32, } impl XMFLOAT4 { pub fn new(x: f32, y: f32, z: f32, w: f32) -> XMFLOAT4 { XMFLOAT4 { x: x, y: y, z: z, w: w, } } } #[allow(non_camel_case_types)] pub trait D3D12_INPUT_ELEMENT_DESC_EXT { fn new( semantic_name: &CStr, semantic_index: u32, format: DXGI_FORMAT, input_slot: u32, aligned_byte_offset: u32, input_slot_class: D3D12_INPUT_CLASSIFICATION, instance_data_step_rate: u32, ) -> D3D12_INPUT_ELEMENT_DESC; } impl D3D12_INPUT_ELEMENT_DESC_EXT for D3D12_INPUT_ELEMENT_DESC { #[inline] fn new( semantic_name: &CStr, semantic_index: u32, format: DXGI_FORMAT, input_slot: u32, aligned_byte_offset: u32, input_slot_class: D3D12_INPUT_CLASSIFICATION, instance_data_step_rate: u32, ) -> D3D12_INPUT_ELEMENT_DESC { D3D12_INPUT_ELEMENT_DESC { SemanticName: semantic_name.as_ptr(), SemanticIndex: semantic_index, Format: format, InputSlot: input_slot, AlignedByteOffset: aligned_byte_offset, InputSlotClass: input_slot_class, InstanceDataStepRate: instance_data_step_rate, } } } #[allow(non_camel_case_types)] pub trait D3D12_INPUT_LAYOUT_DESC_EXT { fn layout(&self) -> D3D12_INPUT_LAYOUT_DESC; } impl D3D12_INPUT_LAYOUT_DESC_EXT for [D3D12_INPUT_ELEMENT_DESC] { #[inline] fn layout(&self) -> D3D12_INPUT_LAYOUT_DESC { let (len, p) = slice_to_ptr(&self); D3D12_INPUT_LAYOUT_DESC { pInputElementDescs: p, NumElements: len, } } } #[allow(non_camel_case_types)] pub trait D3D12_MEMCPY_EXT { fn offset_slice(&self, slice: u32) -> usize; fn offset_row(&self, slice: u32) -> usize; fn ptr_offset(&self, offset: usize) -> *mut u8; } impl D3D12_MEMCPY_EXT for D3D12_MEMCPY_DEST { #[inline] fn offset_slice(&self, slice: u32) -> usize { (self.SlicePitch as usize) * (slice as usize) } #[inline] fn offset_row(&self, row: u32) -> usize { (self.RowPitch as usize) * (row as usize) } #[inline] fn ptr_offset(&self, offset: usize) -> *mut u8 { unsafe { let mut a: usize = mem::transmute(self.pData); a += offset; mem::transmute::<_, _>(a) } } } impl D3D12_MEMCPY_EXT for D3D12_SUBRESOURCE_DATA { #[inline] fn offset_slice(&self, slice: u32) -> usize { (self.SlicePitch as usize) * (slice as usize) } #[inline] fn offset_row(&self, row: u32) -> usize { (self.RowPitch as usize) * (row as usize) } #[inline] fn ptr_offset(&self, offset: usize) -> *mut u8 { unsafe { let mut a: usize = mem::transmute(self.pData); a += offset; mem::transmute(a) } } }
// File: The analyzer of the tool // Purpose: Functions defined in this file are mainly used for analyze // the symbol table, generate proper advice and print out // the advice // Author : Ziling Zhou (802414) use SymbolTable; use builtin::Ty; use VarInfo; use std::collections::{HashMap}; use std::io::{BufReader,BufRead}; use std::fs::File; impl <'a> SymbolTable<'a>{ // Entry point of this file, start to analyze the symbol table // print out the generated advice pub fn start_analyze(&self, file_name:String ) { println!("========================================================"); println!("Adivice one (drop): \n"); let mut print_list = HashMap::new(); for (var,info) in &self.var_table{ if !self.check_move(var){ match info.var_type{ Ty::NonPrimitive=> { self.choice_one_drop(&var,&info,&mut print_list); }, _=>(), } } } for scope in & self.enclose_scope{ scope.drop_analyze_innerscope(&mut print_list); } if print_list.is_empty(){ println!("No adivice!"); }else{ let file = File::open(file_name).unwrap(); let file_reader = BufReader::new(&file); let mut line_num:usize = 1; for line in file_reader.lines(){ let l = line.unwrap(); if let Some(print_lines) = print_list.get(&line_num){ for print_line in print_lines{ println!("{}", print_line); } } println!("{}", l ); line_num+=1; } } println!("========================================================"); println!("Adivice two (fucntion): \n"); let mut print_list_func:HashMap<String,Vec<bool>> = HashMap::new(); self.choice_two_function(&mut print_list_func); let mut printed = false; for (print_fun, prints) in print_list_func { if (!String::eq(&print_fun,"main")) & (!String::eq(&print_fun,"new")) { let mut can_print = true; for(struct_name, _ ) in self.structure_list{ let new_method = struct_name.to_string() +"new"; if String::eq(&new_method,&print_fun){ can_print = false; break; } } if can_print{ printed = true; println!("{}:",print_fun); let mut printed = false; let mut index = 0; while index< prints.len(){ if prints[index] == true{ println!("\targument {} can take ownership ", index+1 ); printed = true; } index+=1; } if !printed { println!("\tNo advice for this function"); } } } } if !printed{ println!("\n No advice for funciton"); } println!("========================================================"); } // Help analyze drop for inner scope fn drop_analyze_innerscope(&self, mut print_list:&mut HashMap<usize,Vec<String>>) { for (var,info) in &self.var_table{ if !self.check_move(var){ match info.var_type{ Ty::NonPrimitive=> { self.choice_one_drop(&var,&info,&mut print_list); }, _=>(), } } } for scope in & self.enclose_scope{ scope.drop_analyze_innerscope(&mut print_list); } } // Generate advice for function // call check function call for each function in function list fn choice_two_function(&self, print_list_func:&mut HashMap<String,Vec<bool>> ) { for (func_name, info) in self.fun_records{ let length = info.input; let mut print = vec![]; let mut count:usize = 0; while count < length{ print.push(true); count+=1; } self.check_function_call(func_name,length,&mut print); print_list_func.insert(func_name.to_string(), print); } } // Check each function call of a specific function. // If onw of a perameter's coorespond argument is used after the // call, then its place in print will be recorded as false. fn check_function_call(&self, func_name:&str, length:usize, print:&mut Vec<bool>) { if let Some(call_infos) = self.call_records.get(func_name){ //Each call for a specific function for info in call_infos{ let call_loc = info.call_location; let mut index = 0; while index < length{ let arg_last_used = self.get_last_used(&info.arguments[index]); if arg_last_used != call_loc{ print[index] = false; } index+=1; } } } for scope in & self.enclose_scope{ scope.check_function_call(func_name,length,print); } } // Generate advice for function drop. // Check whether the given varibale can be dropped earlier, if can, // put it into the print list fn choice_one_drop(&self, var: &str, info: &VarInfo, mut print_list:&mut HashMap<usize,Vec<String>>) { if self.no_var_ref(var){ match info.last_used_loc{ Some(line) => { let mut can_print = true; if (line+1) == info.when_to_drop{ can_print = false } if can_print{ let mut found = false; if let Some(print_line) = print_list.get_mut(&(line+1)){ found = true; print_line.push("drop( ".to_string()+ var + " ); // Adivice: a drop function can add here"); } if ! found { print_list.insert(line+1,vec!["drop( ".to_string()+ var + " ); // Adivice: a drop function can add here"]); } } } None => (), } } else{ // If the variable is reffed by another variable, check when will the reference // be dropped let mut when_drop = 0; for refer in &info.ref_by{ let tmp = self.get_when_drop(&refer,var); if when_drop< tmp { when_drop = tmp } } // If the references will be drop earlier than the variable if (when_drop != 0) & (when_drop < info.when_to_drop){ let mut found = false; if let Some(print_line) = print_list.get_mut(&(when_drop+1)){ found = true; print_line.push("drop( ".to_string()+ var + " ); // Adivice: a drop function can add here"); } if !found{ print_list.insert(when_drop+1, vec!["drop( ".to_string()+ var + " ); // Adivice: a drop function can add here"]); } } } } }
// `fold` mirip map reduce fn main() { let nums = vec![1, 2, 3]; println!("{}", nums.into_iter().fold(0, |acc, x| acc + x)); }
pub mod bench; pub use self::bench::Bench; use futures::prelude::*; use indexmap::IndexMap; use k8s_openapi::api::core::v1::{Pod, Service}; use std::sync::Arc; use structopt::StructOpt; use tokio::sync::Mutex; use tokio_compat_02::FutureExt; use tracing::{info, warn}; #[derive(StructOpt)] #[structopt(about = "Kubernetes controller")] pub struct Controller { #[structopt(short, long, env, default_value = "default")] namespace: String, } struct Ctx { //client: kube::Client, state: Mutex<State>, } struct State { active: IndexMap<String, Bench>, } impl Controller { pub async fn run(self) -> Result<(), kube::Error> { let Self { namespace } = self; let client = kube::Client::try_default().compat().await?; let benches_api = kube::Api::<Bench>::namespaced(client.clone(), &namespace); let _svc_api = kube::Api::<Service>::namespaced(client.clone(), &namespace); let _pods_api = kube::Api::<Pod>::namespaced(client.clone(), &namespace); let ctx = Arc::new(Ctx { //client, state: Mutex::new(State { active: IndexMap::default(), }), }); let mut revision = "0".to_string(); loop { let benches_params = kube::api::ListParams::default(); info!(%revision, "Watching benches"); let mut benches_stream = benches_api.watch(&benches_params, &revision).compat().await?.boxed(); while let Some(ev) = benches_stream.next().compat().await { match ev { Err(error) => { warn!(?error); } Ok(kube::api::WatchEvent::Added(bench)) => { if let Some(rv) = kube::api::Meta::resource_ver(&bench) { revision = rv; } let name = kube::api::Meta::name(&bench); let mut state = ctx.state.lock().await; state.active.insert(name.clone(), bench); info!(%name, %revision, active = %state.active.len(), "Added"); } Ok(kube::api::WatchEvent::Modified(bench)) => { if let Some(rv) = kube::api::Meta::resource_ver(&bench) { revision = rv; } let name = kube::api::Meta::name(&bench); let mut state = ctx.state.lock().await; state.active.insert(name.clone(), bench); info!(%name, %revision, active = %state.active.len(), "Modified"); } Ok(kube::api::WatchEvent::Deleted(bench)) => { if let Some(rv) = kube::api::Meta::resource_ver(&bench) { revision = rv; } let name = kube::api::Meta::name(&bench); let mut state = ctx.state.lock().await; state.active.remove(&name); info!(%name, %revision, active = %state.active.len(), "Deleted"); } Ok(kube::api::WatchEvent::Bookmark(b)) => { revision = b.metadata.resource_version; } Ok(kube::api::WatchEvent::Error(error)) => { warn!(%error); break; } } } info!("Stream completed"); } } }
use cs_bindgen::prelude::*; use derive_more::*; use lazy_static::lazy_static; use num_traits::{ops::wrapping::WrappingAdd, One, PrimInt}; use serde::*; use strum::*; #[cs_bindgen] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Serialize, Deserialize)] pub enum Tile { Simple(SimpleTile), Wind(Wind), Dragon(Dragon), } impl Tile { pub fn is_honor(self) -> bool { match self { Tile::Wind(..) | Tile::Dragon(..) => true, Tile::Simple(..) => false, } } pub fn as_honor(self) -> Option<HonorTile> { match self { Tile::Wind(wind) => Some(HonorTile::Wind(wind)), Tile::Dragon(dragon) => Some(HonorTile::Dragon(dragon)), Tile::Simple(..) => None, } } } #[cs_bindgen] #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, EnumIter, Serialize, Deserialize, )] pub enum Suit { Coins, Bamboo, Characters, } #[cs_bindgen] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct SimpleTile { pub number: u8, pub suit: Suit, } impl SimpleTile { pub const fn new(suit: Suit, number: u8) -> Self { Self { suit, number } } } #[cs_bindgen] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, From, Serialize, Deserialize)] pub enum HonorTile { Wind(Wind), Dragon(Dragon), } #[cs_bindgen] #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, EnumIter, Serialize, Deserialize, )] pub enum Wind { East, South, West, North, } impl Wind { /// Returns the next wind in the cycle order for winds. /// /// Winds follow the order: /// /// ```text /// East -> South -> West -> North -> East /// ``` /// /// Where North cycles back around to East. This is used for determining the dora /// from the dora indicator, and for determining turn order based on seat winds. /// /// # Examples /// /// ``` /// use mahjong::tile::Wind; /// /// let mut wind = Wind::East; /// /// wind = wind.next(); /// assert_eq!(Wind::South, wind); /// /// wind = wind.next(); /// assert_eq!(Wind::West, wind); /// /// wind = wind.next(); /// assert_eq!(Wind::North, wind); /// /// wind = wind.next(); /// assert_eq!(Wind::East, wind); /// ``` pub fn next(self) -> Self { match self { Wind::East => Wind::South, Wind::South => Wind::West, Wind::West => Wind::North, Wind::North => Wind::East, } } } #[cs_bindgen] #[derive( Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, EnumIter, Serialize, Deserialize, )] pub enum Dragon { White, Green, Red, } impl Dragon { /// Returns the next dragon in the cycle order for dragons. /// /// Dragons follow the order: /// /// ```text /// White -> Green -> Red -> White /// ``` /// /// Where Red cycles back around to Red. This is used to determine the dora based /// on the dora indicator. /// /// # Examples /// /// ``` /// use mahjong::tile::Dragon; /// /// let mut dragon = Dragon::White; /// /// dragon = dragon.next(); /// assert_eq!(Dragon::Green, dragon); /// /// dragon = dragon.next(); /// assert_eq!(Dragon::Red, dragon); /// /// dragon = dragon.next(); /// assert_eq!(Dragon::White, dragon); /// ``` pub fn next(self) -> Self { match self { Dragon::White => Dragon::Green, Dragon::Green => Dragon::Red, Dragon::Red => Dragon::White, } } } /// Unique identifier for a tile within a match. /// /// Since there are 4 copies of each tile in a standard Mahjong set, we need a way /// to uniquely identify each tile instance separately. This type, combined with /// [`TileInstance`], provides a way to unambiguously refer to a specific tile /// during a match. /// /// A given tile ID always maps to the same tile value, as specified by [`TILE_SET`]. /// You can use [`by_id`] to lookup the [`Tile`] value for a `TileId`. /// /// [`TileInstance`]: struct.TileInstance.html /// [`Tile`]: struct.Tile.html /// [`by_id`]: fn.by_id.html #[cs_bindgen] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] pub struct TileId(u8); /// An instance of a tile within a player's hand during a match. /// /// Combines a [`TileId`] with a [`Tile`] value in order to differentiate between /// the four copies of each tile in a mahjong set. /// /// [`TileId`]: struct.TileId.html /// [`Tile`]: struct.Tile.html // TODO: Make this class not `Copy` once cs-bindgen has a different way to specify // that a type should be marshaled by value. Since tile instances are meant to be // unique, we don't want it to be easy to accidentally create a copy of a tile. We // should try to always "move" the tile as a logical object in order to reduce the // risk of bugs coming from accidentally duplicating tiles. We might even want to // remove the `Clone` impl, since we could still use `new` to create a new instance // if we *really* needed to. This will likely also require support for returning // values by reference, since we wouldn't be able to return a copy when passing // values to Rust. #[cs_bindgen] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct TileInstance { pub id: TileId, pub tile: Tile, } impl TileInstance { pub fn new<T: Into<Tile>>(tile: T, id: TileId) -> Self { Self { id, tile: tile.into(), } } } lazy_static! { /// The full set of tile instances for a Riichi Mahjong match. pub static ref TILE_SET: Vec<TileInstance> = { /// Helper struct for generating the tile IDs. #[derive(Default)] struct TileIdGenerator(u8); impl TileIdGenerator { fn next(&mut self) -> TileId { let id = TileId(self.0); self.0 += 1; id } } let mut tiles = Vec::with_capacity(144); let mut id_generator = TileIdGenerator::default(); // Add simple tiles for each suit: // // * Tiles in each suit are numbered 1-9. // * There are four copies of each simple tile. for suit in Suit::iter() { for number in 1..=9 { for _ in 0..4 { tiles.push(TileInstance::new( SimpleTile { suit, number }, id_generator.next(), )); } } } // Add honor tiles: // // * There are dragon and wind honors. // * There are four copies of each honor tile. for dragon in Dragon::iter() { for _ in 0..4 { tiles.push(TileInstance::new(dragon, id_generator.next())); } } for wind in Wind::iter() { for _ in 0..4 { tiles.push(TileInstance::new(wind, id_generator.next())); } } tiles }; } /// Returns the tile value associated with the specified ID. /// /// Since each [`TileId`] has a unique mapping to a [`Tile`] value, we can lookup /// the tile associated with a given ID. This allows us to pass around [`TileId`] /// values while still being able to reason about the tile they refer to when /// necessary. /// /// [`TileId`]: struct.TileId.html /// [`Tile`]: struct.Tile.html pub fn by_id(id: TileId) -> Tile { TILE_SET .iter() .find(|instance| instance.id == id) .map(|instance| instance.tile) .unwrap_or_else(|| panic!("Unknown tile ID: {:?}", id)) } /// Determines if the given tiles form a chow, i.e. a sequence in the same suit. /// /// All three tiles must be simple tiles of the same suit (i.e. no dragons or /// winds), and their numeric values must form a numeric sequence. Returns `true` if /// any permutation of the tiles is a valid sequence. pub fn is_chow<T, U, V>(first: T, second: U, third: V) -> bool where T: Into<Tile>, U: Into<Tile>, V: Into<Tile>, { // Determine if all three tiles are simple tiles. Wind/Dragon tiles cannot form a // chow, so if any of the tiles is not a simple then we return `false.` let first = match first.into() { Tile::Simple(tile) => tile, _ => return false, }; let second = match second.into() { Tile::Simple(tile) => tile, _ => return false, }; let third = match third.into() { Tile::Simple(tile) => tile, _ => return false, }; // Determine if all three tiles have the same suit. if first.suit != second.suit || first.suit != third.suit { return false; } // Check the six possible orderings for the tiles. If any of them forms a sequence // then it is a valid chow. let (first, second, third) = (first.number, second.number, third.number); is_sequence(&[first, second, third]) || is_sequence(&[first, third, second]) || is_sequence(&[second, first, third]) || is_sequence(&[second, third, first]) || is_sequence(&[third, first, second]) || is_sequence(&[third, second, first]) } /// Checks if a slice of integers is a consecutive sequence. /// /// Returns `true` if all elements in `values` form a consecutive sequence in /// ascending order. Specifically, each element must be exactly one greater than the /// preceding element. This does not include wrapping, i.e. `[T::MAX, T::MIN]` is /// not considered a valid sequence. /// /// Returns `true` if `values` is empty or only has one element. fn is_sequence<T>(values: &[T]) -> bool where T: PrimInt + One + WrappingAdd, { if values.is_empty() { return true; } let mut last = values[0]; for &next in &values[1..] { // Check for overflow when adding 1 to the last value. If the value overflowed while // there are still more elements then `values` cannot be a valid sequence. let expected_next = last.wrapping_add(&T::one()); if expected_next < last { return false; } if next != expected_next { return false; } last = next; } true } #[cfg(test)] mod is_chow_tests { use super::*; use itertools::Itertools; // Tests for `is_chow`. #[test] fn rejects_honors() { assert!(!is_chow( Dragon::White, SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, )); assert!(!is_chow( SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, Dragon::White, )); assert!(!is_chow( Wind::East, SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, )); assert!(!is_chow( SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, Wind::East, )); } #[test] fn rejects_mismatched_suits() { assert!(!is_chow( SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, SimpleTile { suit: Suit::Bamboo, number: 3, }, )); assert!(!is_chow( SimpleTile { suit: Suit::Bamboo, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, SimpleTile { suit: Suit::Coins, number: 3, }, )); assert!(!is_chow( SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Bamboo, number: 2, }, SimpleTile { suit: Suit::Coins, number: 3, }, )); } #[test] fn all_permutations() { let tiles = [ SimpleTile { suit: Suit::Coins, number: 1, }, SimpleTile { suit: Suit::Coins, number: 2, }, SimpleTile { suit: Suit::Coins, number: 3, }, ]; for permutation in tiles.iter().permutations(3) { assert!(is_chow(*permutation[0], *permutation[1], *permutation[2])); } } } #[cfg(tests)] mod is_sequence_tests { use super::is_sequence; #[test] fn empty_sequence() { assert!(is_sequence::<i32>(&[])); } #[test] fn single_sequence() { assert!(is_sequence(&[0])); assert!(is_sequence(&[i32::MIN])); assert!(is_sequence(&[i32::MAX])); } #[test] fn detects_sequences() { // Positive sequences. assert!(is_sequence(&[0, 1, 2])); assert!(is_sequence(&[0, 1, 2, 3, 4])); assert!(is_sequence(&[u32::MIN, u32::MIN + 1, u32::MIN + 2])); // Short sequences. assert!(is_sequence(&[0, 1])); assert!(is_sequence(&[1234, 1235])); // Negative sequences. assert!(is_sequence(&[-3, -2, -1, 0, 1, 2, 3])); assert!(is_sequence(&[u32::MAX - 2, u32::MAX - 1, u32::MAX])); } #[test] fn rejects_non_sequences() { assert!(!is_sequence(&[1, 2, 0])); assert!(!is_sequence(&[0, 1, 3, 4])); } #[test] fn rejects_descending_sequence() { assert!(!is_sequence(&[3, 2, 1])); assert!(!is_sequence(&[-1, -2, -3])); } #[test] fn wrapping_sequence() { assert!(!is_sequence(&[u32::MAX, u32::MIN])); assert!(!is_sequence(&[ u32::MAX - 2, u32::MAX - 1, u32::MAX, u32::MIN, u32::MIN + 1, u32::MIN + 2, ])); } }
use crate::ast::{TmRef, TyRef}; use crate::check::infer; use crate::exhibit::{TmExhibit, TyExhibit}; pub type TCM<T> = Result<T, TCE>; pub type TCE = &'static str; pub struct TCS { pub tm_exh: TmExhibit, pub ty_exh: TyExhibit, pub gamma: Vec<TyRef>, } impl TCS { pub fn new(tm_exh: TmExhibit, ty_exh: TyExhibit) -> Self { Self { tm_exh, ty_exh, gamma: vec![], } } pub fn infer(&mut self, tm: TmRef) -> TCM<TyRef> { infer(self, tm) } }
use rand::Rng; #[derive(Copy, Clone)] pub enum DestRoom { Relative(isize, isize, i32, i32,), Absolute(isize, isize, i32, i32,), } impl DestRoom { pub fn to_absolute_coordinates(&self, (x, y): (i32, i32)) -> (i32, i32) { match self { DestRoom::Relative(rel_x, rel_y, _, _) => (x + *rel_x as i32, y + *rel_y as i32), DestRoom::Absolute(abs_x, abs_y, _, _) => (*abs_x as i32, *abs_y as i32), } } pub fn spawn_point(&self) -> (i32, i32) { match self { DestRoom::Relative(_, _, x, y) => (*x, *y), DestRoom::Absolute(_, _, x, y) => (*x, *y), } } } #[derive(Copy, Clone)] pub enum RoomField { Nothing, Wall, Stone, Bush, Player, Exit(DestRoom), } #[derive(Clone)] pub struct Room { pub width: usize, pub height: usize, pub fields: Vec<RoomField>, } impl Room { pub fn new(width: usize, height: usize) -> Room { let fields = (0..(width * height)).map(|_| RoomField::Nothing).collect(); Room { width, height, fields, } } pub fn set_field(&mut self, x: usize, y: usize, field: RoomField) { if x < self.width && y < self.height { let index = x + y * self.width; self.fields[index] = field; } } pub fn get_field(&self, x: usize, y: usize) -> Option<RoomField> { if x < self.width && y < self.height { let index = x + y * self.width; Some(self.fields[index]) } else { None } } pub fn room_field_iterator(&self) -> RoomFieldIterator { RoomFieldIterator { x: 0, y: 0, room: self } } } #[derive(Default)] pub struct RoomGeneration { pub width: usize, pub height: usize, pub exit_north: bool, pub exit_south: bool, pub exit_east: bool, pub exit_west: bool, } impl RoomGeneration { pub fn generate_room(&self, rng: &mut impl Rng) -> Room { let mut room = Room::new(self.width, self.height); /* Draw borders */ let wall_borders = RoomField::Wall; for x in 0..self.width { room.set_field(x, 0, wall_borders); room.set_field(x, self.height - 1, wall_borders); } for y in 0..self.height { room.set_field(0, y, wall_borders); room.set_field(self.width - 1, y, wall_borders); } /* Open exits */ if self.exit_north { room.set_field(self.width / 2, self.height - 1, RoomField::Exit(DestRoom::Relative(0, -1, self.width as i32 / 2, 1))); } if self.exit_south { room.set_field(self.width / 2, 0, RoomField::Exit(DestRoom::Relative(0, 1, self.width as i32 / 2, self.height as i32 - 2))); } if self.exit_east { room.set_field(self.width - 1, self.height / 2, RoomField::Exit(DestRoom::Relative(1, 0, 1, self.height as i32 / 2))); } if self.exit_west { room.set_field(0, self.height / 2, RoomField::Exit(DestRoom::Relative(-1, 0, self.width as i32 - 2, self.height as i32 / 2))); } /* Draw 5-7 random stones */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Stone); } /* Draw 5-7 bushes */ for _ in 0..rng.gen_range(5, 8) { let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Bush); } /* Add the player somewhere */ let x = rng.gen_range(2, self.width - 3); let y = rng.gen_range(2, self.height - 3); room.set_field(x, y, RoomField::Player); room } } pub struct RoomFieldIterator<'a> { room: &'a Room, x: usize, y: usize, } impl<'a> Iterator for RoomFieldIterator<'a> { type Item = (usize, usize, RoomField); fn next(&mut self) -> Option<Self::Item> { let result = self.room.get_field(self.x, self.y).map(|field| (self.x, self.y, field)); self.x += 1; if self.x >= self.room.width { self.x = 0; self.y += 1; } if self.y >= self.room.height { return None; } result } }
// Copyright 2020-2021, The Tremor Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![cfg(not(tarpaulin_include))] use crate::errors::{Error, ErrorKind, Result}; use crate::source::prelude::*; use async_channel::Sender; use async_channel::TryRecvError; use async_std::net::TcpListener; use async_tls::TlsAcceptor; use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys}; use rustls::{Certificate, NoClientAuth, PrivateKey, ServerConfig}; use std::io::BufReader; use std::path::{Path, PathBuf}; use std::sync::Arc; // TODO expose this as config (would have to change buffer to be vector?) const BUFFER_SIZE_BYTES: usize = 8192; #[derive(Debug, Clone, Deserialize, Default)] pub struct Config { pub port: u16, pub host: String, pub tls: Option<TLSConfig>, } #[derive(Debug, Clone, Deserialize)] pub struct TLSConfig { cert: PathBuf, key: PathBuf, } impl ConfigImpl for Config {} pub struct Tcp { pub config: Config, onramp_id: TremorUrl, } pub struct Int { uid: u64, config: Config, listener: Option<Receiver<SourceReply>>, onramp_id: TremorUrl, } impl std::fmt::Debug for Int { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "TCP") } } impl Int { fn from_config(uid: u64, onramp_id: TremorUrl, config: &Config) -> Self { let config = config.clone(); Self { uid, config, listener: None, onramp_id, } } } impl onramp::Impl for Tcp { fn from_config(id: &TremorUrl, config: &Option<YamlValue>) -> Result<Box<dyn Onramp>> { if let Some(config) = config { let config: Config = Config::new(config)?; Ok(Box::new(Self { config, onramp_id: id.clone(), })) } else { Err("Missing config for tcp onramp".into()) } } } #[async_trait::async_trait()] impl Source for Int { fn id(&self) -> &TremorUrl { &self.onramp_id } async fn pull_event(&mut self, _id: u64) -> Result<SourceReply> { self.listener.as_ref().map_or_else( || Ok(SourceReply::StateChange(SourceState::Disconnected)), |listener| match listener.try_recv() { Ok(r) => Ok(r), Err(TryRecvError::Empty) => Ok(SourceReply::Empty(10)), Err(TryRecvError::Closed) => { Ok(SourceReply::StateChange(SourceState::Disconnected)) } }, ) } async fn init(&mut self) -> Result<SourceState> { let listener = TcpListener::bind((self.config.host.as_str(), self.config.port)).await?; let (tx, rx) = bounded(crate::QSIZE); let uid = self.uid; let path = vec![self.config.port.to_string()]; let server_config: Option<ServerConfig> = if let Some(tls_config) = self.config.tls.as_ref() { Some(load_server_config(tls_config)?) } else { None }; task::spawn(async move { let mut stream_id = 0; while let Ok((stream, peer)) = listener.accept().await { let tx = tx.clone(); stream_id += 1; let origin_uri = EventOriginUri { uid, scheme: "tremor-tcp".to_string(), host: peer.ip().to_string(), port: Some(peer.port()), // TODO also add token_num here? path: path.clone(), // captures server port }; let tls_acceptor: Option<TlsAcceptor> = server_config .clone() .map(|s| TlsAcceptor::from(Arc::new(s))); task::spawn(async move { //let (reader, writer) = &mut (&stream, &stream); if let Err(e) = tx.send(SourceReply::StartStream(stream_id)).await { error!("TCP Error: {}", e); return; } if let Some(acceptor) = tls_acceptor { match acceptor.accept(stream).await { Ok(tls_stream) => { read_loop(tls_stream, tx, stream_id, origin_uri).await; } Err(_e) => { if let Err(e) = tx.send(SourceReply::EndStream(stream_id)).await { error!("TCP Error: {}", e); return; } } } } else { read_loop(stream, tx, stream_id, origin_uri).await; }; }); } }); self.listener = Some(rx); Ok(SourceState::Connected) } } #[async_trait::async_trait] impl Onramp for Tcp { async fn start(&mut self, config: OnrampConfig<'_>) -> Result<onramp::Addr> { let source = Int::from_config(config.onramp_uid, self.onramp_id.clone(), &self.config); SourceManager::start(source, config).await } fn default_codec(&self) -> &str { "json" } } async fn read_loop( mut stream: impl futures::io::AsyncRead + std::marker::Unpin, tx: Sender<SourceReply>, stream_id: usize, origin_uri: EventOriginUri, ) { let mut buffer = [0; BUFFER_SIZE_BYTES]; while let Ok(n) = stream.read(&mut buffer).await { if n == 0 { if let Err(e) = tx.send(SourceReply::EndStream(stream_id)).await { error!("TCP Error: {}", e); }; break; }; if let Err(e) = tx .send(SourceReply::Data { origin_uri: origin_uri.clone(), // ALLOW: we define n as part of the read data: buffer[0..n].to_vec(), meta: None, // TODO: add peer address etc. to meta codec_override: None, stream: stream_id, }) .await { error!("TCP Error: {}", e); break; }; } } // Load the passed certificates file fn load_certs(path: &Path) -> Result<Vec<Certificate>> { let certfile = tremor_common::file::open(path)?; let mut reader = BufReader::new(certfile); certs(&mut reader).map_err(|_| { Error::from(ErrorKind::TLSError(format!( "Invalid certificate in {}", path.display() ))) }) } // Load the passed keys file fn load_keys(path: &Path) -> Result<PrivateKey> { // prefer to load pkcs8 keys // this will only error if we have invalid pkcs8 key base64 or we couldnt read the file. let mut keys: Vec<PrivateKey> = { let keyfile = tremor_common::file::open(path)?; let mut reader = BufReader::new(keyfile); pkcs8_private_keys(&mut reader).map_err(|_e| { Error::from(ErrorKind::TLSError(format!( "Invalid PKCS8 Private key in {}", path.display() ))) }) }?; // only attempt to load as RSA keys if file has no pkcs8 keys if keys.is_empty() { let keyfile = tremor_common::file::open(path)?; let mut reader = BufReader::new(keyfile); keys = rsa_private_keys(&mut reader).map_err(|_e| { Error::from(ErrorKind::TLSError(format!( "Invalid RSA Private key in {}", path.display() ))) })?; } if keys.is_empty() { Err(Error::from(ErrorKind::TLSError(format!( "No valid private keys (RSA or PKCS8) found in {}", path.display() )))) } else { // ALLOW: we know keys is not empty Ok(keys.remove(0)) } } fn load_server_config(config: &TLSConfig) -> Result<ServerConfig> { let certs = load_certs(&config.cert)?; let keys = load_keys(&config.key)?; let mut server_config = ServerConfig::new(NoClientAuth::new()); server_config // set this server to use one cert together with the loaded private key .set_single_cert(certs, keys)?; Ok(server_config) }
use std::io; use actix; use actix::{WrapFuture, Actor, fut, ActorFuture, ContextFutureSpawner, AsyncContext}; use actix_web; use futures::{Future, IntoFuture, Stream}; use tokio_postgres; use database::models; pub struct PgConnection { client: Option<tokio_postgres::Client>, create_st: Option<tokio_postgres::Statement>, read_st: Option<tokio_postgres::Statement>, update_st: Option<tokio_postgres::Statement>, delete_st: Option<tokio_postgres::Statement>, list_st: Option<tokio_postgres::Statement>, create_table_st: Option<tokio_postgres::Statement>, } impl actix::Actor for PgConnection { type Context = actix::Context<Self>; } impl PgConnection { pub fn connect(db_url: &str) -> actix::Addr<PgConnection> { let hs = tokio_postgres::connect(db_url.parse().unwrap(), tokio_postgres::TlsMode::None); PgConnection::create(move |ctx| { let act = PgConnection { client: None, create_st: None, read_st: None, update_st: None, delete_st: None, list_st: None, create_table_st: None, }; hs.map_err(|_| panic!("cannot connect to postgresql")) .into_actor(&act) .and_then(|(mut cl, conn), act, ctx| { ctx.wait( cl.prepare("CREATE TABLE IF NOT EXISTS users (id SERIAL PRIMARY KEY NOT NULL, email VARCHAR(100) NOT NULL);") .map_err(|_| ()) .into_actor(act) .and_then(|st, act, _| { // ctxx.wait( // cl.execute(&st, &[]) // .poll().unwrap() also works, i don't like it though for some reason // .map_err(|_| ()) // .into_actor(actt) // .and_then(|_,_,_| {fut::ok(())}) // ); act.create_table_st = Some(st); fut::ok(()) }), ); ctx.wait( cl.prepare("INSERT INTO users (email) VALUES ($1) RETURNING *;") .map_err(|_| ()) .into_actor(act) .and_then(|statement, act, _ctxx| { act.create_st = Some(statement); fut::ok(()) }), ); ctx.wait( cl.prepare("SELECT * FROM users WHERE id=$1;") .map_err(|_| ()) .into_actor(act) .and_then(|statement, act, _ctx| { act.read_st = Some(statement); fut::ok(()) }), ); ctx.wait( cl.prepare("UPDATE users SET email = $2 WHERE id=$1 RETURNING *;") .map_err(|_| ()) .into_actor(act) .and_then(|statement, act, _ctx| { act.update_st = Some(statement); fut::ok(()) }), ); ctx.wait( cl.prepare("DELETE FROM users WHERE id = $1;") .map_err(|_| ()) .into_actor(act) .and_then(|statement, act, _ctx| { act.delete_st = Some(statement); fut::ok(()) }), ); ctx.wait( cl.prepare("SELECT * from users;") .map_err(|_| ()) .into_actor(act) .and_then(|statement, act, _ctx| { act.list_st = Some(statement); fut::ok(()) }), ); act.client = Some(cl); actix::Arbiter::spawn(conn.map_err(|e| panic!("{}", e))); fut::ok(()) }).wait(ctx); act }) } } pub struct InitializeDatabase; impl actix::Message for InitializeDatabase { type Result = io::Result<u64>; } impl actix::Handler<InitializeDatabase> for PgConnection { type Result = actix::ResponseFuture<u64, io::Error>; fn handle(&mut self, _msg: InitializeDatabase, _ctx: &mut Self::Context) -> Self::Result { Box::new( self.client .as_mut() .unwrap() .execute(self.create_table_st.as_ref().unwrap(), &[]) .into_future() .map_err(|e| io::Error::new(io::ErrorKind::Other, e)) .and_then(|num_rows| { Ok(num_rows) }), ) } } pub struct CreateUser { pub email: String, } impl actix::Message for CreateUser { type Result = Result<models::User, actix_web::Error>; } impl actix::Handler<CreateUser> for PgConnection { type Result = actix::ResponseFuture<models::User, actix_web::Error>; fn handle(&mut self, msg: CreateUser, _ctx: &mut Self::Context) -> Self::Result { Box::new( self.client .as_mut() .unwrap() .query(self.create_st.as_ref().unwrap(), &[&msg.email]) .into_future() .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to create user")) .and_then(|(row, _query)| { let row = row.unwrap(); Ok( models::User { id: row.get(0), email: row.get(1), } ) }), ) } } pub struct GetUser { pub id: i32, } impl actix::Message for GetUser { type Result = Result<models::User, actix_web::Error>; } impl actix::Handler<GetUser> for PgConnection { type Result = actix::ResponseFuture<models::User, actix_web::Error>; fn handle(&mut self, msg: GetUser, _ctx: &mut Self::Context) -> Self::Result { Box::new( self.client .as_mut() .unwrap() .query(self.read_st.as_ref().unwrap(), &[&msg.id]) .into_future() .map_err(|_| actix_web::error::ErrorInternalServerError("failed to get user")) .and_then(|(row, _query)| { let row = row.unwrap(); Ok( models::User { id: row.get(0), email: row.get(1), } ) }), ) } } pub struct UpdateUser { pub user: models::User, } impl actix::Message for UpdateUser { type Result = Result<models::User, actix_web::Error>; } impl actix::Handler<UpdateUser> for PgConnection { type Result = actix::ResponseFuture<models::User, actix_web::Error>; fn handle(&mut self, msg: UpdateUser, _ctx: &mut Self::Context) -> Self::Result { Box::new( self.client .as_mut() .unwrap() .query(self.update_st.as_ref().unwrap(), &[&msg.user.id, &msg.user.email]) .into_future() .map_err(|_| actix_web::error::ErrorInternalServerError("failed to get user")) .and_then(|(row, _query)| { let row = row.unwrap(); Ok( models::User { id: row.get(0), email: row.get(1), } ) }), ) } } pub struct DeleteUser { pub id: i32, } impl actix::Message for DeleteUser { type Result = Result<(), actix_web::Error>; } impl actix::Handler<DeleteUser> for PgConnection { type Result = actix::ResponseFuture<(), actix_web::Error>; fn handle(&mut self, msg: DeleteUser, _ctx: &mut Self::Context) -> Self::Result { Box::new( self.client .as_mut() .unwrap() .query(self.delete_st.as_ref().unwrap(), &[&msg.id]) .into_future() .map_err(|_| actix_web::error::ErrorInternalServerError("failed to delete user")) .and_then(|(_row, _query)| { Ok(()) }), ) } } pub struct ListUsers; impl actix::Message for ListUsers { type Result = Result<Vec<models::User>, actix_web::Error>; } impl actix::Handler<ListUsers> for PgConnection { type Result = actix::ResponseFuture<Vec<models::User>, actix_web::Error>; fn handle(&mut self, _msg: ListUsers, _ctx: &mut Self::Context) -> Self::Result { let users = Vec::with_capacity(4); Box::new( self.client .as_mut() .unwrap() .query(self.list_st.as_ref().unwrap(), &[]) .fold(users, move |mut users, row| { users.push( models::User { id: row.get(0), email: row.get(1), } ); Ok::<_, tokio_postgres::error::Error>(users) }) .map_err(|_| actix_web::error::ErrorInternalServerError("failed to get users")) .and_then(|users| { Ok(users) }), ) } }
use libraries::Library; use vm::Value; #[derive(Debug)] pub struct IO; impl IO { pub fn new() -> IO { IO } fn println(&self, val: Value) -> Value { println!("{:?}", val); Value::null() } } impl Library for IO { fn call(&self, method: String) -> Value { let method : &str = method.trim(); match method { "println" => self.println(Value::null()), _ => panic!("No such method {:?}", method), } } }
use ethcontract::prelude::*; use crate::cli::{Currency, Eth, Scm}; use chrono::{Local, NaiveDateTime, TimeZone, Utc}; use ethcontract::batch::CallBatch; use futures::StreamExt as _; #[derive(structopt::StructOpt)] #[structopt(about = "Participate in SCM ICO")] pub enum IcoCommand { #[structopt(about = "Get status of the ICO")] Info, #[structopt(about = "Get number of SCM tokens available to the given user")] Balance { #[structopt(help = "Account we're fetching balance for (uses your account by default)")] address: Option<Address>, #[structopt(long, help = "Display balance in ETH")] eth: bool, }, #[structopt(about = "Buy SCM")] Fund { #[structopt(long, help = "Wrap and approve eth if you don't have enough of it")] wrap_weth: bool, #[structopt(long, help = "Ensure that ICO is authorized to spend WETH")] approve_weth: bool, #[structopt(help = "Number of ETH tokens to contribute to the ICO")] funds: Eth, }, #[structopt(about = "Claim purchased SCM")] Claim { #[structopt(long, help = "If ICO is not finished, wait for it")] wait: bool, }, #[structopt(about = "Wait for ICO to finish")] Wait, } impl IcoCommand { pub async fn invoke(&self, account: Account, web3: &Web3<Http>) { let contract_address = crate::contracts::get_ico_address(web3).await; let contract = crate::contracts::ICO::at(web3, contract_address); match self { IcoCommand::Info => { let current_block = web3.eth().block_number().await.unwrap(); let current_block = BlockId::Number(BlockNumber::Number(current_block)); let mut batch = CallBatch::new(web3.transport()); let state = contract.state().block(current_block).batch_call(&mut batch); let left_eth = contract .left_eth() .block(current_block) .batch_call(&mut batch); let left_scm = contract .left_scm() .block(current_block) .batch_call(&mut batch); let scm = contract.scm().block(current_block).batch_call(&mut batch); let weth = contract.weth().block(current_block).batch_call(&mut batch); batch.execute_all(100).await; let state = state.await.expect("state call failed"); match state { 0x0 => println!("State: Ongoing"), 0x1 => println!("State: Closed"), 0x2 => println!("State: Finished"), unknown => println!("State: Unknown ({})", unknown), }; println!( "Left ETH: {}", Eth::new(left_eth.await.expect("left_eth call failed")) ); println!( "Left SCM: {}", Scm::new(left_scm.await.expect("left_scm call failed")) ); println!("ICO: {:?}", contract_address); println!("SCM: {:?}", scm.await.expect("scm call failed")); println!("WETH: {:?}", weth.await.expect("weth call failed")); if state != 0 { let close_time = { let timestamp = contract .close_time() .block(current_block) .call() .await .expect("close_time call failed") .as_u64(); Local.from_utc_datetime(&NaiveDateTime::from_timestamp(timestamp as i64, 0)) }; println!("Close time: {}", close_time); let finish_time = { let timestamp = contract .finish_time() .block(current_block) .call() .await .expect("close_time call failed") .as_u64(); Local.from_utc_datetime(&NaiveDateTime::from_timestamp(timestamp as i64, 0)) }; println!("Finish time: {}", finish_time); } } IcoCommand::Balance { address, eth } => { let address = address.unwrap_or(account.address()); if *eth { let balance = contract .balance_eth(address) .call() .await .expect("balance fetch failed"); println!("ICO balance: {}", Eth::new(balance)); } else { let balance = contract .balance_scm(address) .call() .await .expect("balance fetch failed"); println!("ICO balance: {}", Scm::new(balance)); }; } IcoCommand::Fund { wrap_weth, approve_weth, funds, } => { let weth_address = contract.weth().call().await.unwrap(); let weth = crate::contracts::WETH9::at(web3, weth_address); if *wrap_weth { let balance = weth .balance_of(account.address()) .call() .await .expect("balance_of call failed"); if balance < funds.as_inner() { println!("Wrapping WETH"); weth.deposit() .from(account.clone()) .value(funds.as_inner()) .send() .await .expect("deposit failed"); } else { println!("WETH balance is sufficient, no need to wrap more"); } } if *approve_weth || *wrap_weth { let allowance = weth .allowance(account.address(), contract_address) .call() .await .expect("balance_of call failed"); if allowance < funds.as_inner() { println!("Approving WETH"); weth.approve(contract_address, U256::exp10(18) * 10) .from(account.clone()) .send() .await .expect("approve failed"); } else { println!("WETH allowance is sufficient, no need to approve more"); } } contract .fund(funds.as_inner()) .from(account.clone()) .send() .await .expect("fund call failed"); println!("Done"); let balance = contract .balance_scm(account.address()) .call() .await .expect("balance fetch failed"); println!("ICO balance: {}", Scm::new(balance)); } IcoCommand::Claim { wait } => { if *wait { wait_finish(web3, &contract).await; } contract .claim() .from(account.clone()) .send() .await .expect("fund call failed"); println!("Done"); let scm_address = contract.scm().call().await.unwrap(); let scm = crate::contracts::SCM::at(web3, scm_address); let balance = scm .balance_of(account.address()) .call() .await .expect("balance fetch failed"); println!("SCM balance: {}", Scm::new(balance)); } IcoCommand::Wait => { wait_finish(web3, &contract).await; } } } } async fn wait_finish(web3: &Web3<Http>, contract: &crate::contracts::ICO) { let current_block = web3.eth().block_number().await.unwrap(); let state = contract .state() .block(BlockId::Number(BlockNumber::Number(current_block))) .call() .await .unwrap(); if state == 0 { println!("Waiting for ICO to close"); contract .events() .ico_closed() .from_block(BlockNumber::Number(current_block)) .stream() .boxed() .next() .await; println!("ICO closed"); } let finish_time = { let timestamp = contract .finish_time() .call() .await .expect("finish_time call failed") .as_u64(); let naive = NaiveDateTime::from_timestamp(timestamp as i64, 0); Utc.from_utc_datetime(&naive) }; let now = Utc::now(); if now < finish_time { println!("ICO will finish on {}", finish_time.with_timezone(&Local)); println!("Waiting for ICO to finish"); tokio::time::sleep((finish_time - now).to_std().unwrap()).await; } while contract.state().call().await.unwrap() != 0x2 { tokio::time::sleep(std::time::Duration::new(10, 0)).await; } println!("ICO is finished"); }
use crate::{config, err, util}; use anyhow::Result; use std::path::Path; use std::process::Command; /// Returns the timestamp of the most recent commit modifying `path` in seconds. pub fn timestamp<P: AsRef<Path>>(path: P) -> Result<u64> { use chrono::prelude::*; let output = Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(&["log", "--pretty=format:%cd", "-n", "1", "--date=iso", "--"]) .arg(path.as_ref()) .output()?; Ok( String::from_utf8(output.stdout)? .trim() .parse::<DateTime<Utc>>()? .timestamp() as u64, ) } fn has_remote() -> Result<bool> { Ok( Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(&["remote", "-v"]) .output()? .stdout .len() != 0, ) } /// Sets the url as the upstream repository. pub fn set_remote(url: &str) -> Result<()> { let status = Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(&["remote", "add", "origin", url]) .status()?; if status.success() { Ok(()) } else { err::err("git error") } } /// Execute a git command. fn git_cmd(cmd: &[&str]) -> Result<()> { if !has_remote()? { return err::err("Attempting git command without existing repo."); } let status = Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(cmd) .status()?; if status.success() { Ok(()) } else { err::err("git error") } } /// Clones an existing tittle directory pub fn clone(url: &str) -> Result<()> { if config::tittle_config_dir().is_dir() { return err::err( "Can't clone remote dotfile repository if local \ repository already exists. Delete ~/.tittle first.", ); } let output = Command::new("git") .args(&["clone", url]) .arg(config::tittle_config_dir()) .output()?; if output.status.success() { Ok(()) } else { util::error(String::from_utf8(output.stderr)?.trim()); err::err("Couldn't clone repo.") } } /// Pull in any changes in the tittle Git repository. pub fn pull() -> Result<()> { git_cmd(&["pull", "origin", "master"]) } /// Push any changes in the tittle Git repository. pub fn push() -> Result<()> { git_cmd(&["push", "-u", "origin", "master"]) } /// Create a commit under `tittle_config_dir()` with the message `msg`. pub fn commit(msg: &str) -> Result<()> { Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(&["add", "."]) .output()?; Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .args(&["commit", "-m", &format!("{}: {}", util::machine_id()?, msg)]) .output()?; Ok(()) } /// Initializes a Git repository under the tittle config directory. This must be called /// before any other functions from `git::*` are called. pub fn init() -> Result<()> { if !config::tittle_config_dir().join(".git").exists() { let output = Command::new("git") .arg("-C") .arg(config::tittle_config_dir()) .arg("init") .output()?; util::info(String::from_utf8(output.stdout)?.trim()); commit("initial commit")?; } Ok(()) }
use num::FromPrimitive; use super::opcodes::Opcode; pub struct Instruction{ pub opcode: u8, } impl Instruction { pub fn opcode(&self) -> Opcode { println!("{}",self.opcode); Opcode::from_u8((self.opcode) & 0xff).unwrap_or_else( || panic!("Unrecognized instruction: {:#x}", self.opcode)) } /*fn Adcimmediate(&mut self){}// ADd with Carry: Length 2 fn Adczero_page(&mut self){}// ADd with Carry: Length 2 fn Adczero_page_x(&mut self){}// ADd with Carry: Length 2 fn Adcabsolute(&mut self){} // ADd with Carry: Length 3 fn Adcabsolute_x(&mut self){}// ADd with Carry: Length 3 fn Adcabsolute_y(&mut self){}// ADd with Carry: Length 3 fn Adcindirect_x(&mut self){}// ADd with Carry: Length 2 fn Adcindirect_y(&mut self){}// ADd with Carry: Length 2 fn Andimmediate(&mut self){}// Bitwise AND with accumultor: Length 2 fn Andzero_page(&mut self){}// Bitwise AND with accumultor: Length 2 fn Andzero_page_x(&mut self){}// Bitwise AND with accumultor: Length 2 fn AndAbsolute(&mut self){}// Bitwise AND with accumultor: Length 3 fn AndAbsoluteX(&mut self){}// Bitwise AND with accumultor: Length 3 fn AndAbsoluteY(&mut self){}// Bitwise AND with accumultor: Length 3 fn Andindirect_x(&mut self){}// Bitwise AND with accumultor: Length 2 fn Andindirect_y(&mut self){}// Bitwise AND with accumultor: Length 2 fn Aslaccumulator(&mut self){}// Arithmetic Shift Left: Length 1 fn Aslzero_page(&mut self){}// Arithmetic Shift Left: Length 2 fn Aslzero_page_x(&mut self){}// Arithmetic Shift Left: Length 2 fn AslAbsolute(&mut self){}// Arithmetic Shift Left: Length 3 fn AslAbsoluteX(&mut self){}// Arithmetic Shift Left: Length 3 fn Bitzero_page(&mut self){}// test BITs fn BitAbsolute(&mut self){}// test BITs // Branch Instructions fn Bcc(&mut self){}// Beanch on Carry Clear fn Bcs(&mut self){}// Branch on Carry Set fn Beq(&mut self){}// Branch on Equal fn Bmi(&mut self){}// Branch on Minus fn Bne(&mut self){}// Branch on Not Equal fn Bpl(&mut self){}// Branch of Plus fn BVC(&mut self){}// Branch on overflow clear fn BVS(&mut self){}// Branch on overflow set fn Brk(&mut self){}// Break // Clear/Set Operations fn CLC(&mut self){}// Clear Carry fn CLD(&mut self){}// Clear Decimal fn CLI(&mut self){}// Clear Interrupt fn CLV(&mut self){}// Clear Overflow fn SEC(&mut self){} // Set Carry fn SED(&mut self){} // Set Decimal fn SEI(&mut self){}// Set Interrupt fn Cmpimmediate(&mut self){}// Compare Accumulator: Length 2 fn Cmpzero_page(&mut self){}// Compare Accumulator: Length 2 fn Cmpzero_page_x(&mut self){}// Compare Accumulator: Length 2 fn CmpAbsolute(&mut self){}// Compare Accumulator: Length 3 fn CmpAbsoluteX(&mut self){}// Compare Accumulator: Length 3 fn CmpAbsoluteY(&mut self){}// Compare Accumulator: Length 3 fn Cmpindirect_x(&mut self){}// Compare Accumulator: Length 2 fn Cmpindirect_y(&mut self){}// Compare Accumulator: Length 2 fn Cpximmediate(&mut self){}// Compare X Register: Length 2 fn Cpxzero_page(&mut self){}// Compare X Register: Length 2 fn CpxAbsolute(&mut self){}// Compare X Register: Length 3 fn Cpyimmediate(&mut self){}// Compare Y Register: Length 2 fn Cpyzero_page(&mut self){}// Compare Y Register: Length 2 fn CpyAbsolute(&mut self){}// Compare Y Register: Length 3 fn Deczero_page(&mut self){}// Decrement Memory: Length 2 fn Dec_zero_page_x(&mut self){}// Decrement Memory: Length 2 fn DecAbsolute(&mut self){}// Decrement Memory: Length 3 fn Dec_absolute_x(&mut self){}// Decrement Memory: Length 3 fn Eorimmediate(&mut self){}// Bitwise exlusive OR: Length 2 fn Eorzero_page(&mut self){}// Bitwise exlusive OR: Length 2 fn Eorzero_page_x(&mut self){} // Bitwise exlusive OR: Length 2 fn EorAbsolute(&mut self){}// Bitwise exlusive OR: Length 3 fn EorAbsoluteX(&mut self){}// Bitwise exlusive OR: Length 3 fn EorAbsoluteY(&mut self){}// Bitwise exlusive OR: Length 3 fn Eorindirect_x(&mut self){}// Bitwise exlusive OR: Length 2 fn Eorindirect_y(&mut self){}// Bitwise exlusive OR: Length 2 fn Inczero_page(&mut self){}// Increment Memory: Length 2 fn Inczero_page_x(&mut self){}// Increment Memory: Length 2 fn IncAbsolute(&mut self){} // Increment Memory: Length 3 fn IncAbsoluteX(&mut self){} // Increment Memory: Length 3 fn INX(&mut self){} // Increment X fn INY(&mut self){} // Increment Y fn DEX(&mut self){} // Decrement X fn DEY(&mut self){} // Decrement Y fn Jmpabsolute(&mut self){} // Jump absolute(){}: Length 3 fn Jmpindirect(&mut self){} // Jump indirect(){}: Length 3 fn JSR_absolute(&mut self){} // Jump to SubRoutine fn Ldaimmediate(&mut self){} // Load Accumulator: Length 2 fn Ldazero_page(&mut self){} // Load Accumulator: Length 2 fn Ldazero_page_x(&mut self){} // Load Accumulator: Length 2 fn LdaAbsolute(&mut self){} // Load Accumulator: Length 3 fn LdaAbsoluteX(&mut self){} // Load Accumulator: Length 3 fn LdaAbsoluteY(&mut self){} // Load Accumulator: Length 3 fn Ldaindirect_x(&mut self){} // Load Accumulator: Length 2 fn Ldaindirect_y(&mut self){} // Load Accumulator: Length 2 fn Ldximmediate(&mut self){} // Load X Register: Length 2 fn Ldxzero_page(&mut self){} // Load X Register: Length 2 fn Ldxzero_page_y(&mut self){} // Load X Register: Length 2 fn Ldxabsolute(&mut self){} // Load X Register: Length 3 fn Ldxabsolute_y(&mut self){}// Load X Register: Length 3 fn Ldyimmediate(&mut self){} // Load Y Register: Length 2 fn Ldyzero_page(&mut self){} // Load Y Register: Length 2 fn Ldyzero_page_x(&mut self){} // Load Y Register: Length 2 fn LdyAbsolute(&mut self){} // Load Y Register: Length 3 fn LdyAbsoluteX(&mut self){} // Load Y Register: Length 3 fn Lsraccumulator(&mut self){}// Logical Shift Right: Length 1 fn Lsrzero_page(&mut self){} // Logical Shift Right: Length 2 fn Lsrzero_page_x(&mut self){}// Logical Shift Right: Length 2 fn LSRAbsolute(&mut self){} // Logical Shift Right: Length 3 fn LSRAbsoluteX(&mut self){} // Logical Shift Right: Length 3 fn NOP_implied(&mut self){}// No operation fn Oraimmediate(&mut self){} // Bitwise OR with Accumulator: Length 2 fn Orazero_page(&mut self){} // Bitwise OR with Accumulator: Length 2 fn Orazero_page_x(&mut self){} // Bitwise OR with Accumulator: Length 2 fn OraAbsolute(&mut self){} // Bitwise OR with Accumulator: Length 3 fn OraAbsoluteX(&mut self){} // Bitwise OR with Accumulator: Length 3 fn OraAbsoluteY(&mut self){} // Bitwise OR with Accumulator: Length 3 fn Oraindirect_x(&mut self){} // Bitwise OR with Accumulator: Length 2 fn Oraindirect_y(&mut self){} // Bitwise OR with Accumulator: Length 2 //stack Instructions fn Txs(&mut self){} // Transfer X to stack ptr fn TSX(&mut self){} // Transfer stack ptr to X fn PHA(&mut self){} // Push Accumulator fn PHP(&mut self){} // Push Processor status fn PLA(&mut self){} // Pull Acumulator fn PLP(&mut self){} // Pull Process status fn Rolaccumulator(&mut self){} // Rotate Left: Length 1 fn Rolzero_page(&mut self){} // Rotate Left: Length 2 fn Rolzero_page_x(&mut self){} // Rotate Left: Length 2 fn Rolabsolute(&mut self){} // Rotate Left: Length 3 fn RolAbsoluteX(&mut self){} // Rotate Left: Length 3 fn Roraccumulator(&mut self){} // Rotate Right: Length 1 fn Rorzero_page(&mut self){} // Rotate Right: Length 2 fn Rorzero_page_x(&mut self){} // Rotate Right: Length 2 fn RorAbsolute(&mut self){} // Rotate Right: Length 3 fn RorAbsoluteX(&mut self){} // Rotate Right: Length 3 fn Rti(&mut self){}// Return from Interrupt fn Rts(&mut self){} // Return from Subroutine fn SbcImmediate(&mut self){} // Subtract with Carry: Length 2 fn SbcZeroPage(&mut self){} // Subtract with Carry: Length 2 fn SbcZeroPageX(&mut self){}// Subtract with Carry: Length 2 fn SbcAbsolute(&mut self){}// Subtract with Carry: Length 3 fn SbcAbsoluteX(&mut self){} // Subtract with Carry: Length 3 fn SbcAbsoluteY(&mut self){} // Subtract with Carry: Length 3 fn SbcIndirectX(&mut self){} // Subtract with Carry: Length 2 fn SbcIndirectY(&mut self){} // Subtract with Carry: Length 2 fn StaZeroPage(&mut self){}// Store Accumulator: Length 2 fn StaZeroPageX(&mut self){} // Store Accumulator: Length 2 fn StaAbsolute(&mut self){} // Store Accumulator: Length 3 fn StaAbsoluteX(&mut self){} // Store Accumulator: Length 3 fn StaAbsoluteY(&mut self){} // Store Accumulator: Length 3 fn StaIndirectX(&mut self){} // Store Accumulator: Length 2 fn StaIndirectY(&mut self){} // Store Accumulator: Length 2 fn StxZeroPage(&mut self){} // Store X register: Length 2 fn StxZeroPage_y(&mut self){}// Store X register: Length 2 fn StxAbsolute(&mut self){} // Store X register: Length 3 fn StyZeroPage(&mut self){}// Store Y register: Length 2 fn StyZeroPageX(&mut self){} // Store Y register: Length 2 fn StyAbsolute(&mut self){}// Store Y register: Length 3 fn Tax(&mut self){} // Transfer A to X fn TAY(&mut self){}// Tranfer A to Y fn Txa(&mut self){} // Transfer X to A fn page_x(&mut self){} // Transfer Y to A*/ }
use lazy_static::lazy_static; use regex::Regex; use std::collections::HashSet; use std::rc::Rc; fn main() { let all_foods = read_foods(include_str!("../input.txt").lines()); println!("appearances: {}", part_1(&all_foods)); println!("{}", part_2(&all_foods)); } fn part_1(all_foods: &[Food]) -> usize { let safe = get_safe(&all_foods); let mut appearances = 0; for ingredient in safe { for food in all_foods.iter() { if food.ingredients.contains(&ingredient) { appearances += 1; } } } appearances } fn part_2(all_foods: &[Food]) -> String { let mut potential_allergens = all_potential_allergen_ingredients(all_foods); let mut allergens = reduce_allergens(&mut potential_allergens); allergens.sort_by(|(ka, _), (kb, _)| ka.cmp(kb)); allergens .into_iter() .map(|(_, ingredient)| ingredient) .map(|ingredient| ingredient.to_string()) .collect::<Vec<String>>() .join(",") } struct Food { id: u32, ingredients: HashSet<Rc<str>>, allergens: HashSet<Rc<str>>, } impl Food { fn new(id: u32, ingredients: HashSet<Rc<str>>, allergens: HashSet<Rc<str>>) -> Food { Food { id, ingredients, allergens, } } } trait FoodOps { fn all_ingredients(&self) -> HashSet<Rc<str>>; fn all_allergens(&self) -> HashSet<Rc<str>>; fn first_with_allergen(&self, allergen: &Rc<str>) -> &Food; } impl FoodOps for &[Food] { fn all_ingredients(&self) -> HashSet<Rc<str>> { let mut ingredients = HashSet::new(); for food in self.iter() { for ingredient in food.ingredients.iter() { ingredients.insert(ingredient.clone()); } } ingredients } fn all_allergens(&self) -> HashSet<Rc<str>> { { let mut allergens = HashSet::new(); for food in self.iter() { for allergen in food.allergens.iter() { allergens.insert(allergen.clone()); } } allergens } } fn first_with_allergen(&self, allergen: &Rc<str>) -> &Food { self.iter() .find(|food| food.allergens.contains(allergen)) .expect("no food with given allergen") } } fn get_safe(all_foods: &[Food]) -> HashSet<Rc<str>> { let all_ingredients = all_foods.all_ingredients(); let mut maybe_allergens = all_potential_allergen_ingredients(all_foods) .into_iter() .map(|(_, set)| set) .collect::<Vec<HashSet<Rc<str>>>>(); let first = maybe_allergens.pop().expect("maybe allergens is empty"); let danger_danger = maybe_allergens.iter().fold(first, |mut all, maybe| { maybe.iter().fold(&mut all, |set, a| { set.insert(a.clone()); set }); all }); all_ingredients .difference(&danger_danger) .cloned() .collect::<HashSet<Rc<str>>>() } fn all_potential_allergen_ingredients(all_foods: &[Food]) -> Vec<(Rc<str>, HashSet<Rc<str>>)> { let all_allergens = all_foods.all_allergens(); let mut maybe_allergens = Vec::new(); for allergen_ref in all_allergens.iter() { let potential_allergens = potential_allergen_ingredients(all_foods, allergen_ref); maybe_allergens.push((allergen_ref.clone(), potential_allergens)); } maybe_allergens } fn potential_allergen_ingredients(all_foods: &[Food], allergen: &Rc<str>) -> HashSet<Rc<str>> { let first_with_allergen = all_foods.first_with_allergen(allergen); let mut maybe_allergen_ingredient = first_with_allergen .ingredients .iter() .fold(HashSet::new(), |mut all, ingredient| { all.insert(ingredient.clone()); all }); for food in all_foods .iter() .filter(|f| f.allergens.contains(allergen) && f.id != first_with_allergen.id) { let new_maybe = maybe_allergen_ingredient .intersection(&food.ingredients) .fold(HashSet::new(), |mut all, ingredient| { all.insert(ingredient.clone()); all }); maybe_allergen_ingredient = new_maybe; } maybe_allergen_ingredient } fn reduce_allergens(potentials: &mut Vec<(Rc<str>, HashSet<Rc<str>>)>) -> Vec<(Rc<str>, Rc<str>)> { let mut discovered_allergens: HashSet<Rc<str>> = HashSet::new(); let mut discovered_kinds: HashSet<Rc<str>> = HashSet::new(); let mut paired = Vec::new(); while discovered_allergens.len() < potentials.len() { for i in 0..potentials.len() { let (kind, potential) = potentials.get_mut(i).unwrap(); if discovered_kinds.contains(kind) { continue; } for discovered in discovered_allergens.iter() { potential.remove(discovered); } if potential.len() == 1 { discovered_kinds.insert(kind.clone()); let ingredient = &(*potential.iter().collect::<Vec<&Rc<str>>>().first().unwrap()).clone(); discovered_allergens.insert(ingredient.clone()); paired.push((kind.clone(), ingredient.clone())); } } } paired } lazy_static! { static ref FOOD_REGEX: Regex = Regex::new(r"^(?P<ingredients>[\w\s]+)\(contains (?P<allergens>[\w\s,]+)\)$") .expect("illegal food regex"); } fn read_foods<'a>(lines: impl Iterator<Item = &'a str>) -> Vec<Food> { let mut foods = Vec::new(); let mut id = 0; for line in lines { if let Some(caps) = FOOD_REGEX.captures(line) { let fold = |mut all: HashSet<Rc<str>>, item: &str| { if !item.trim().is_empty() { all.insert(Rc::from(item)); } all }; let ingredients = caps["ingredients"] .split(' ') .fold(HashSet::new(), fold); let allergens = caps["allergens"] .split(", ") .fold(HashSet::new(), fold); foods.push(Food::new(id, ingredients, allergens)); id += 1; } } foods } #[cfg(test)] mod tests { use super::*; const TEST_INPUT: &str = "mxmxvkd kfcds sqjhc nhms (contains dairy, fish)\n trh fvjkl sbzzf mxmxvkd (contains dairy)\n sqjhc fvjkl (contains soy)\n sqjhc mxmxvkd sbzzf (contains fish)"; #[test] fn it_correctly_finds_potential_allergen_ingredients() { let all_foods = read_foods(TEST_INPUT.lines()); let potential_allergens = potential_allergen_ingredients(&all_foods, &Rc::from("fish")); assert_eq!(2, potential_allergens.len()); assert!(potential_allergens.contains(&Rc::from("mxmxvkd"))); assert!(potential_allergens.contains(&Rc::from("sqjhc"))); } #[test] fn it_counts_the_number_of_safe_ingredients() { let mut all_foods = read_foods(TEST_INPUT.lines()); let safe_count = part_1(&mut all_foods); assert_eq!(5, safe_count); } #[test] fn it_correctly_lists_allergens() { let mut all_foods = read_foods(TEST_INPUT.lines()); let allergens = part_2(&mut all_foods); assert_eq!("mxmxvkd,sqjhc,fvjkl", allergens); } }
mod client; mod message; mod read_message; mod write_message; pub use self::client::run; pub use self::message::Message; pub use self::read_message::ReadMessage; pub use self::write_message::WriteMessage;
mod client; mod server; mod smtp; pub use self::client::*; pub use self::server::*; pub use self::smtp::*;
use std::io::Read; fn main() { let mut buf = String::new(); // 標準入力から全部bufに読み込む std::io::stdin().read_to_string(&mut buf).unwrap(); // 行ごとのiterが取れる let mut iter = buf.split_whitespace(); let n: usize = iter.next().unwrap().parse().unwrap(); let buttons: Vec<usize> = (0..n) .map(|_| iter.next().unwrap().parse().unwrap()) .collect(); let mut cnt = 0; let mut target_button_index = 1; while target_button_index != 2 && cnt < 100000 { target_button_index = buttons[target_button_index - 1]; cnt += 1; } if cnt == 100000 { println!("-1"); } else { println!("{}", cnt); } }
fn main() { for arg in std::env::args().skip(1) { respond(&arg) } } fn respond(arg: &str) { match arg { "hi" => println!("Hello there!"), "bye" => println!("Ok, goodbye!"), _ => println!("Sorry, I don't know what {} means", arg), } }
//! Implementation of channels that supports iterator-like operation such as `map`, `filter` //! ... /// Near drop-in replacement for std::sync::mpsc; pub mod mpsc;
use arbitrary::Unstructured; use rand::{prelude::random, rngs::SmallRng, Rng, SeedableRng}; use super::*; use crate::state; #[test] fn test_journal() { let seeds: Vec<u128> = vec![193003787382804392805109954488729196323, random()]; let seed = seeds[random::<usize>() % seeds.len()]; // let seed: u128 = 148484157541144179681685363423689665370; println!("test_journal {}", seed); let mut rng = SmallRng::from_seed(seed.to_le_bytes()); let name = "test_journal"; let dir = tempfile::tempdir().unwrap(); println!("test_journal {:?}", dir.path()); let mut jn = Journal::start(name, dir.path().as_ref(), 0, state::NoState).unwrap(); assert_eq!(jn.to_journal_number(), 0); assert_eq!(jn.len_batches(), 0); assert_eq!(jn.to_state(), state::NoState); let mut entries: Vec<entry::Entry> = (0..1_000_000) .map(|_i| { let bytes = rng.gen::<[u8; 32]>(); let mut uns = Unstructured::new(&bytes); uns.arbitrary::<entry::Entry>().unwrap() }) .collect(); entries.sort(); entries.dedup_by(|a, b| a.to_seqno() == b.to_seqno()); let mut n_batches = 0; let mut offset = 0; for _i in 0..1000 { let n = rng.gen::<u8>(); for _j in 0..n { let entry = entries[offset].clone(); jn.add_entry(entry.clone()).unwrap(); entries.push(entry); offset += 1; } assert_eq!(jn.to_last_seqno(), Some(entries[offset - 1].to_seqno())); jn.flush().unwrap(); if n > 0 { n_batches += 1; } assert_eq!(jn.to_last_seqno(), Some(entries[offset - 1].to_seqno())); } assert_eq!(n_batches, jn.len_batches()); let iter = RdJournal::from_journal(&jn, 0..=u64::MAX).unwrap(); let jn_entries: Vec<entry::Entry> = iter.map(|x| x.unwrap()).collect(); let entries = entries[..offset].to_vec(); assert_eq!(entries.len(), jn_entries.len()); assert_eq!(entries, jn_entries); { let (load_jn, _) = Journal::<state::NoState>::load(name, &jn.to_file_path()).unwrap(); let iter = RdJournal::from_journal(&load_jn, 0..=u64::MAX).unwrap(); let jn_entries: Vec<entry::Entry> = iter.map(|x| x.unwrap()).collect(); let entries = entries[..offset].to_vec(); assert_eq!(entries.len(), jn_entries.len()); assert_eq!(entries, jn_entries); } jn.purge().unwrap(); dir.close().unwrap(); }
use core::cell::UnsafeCell; use alloc::boxed::Box; use crate::process::Tid; use crate::process::structs::*; use crate::process::thread_pool::ThreadPool; use crate::interrupt::*; // 调度单元 Processor 的内容 pub struct ProcessorInner { // 线程池 pool: Box<ThreadPool>, // idle 线程 idle: Box<Thread>, // 当前正在运行的线程 current: Option<(Tid, Box<Thread>)>, } pub struct Processor { inner: UnsafeCell<Option<ProcessorInner>>, } unsafe impl Sync for Processor {} impl Processor { // 新建一个空的 Processor pub const fn new() -> Processor { Processor { inner: UnsafeCell::new(None), } } // 传入 idle 线程,以及线程池进行初始化 pub fn init(&self, idle: Box<Thread>, pool: Box<ThreadPool>) { unsafe { *self.inner.get() = Some( ProcessorInner { pool, idle, current: None, } ); } } // 内部可变性:获取包裹的值的可变引用 pub fn inner(&self) -> &mut ProcessorInner { unsafe { &mut *self.inner.get() } .as_mut() .expect("Processor is not initialized!") } // 通过线程池新增线程 pub fn add_thread(&self, thread: Box<Thread>) { self.inner().pool.add(thread); } pub fn idle_main(&self) -> ! { let inner = self.inner(); // 在 idle 线程刚进来时禁用异步中断 disable_and_store(); loop { // 如果从线程池中获取到一个可运行线程 if let Some(thread) = inner.pool.acquire() { // 将自身的正在运行线程设置为刚刚获取到的线程 inner.current = Some(thread); // 从正在运行的线程 idle 切换到刚刚获取到的线程 //println!("\n>>>> will switch_to thread {} in idle_main!", inner.current.as_mut().unwrap().0); inner.idle.switch_to( &mut *inner.current.as_mut().unwrap().1 ); // 上个线程时间耗尽,切换回调度线程 idle //println!("<<<< switch_back to idle in idle_main!"); // 此时 current 还保存着上个线程 let (tid, thread) = inner.current.take().unwrap(); // 通知线程池这个线程需要将资源交还出去 inner.pool.retrieve(tid, thread); } // 如果现在并无任何可运行线程 else { // 打开异步中断,并等待异步中断的到来 enable_and_wfi(); // 异步中断处理返回后,关闭异步中断 disable_and_store(); } } } pub fn tick(&self) { let inner = self.inner(); if !inner.current.is_none() { // 如果当前有在运行线程 if inner.pool.tick() { // 如果返回true, 表示当前运行线程时间耗尽,需要被调度出去 // 我们要进入 idle 线程了,因此必须关闭异步中断 // 我们可没保证 switch_to 前后 sstatus 寄存器不变 // 因此必须手动保存 let flags = disable_and_store(); // 切换到 idle 线程进行调度 inner.current .as_mut() .unwrap() .1 .switch_to(&mut inner.idle); // 之后某个时候又从 idle 线程切换回来 // 恢复 sstatus 寄存器继续中断处理 restore(flags); } } } pub fn run(&self) { // 运行,也就是从启动线程切换到调度线程 idle Thread::get_boot_thread().switch_to(&mut self.inner().idle); } pub fn exit(&self, code: usize) -> ! { // 由于要切换到 idle 线程,必须先关闭时钟中断 disable_and_store(); // 由于自己正在执行,可以通过这种方式获取自身的 tid let inner = self.inner(); let tid = inner.current.as_ref().unwrap().0; // 通知线程池这个线程退出啦! inner.pool.exit(tid); println!("thread {} exited, exit code = {}", tid, code); // 加入这个判断 // 如果有一个线程正在等待当前线程运行结束 // 将其唤醒 if let Some(wait) = inner.current.as_ref().unwrap().1.wait { inner.pool.wakeup(wait); } // 切换到 idle 线程决定下一个运行哪个线程 inner.current .as_mut() .unwrap() .1 .switch_to(&mut inner.idle); loop {} } pub fn yield_now(&self) { let inner = self.inner(); if !inner.current.is_none() { unsafe { // 由于要进入 idle 线程,必须关闭异步中断 // 手动保存之前的 sstatus let flags = disable_and_store(); let tid = inner.current.as_mut().unwrap().0; let thread_info = inner.pool.threads[tid].as_mut().expect("thread not existed when yielding"); // 修改线程状态 thread_info.status = Status::Sleeping; // 切换到 idle 线程 inner.current .as_mut() .unwrap() .1 .switch_to(&mut *inner.idle); // 从 idle 线程切换回来 // 恢复 sstatus restore(flags); } } } pub fn wake_up(&self, tid: Tid) { let inner = self.inner(); inner.pool.wakeup(tid); } pub fn current_tid(&self) -> usize { self.inner().current.as_mut().unwrap().0 as usize } }
#[doc = "Reader of register WAKESTAT"] pub type R = crate::R<u32, super::WAKESTAT>; #[doc = "Reader of field `STAT4`"] pub type STAT4_R = crate::R<bool, bool>; impl R { #[doc = "Bit 4 - P\\[4\\] Wake Status"] #[inline(always)] pub fn stat4(&self) -> STAT4_R { STAT4_R::new(((self.bits >> 4) & 0x01) != 0) } }
use crate::isolate::{IsolatedBoxOptions, IsolatedBoxOptionsBuilder}; use merge::Merge; use serde::Deserialize; use std::collections::HashMap; use validator::Validate; #[derive(Deserialize, Debug, Clone, Default, Merge, Validate)] pub struct PhaseSandboxSettings { pub run_time_limit: Option<u64>, pub extra_time_limit: Option<u64>, pub wall_time_limit: Option<u64>, pub stack_size_limit: Option<u64>, pub process_count_limit: Option<u64>, pub memory_limit: Option<u64>, pub storage_limit: Option<u64>, } #[derive(Deserialize, Debug, Clone, Validate)] pub struct PhaseSettings { pub name: Option<String>, pub script: String, pub stdin: Option<String>, pub environment: Option<HashMap<String, String>>, pub sandbox_settings: Option<PhaseSandboxSettings>, pub profiling: Option<bool>, } impl From<PhaseSettings> for IsolatedBoxOptions { fn from(settings: PhaseSettings) -> Self { let mut options = IsolatedBoxOptionsBuilder::default(); if let Some(sandbox_settings) = settings.sandbox_settings { if let Some(run_time_limit) = sandbox_settings.run_time_limit { options.run_time_limit(run_time_limit); } if let Some(extra_time_limit) = sandbox_settings.extra_time_limit { options.extra_time_limit(extra_time_limit); } if let Some(wall_time_limit) = sandbox_settings.wall_time_limit { options.wall_time_limit(wall_time_limit); } if let Some(stack_size_limit) = sandbox_settings.stack_size_limit { options.stack_size_limit(stack_size_limit); } if let Some(process_count_limit) = sandbox_settings.process_count_limit { options.process_count_limit(process_count_limit); } if let Some(memory_limit) = sandbox_settings.memory_limit { options.memory_limit(memory_limit); } if let Some(storage_limit) = sandbox_settings.storage_limit { options.storage_limit(storage_limit); } } options.environment(settings.environment.clone()); if let Some(stdin) = settings.stdin { options.stdin(stdin); } if let Some(profiling) = settings.profiling { options.profiling(profiling); } options.build().unwrap() } }
use std::collections::HashMap; use serde::{Serialize, Serializer, Deserialize, Deserializer}; use super::var::VarInt; use super::slot::Slot; use super::uuid::Uuid; use super::chat::Chat; use serde::de::{Visitor, SeqAccess, DeserializeOwned, DeserializeSeed}; use std::convert::TryInto; use core::borrow::Borrow; #[derive(Debug, Clone, Serialize)] pub struct EntityMetadata(HashMap<u8, Entry>); #[derive(Debug, Clone, Serialize, Deserialize)] pub enum Entry { Byte(i8), Short(i16), Int(i32), Float(f32), String(String), Slot(Option<Slot>), Rotation([i32; 3]), } impl<'de> Deserialize<'de> for EntityMetadata { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> { struct MetaDataVisitor; impl<'de> Visitor<'de> for MetaDataVisitor { type Value = EntityMetadata; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { formatter.write_str("a entity meta") } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let mut map = HashMap::new(); while let Some((id, entry)) = seq.next_element()? { map.entry(id).or_insert(entry); }; Ok(EntityMetadata(map)) } } deserializer.deserialize_newtype_struct("MCMETADATAENTRY", MetaDataVisitor) } }
mod bfs; mod dfs; mod dijkstra; mod graph; pub struct Graph { node_size: usize, edge_size: usize, edge: Vec<Vec<(usize, i64)>>, }
#![crate_name = "uu_echo"] /* * This file is part of the uutils coreutils package. * * (c) Derek Chiang <derekchiang93@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ extern crate getopts; extern crate libc; #[macro_use] extern crate uucore; use std::io::Write; use std::str::from_utf8; #[allow(dead_code)] static NAME: &'static str = "echo"; static VERSION: &'static str = env!("CARGO_PKG_VERSION"); #[derive(Clone)] struct EchoOptions { newline: bool, escape: bool } #[inline(always)] fn to_char(bytes: &[u8], base: u32) -> char { usize::from_str_radix(from_utf8(bytes.as_ref()).unwrap(), base).unwrap() as u8 as char } #[inline(always)] fn isxdigit(c: u8) -> bool { match c as char { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' => true, _ => false } } #[inline(always)] fn isodigit(c: u8) -> bool { match c as char { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' => true, _ => false } } fn convert_str(string: &[u8], index: usize, base: u32) -> (char, usize) { let (max_digits, is_legal_digit) : (usize, fn(u8) -> bool) = match base { 8 => (3, isodigit), 16 => (2, isxdigit), _ => panic!(), }; let mut bytes = vec!(); for offset in 0usize .. max_digits { if string.len() <= index + offset as usize { break; } let c = string[index + offset as usize]; if is_legal_digit(c) { bytes.push(c as u8); } else { break; } } if bytes.is_empty() { (' ', 0) } else { (to_char(&bytes, base), bytes.len()) } } fn parse_options(args: Vec<String>, options: &mut EchoOptions) -> Option<Vec<String>> { let mut echo_args = vec!(); 'argloop: for arg in args.into_iter().skip(1) { match arg.as_ref() { "--help" | "-h" => { print_help(); return None; } "--version" | "-V" => { print_version(); return None; } "-n" => options.newline = true, "-e" => options.escape = true, "-E" => options.escape = false, _ => { if arg.len() > 1 && arg.chars().next().unwrap_or('_') == '-' { let mut newopts = options.clone(); for ch in arg.chars().skip(1) { match ch { 'h' => { print_help(); return None; } 'V' => { print_version(); return None; } 'n' => newopts.newline = true, 'e' => newopts.escape = true, 'E' => newopts.escape = false, _ => { echo_args.push(arg.clone()); continue 'argloop; } } } *options = newopts; } else { echo_args.push(arg); } } } } Some(echo_args) } fn print_help() { let mut opts = getopts::Options::new(); opts.optflag("n", "", "do not output the trailing newline"); opts.optflag("e", "", "enable interpretation of backslash escapes"); opts.optflag("E", "", "disable interpretation of backslash escapes (default)"); opts.optflag("h", "help", "display this help and exit"); opts.optflag("V", "version", "output version information and exit"); let msg = format!("{0} {1} - display a line of text Usage: {0} [SHORT-OPTION]... [STRING]... {0} LONG-OPTION Echo the STRING(s) to standard output. If -e is in effect, the following sequences are recognized: \\\\ backslash \\a alert (BEL) \\b backspace \\c produce no further output \\e escape \\f form feed \\n new line \\r carriage return \\t horizontal tab \\v vertical tab \\0NNN byte with octal value NNN (1 to 3 digits) \\xHH byte with hexadecimal value HH (1 to 2 digits)", NAME, VERSION); print!("{}", opts.usage(&msg)); } fn print_version() { println!("{} {}", NAME, VERSION); } pub fn uumain(args: Vec<String>) -> i32 { let mut options = EchoOptions { newline: false, escape: false }; let free = match parse_options(args, &mut options) { Some(vec) => vec, None => return 0 }; if !free.is_empty() { let string = free.join(" "); if options.escape { let mut prev_was_slash = false; let mut iter = string.chars().enumerate(); while let Some((index, c)) = iter.next() { if !prev_was_slash { if c != '\\' { print!("{}", c); } else { prev_was_slash = true; } } else { prev_was_slash = false; match c { '\\' => print!("\\"), 'a' => print!("\x07"), 'b' => print!("\x08"), 'c' => break, 'e' => print!("\x1B"), 'f' => print!("\x0C"), 'n' => print!("\n"), 'r' => print!("\r"), 't' => print!("\t"), 'v' => print!("\x0B"), 'x' => { let (c, num_char_used) = convert_str(string.as_bytes(), index + 1, 16); if num_char_used == 0 { print!("\\x"); } else { print!("{}", c); for _ in 0 .. num_char_used { iter.next(); // consume used characters } } }, '0' => { let (c, num_char_used) = convert_str(string.as_bytes(), index + 1, 8); if num_char_used == 0 { print!("\0"); } else { print!("{}", c); for _ in 0 .. num_char_used { iter.next(); // consume used characters } } } _ => { let (esc_c, num_char_used) = convert_str(string.as_bytes(), index, 8); if num_char_used == 0 { print!("\\{}", c); } else { print!("{}", esc_c); for _ in 1 .. num_char_used { iter.next(); // consume used characters } } } } } } } else { print!("{}", string); } } if options.newline { pipe_flush!(); } else { println!("") } 0 }
use super::{utils::FiberIdExtension, variable::VariablesKey, PausedState}; use dap::{ requests::ScopesArguments, responses::ScopesResponse, types::{Scope, ScopePresentationhint}, }; impl PausedState { pub fn scopes(&mut self, args: ScopesArguments) -> ScopesResponse { let stack_frame_key = self .stack_frame_ids .id_to_key(args.frame_id.try_into().unwrap()); let stack_frame = stack_frame_key.get(&self.vm_state.vm); let mut scopes = vec![]; if let Some(stack_frame) = stack_frame { scopes.push(Scope { name: "Arguments".to_string(), presentation_hint: Some(ScopePresentationhint::Arguments), variables_reference: self .variables_ids .key_to_id(VariablesKey::Arguments(stack_frame_key.to_owned())), named_variables: Some(stack_frame.call.arguments.len()), indexed_variables: Some(0), expensive: false, // TODO: source information for function source: None, line: None, column: None, end_line: None, end_column: None, }); } let locals = stack_frame_key.get_locals(&self.vm_state.vm); scopes.push(Scope { name: "Locals".to_string(), presentation_hint: Some(ScopePresentationhint::Locals), variables_reference: self .variables_ids .key_to_id(VariablesKey::Locals(stack_frame_key.to_owned())), named_variables: Some(locals.len()), indexed_variables: Some(0), expensive: false, // TODO: source information for function source: None, line: None, column: None, end_line: None, end_column: None, }); // TODO: Show channels let fiber = stack_frame_key.fiber_id.get(&self.vm_state.vm); scopes.push(Scope { name: "Fiber Heap".to_string(), presentation_hint: None, variables_reference: self .variables_ids .key_to_id(VariablesKey::FiberHeap(stack_frame_key.fiber_id)), named_variables: Some(fiber.heap.objects().len()), indexed_variables: Some(0), expensive: false, source: None, line: None, column: None, end_line: None, end_column: None, }); ScopesResponse { scopes } } }
pub fn example13() { let vect1 = vec![1, 2, 3]; let vect2 = vect1; // this is invalid // xprintln!("vect1[0] = {}", vect1[0]); xprintln!("vect2[0] = {}", vect2[0]); let prim_val = 1; let prim_val2 = prim_val; xprintln!("prim_val: {}", prim_val); xprintln!("prim_val2: {}", prim_val2); xprintln!("sum of vect: {}", sum_vects(&vect2)); xprintln!("vect: {:?}", vect2); } fn sum_vects(v1: &Vec<i32>) -> i32 { let sum = v1.iter() .fold( 0, |mut sum, &x| { sum += x; sum } ); sum }
//! Top level error module. use crate::project_config::ProjectParseError; use crate::tmux::TmuxError; use std::fmt::{Debug, Display}; use std::io; use std::path::PathBuf; use thiserror::Error; /// Top level error. #[derive(Error, Debug)] pub enum AppError { /// Problem getting the configuration directory. #[error("Can not get config path in the user's home directory")] ConfigPath, /// Could show the confirmation prompt. #[error("Can not run prompt: {0}")] Prompt(io::Error), /// Error during `yaml` parsing. #[error("Could not parse yaml from {0}: {1}")] YamlParse(PathBuf, String), /// Error mapping the parsed yaml to /// [ProjectConfig](crate::project_config::project::ProjectConfig). #[error("{0}")] ProjectParse(#[from] ProjectParseError), /// Error running `tmux` operation. #[error(transparent)] TmuxOperation(#[from] TmuxError), /// Problem copying project file from `src` to `dest` #[error("Cannoy copy {0} to {1}: {2}")] ProjectCopy(PathBuf, PathBuf, io::Error), /// Problem creating the configuration directory. #[error("Could not create config dir {0}: {1}")] ProjectCreateConfigDir(PathBuf, io::Error), /// Error during file creation. #[error("Could not create project file {0}: {1}")] ProjectFileCreate(PathBuf, io::Error), /// Error during file deletion. #[error("Can not delete Project file {0}: {1}")] ProjectFileDelete(PathBuf, io::Error), /// The project file already exists. #[error("Project file {0} already exists")] ProjectFileExists(PathBuf), /// Can not find the project file. #[error("Project file {0} not found")] ProjectFileNotFound(PathBuf), /// Error reading file content. #[error("Could not read content from project file {0}: {1}")] ProjectFileRead(PathBuf, io::Error), /// Error writing file content. #[error("Could not write content to project file {0}: {1}")] ProjectFileWrite(PathBuf, io::Error), /// `$EDITOR` environment var is not set. #[error("$EDITOR is not set, the file path to edit is {0}")] EditorNotSet(PathBuf), /// Error running a command #[error("Could not run command {0}")] CommandRun(String), } /// Used for displaying the error on exit. /// /// By default, existing with an error from `main()` displays the /// debug information of the error, which is not human friendly. /// /// To mitigate, this wraps [AppError], and implements [Debug] as /// [Display]. #[derive(Error)] pub(crate) struct AppErrorForDisplay(AppError); impl Display for AppErrorForDisplay { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } impl Debug for AppErrorForDisplay { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } impl From<AppError> for AppErrorForDisplay { fn from(value: AppError) -> Self { Self(value) } }
#![feature(lang_items)] #![feature(start)] #![feature(asm)] #![no_main] #![no_std] pub mod arch; pub use arch::*; pub mod devices; #[no_mangle] #[start] pub extern fn rusternel_main() { devices::crt::puts("Hello,rusternel!\r\n"); unsafe { x86_32::gdt::init_gdtidt(); loop { x86_32::device::io::hlt() } } } #[lang = "eh_personality"] extern fn eh_personality() {} #[lang = "panic_fmt"] extern fn panic_fmt() -> ! { unsafe { loop { x86_32::device::io::hlt() } } }
use crossterm::style::Color; use printer::printer::{PrintQueue, PrinterItem}; use std::sync::OnceLock; static NO_COLOR: OnceLock<bool> = OnceLock::new(); /// Have the top precedence fn no_color() -> bool { *NO_COLOR.get_or_init(|| std::env::var("NO_COLOR").is_ok()) } pub fn format_err<'a>(original_output: &'a str, show_warnings: bool, repl_name: &str) -> String { const BEFORE_2021_END_TAG: &str = ": aborting due to "; // Relies on --color=always const ERROR_TAG: &str = "\u{1b}[0m\u{1b}[1m\u{1b}[38;5;9merror"; const WARNING_TAG: &str = "\u{1b}[0m\u{1b}[1m\u{1b}[33mwarning"; // These are more fragile, should be only used when NO_COLOR is on const ERROR_TAG_NO_COLOR: &str = "error["; const WARNING_TAG_NO_COLOR: &str = "warning: "; let go_to_start = |output: &'a str| -> Vec<&'a str> { if show_warnings { output .lines() .skip_while(|line| !line.contains(&format!("{repl_name} v0.1.0"))) .skip(1) .collect() } else { output .lines() .skip_while(|line| { if no_color() { !line.starts_with(ERROR_TAG_NO_COLOR) } else { !line.starts_with(ERROR_TAG) } }) .collect() } }; let go_to_end = |output: Box<dyn Iterator<Item = &str>>| -> String { if show_warnings { output } else { Box::new(output.take_while(|line| { if no_color() { !line.starts_with(WARNING_TAG_NO_COLOR) } else { !line.starts_with(WARNING_TAG) } })) } .collect::<Vec<_>>() .join("\n") }; let handle_error = |output: &'a str| { go_to_start(output) .into_iter() .take_while(|line| !line.contains(BEFORE_2021_END_TAG)) }; let handle_error_2021 = |output: &'a str| { go_to_start(output) .into_iter() .rev() .skip_while(|line| !line.is_empty()) .collect::<Vec<_>>() .into_iter() .rev() }; let output: Box<dyn Iterator<Item = &str>> = if original_output.contains(BEFORE_2021_END_TAG) { Box::new(handle_error(original_output)) } else { Box::new(handle_error_2021(original_output)) }; let formatted_error = go_to_end(output); // The formatting logic is ad-hoc, there will always be a chance of failure with a rust update // // So we do a sanity check here, if the formatted_error is empty (which means we failed to // format the output), ask the user to open a bug report with the original_output if !formatted_error.is_empty() { formatted_error } else { format!("IRust: failed to format the error output.\nThis is a bug in IRust.\nFeel free to open a bug-report at https://github.com/sigmaSd/IRust/issues/new with the next text:\n\noriginal_output:\n{original_output}") } } pub fn format_err_printqueue(output: &str, show_warnings: bool, repl_name: &str) -> PrintQueue { PrinterItem::String(format_err(output, show_warnings, repl_name), Color::Red).into() } pub fn format_eval_output( status: std::process::ExitStatus, output: String, prompt: String, show_warnings: bool, repl_name: &str, new_lines_after_output: usize, ) -> Option<PrintQueue> { if !status.success() { return Some(format_err_printqueue(&output, show_warnings, repl_name)); } if output.trim() == "()" { return None; } let mut eval_output = PrintQueue::default(); eval_output.push(PrinterItem::String(prompt, Color::Red)); eval_output.push(PrinterItem::String(output, Color::White)); eval_output.add_new_line(new_lines_after_output); Some(eval_output) } fn check_is_err(s: &str) -> bool { !s.contains("dev [unoptimized + debuginfo]") } pub fn format_check_output( output: String, show_warnings: bool, repl_name: &str, ) -> Option<PrintQueue> { if check_is_err(&output) { Some(format_err_printqueue(&output, show_warnings, repl_name)) } else { None } }
//! All the traits exposed to be used in other custom pallets use crate::*; use codec::{Decode, Encode}; use frame_support::dispatch; use scale_info::TypeInfo; /// Anchor trait definition to be used in other pallets pub trait AnchorInterface<T: Config<I>, I: 'static = ()> { // Creates a new anchor fn create(creator: T::AccountId, depth: u8, max_edges: u32) -> Result<T::TreeId, dispatch::DispatchError>; /// Deposit into the anchor fn deposit(account: T::AccountId, id: T::TreeId, leaf: T::Element) -> Result<(), dispatch::DispatchError>; /// Withdraw from the anchor fn withdraw( id: T::TreeId, proof_bytes: &[u8], roots: Vec<T::Element>, nullifier_hash: T::Element, recipient: T::AccountId, relayer: T::AccountId, fee: BalanceOf<T, I>, refund: BalanceOf<T, I>, ) -> Result<(), dispatch::DispatchError>; /// Add an edge to this anchor fn add_edge( id: T::TreeId, src_chain_id: T::ChainId, root: T::Element, height: T::BlockNumber, ) -> Result<(), dispatch::DispatchError>; /// Update an edge for this anchor fn update_edge( id: T::TreeId, src_chain_id: T::ChainId, root: T::Element, height: T::BlockNumber, ) -> Result<(), dispatch::DispatchError>; } /// Anchor trait for inspecting tree state pub trait AnchorInspector<T: Config<I>, I: 'static = ()> { /// Gets the merkle root for a tree or returns `TreeDoesntExist` fn get_neighbor_roots(id: T::TreeId) -> Result<Vec<T::Element>, dispatch::DispatchError>; /// Checks if a merkle root is in a tree's cached history or returns /// `TreeDoesntExist fn is_known_neighbor_root( id: T::TreeId, src_chain_id: T::ChainId, target: T::Element, ) -> Result<bool, dispatch::DispatchError>; fn ensure_known_neighbor_root( id: T::TreeId, src_chain_id: T::ChainId, target: T::Element, ) -> Result<(), dispatch::DispatchError> { let is_known = Self::is_known_neighbor_root(id, src_chain_id, target)?; ensure!(is_known, Error::<T, I>::InvalidNeighborWithdrawRoot); Ok(()) } /// Check if this anchor has this edge fn has_edge(id: T::TreeId, src_chain_id: T::ChainId) -> bool; } #[derive(Default, Clone, Encode, Decode, TypeInfo)] pub struct AnchorMetadata<AccountId, Balance> { /// Creator account pub creator: AccountId, /// Balance size of deposit pub deposit_size: Balance, } #[derive(Clone, Encode, Decode, Eq, PartialEq, Default, Debug, TypeInfo)] pub struct EdgeMetadata<ChainID, Element, BlockNumber> { /// chain id pub src_chain_id: ChainID, /// root of source chain anchor's native merkle tree pub root: Element, /// height of source chain anchor's native merkle tree pub height: BlockNumber, }
use core::future; use std::sync::Arc; use futures::{future::{AbortHandle, Abortable, join, select}, pin_mut}; use tokio::{io::copy, net::{TcpListener, TcpStream, ToSocketAddrs}, sync::watch}; use watch::{Receiver, Sender}; use crate::error::Error; async fn proxy_to_remote(incoming: TcpStream, outgoing: TcpStream) { let (mut inc_reader, mut inc_writer) = incoming.into_split(); let (mut out_reader, mut out_writer) = outgoing.into_split(); let write_to_outgoing = copy(&mut inc_reader, &mut out_writer); let read_from_incoming = copy(&mut out_reader, &mut inc_writer); join(read_from_incoming, write_to_outgoing).await; } pub async fn proxy<A: ToSocketAddrs>(incoming: TcpStream, remote_addr: A) -> Result<(), Error> { let outgoing = TcpStream::connect(remote_addr).await?; proxy_to_remote(incoming, outgoing).await; Ok(()) } #[cfg(test)] mod tests { use tokio::io::{AsyncReadExt, AsyncWriteExt}; use super::*; // Binding to port 0 makes the os allocate a free high port, so we can run this test without worrying about ports async fn mk_listener() -> TcpListener { TcpListener::bind("127.0.0.1:0").await.unwrap() } #[tokio::test] async fn test_proxy_proxies() { // steps for this test: // Start a tcp server that reads a number and returns that number + 1 // Start a tcp server that proxies to that server // Run a connection to the proxy server, send it 1, expect to get 2 back. let mut real_listener = mk_listener().await; let real_addr = real_listener.local_addr().unwrap(); // The real server - adds one to the number sent tokio::spawn(async move { let mut stream = real_listener.accept().await.unwrap().0; let num = stream.read_i64().await.unwrap(); stream.write_all(&(num + 1).to_be_bytes()).await.unwrap(); }); let mut proxy_listener = mk_listener().await; let proxy_addr = proxy_listener.local_addr().unwrap(); // The proxy - forwards to the real address tokio::spawn(async move { let mut stream = proxy_listener.accept().await.unwrap().0; proxy(stream, real_addr).await }); // Connect to the proxy and make sure that our number goes through correctly let mut stream = TcpStream::connect(proxy_addr).await.unwrap(); let send_num: i64 = 1; stream.write_all(&send_num.to_be_bytes()).await.unwrap(); let recv_num = stream.read_i64().await.unwrap(); assert_eq!(send_num + 1, recv_num); } }
extern crate messycanvas; fn main() { messycanvas::client::main(); }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use alloc::vec::Vec; use alloc::string::ToString; use super::super::qlib::path::*; use super::*; pub fn ContainsStr(strs: &Vec<&str>, str: &str) -> bool { for s in strs { if *s == str { return true; } } return false; } pub fn IsSupportedDevMount(m: &oci::Mount) -> bool { let existingDevices = vec!["/dev/fd", "/dev/stdin", "/dev/stdout", "/dev/stderr", "/dev/null", "/dev/zero", "/dev/full", "/dev/random", "/dev/urandom", "/dev/shm", "/dev/pts", "/dev/ptmx"]; let dst = Clean(&m.destination); if dst.as_str() == "dev" { return false; } for dev in existingDevices { if dst.as_str() == dev || HasPrefix(&dst, &(dev.to_string() + "/")) { return false } } return true }
//! # Piccolo //! //! Piccolo is a small, light, high-pitched scripting language (eventually) intended //! for embedding in Rust projects. pub extern crate downcast_rs; pub extern crate fnv; #[macro_use] pub extern crate log; pub mod compiler; pub mod error; pub mod runtime; /// Commonly used items that you might want access to. pub mod prelude { pub use super::compiler::{emitter::Emitter, parser::parse, scanner::Scanner}; pub use super::compiler::{Token, TokenKind}; pub use super::error::{ErrorKind, PiccoloError}; pub use super::runtime::{ chunk::Chunk, object::Object, value::Constant, value::Value, vm::Machine, }; } use prelude::*; #[cfg(feature = "pc-debug")] pub use compiler::{compile_chunk, scan_all}; #[cfg(feature = "fuzzer")] pub use compiler::print_tokens; /// Interprets a Piccolo source and returns its result. /// /// # Examples /// /// ```rust /// # fn main() -> Result<(), Vec<piccolo::prelude::PiccoloError>> { /// let result = piccolo::interpret("1 + 2")?; /// assert_eq!(3, result.into::<i64>()); /// # Ok(()) /// # } /// ``` pub fn interpret(src: &str) -> Result<Constant, Vec<PiccoloError>> { let mut scanner = Scanner::new(src); debug!("parse"); let ast = parse(&mut scanner)?; debug!("ast\n{}", compiler::ast::print_ast(&ast)); debug!("compile"); let mut emitter = compiler::emitter::Emitter::new(); compiler::emitter::compile_ast(&mut emitter, &ast)?; let chunk = emitter.current_chunk(); debug!("chunk\n{}", chunk.disassemble("")); debug!("interpret"); Ok(Machine::new().interpret(&chunk)?) } /// Reads a file and interprets its contents. pub fn do_file(file: &std::path::Path) -> Result<Constant, Vec<PiccoloError>> { let contents = std::fs::read_to_string(file).map_err(|e| vec![PiccoloError::from(e)])?; interpret(&contents).map_err(|v| { v.into_iter() .map(|e| e.file(file.to_str().unwrap().to_owned())) .collect() }) } pub(crate) fn encode_bytes(low: u8, high: u8) -> u16 { ((high as u16) << 8) | (low as u16) } pub(crate) fn decode_bytes(bytes: u16) -> (u8, u8) { let high = (bytes >> 8) as u8; let low = (bytes & 0xff) as u8; (low, high) } #[cfg(feature = "fuzzer")] pub mod fuzzer { extern crate rand; use crate::compiler::TokenKind; use crate::Machine; use rand::distributions::{Distribution, Standard}; use rand::Rng; /// Run `n` tests of random tokens. pub fn fuzz(n: usize, min_len: usize, max_len: usize) -> Option<Vec<usize>> { let mut ok = None; let start = std::time::Instant::now(); let mut avg = 0.0; for n in 1..=n { let s = std::time::Instant::now(); if let Some(_) = run(n, min_len, max_len) { if ok.is_none() { ok = Some(vec![n]); } else { ok.as_mut().unwrap().push(n); } } avg += (std::time::Instant::now() - s).as_secs_f64(); } println!( "{} runs, in {:.8} sec ({:.8} avg per run)", n, (std::time::Instant::now() - start).as_secs_f64(), avg / n as f64 ); ok } // occasionally creates valid programs fn run(n: usize, min_len: usize, max_len: usize) -> Option<()> { let mut src = String::new(); let mut r = rand::thread_rng(); let lines = r.gen_range(min_len, max_len); for _ in 1..lines { let tk: TokenKind = r.gen(); src.push_str(&format!("{} ", tk).to_lowercase()); } if let Ok(chunk) = crate::compile_chunk(&src) { println!("----- run {} compiles -----", n); crate::print_tokens(&crate::compiler::scan_all(&src).unwrap()); chunk.disassemble(""); Machine::new().interpret(&chunk).ok().map(|_| { println!("----- run {} executes -----", n); }) } else { None } } impl Distribution<TokenKind> for Standard { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> TokenKind { match rng.gen_range(0, 50) { // 0 => TokenKind::Do, // 1 => TokenKind::End, // 2 => TokenKind::Fn, // 3 => TokenKind::If, // 4 => TokenKind::Else, // 5 => TokenKind::While, // 6 => TokenKind::For, // 7 => TokenKind::In, // 8 => TokenKind::Data, 9 => TokenKind::Let, // 10 => TokenKind::Is, // 11 => TokenKind::Me, // 12 => TokenKind::New, // 13 => TokenKind::Err, 14 => TokenKind::Retn, 15 => TokenKind::Nil, // 16 => TokenKind::LeftBracket, // 17 => TokenKind::RightBracket, 18 => TokenKind::LeftParen, 19 => TokenKind::RightParen, // 20 => TokenKind::Comma, // 21 => TokenKind::Period, // 22 => TokenKind::ExclusiveRange, // 23 => TokenKind::InclusiveRange, 24 => TokenKind::Assign, 25 => TokenKind::Not, 26 => TokenKind::Plus, 27 => TokenKind::Minus, 28 => TokenKind::Multiply, 29 => TokenKind::Divide, 30 => TokenKind::Modulo, // 31 => TokenKind::LogicalAnd, // 32 => TokenKind::LogicalOr, // 33 => TokenKind::BitwiseAnd, // 34 => TokenKind::BitwiseOr, // 35 => TokenKind::BitwiseXor, 36 => TokenKind::Equal, 37 => TokenKind::NotEqual, 38 => TokenKind::Less, 39 => TokenKind::Greater, 40 => TokenKind::LessEqual, 41 => TokenKind::GreaterEqual, // 42 => TokenKind::ShiftLeft, // 43 => TokenKind::ShiftRight, 44 => TokenKind::Identifier, 45 => TokenKind::String, 46 => TokenKind::True, 47 => TokenKind::False, 48 => TokenKind::Double(0.0), 49 => TokenKind::Integer(1), _ => TokenKind::Nil, } } } } #[cfg(test)] mod integration { use super::{parse, Emitter, Machine, Scanner, Token, TokenKind}; use crate::compiler::ast::{self, Expr, Stmt}; use crate::Constant; #[test] #[ignore] fn very_long() { let path = std::path::Path::new("examples/long.pc"); crate::do_file(path).unwrap(); } #[test] fn encode_decode() { let bytes: u16 = 0xbead; let (low, high) = crate::decode_bytes(bytes); assert_eq!(high, 0xbe); assert_eq!(low, 0xad); let bytes2 = crate::encode_bytes(low, high); assert_eq!(bytes, bytes2); } #[test] fn idk() { let src = "a=:1+2"; let mut scanner = Scanner::new(src); let ast = parse(&mut scanner).unwrap(); println!("{}", ast::print_ast(&ast)); let mut ne = Emitter::new(); crate::compiler::emitter::compile_ast(&mut ne, &ast).unwrap(); let chunk = ne.into_chunk(); #[cfg(feature = "pc-debug")] { chunk.disassemble("idklol"); } let mut vm = Machine::new(); println!("{}", vm.interpret(&chunk).unwrap()); } #[test] fn visitor_emitter() { let src = "1+2*3+4"; let mut scanner = Scanner::new(src); let ast = parse(&mut scanner).unwrap(); if let Stmt::Expr { expr, .. } = &ast[0] { assert_eq!( expr, &Expr::Binary { lhs: Box::new(Expr::Binary { lhs: Box::new(Expr::Literal { literal: Token::new(TokenKind::Integer(1), "1", 1) }), op: Token::new(TokenKind::Plus, "+", 1), rhs: Box::new(Expr::Binary { lhs: Box::new(Expr::Literal { literal: Token::new(TokenKind::Integer(2), "2", 1) }), op: Token::new(TokenKind::Multiply, "*", 1), rhs: Box::new(Expr::Literal { literal: Token::new(TokenKind::Integer(3), "3", 1) }) }) }), op: Token::new(TokenKind::Plus, "+", 1), rhs: Box::new(Expr::Literal { literal: Token::new(TokenKind::Integer(4), "4", 1) }), } ); println!("{}", ast::print_expression(expr)); let mut ne = Emitter::new(); crate::compiler::emitter::compile_ast(&mut ne, &ast).unwrap(); let chunk = ne.into_chunk(); #[cfg(feature = "pc-debug")] { println!("{}", chunk.disassemble("idklol")); } let mut vm = Machine::new(); assert_eq!(vm.interpret(&chunk).unwrap(), Constant::Integer(11)); } else { panic!("ast not initialized") } } }
// Copyright 2018 The Servo Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use sys; /// Direction of text flow during layout. /// /// This maps to the [`hb_direction_t`] from /// [`harfbuzz-sys`]. It can be converted to /// or from `hb_direction_t` using the [`From`] /// and [`Into`] traits: /// /// ``` /// # use harfbuzz::{Direction, sys}; /// assert_eq!(Direction::from(sys::HB_DIRECTION_LTR), Direction::LTR); /// assert_eq!(sys::hb_direction_t::from(Direction::BTT), sys::HB_DIRECTION_BTT); /// /// let hb_dir: sys::hb_direction_t = Direction::LTR.into(); /// assert_eq!(hb_dir, sys::HB_DIRECTION_LTR); /// /// let dir: Direction = sys::HB_DIRECTION_TTB.into(); /// assert_eq!(dir, Direction::TTB); /// ``` /// /// [`hb_direction_t`]: ../harfbuzz_sys/type.hb_direction_t.html /// [`harfbuzz-sys`]: ../harfbuzz_sys/index.html /// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html /// [`Into`]: https://doc.rust-lang.org/std/convert/trait.Into.html #[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] pub enum Direction { /// Initial, unset direction. /// /// This corresponds to [`HB_DIRECTION_INVALID`]. /// /// [`HB_DIRECTION_INVALID`]: ../harfbuzz_sys/constant.HB_DIRECTION_INVALID.html Invalid, /// Text is set horizontally from left to right. /// /// This corresponds to [`HB_DIRECTION_LTR`]. /// /// [`HB_DIRECTION_LTR`]: ../harfbuzz_sys/constant.HB_DIRECTION_LTR.html LTR, /// Text is set horizontally from right to left. /// /// This corresponds to [`HB_DIRECTION_RTL`]. /// /// [`HB_DIRECTION_RTL`]: ../harfbuzz_sys/constant.HB_DIRECTION_RTL.html RTL, /// Text is set vertically from top to bottom. /// /// This corresponds to [`HB_DIRECTION_TTB`]. /// /// [`HB_DIRECTION_TTB`]: ../harfbuzz_sys/constant.HB_DIRECTION_TTB.html TTB, /// Text is set vertically from bottom to top. /// /// This corresponds to [`HB_DIRECTION_BTT`]. /// /// [`HB_DIRECTION_BTT`]: ../harfbuzz_sys/constant.HB_DIRECTION_BTT.html BTT, } impl From<sys::hb_direction_t> for Direction { fn from(s: sys::hb_direction_t) -> Self { match s { sys::HB_DIRECTION_INVALID => Direction::Invalid, sys::HB_DIRECTION_LTR => Direction::LTR, sys::HB_DIRECTION_RTL => Direction::RTL, sys::HB_DIRECTION_TTB => Direction::TTB, sys::HB_DIRECTION_BTT => Direction::BTT, _ => Direction::Invalid, } } } impl From<Direction> for sys::hb_direction_t { fn from(s: Direction) -> Self { match s { Direction::Invalid => sys::HB_DIRECTION_INVALID, Direction::LTR => sys::HB_DIRECTION_LTR, Direction::RTL => sys::HB_DIRECTION_RTL, Direction::TTB => sys::HB_DIRECTION_TTB, Direction::BTT => sys::HB_DIRECTION_BTT, } } }
use serde::{Deserialize, Serialize}; use serde_json::Result; type StockId = String; #[derive(Debug, Serialize, Deserialize)] struct Stock { id: StockId, qty: i32, } #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "type")] enum Event { Created { id: StockId }, Updated { id: StockId, qty: i32 }, } fn sample1() -> Result<()> { let s1 = Stock { id: "s1".to_string(), qty: 5 }; let json = serde_json::to_string(&s1)?; println!("{}", json); let s2: Stock = serde_json::from_str(&json)?; println!("{:?}", s2); Ok(()) } fn sample2() -> Result<()> { let ev1 = Event::Created { id: "s1".to_string() }; let ev2 = Event::Updated { id: "s1".to_string(), qty: 10 }; let json1 = serde_json::to_string(&ev1)?; let json2 = serde_json::to_string(&ev2)?; println!("{}", json1); println!("{}", json2); let ev1r: Event = serde_json::from_str(&json1)?; let ev2r: Event = serde_json::from_str(&json2)?; println!("{:?}", ev1r); println!("{:?}", ev2r); Ok(()) } fn main() { sample1().unwrap(); println!("-----"); sample2().unwrap(); }
// revisions: base nll // ignore-compare-mode-nll //[nll] compile-flags: -Z borrowck=mir fn static_to_a_to_static_through_ref_in_tuple<'a>(x: &'a u32) -> &'static u32 { let (ref y, _z): (&'a u32, u32) = (&22, 44); *y //~ ERROR } fn main() {}
//! namespace introduces a namespace Datastore Shim, which basically //! mounts the entire child datastore under a prefix. //! Use the Wrap function to wrap a datastore with any Key prefix. //! # For example: //! //! ```norun //! let db = /*...*/; //! let mut ns = wrap(db.clone(), Key("/foo/bar")); //! ns.put(Key("/beep"), "boop"); // now it's /foo/bar/boop //! let v2 = ns.get("beep").unwrap(); //! asset_eq!(&v2, "boop"); //! //! // and, in the underlying database //! v3 = db.get("/foo/bar/beep").unwrap(); //! asset_eq!(&v3, "boop"); //! ``` use crate::datastore::Datastore as DatastoreT; use crate::key::Key; use crate::keytransform; // re-export pub use crate::keytransform::{Datastore, PrefixTransform}; #[inline] pub fn prefix_transform(prefix: Key) -> PrefixTransform { PrefixTransform { prefix } } pub fn wrap<D: DatastoreT>(child: D, prefix: Key) -> Datastore<D, PrefixTransform> { keytransform::wrap(child, prefix_transform(prefix)) } pub type NSDatastore<D> = Datastore<D, PrefixTransform>;
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_exception::Result; use common_expression::types::nullable::NullableColumnBuilder; use common_expression::types::BooleanType; use common_expression::types::DataType; use common_expression::BlockEntry; use common_expression::Column; use common_expression::ColumnBuilder; use common_expression::DataBlock; use common_expression::Evaluator; use common_expression::Expr; use common_expression::FieldIndex; use common_expression::FunctionContext; use common_expression::ScalarRef; use common_expression::Value; use common_functions::BUILTIN_FUNCTIONS; use common_pipeline_core::processors::port::InputPort; use common_pipeline_core::processors::port::OutputPort; use common_pipeline_core::processors::Processor; use common_pipeline_transforms::processors::transforms::Transform; use common_pipeline_transforms::processors::transforms::Transformer; /// `BlockOperator` takes a `DataBlock` as input and produces a `DataBlock` as output. #[derive(Clone)] pub enum BlockOperator { /// Batch mode of map which merges map operators into one. Map { exprs: Vec<Expr> }, /// Filter the input [`DataBlock`] with the predicate `eval`. Filter { expr: Expr }, /// Reorganize the input [`DataBlock`] with `projection`. Project { projection: Vec<FieldIndex> }, /// Expand the input [`DataBlock`] with set-returning functions. FlatMap { srf_exprs: Vec<Expr> }, } impl BlockOperator { pub fn execute(&self, func_ctx: &FunctionContext, mut input: DataBlock) -> Result<DataBlock> { match self { BlockOperator::Map { exprs } => { for expr in exprs { let evaluator = Evaluator::new(&input, *func_ctx, &BUILTIN_FUNCTIONS); let result = evaluator.run(expr)?; let col = BlockEntry { data_type: expr.data_type().clone(), value: result, }; input.add_column(col); } Ok(input) } BlockOperator::Filter { expr } => { assert_eq!(expr.data_type(), &DataType::Boolean); let evaluator = Evaluator::new(&input, *func_ctx, &BUILTIN_FUNCTIONS); let filter = evaluator.run(expr)?.try_downcast::<BooleanType>().unwrap(); input.filter_boolean_value(&filter) } BlockOperator::Project { projection } => { let mut result = DataBlock::new(vec![], input.num_rows()); for index in projection { result.add_column(input.get_by_offset(*index).clone()); } Ok(result) } BlockOperator::FlatMap { srf_exprs } => { let eval = Evaluator::new(&input, *func_ctx, &BUILTIN_FUNCTIONS); // [ // srf1: [ // result_set1: [ // col1, col2, ... // ], // ... // ], // ... // ] let result = srf_exprs .iter() .map(|srf_expr| eval.run_srf(srf_expr)) .collect::<Result<Vec<_>>>()?; let mut result_data_blocks = Vec::with_capacity(input.num_rows()); for i in 0..input.num_rows() { let mut row = Vec::with_capacity(input.num_rows()); // Get the max number of rows of all result sets. let mut max_num_rows = 0; result.iter().for_each(|srf_results| { let (_, result_set_rows) = &srf_results[i]; if *result_set_rows > max_num_rows { max_num_rows = *result_set_rows; } }); if max_num_rows == 0 && !result_data_blocks.is_empty() { // Skip current row continue; } for entry in input.columns() { // Take the i-th row of input data block and add it to the row. let mut builder = ColumnBuilder::with_capacity(&entry.data_type, max_num_rows); let scalar_ref = entry.value.index(i).unwrap(); (0..max_num_rows).for_each(|_| { builder.push(scalar_ref.clone()); }); row.push(BlockEntry { value: Value::Column(builder.build()), data_type: entry.data_type.clone(), }); } for (srf_expr, srf_results) in srf_exprs.iter().zip(&result) { let (mut row_result, repeat_times) = srf_results[i].clone(); if let Value::Column(Column::Tuple(fields)) = &mut row_result { // If the current result set has less rows than the max number of rows, // we need to pad the result set with null values. // TODO(leiysky): this can be optimized by using a `zip` array function if repeat_times < max_num_rows { for field in fields { match field { Column::Null { .. } => { *field = ColumnBuilder::repeat( &ScalarRef::Null, max_num_rows, &DataType::Null, ) .build(); } Column::Nullable(box nullable_column) => { let mut column_builder = NullableColumnBuilder::from_column( (*nullable_column).clone(), ); (0..(max_num_rows - repeat_times)).for_each(|_| { column_builder.push_null(); }); *field = Column::Nullable(Box::new(column_builder.build())); } _ => unreachable!(), } } } } row.push(BlockEntry { data_type: srf_expr.data_type().clone(), value: row_result, }) } result_data_blocks.push(DataBlock::new(row, max_num_rows)); } let result = DataBlock::concat(&result_data_blocks)?; Ok(result) } } } } /// `CompoundBlockOperator` is a pipeline of `BlockOperator`s pub struct CompoundBlockOperator { pub operators: Vec<BlockOperator>, pub ctx: FunctionContext, } impl CompoundBlockOperator { pub fn create( input_port: Arc<InputPort>, output_port: Arc<OutputPort>, ctx: FunctionContext, operators: Vec<BlockOperator>, ) -> Box<dyn Processor> { let operators = Self::compact_map(operators); Transformer::<Self>::create(input_port, output_port, Self { operators, ctx }) } pub fn compact_map(operators: Vec<BlockOperator>) -> Vec<BlockOperator> { let mut results = Vec::with_capacity(operators.len()); for op in operators { match op { BlockOperator::Map { exprs } => { if let Some(BlockOperator::Map { exprs: pre_exprs }) = results.last_mut() { pre_exprs.extend(exprs); } else { results.push(BlockOperator::Map { exprs }); } } _ => results.push(op), } } results } #[allow(dead_code)] pub fn merge(self, other: Self) -> Self { let mut operators = self.operators; operators.extend(other.operators); Self { operators, ctx: self.ctx, } } } impl Transform for CompoundBlockOperator { const NAME: &'static str = "CompoundBlockOperator"; const SKIP_EMPTY_DATA_BLOCK: bool = true; fn transform(&mut self, data_block: DataBlock) -> Result<DataBlock> { self.operators .iter() .try_fold(data_block, |input, op| op.execute(&self.ctx, input)) } fn name(&self) -> String { format!( "{}({})", Self::NAME, self.operators .iter() .map(|op| { match op { BlockOperator::Map { .. } => "Map", BlockOperator::Filter { .. } => "Filter", BlockOperator::Project { .. } => "Project", BlockOperator::FlatMap { .. } => "FlatMap", } .to_string() }) .collect::<Vec<String>>() .join("->") ) } }
use crate::engine::game::Game; #[derive(Clone, Copy, Debug, PartialEq)] pub struct Replicated { pub id: ReplicationId, pub entity_type: ReplicatedEntityType, } impl Replicated { pub fn new_for_game(game: &mut Game, entity_type: ReplicatedEntityType) -> Self { Self { id: game.get_new_replication_id(), entity_type, } } } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ReplicationId(pub u32); #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] #[allow(dead_code)] pub enum ReplicatedEntityType { Character, Minion, Tower, Core, }
extern crate hyper; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate time; mod radio; mod util; use radio::data::RADIOSTATIONS; use radio::Radio; use radio::song::Song; use std::collections::LinkedList; use std::thread; use std::time::Duration; use util::html_generator::write_html_from_json; use util::logger::{log, log_at}; use util::song_map::SongMap; const DIRECTORY: &'static str = "data"; const SUBDIRECTORY: &'static str = "radio"; /// Radiostation analyzer /// Collects songs played on radiostations and displays them sorted by /// times played fn main() { // if the command gets used with --html generate html // because we don't need to generate html everytime // or we want to generate html without the analyzer runnning match std::env::args().nth(1) { Some(arg) => { if arg == "--html" { // Write html displaying info write_html_from_json(DIRECTORY, SUBDIRECTORY); return; } }, None => {}, } // Start analyzing run_radio_analyser(); } /// Analyzes stations /// Gets the song played about every minute and saves them to json fn run_radio_analyser() { log("Start Analyzing Radio stations"); for station in RADIOSTATIONS { thread::spawn(move || { // Try to load existing file let mut map = SongMap::load_from_file( &format!("{}\\{}", DIRECTORY, SUBDIRECTORY), station.shorthand, ); // Save the last 5 songs played let mut last_songs: LinkedList<Song> = LinkedList::new(); loop { // If theres a song playing and it's not contained in the // last 5 songs log it and add it to the list and save change match station.get_current_song() { Some(song) => { if !last_songs.contains(&song) { log_at( &format!("log\\{}", station.shorthand), station.shorthand, &format!("{}: {}", station.name, song), ); println!("{}: {}", station.name, song); map.insert_song(song.clone()); if last_songs.len() == 5 { last_songs.pop_back(); } last_songs.push_front(song); map.save_to_file( &format!("{}\\{}", DIRECTORY, SUBDIRECTORY), station.shorthand, ); } }, None => {}, } // Wait about a minute thread::sleep(Duration::from_secs(60)); } }); } println!("-- Press any key to end program --"); // stdin blocks mains thread, so we don't need to join the other threads std::io::stdin() .read_line(&mut String::new()) .expect("something went seriously wrong :O"); log("Stop Analyzing Radio stations"); }
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use glib; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib_sys; use libc; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use std::ptr; use webkit2_webextension_sys; use DOMNode; use DOMObject; glib_wrapper! { pub struct DOMTreeWalker(Object<webkit2_webextension_sys::WebKitDOMTreeWalker, webkit2_webextension_sys::WebKitDOMTreeWalkerClass, DOMTreeWalkerClass>) @extends DOMObject; match fn { get_type => || webkit2_webextension_sys::webkit_dom_tree_walker_get_type(), } } pub const NONE_DOM_TREE_WALKER: Option<&DOMTreeWalker> = None; pub trait DOMTreeWalkerExt: 'static { #[cfg_attr(feature = "v2_22", deprecated)] fn first_child(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_current_node(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_12", deprecated)] fn get_expand_entity_references(&self) -> bool; //#[cfg_attr(feature = "v2_22", deprecated)] //fn get_filter(&self) -> /*Ignored*/Option<DOMNodeFilter>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_root(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn get_what_to_show(&self) -> libc::c_ulong; #[cfg_attr(feature = "v2_22", deprecated)] fn last_child(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn next_node(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn next_sibling(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn parent_node(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn previous_node(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn previous_sibling(&self) -> Option<DOMNode>; #[cfg_attr(feature = "v2_22", deprecated)] fn set_current_node<P: IsA<DOMNode>>(&self, value: &P) -> Result<(), glib::Error>; fn connect_property_current_node_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_filter_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_root_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_what_to_show_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<DOMTreeWalker>> DOMTreeWalkerExt for O { fn first_child(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_first_child( self.as_ref().to_glib_none().0, ), ) } } fn get_current_node(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_get_current_node( self.as_ref().to_glib_none().0, ), ) } } fn get_expand_entity_references(&self) -> bool { unsafe { from_glib( webkit2_webextension_sys::webkit_dom_tree_walker_get_expand_entity_references( self.as_ref().to_glib_none().0, ), ) } } //fn get_filter(&self) -> /*Ignored*/Option<DOMNodeFilter> { // unsafe { TODO: call webkit2_webextension_sys:webkit_dom_tree_walker_get_filter() } //} fn get_root(&self) -> Option<DOMNode> { unsafe { from_glib_none(webkit2_webextension_sys::webkit_dom_tree_walker_get_root( self.as_ref().to_glib_none().0, )) } } fn get_what_to_show(&self) -> libc::c_ulong { unsafe { webkit2_webextension_sys::webkit_dom_tree_walker_get_what_to_show( self.as_ref().to_glib_none().0, ) } } fn last_child(&self) -> Option<DOMNode> { unsafe { from_glib_none(webkit2_webextension_sys::webkit_dom_tree_walker_last_child( self.as_ref().to_glib_none().0, )) } } fn next_node(&self) -> Option<DOMNode> { unsafe { from_glib_none(webkit2_webextension_sys::webkit_dom_tree_walker_next_node( self.as_ref().to_glib_none().0, )) } } fn next_sibling(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_next_sibling( self.as_ref().to_glib_none().0, ), ) } } fn parent_node(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_parent_node( self.as_ref().to_glib_none().0, ), ) } } fn previous_node(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_previous_node( self.as_ref().to_glib_none().0, ), ) } } fn previous_sibling(&self) -> Option<DOMNode> { unsafe { from_glib_none( webkit2_webextension_sys::webkit_dom_tree_walker_previous_sibling( self.as_ref().to_glib_none().0, ), ) } } fn set_current_node<P: IsA<DOMNode>>(&self, value: &P) -> Result<(), glib::Error> { unsafe { let mut error = ptr::null_mut(); let _ = webkit2_webextension_sys::webkit_dom_tree_walker_set_current_node( self.as_ref().to_glib_none().0, value.as_ref().to_glib_none().0, &mut error, ); if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) } } } fn connect_property_current_node_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_current_node_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMTreeWalker, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMTreeWalker>, { let f: &F = &*(f as *const F); f(&DOMTreeWalker::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::current-node\0".as_ptr() as *const _, Some(transmute( notify_current_node_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } fn connect_property_filter_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_filter_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMTreeWalker, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMTreeWalker>, { let f: &F = &*(f as *const F); f(&DOMTreeWalker::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::filter\0".as_ptr() as *const _, Some(transmute(notify_filter_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_root_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_root_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMTreeWalker, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMTreeWalker>, { let f: &F = &*(f as *const F); f(&DOMTreeWalker::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::root\0".as_ptr() as *const _, Some(transmute(notify_root_trampoline::<Self, F> as usize)), Box_::into_raw(f), ) } } fn connect_property_what_to_show_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_what_to_show_trampoline<P, F: Fn(&P) + 'static>( this: *mut webkit2_webextension_sys::WebKitDOMTreeWalker, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<DOMTreeWalker>, { let f: &F = &*(f as *const F); f(&DOMTreeWalker::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::what-to-show\0".as_ptr() as *const _, Some(transmute( notify_what_to_show_trampoline::<Self, F> as usize, )), Box_::into_raw(f), ) } } } impl fmt::Display for DOMTreeWalker { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "DOMTreeWalker") } }
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_base::base::escape_for_key; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::principal::UserDefinedFileFormat; use common_meta_kvapi::kvapi; use common_meta_kvapi::kvapi::UpsertKVReq; use common_meta_types::MatchSeq; use common_meta_types::MatchSeqExt; use common_meta_types::MetaError; use common_meta_types::Operation; use common_meta_types::SeqV; use crate::serde::deserialize_struct; use crate::serde::serialize_struct; use crate::FileFormatApi; static USER_FILE_FORMAT_API_KEY_PREFIX: &str = "__fd_file_formats"; pub struct FileFormatMgr { kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, file_format_prefix: String, } impl FileFormatMgr { pub fn create(kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, tenant: &str) -> Result<Self> { if tenant.is_empty() { return Err(ErrorCode::TenantIsEmpty( "Tenant can not empty(while role mgr create)", )); } Ok(Self { kv_api, file_format_prefix: format!( "{}/{}", USER_FILE_FORMAT_API_KEY_PREFIX, escape_for_key(tenant)? ), }) } } #[async_trait::async_trait] impl FileFormatApi for FileFormatMgr { async fn add_file_format(&self, info: UserDefinedFileFormat) -> Result<u64> { let seq = MatchSeq::Exact(0); let val = Operation::Update(serialize_struct( &info, ErrorCode::IllegalFileFormat, || "", )?); let key = format!( "{}/{}", self.file_format_prefix, escape_for_key(&info.name)? ); let upsert_info = self .kv_api .upsert_kv(UpsertKVReq::new(&key, seq, val, None)); let res = upsert_info.await?.added_or_else(|v| { ErrorCode::FileFormatAlreadyExists(format!( "file_format already exists, seq [{}]", v.seq )) })?; Ok(res.seq) } async fn get_file_format( &self, name: &str, seq: MatchSeq, ) -> Result<SeqV<UserDefinedFileFormat>> { let key = format!("{}/{}", self.file_format_prefix, escape_for_key(name)?); let kv_api = self.kv_api.clone(); let get_kv = async move { kv_api.get_kv(&key).await }; let res = get_kv.await?; let seq_value = res .ok_or_else(|| ErrorCode::UnknownFileFormat(format!("Unknown file_format {}", name)))?; match seq.match_seq(&seq_value) { Ok(_) => Ok(SeqV::new( seq_value.seq, deserialize_struct(&seq_value.data, ErrorCode::IllegalFileFormat, || "")?, )), Err(_) => Err(ErrorCode::UnknownFileFormat(format!( "Unknown file_format {}", name ))), } } async fn get_file_formats(&self) -> Result<Vec<UserDefinedFileFormat>> { let values = self.kv_api.prefix_list_kv(&self.file_format_prefix).await?; let mut file_format_infos = Vec::with_capacity(values.len()); for (_, value) in values { let file_format_info = deserialize_struct(&value.data, ErrorCode::IllegalFileFormat, || "")?; file_format_infos.push(file_format_info); } Ok(file_format_infos) } async fn drop_file_format(&self, name: &str, seq: MatchSeq) -> Result<()> { let key = format!("{}/{}", self.file_format_prefix, escape_for_key(name)?); let kv_api = self.kv_api.clone(); let upsert_kv = async move { kv_api .upsert_kv(UpsertKVReq::new(&key, seq, Operation::Delete, None)) .await }; let res = upsert_kv.await?; if res.prev.is_some() && res.result.is_none() { Ok(()) } else { Err(ErrorCode::UnknownFileFormat(format!( "Unknown FileFormat {}", name ))) } } }
use winapi::um::winnt::HANDLE; use winapi::um::handleapi::CloseHandle; use winapi::shared::winerror::WAIT_TIMEOUT; use winapi::um::winbase::{WAIT_OBJECT_0, WAIT_FAILED}; use winapi::um::winnt::{SYNCHRONIZE, EVENT_MODIFY_STATE}; use winapi::um::synchapi::{CreateEventW, OpenEventW, SetEvent, WaitForSingleObject}; use std::ptr; pub enum Win32EventWaitResult { Signaled, Timout, Failed } /// A wrapper over win32 events pub struct Win32Event { handle: HANDLE } impl Win32Event { pub fn create(name: &str) -> Result<Win32Event, ()> { let name = to_utf16(name); let handle = unsafe { CreateEventW(ptr::null_mut(), 0, 0, name.as_ptr()) }; match handle.is_null() { true => Err(()), false => Ok(Win32Event{ handle }) } } pub fn open(name: &str) -> Result<Win32Event, ()> { let name = to_utf16(name); let handle = unsafe { OpenEventW(SYNCHRONIZE | EVENT_MODIFY_STATE, 0, name.as_ptr()) }; match handle.is_null() { true => Err(()), false => Ok(Win32Event{ handle }) } } pub fn wait(&self, timeout: u32) -> Win32EventWaitResult { match unsafe { WaitForSingleObject(self.handle, timeout) } { WAIT_OBJECT_0 => Win32EventWaitResult::Signaled, WAIT_TIMEOUT => Win32EventWaitResult::Timout, WAIT_FAILED => Win32EventWaitResult::Failed, _ => unreachable!() } } pub fn set(&self) { unsafe { SetEvent(self.handle); } } pub fn close(&self) { unsafe { CloseHandle(self.handle); } } } unsafe impl Send for Win32Event {} unsafe impl Sync for Win32Event {} impl Default for Win32Event { fn default() -> Win32Event { Win32Event { handle: ptr::null_mut() } } } fn to_utf16(s: &str) -> Vec<u16> { use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; OsStr::new(s) .encode_wide() .chain(Some(0u16).into_iter()) .collect() }
//! An API for declaring rust-code callbacks to be executed when a given pattern is matched. //! //! A flexer rule is a [`crate::automata::pattern`] associated with rust code to be executed as a //! callback. use crate::automata::pattern::Pattern; // ========== // == Rule == // ========== /// A flexer rule. #[derive(Clone,Debug,PartialEq)] pub struct Rule { /// The pattern that triggers the callback. pub pattern:Pattern, /// The code to execute when [`Rule::pattern`] matches, containing rust code as a /// [`std::string::String`]. /// /// This code will be called directly from a method defined on your Lexer (the one that contains /// a [`crate::Flexer`] instance. To this end, the code you provide as a string must be valid in /// that context. pub callback:String, } impl Rule { /// Creates a new rule. pub fn new(pattern:Pattern, callback:impl Into<String>) -> Self { Rule{pattern,callback:callback.into()} } }
//! Tests for the raw (unprepared) query API for Postgres. use sqlx::{Cursor, Executor, Postgres, Row}; use sqlx_test::new; /// Tests the edge case of executing a completely empty query string. /// /// This gets flagged as an `EmptyQueryResponse` in Postgres. We currently /// catch this and just return no rows. #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn test_empty_query() -> anyhow::Result<()> { let mut conn = new::<Postgres>().await?; let affected = conn.execute("").await?; assert_eq!(affected, 0); Ok(()) } /// Test a simple select expression. This should return the row. #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn test_select_expression() -> anyhow::Result<()> { let mut conn = new::<Postgres>().await?; let mut cursor = conn.fetch("SELECT 5"); let row = cursor.next().await?.unwrap(); assert!(5i32 == row.try_get::<i32, _>(0)?); Ok(()) } /// Test that we can interleave reads and writes to the database /// in one simple query. Using the `Cursor` API we should be /// able to fetch from both queries in sequence. #[cfg_attr(feature = "runtime-async-std", async_std::test)] #[cfg_attr(feature = "runtime-tokio", tokio::test)] async fn test_multi_read_write() -> anyhow::Result<()> { let mut conn = new::<Postgres>().await?; let mut cursor = conn.fetch( " CREATE TABLE IF NOT EXISTS _sqlx_test_postgres_5112 ( id BIGSERIAL PRIMARY KEY, text TEXT NOT NULL ); SELECT 'Hello World' as _1; INSERT INTO _sqlx_test_postgres_5112 (text) VALUES ('this is a test'); SELECT id, text FROM _sqlx_test_postgres_5112; ", ); let row = cursor.next().await?.unwrap(); assert!("Hello World" == row.try_get::<&str, _>("_1")?); let row = cursor.next().await?.unwrap(); let id: i64 = row.try_get("id")?; let text: &str = row.try_get("text")?; assert_eq!(1_i64, id); assert_eq!("this is a test", text); Ok(()) }
#![cfg_attr(windows, allow(dead_code, unused_imports))] use std::cmp; use std::env; use std::fs::{self, File}; use std::io; use std::path::{Path, PathBuf}; use std::collections::HashMap; use quickcheck::{Arbitrary, Gen, QuickCheck, StdGen}; use rand::{self, Rng, RngCore}; use super::{DirEntry, WalkDir, IntoIter, Error, ErrorInner}; #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] enum Tree { Dir(PathBuf, Vec<Tree>), File(PathBuf), Symlink { src: PathBuf, dst: PathBuf, dir: bool, } } impl Tree { fn from_walk_with<P, F>( p: P, f: F, ) -> io::Result<Tree> where P: AsRef<Path>, F: FnOnce(WalkDir) -> WalkDir { let mut stack = vec![Tree::Dir(p.as_ref().to_path_buf(), vec![])]; let it: WalkEventIter = f(WalkDir::new(p)).into(); for ev in it { match try!(ev) { WalkEvent::Exit => { let tree = stack.pop().unwrap(); if stack.is_empty() { return Ok(tree); } stack.last_mut().unwrap().children_mut().push(tree); } WalkEvent::Dir(dent) => { stack.push(Tree::Dir(pb(dent.file_name()), vec![])); } WalkEvent::File(dent) => { let node = if dent.file_type().is_symlink() { let src = try!(dent.path().read_link()); let dst = pb(dent.file_name()); let dir = dent.path().is_dir(); Tree::Symlink { src: src, dst: dst, dir: dir } } else { Tree::File(pb(dent.file_name())) }; stack.last_mut().unwrap().children_mut().push(node); } } } assert_eq!(stack.len(), 1); Ok(stack.pop().unwrap()) } fn from_walk_with_contents_first<P, F>( p: P, f: F, ) -> io::Result<Tree> where P: AsRef<Path>, F: FnOnce(WalkDir) -> WalkDir { let mut contents_of_dir_at_depth = HashMap::new(); let mut min_depth = ::std::usize::MAX; let top_level_path = p.as_ref().to_path_buf(); for result in f(WalkDir::new(p).contents_first(true)) { let dentry = try!(result); let tree = if dentry.file_type().is_dir() { let any_contents = contents_of_dir_at_depth.remove( &(dentry.depth+1)); Tree::Dir(pb(dentry.file_name()), any_contents.unwrap_or_default()) } else { if dentry.file_type().is_symlink() { let src = try!(dentry.path().read_link()); let dst = pb(dentry.file_name()); let dir = dentry.path().is_dir(); Tree::Symlink { src: src, dst: dst, dir: dir } } else { Tree::File(pb(dentry.file_name())) } }; contents_of_dir_at_depth.entry( dentry.depth).or_insert(vec!()).push(tree); min_depth = cmp::min(min_depth, dentry.depth); } Ok(Tree::Dir(top_level_path, contents_of_dir_at_depth.remove(&min_depth) .unwrap_or_default())) } fn name(&self) -> &Path { match *self { Tree::Dir(ref pb, _) => pb, Tree::File(ref pb) => pb, Tree::Symlink { ref dst, .. } => dst, } } fn unwrap_singleton(self) -> Tree { match self { Tree::File(_) | Tree::Symlink { .. } => { panic!("cannot unwrap file or link as dir"); } Tree::Dir(_, mut childs) => { assert_eq!(childs.len(), 1); childs.pop().unwrap() } } } fn unwrap_dir(self) -> Vec<Tree> { match self { Tree::File(_) | Tree::Symlink { .. } => { panic!("cannot unwrap file as dir"); } Tree::Dir(_, childs) => childs, } } fn children_mut(&mut self) -> &mut Vec<Tree> { match *self { Tree::File(_) | Tree::Symlink { .. } => { panic!("files do not have children"); } Tree::Dir(_, ref mut children) => children, } } fn create_in<P: AsRef<Path>>(&self, parent: P) -> io::Result<()> { let parent = parent.as_ref(); match *self { Tree::Symlink { ref src, ref dst, dir } => { if dir { try!(soft_link_dir(src, parent.join(dst))); } else { try!(soft_link_file(src, parent.join(dst))); } } Tree::File(ref p) => { try!(File::create(parent.join(p))); } Tree::Dir(ref dir, ref children) => { try!(fs::create_dir(parent.join(dir))); for child in children { try!(child.create_in(parent.join(dir))); } } } Ok(()) } fn canonical(&self) -> Tree { match *self { Tree::Symlink { ref src, ref dst, dir } => { Tree::Symlink { src: src.clone(), dst: dst.clone(), dir: dir } } Tree::File(ref p) => { Tree::File(p.clone()) } Tree::Dir(ref p, ref cs) => { let mut cs: Vec<Tree> = cs.iter().map(|c| c.canonical()).collect(); cs.sort(); Tree::Dir(p.clone(), cs) } } } fn dedup(&self) -> Tree { match *self { Tree::Symlink { ref src, ref dst, dir } => { Tree::Symlink { src: src.clone(), dst: dst.clone(), dir: dir } } Tree::File(ref p) => { Tree::File(p.clone()) } Tree::Dir(ref p, ref cs) => { let mut nodupes: Vec<Tree> = vec![]; for (i, c1) in cs.iter().enumerate() { if !cs[i+1..].iter().any(|c2| c1.name() == c2.name()) && !nodupes.iter().any(|c2| c1.name() == c2.name()) { nodupes.push(c1.dedup()); } } Tree::Dir(p.clone(), nodupes) } } } fn gen<G: Gen>(g: &mut G, depth: usize) -> Tree { #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] struct NonEmptyAscii(String); impl Arbitrary for NonEmptyAscii { fn arbitrary<G: Gen>(g: &mut G) -> NonEmptyAscii { use std::char::from_u32; let upper_bound = g.size(); // We start with a lower bound of `4` to avoid // generating the special file name `con` on Windows, // because such files cannot exist... let size = g.gen_range(4, upper_bound); NonEmptyAscii((0..size) .map(|_| from_u32(g.gen_range(97, 123)).unwrap()) .collect()) } fn shrink(&self) -> Box<Iterator<Item=NonEmptyAscii>> { let mut smaller = vec![]; for i in 1..self.0.len() { let s: String = self.0.chars().skip(i).collect(); smaller.push(NonEmptyAscii(s)); } Box::new(smaller.into_iter()) } } let name = pb(NonEmptyAscii::arbitrary(g).0); if depth == 0 { Tree::File(name) } else { let children: Vec<Tree> = (0..g.gen_range(0, 5)) .map(|_| Tree::gen(g, depth-1)) .collect(); Tree::Dir(name, children) } } } impl Arbitrary for Tree { fn arbitrary<G: Gen>(g: &mut G) -> Tree { let depth = g.gen_range(0, 5); Tree::gen(g, depth).dedup() } fn shrink(&self) -> Box<Iterator<Item=Tree>> { let trees: Box<Iterator<Item=Tree>> = match *self { Tree::Symlink { .. } => unimplemented!(), Tree::File(ref path) => { let s = path.to_string_lossy().into_owned(); Box::new(s.shrink().map(|s| Tree::File(pb(s)))) } Tree::Dir(ref path, ref children) => { let s = path.to_string_lossy().into_owned(); if children.is_empty() { Box::new(s.shrink().map(|s| Tree::Dir(pb(s), vec![]))) } else if children.len() == 1 { let c = &children[0]; Box::new(Some(c.clone()).into_iter().chain(c.shrink())) } else { Box::new(children .shrink() .map(move |cs| Tree::Dir(pb(s.clone()), cs))) } } }; Box::new(trees.map(|t| t.dedup())) } } #[derive(Debug)] enum WalkEvent { Dir(DirEntry), File(DirEntry), Exit, } struct WalkEventIter { depth: usize, it: IntoIter, next: Option<Result<DirEntry, Error>>, } impl From<WalkDir> for WalkEventIter { fn from(it: WalkDir) -> WalkEventIter { WalkEventIter { depth: 0, it: it.into_iter(), next: None } } } impl Iterator for WalkEventIter { type Item = io::Result<WalkEvent>; fn next(&mut self) -> Option<io::Result<WalkEvent>> { let dent = self.next.take().or_else(|| self.it.next()); let depth = match dent { None => 0, Some(Ok(ref dent)) => dent.depth(), Some(Err(ref err)) => err.depth(), }; if depth < self.depth { self.depth -= 1; self.next = dent; return Some(Ok(WalkEvent::Exit)); } self.depth = depth; match dent { None => None, Some(Err(err)) => Some(Err(From::from(err))), Some(Ok(dent)) => { if dent.file_type().is_dir() { self.depth += 1; Some(Ok(WalkEvent::Dir(dent))) } else { Some(Ok(WalkEvent::File(dent))) } } } } } struct TempDir(PathBuf); impl TempDir { fn path<'a>(&'a self) -> &'a Path { &self.0 } } impl Drop for TempDir { fn drop(&mut self) { fs::remove_dir_all(&self.0).unwrap(); } } fn tmpdir() -> TempDir { let p = env::temp_dir(); let mut r = rand::thread_rng(); let ret = p.join(&format!("rust-{}", r.next_u32())); fs::create_dir(&ret).unwrap(); TempDir(ret) } fn dir_setup_with<F>(t: &Tree, f: F) -> (TempDir, Tree) where F: Fn(WalkDir) -> WalkDir { let tmp = tmpdir(); t.create_in(tmp.path()).unwrap(); let got = Tree::from_walk_with(tmp.path(), &f).unwrap(); let got_cf = Tree::from_walk_with_contents_first(tmp.path(), &f).unwrap(); assert_eq!(got, got_cf); (tmp, got.unwrap_singleton().unwrap_singleton()) } fn dir_setup(t: &Tree) -> (TempDir, Tree) { dir_setup_with(t, |wd| wd) } fn canon(unix: &str) -> String { if cfg!(windows) { unix.replace("/", "\\") } else { unix.to_string() } } fn pb<P: AsRef<Path>>(p: P) -> PathBuf { p.as_ref().to_path_buf() } fn td<P: AsRef<Path>>(p: P, cs: Vec<Tree>) -> Tree { Tree::Dir(pb(p), cs) } fn tf<P: AsRef<Path>>(p: P) -> Tree { Tree::File(pb(p)) } fn tld<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> Tree { Tree::Symlink { src: pb(src), dst: pb(dst), dir: true } } fn tlf<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> Tree { Tree::Symlink { src: pb(src), dst: pb(dst), dir: false } } #[cfg(unix)] fn soft_link_dir<P: AsRef<Path>, Q: AsRef<Path>>( src: P, dst: Q, ) -> io::Result<()> { use std::os::unix::fs::symlink; symlink(src, dst) } #[cfg(unix)] fn soft_link_file<P: AsRef<Path>, Q: AsRef<Path>>( src: P, dst: Q, ) -> io::Result<()> { soft_link_dir(src, dst) } #[cfg(windows)] fn soft_link_dir<P: AsRef<Path>, Q: AsRef<Path>>( src: P, dst: Q, ) -> io::Result<()> { use std::os::windows::fs::symlink_dir; symlink_dir(src, dst) } #[cfg(windows)] fn soft_link_file<P: AsRef<Path>, Q: AsRef<Path>>( src: P, dst: Q, ) -> io::Result<()> { use std::os::windows::fs::symlink_file; symlink_file(src, dst) } macro_rules! assert_tree_eq { ($e1:expr, $e2:expr) => { assert_eq!($e1.canonical(), $e2.canonical()); } } #[test] fn walk_dir_1() { let exp = td("foo", vec![]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_2() { let exp = tf("foo"); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_3() { let exp = td("foo", vec![tf("bar")]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_4() { let exp = td("foo", vec![tf("foo"), tf("bar"), tf("baz")]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_5() { let exp = td("foo", vec![td("bar", vec![])]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_6() { let exp = td("foo", vec![ td("bar", vec![ tf("baz"), td("bat", vec![]), ]), ]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_7() { let exp = td("foo", vec![ td("bar", vec![ tf("baz"), td("bat", vec![]), ]), td("a", vec![tf("b"), tf("c"), tf("d")]), ]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_sym_1() { let exp = td("foo", vec![tf("bar"), tlf("bar", "baz")]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_sym_2() { let exp = td("foo", vec![ td("a", vec![tf("a1"), tf("a2")]), tld("a", "alink"), ]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] fn walk_dir_sym_root() { let exp = td("foo", vec![ td("bar", vec![tf("a"), tf("b")]), tld("bar", "alink"), ]); let tmp = tmpdir(); let tmp_path = tmp.path(); let tmp_len = tmp_path.to_str().unwrap().len(); exp.create_in(tmp_path).unwrap(); let it = WalkDir::new(tmp_path.join("foo").join("alink")).into_iter(); let mut got = it .map(|d| d.unwrap().path().to_str().unwrap()[tmp_len+1..].into()) .collect::<Vec<String>>(); got.sort(); assert_eq!(got, vec![ canon("foo/alink"), canon("foo/alink/a"), canon("foo/alink/b"), ]); let it = WalkDir::new(tmp_path.join("foo/alink/")).into_iter(); let mut got = it .map(|d| d.unwrap().path().to_str().unwrap()[tmp_len+1..].into()) .collect::<Vec<String>>(); got.sort(); assert_eq!(got, vec!["foo/alink/", "foo/alink/a", "foo/alink/b"]); } // See: https://github.com/BurntSushi/ripgrep/issues/984 #[test] #[cfg(unix)] fn first_path_not_symlink() { let exp = td("foo", vec![]); let (tmp, _got) = dir_setup(&exp); let dents = WalkDir::new(tmp.path().join("foo")) .into_iter() .collect::<Result<Vec<_>, _>>() .unwrap(); assert_eq!(1, dents.len()); assert!(!dents[0].path_is_symlink()); } // Like first_path_not_symlink, but checks that the first path is not reported // as a symlink even when we are supposed to be following them. #[test] #[cfg(unix)] fn first_path_not_symlink_follow() { let exp = td("foo", vec![]); let (tmp, _got) = dir_setup(&exp); let dents = WalkDir::new(tmp.path().join("foo")) .follow_links(true) .into_iter() .collect::<Result<Vec<_>, _>>() .unwrap(); assert_eq!(1, dents.len()); assert!(!dents[0].path_is_symlink()); } // See: https://github.com/BurntSushi/walkdir/issues/115 #[test] #[cfg(unix)] fn first_path_is_followed() { let exp = td("foo", vec![ td("a", vec![tf("a1"), tf("a2")]), td("b", vec![tlf("../a/a1", "alink")]), ]); let (tmp, _got) = dir_setup(&exp); let dents = WalkDir::new(tmp.path().join("foo/b/alink")) .into_iter() .collect::<Result<Vec<_>, _>>() .unwrap(); assert_eq!(1, dents.len()); assert!(dents[0].file_type().is_symlink()); assert!(dents[0].metadata().unwrap().file_type().is_symlink()); let dents = WalkDir::new(tmp.path().join("foo/b/alink")) .follow_links(true) .into_iter() .collect::<Result<Vec<_>, _>>() .unwrap(); assert_eq!(1, dents.len()); assert!(!dents[0].file_type().is_symlink()); assert!(!dents[0].metadata().unwrap().file_type().is_symlink()); } #[test] #[cfg(unix)] fn walk_dir_sym_detect_no_follow_no_loop() { let exp = td("foo", vec![ td("a", vec![tf("a1"), tf("a2")]), td("b", vec![tld("../a", "alink")]), ]); let (_tmp, got) = dir_setup(&exp); assert_tree_eq!(exp, got); } #[test] #[cfg(unix)] fn walk_dir_sym_follow_dir() { let actual = td("foo", vec![ td("a", vec![tf("a1"), tf("a2")]), td("b", vec![tld("../a", "alink")]), ]); let followed = td("foo", vec![ td("a", vec![tf("a1"), tf("a2")]), td("b", vec![td("alink", vec![tf("a1"), tf("a2")])]), ]); let (_tmp, got) = dir_setup_with(&actual, |wd| wd.follow_links(true)); assert_tree_eq!(followed, got); } #[test] #[cfg(unix)] fn walk_dir_sym_detect_loop() { let actual = td("foo", vec![ td("a", vec![tlf("../b", "blink"), tf("a1"), tf("a2")]), td("b", vec![tlf("../a", "alink")]), ]); let tmp = tmpdir(); actual.create_in(tmp.path()).unwrap(); let got = WalkDir::new(tmp.path()) .follow_links(true) .into_iter() .collect::<Result<Vec<_>, _>>(); match got { Ok(x) => panic!("expected loop error, got no error: {:?}", x), Err(err @ Error { inner: ErrorInner::Io { .. }, .. }) => { panic!("expected loop error, got generic IO error: {:?}", err); } Err(Error { inner: ErrorInner::Loop { .. }, .. }) => {} } } #[test] fn walk_dir_sym_infinite() { let actual = tlf("a", "a"); let tmp = tmpdir(); actual.create_in(tmp.path()).unwrap(); let got = WalkDir::new(tmp.path()) .follow_links(true) .into_iter() .collect::<Result<Vec<_>, _>>(); match got { Ok(x) => panic!("expected IO error, got no error: {:?}", x), Err(Error { inner: ErrorInner::Loop { .. }, .. }) => { panic!("expected IO error, but got loop error"); } Err(Error { inner: ErrorInner::Io { .. }, .. }) => {} } } #[test] fn walk_dir_min_depth_1() { let exp = td("foo", vec![tf("bar")]); let (_tmp, got) = dir_setup_with(&exp, |wd| wd.min_depth(1)); assert_tree_eq!(tf("bar"), got); } #[test] fn walk_dir_min_depth_2() { let exp = td("foo", vec![tf("bar"), tf("baz")]); let tmp = tmpdir(); exp.create_in(tmp.path()).unwrap(); let got = Tree::from_walk_with(tmp.path(), |wd| wd.min_depth(2)) .unwrap().unwrap_dir(); let got_cf = Tree::from_walk_with_contents_first( tmp.path(), |wd| wd.min_depth(2)) .unwrap().unwrap_dir(); assert_eq!(got, got_cf); assert_tree_eq!(exp, td("foo", got)); } #[test] fn walk_dir_min_depth_3() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("xyz")]), tf("baz"), ]); let tmp = tmpdir(); exp.create_in(tmp.path()).unwrap(); let got = Tree::from_walk_with(tmp.path(), |wd| wd.min_depth(3)) .unwrap().unwrap_dir(); assert_eq!(vec![tf("xyz")], got); let got_cf = Tree::from_walk_with_contents_first( tmp.path(), |wd| wd.min_depth(3)) .unwrap().unwrap_dir(); assert_eq!(got, got_cf); } #[test] fn walk_dir_max_depth_1() { let exp = td("foo", vec![tf("bar")]); let (_tmp, got) = dir_setup_with(&exp, |wd| wd.max_depth(1)); assert_tree_eq!(td("foo", vec![]), got); } #[test] fn walk_dir_max_depth_2() { let exp = td("foo", vec![tf("bar"), tf("baz")]); let (_tmp, got) = dir_setup_with(&exp, |wd| wd.max_depth(1)); assert_tree_eq!(td("foo", vec![]), got); } #[test] fn walk_dir_max_depth_3() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("xyz")]), tf("baz"), ]); let exp_trimmed = td("foo", vec![ tf("bar"), td("abc", vec![]), tf("baz"), ]); let (_tmp, got) = dir_setup_with(&exp, |wd| wd.max_depth(2)); assert_tree_eq!(exp_trimmed, got); } #[test] fn walk_dir_min_max_depth() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("xyz")]), tf("baz"), ]); let tmp = tmpdir(); exp.create_in(tmp.path()).unwrap(); let got = Tree::from_walk_with(tmp.path(), |wd| wd.min_depth(2).max_depth(2)) .unwrap().unwrap_dir(); let got_cf = Tree::from_walk_with_contents_first(tmp.path(), |wd| wd.min_depth(2).max_depth(2)) .unwrap().unwrap_dir(); assert_eq!(got, got_cf); assert_tree_eq!( td("foo", vec![tf("bar"), td("abc", vec![]), tf("baz")]), td("foo", got)); } #[test] fn walk_dir_skip() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("xyz")]), tf("baz"), ]); let tmp = tmpdir(); exp.create_in(tmp.path()).unwrap(); let mut got = vec![]; let mut it = WalkDir::new(tmp.path()).min_depth(1).into_iter(); loop { let dent = match it.next().map(|x| x.unwrap()) { None => break, Some(dent) => dent, }; let name = dent.file_name().to_str().unwrap().to_owned(); if name == "abc" { it.skip_current_dir(); } got.push(name); } got.sort(); assert_eq!(got, vec!["abc", "bar", "baz", "foo"]); // missing xyz! } #[test] fn walk_dir_filter() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("fit")]), tf("faz"), ]); let tmp = tmpdir(); let tmp_path = tmp.path().to_path_buf(); exp.create_in(tmp.path()).unwrap(); let it = WalkDir::new(tmp.path()).min_depth(1) .into_iter() .filter_entry(move |d| { let n = d.file_name().to_string_lossy().into_owned(); !d.file_type().is_dir() || n.starts_with("f") || d.path() == &*tmp_path }); let mut got = it.map(|d| d.unwrap().file_name().to_str().unwrap().into()) .collect::<Vec<String>>(); got.sort(); assert_eq!(got, vec!["bar", "faz", "foo"]); } #[test] fn qc_roundtrip() { fn p(exp: Tree) -> bool { let (_tmp, got) = dir_setup(&exp); exp.canonical() == got.canonical() } QuickCheck::new() .gen(StdGen::new(rand::thread_rng(), 15)) .tests(1_000) .max_tests(10_000) .quickcheck(p as fn(Tree) -> bool); } // Same as `qc_roundtrip`, but makes sure `follow_links` doesn't change // the behavior of walking a directory *without* symlinks. #[test] fn qc_roundtrip_no_symlinks_with_follow() { fn p(exp: Tree) -> bool { let (_tmp, got) = dir_setup_with(&exp, |wd| wd.follow_links(true)); exp.canonical() == got.canonical() } QuickCheck::new() .gen(StdGen::new(rand::thread_rng(), 15)) .tests(1_000) .max_tests(10_000) .quickcheck(p as fn(Tree) -> bool); } #[test] fn walk_dir_sort() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("fit")]), tf("faz"), ]); let tmp = tmpdir(); let tmp_path = tmp.path(); let tmp_len = tmp_path.to_str().unwrap().len(); exp.create_in(tmp_path).unwrap(); let it = WalkDir::new(tmp_path) .sort_by(|a,b| a.file_name().cmp(b.file_name())) .into_iter(); let got = it.map(|d| { let path = d.unwrap(); let path = &path.path().to_str().unwrap()[tmp_len..]; path.replace("\\", "/") }).collect::<Vec<String>>(); assert_eq!( got, ["", "/foo", "/foo/abc", "/foo/abc/fit", "/foo/bar", "/foo/faz"]); } #[test] fn walk_dir_sort_small_fd_max() { let exp = td("foo", vec![ tf("bar"), td("abc", vec![tf("fit")]), tf("faz"), ]); let tmp = tmpdir(); let tmp_path = tmp.path(); let tmp_len = tmp_path.to_str().unwrap().len(); exp.create_in(tmp_path).unwrap(); let it = WalkDir::new(tmp_path) .max_open(1) .sort_by(|a,b| a.file_name().cmp(b.file_name())) .into_iter(); let got = it.map(|d| { let path = d.unwrap(); let path = &path.path().to_str().unwrap()[tmp_len..]; path.replace("\\", "/") }).collect::<Vec<String>>(); assert_eq!( got, ["", "/foo", "/foo/abc", "/foo/abc/fit", "/foo/bar", "/foo/faz"]); } #[test] fn walk_dir_send_sync_traits() { use FilterEntry; fn assert_send<T: Send>() {} fn assert_sync<T: Sync>() {} assert_send::<WalkDir>(); assert_sync::<WalkDir>(); assert_send::<IntoIter>(); assert_sync::<IntoIter>(); assert_send::<FilterEntry<IntoIter, u8>>(); assert_sync::<FilterEntry<IntoIter, u8>>(); } // We cannot mount different volumes for the sake of the test, but // on Linux systems we can assume that /sys is a mounted volume. #[test] #[cfg(target_os = "linux")] fn walk_dir_stay_on_file_system() { // If for some reason /sys doesn't exist or isn't a directory, just skip // this test. if !Path::new("/sys").is_dir() { return; } let actual = td("same_file", vec![ td("a", vec![tld("/sys", "alink")]), ]); let unfollowed = td("same_file", vec![ td("a", vec![tld("/sys", "alink")]), ]); let (_tmp, got) = dir_setup_with(&actual, |wd| wd); assert_tree_eq!(unfollowed, got); // Create a symlink to sys and enable following symlinks. If the // same_file_system option doesn't work, then this probably will hit a // permission error. Otherwise, it should just skip over the symlink // completely. let actual = td("same_file", vec![ td("a", vec![tld("/sys", "alink")]), ]); let followed = td("same_file", vec![ td("a", vec![td("alink", vec![])]), ]); let (_tmp, got) = dir_setup_with(&actual, |wd| { wd.follow_links(true).same_file_system(true) }); assert_tree_eq!(followed, got); }