text
stringlengths
8
4.13M
foo = gl_Vertex.xyz py = gl_Vertex.y gl_FragColor = vec4 foo py
#![allow(dead_code)] use std::path::PathBuf; use crate::builder::*; use crate::const_values::*; use std::thread::*; use std::sync::{Arc,Mutex}; use std::collections::HashMap; #[derive(Debug,Clone)] pub enum ThreadState{ ALIVE,DEAD } #[derive(Debug)] pub struct ThreadObserver{ threadid:Option<ThreadId>, dll:Option<PathBuf>, count_threshold:u32, count:u32, method:&'static str, state:ThreadState } impl ThreadObserver{ pub fn new()->ThreadObserverBuilder{ ThreadObserverBuilder::new() } pub fn get_tid(&self)->ThreadId{ self.threadid.unwrap() } pub fn get_dll(&self)->PathBuf{ self.dll.clone().unwrap() } pub fn get_method(&self)->&'static str{ self.method } pub fn get_state(&self)->ThreadState{ self.state.clone() } } pub struct ThreadObserverBuilder{ to:ThreadObserver } impl ThreadObserverBuilder{ pub fn new()->Self{ ThreadObserverBuilder{ to:ThreadObserver{ threadid:None, dll:None, method:"", count_threshold:LONG_WAIT_THREAD_SECS, count:0, state:ThreadState::ALIVE } } } pub fn tid(mut self,id:ThreadId)->Self{ self.to.threadid = Some(id); self } pub fn dll(mut self,path:PathBuf)->Self{ self.to.dll = Some(path); self } pub fn method(mut self,method:&'static str)->Self{ self.to.method = method; self } // レスポンスがないと判断するしきい値(単位は時間・秒) pub fn noresponse_threshold(mut self,sec:u32)->Self{ self.to.count_threshold=sec; self } pub fn build(self)->std::result::Result<ThreadObserver,BuilderError>{ let mut err = BuilderError::new(); err.kind=BuilderErrorKind::InvalidParameter; if self.to.threadid == None{return Err(err);} if self.to.dll == None{return Err(err);} if self.to.method == ""{return Err(err);} Ok(self.to) } } // ながーい処理時間のスレッドを検出する。 pub fn thread_lock_detector(th_list:Arc<Mutex<HashMap<ThreadId,ThreadObserver>>>){ loop{ { let mut list = th_list.lock().unwrap(); for val in list.values_mut(){ val.count+=1; if val.count%val.count_threshold == 0{ log::warn!("{}::{}(); ({:?})thread long wait!", val.dll.clone().unwrap().to_str().unwrap(), val.method, val.threadid.unwrap()); val.count=0; } } } std::thread::sleep(std::time::Duration::from_millis(ONE_SEC)); } }
use std; use std::ptr; use std::borrow::Borrow; use std::cmp::Ordering; use ordered_iter; const SEQ_START: u64 = 0; const SEQ_END: u64 = !0; #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] struct Key<K> { key: K, sequence: u64 } impl<K> Key<K> { fn as_ref<Q>(&self) -> Key<&Q> where K: Borrow<Q> { Key { key: self.key.borrow(), sequence: self.sequence } } } unsafe fn mv_to<T>(d: &mut [T], dst: usize, s: &[T], src: usize) { debug_assert!(src < s.len()); debug_assert!(dst < d.len()); ptr::write( d.as_mut_ptr().offset(dst as isize), take(s, src) ); } unsafe fn mv<T>(d: &mut [T], dst: usize, src: usize) { debug_assert!(src < d.len()); debug_assert!(dst < d.len()); ptr::write( d.as_mut_ptr().offset(dst as isize), take(d, src) ); } unsafe fn take<T>(d: &[T], src: usize) -> T { debug_assert!(src < d.len()); ptr::read(d.as_ptr().offset(src as isize)) } /// An always ordered associated array of items #[derive(Clone, Debug)] pub struct OrderedVec<K, V>{ keys: Vec<Key<K>>, values: Vec<V> } impl<K: Ord, V> OrderedVec<K, V> { /// Create a new Ordered Vector pub fn new() -> OrderedVec<K, V> { OrderedVec{ values: Vec::new(), keys: Vec::new() } } /// Create a new Ordered Vector with the capacity of `n` pub fn with_capacity(n: usize) -> OrderedVec<K, V> { OrderedVec{ values: Vec::with_capacity(n), keys: Vec::with_capacity(n) } } /// Create a new Ordered Vector pub fn len(&self) -> usize { self.keys.len() } /// Clear the contentens pub fn clear(&mut self) { self.keys.clear(); self.values.clear(); } /// Find the key with the latest value fn find_key<Q>(&self, key: &Key<&Q>) -> usize where K: Borrow<Q>, Q: Ord { let mut start = 0; let mut end = self.keys.len(); while start != end { let mid = (start + end) / 2; // This is bound by the size and match self.keys[mid].as_ref().cmp(key) { Ordering::Equal | Ordering::Greater => { end = mid; }, Ordering::Less => { start = mid + 1; } }; } start } /// Find the key with the latest value fn find_last<Q>(&self, key: &Q) -> Option<usize> where K: Borrow<Q>, Q: Ord { let key = Key { key: key, sequence: SEQ_END }; let idx = self.find_key(&key); if idx == 0 { None } else if self.keys[idx-1].key.borrow() == key.key { Some(idx-1) } else { None } } /// Find the key with the lowest sequence fn find_first<Q>(&self, key: &Q) -> Option<usize> where K: Borrow<Q>, Q: Ord { let key = Key { key: key, sequence: SEQ_START }; let idx = self.find_key(&key); if idx == self.keys.len() { None } else { Some(idx) } } /// Returns a reference to the value corresponding to the key. /// /// `O(log(n))` complexity pub fn get<Q>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Ord { self.find_last(key).map(|idx| &self.values[idx]) } /// Returns a reference to the value corresponding to the key. /// /// `O(log(n))` complexity pub fn get_all<Q>(&self, key: &Q) -> Option<IterSequence<K, V>> where K: Borrow<Q>, Q: Ord { self.find_first(key) .and_then(|start| { self.find_last(key).map(|end| (start, end)) }) .map(|(s, e)| { IterSequence { keys: self.keys[s..e].iter(), values: self.values[s..e].iter() } }) } /// Inserts a key-value pair into the map. If the key already /// had a value present in the map, that value is returned. /// Otherwise, None is returned. /// /// On insert, `O(n)` complexity pub fn insert(&mut self, key: K, sequence: u64, value: V) { let key = Key { key: key, sequence: sequence }; let idx = self.find_key(&key.as_ref()); self.keys.insert(idx, key); self.values.insert(idx, value); } /// Creates an iterator over the keys, values in the map pub fn iter(&self) -> Iter<K, V> { Iter { keys: self.keys.iter().peekable(), values: self.values.iter() } } /// Merge this with another OrderedVec pub fn merge(&mut self, new: &mut Self) { unsafe { // Make sure there is enough space in the new vector self.keys.reserve(new.keys.len()); self.values.reserve(new.values.len()); let mut new_end = new.keys.len(); let mut old_end = self.keys.len(); let mut end = new_end + old_end; self.keys.set_len(end); self.values.set_len(end); loop { match (new_end, old_end) { (0, _) => { // congrats! all the new values have been merged into the array // if there are values left in the old array they are already in // the correct position, or we are at position 0 in which case // we can abort! assert_eq!(old_end, end); break } (i, 0) => { mv_to(&mut self.keys, end-1, &new.keys, i-1); mv_to(&mut self.values, end-1, &new.values, i-1); new_end -= 1; } (ne, oe) => { match self.keys[oe-1].cmp(&new.keys[ne-1]) { Ordering::Greater => { mv(&mut self.keys, end-1, oe-1); mv(&mut self.values, end-1, oe-1); old_end -= 1; } Ordering::Less | Ordering::Equal => { mv_to(&mut self.keys, end-1, &new.keys, ne-1); mv_to(&mut self.values, end-1, &new.values, ne-1); new_end -= 1; } } } } end -= 1; } new.keys.set_len(0); new.values.set_len(0); } } fn get_range(&self, start: usize) -> std::ops::Range<usize> { let mut end = start + 1; while end < self.keys.len() { if self.keys[start].key != self.keys[end].key { break; } end += 1; } std::ops::Range { start: start, end: end, } } /// Compact the the keys discarding old values pub fn compact(&mut self) { let mut wp = 0; let mut rp = 0; let end = self.keys.len(); while rp < end { let range = self.get_range(rp); unsafe { for r in range.start..range.end-1 { // Drop any keys/value pairs that were compacted away take(&mut self.keys, r); take(&mut self.values, r); } mv(&mut self.keys, wp, rp); mv(&mut self.values, wp, rp); } rp = range.end; wp += 1; } unsafe { self.keys.set_len(wp); self.values.set_len(wp); } } } pub struct IterSequence<'a, K:'a, V:'a> { keys: std::slice::Iter<'a, Key<K>>, values: std::slice::Iter<'a, V> } impl<'a, K:'a, V:'a> Iterator for IterSequence<'a, K, V> { type Item = (&'a K, u64, &'a V); fn next(&mut self) -> Option<(&'a K, u64, &'a V)> { self.keys.next() .and_then(|key| self.values.next().map(|value| (key, value))) .map(|(k, v)| (&k.key, k.sequence, v)) } } pub struct Iter<'a, K:'a, V:'a> { keys: std::iter::Peekable<std::slice::Iter<'a, Key<K>>>, values: std::slice::Iter<'a, V> } impl<'a, K:Eq+Ord+'a, V:'a> std::iter::IntoIterator for &'a OrderedVec<K, V> { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; fn into_iter(self) -> Iter<'a, K, V> { self.iter() } } impl<'a, K:'a+Eq, V:'a> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); fn next(&mut self) -> Option<(&'a K, &'a V)> { while let Some(key) = self.keys.next() { if let Some(next) = self.keys.peek() { if key.key == next.key { self.values.next(); continue; } } return self.values .next().map(|value| (key, value)) .map(|(k, v)| (&k.key, v)); } None } } impl<'a, K:'a, V:'a> ordered_iter::OrderedMapIterator for Iter<'a, K, V> where K: std::cmp::Eq { type Key = &'a K; type Val = &'a V; } pub struct IntoIter<K, V> { keys: std::iter::Peekable<std::vec::IntoIter<Key<K>>>, values: std::vec::IntoIter<V> } impl<K:Eq, V> Iterator for IntoIter<K, V> { type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { while let Some(key) = self.keys.next() { if let Some(next) = self.keys.peek() { if key.key == next.key { self.values.next(); continue; } } return self.values .next().map(|value| (key, value)) .map(|(Key{key, sequence: _}, v)| (key, v)); } None } } impl<K:Eq+Ord, V> std::iter::IntoIterator for OrderedVec<K, V> { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> IntoIter<K, V> { IntoIter{ keys: self.keys.into_iter().peekable(), values: self.values.into_iter() } } } #[cfg(test)] mod test { use {OrderedVec, Sequence}; static VECTOR: &'static [u32] = &[123, 234, 0, 5, 1, 213, 7, 3, 324]; static SORTED: &'static [u32] = &[0, 1, 3, 5, 7, 123, 213, 234, 324]; #[test] fn insert() { let mut vec = OrderedVec::new(); let mut seq = Sequence::new(); for &i in VECTOR { vec.insert(i, seq.next(), i); } for i in VECTOR { assert_eq!(vec.get(i).unwrap(), i); } for &i in VECTOR { vec.insert(i, seq.next(), i+1); } for i in VECTOR { assert_eq!(vec.get(i).unwrap(), &(i+1)); } } #[test] fn insert_many() { let mut vec = OrderedVec::new(); let mut seq = Sequence::new(); for &i in VECTOR { for j in 0..10 { vec.insert(j, seq.next(), i); } } for i in 0..10 { for ((_, _, found), expected) in vec.get_all(&i).unwrap().zip(VECTOR.iter()) { assert_eq!(found, expected); } } } #[test] fn iter() { let mut vec = OrderedVec::new(); let mut seq = Sequence::new(); for &i in VECTOR { vec.insert(i, seq.next(), i); } for (i, (k, v)) in vec.iter().enumerate() { assert_eq!(*k, SORTED[i]); assert_eq!(*v, SORTED[i]); } } #[test] fn merge() { let mut gen0 = OrderedVec::new(); let mut gen1 = OrderedVec::new(); let mut gen2 = OrderedVec::new(); let mut seq = Sequence::new(); for &i in VECTOR { gen0.insert(i, seq.next(), i); gen1.insert(i, seq.next(), i+1_000); gen2.insert(i, seq.next(), i+1_000_000); } gen0.merge(&mut gen1); for (i, (key, value)) in gen0.iter().enumerate() { assert_eq!(SORTED[i], *key); assert_eq!(SORTED[i]+1_000, *value); } gen0.merge(&mut gen2); for (i, (key, value)) in gen0.iter().enumerate() { assert_eq!(SORTED[i], *key); assert_eq!(SORTED[i]+1_000_000, *value); } } #[test] fn merge_new() { let mut a = OrderedVec::new(); let mut b = OrderedVec::new(); let mut c = OrderedVec::new(); let mut seq = Sequence::new(); for i in 0..10 { a.insert(i+10, seq.next(), i+10); b.insert(i+0, seq.next(), i+0); c.insert(i+20, seq.next(), i+20); } // b will be merged below all the values in a a.merge(&mut b); // c should be on the end of a a.merge(&mut c); for (i, (&key, &value)) in a.iter().enumerate() { assert_eq!(i, key); assert_eq!(value, key); } } #[test] fn into_iter() { let mut vec = OrderedVec::new(); let mut seq = Sequence::new(); for &i in VECTOR { vec.insert(i, seq.next(), i); } for (i, (k, v)) in vec.into_iter().enumerate() { assert_eq!(k, SORTED[i]); assert_eq!(v, SORTED[i]); } } use std::cell::Cell; use std::rc::Rc; #[derive(Debug, Clone)] struct DropCanary(Rc<Cell<u32>>); impl DropCanary { pub fn new() -> DropCanary { DropCanary(Rc::new(Cell::new(0))) } pub fn get(&self) -> u32 { self.0.get() } } impl Drop for DropCanary { fn drop(&mut self) { self.0.set(self.0.get()+1); } } #[test] fn compact() { let mut vec = OrderedVec::new(); let mut seq = Sequence::new(); let canary = DropCanary::new(); // no need to compact! for i in 0..100 { vec.insert(i, seq.next(), canary.clone()); } assert_eq!(vec.len(), 100); vec.compact(); assert_eq!(vec.len(), 100); assert_eq!(canary.get(), 0); let mut vec = OrderedVec::new(); let canary = DropCanary::new(); // no need to compact! for i in 0..100 { for _ in 0..10 { vec.insert(i, seq.next(), canary.clone()); } } assert_eq!(vec.len(), 1000); assert_eq!(canary.get(), 0); vec.compact(); assert_eq!(vec.len(), 100); assert_eq!(canary.get(), 900); } #[test] fn merge_update_drop() { let mut gen0 = OrderedVec::new(); let mut gen1 = OrderedVec::new(); let mut gen2 = OrderedVec::new(); let mut seq = Sequence::new(); let canary = DropCanary::new(); for &i in VECTOR { gen0.insert(i, seq.next(), canary.clone()); gen1.insert(i, seq.next(), canary.clone()); gen2.insert(i, seq.next(), canary.clone()); } assert_eq!(canary.get(), 0); gen0.merge(&mut gen1); assert_eq!(canary.get(), 0); gen0.merge(&mut gen2); assert_eq!(canary.get(), 0); gen0.compact(); assert_eq!(canary.get(), 2 * VECTOR.len() as u32); } }
use std::collections::HashSet; #[derive(Clone)] enum Move { Up, Down, Left, Right, } fn compute_tail_position(current_tail_pos: (u32, u32), new_head_pos: (u32, u32)) -> (u32, u32) { let is_connected = |t1: (u32, u32), t2: (u32, u32)| { let distance = i32::abs(t1.0 as i32 - t2.0 as i32) + i32::abs(t1.1 as i32 - t2.1 as i32); t1 == t2 || distance == 1 || ( current_tail_pos.0 != new_head_pos.0 && current_tail_pos.1 != new_head_pos.1 && distance == 2 ) }; if is_connected(new_head_pos, current_tail_pos) { return current_tail_pos } if new_head_pos.0 == current_tail_pos.0 { if new_head_pos.1 < current_tail_pos.1 { return (current_tail_pos.0, current_tail_pos.1-1); // left } else { return (current_tail_pos.0, current_tail_pos.1+1); // left } } if new_head_pos.1 == current_tail_pos.1 { if new_head_pos.0 < current_tail_pos.0 { return (current_tail_pos.0-1, current_tail_pos.1); // up } else { return (current_tail_pos.0+1, current_tail_pos.1); // down } } // diagonal moves if is_connected(new_head_pos, (current_tail_pos.0-1, current_tail_pos.1-1)) { return (current_tail_pos.0-1, current_tail_pos.1-1); } else if is_connected(new_head_pos, (current_tail_pos.0-1, current_tail_pos.1+1)) { return (current_tail_pos.0-1, current_tail_pos.1+1); } else if is_connected(new_head_pos, (current_tail_pos.0+1, current_tail_pos.1-1)) { return (current_tail_pos.0+1, current_tail_pos.1-1); } else if is_connected(new_head_pos, (current_tail_pos.0+1, current_tail_pos.1+1)) { return (current_tail_pos.0+1, current_tail_pos.1+1); } panic!("Unexpected move"); } pub fn process_part1(input: &str) -> String { let mut tail_positions: HashSet<(u32, u32)> = HashSet::new(); let mut current_head_position = (1000u32, 1000u32); let mut current_tail_position = (1000u32, 1000u32); input.lines().map(|line| { let move_instructions = line.split(" ").collect::<Vec<&str>>(); return match move_instructions[0] { "U" => vec![Move::Up; move_instructions[1].parse::<usize>().unwrap()], "D" => vec![Move::Down; move_instructions[1].parse::<usize>().unwrap()], "R" => vec![Move::Right; move_instructions[1].parse::<usize>().unwrap()], "L" => vec![Move::Left; move_instructions[1].parse::<usize>().unwrap()], _ => vec![], } }) .flatten() .for_each(|m| { match m { Move::Up => current_head_position.1 -= 1, Move::Down => current_head_position.1 += 1, Move::Left => current_head_position.0 -= 1, Move::Right => current_head_position.0 += 1, } current_tail_position = compute_tail_position(current_tail_position, current_head_position); tail_positions.insert(current_tail_position.clone()); }); tail_positions.len().to_string() } #[allow(unused)] fn print(rope: &Vec<(u32, u32)>, width: usize, height: usize) { let mut buffer = vec!['.'; width*height]; let calc_pos = |knot: &(u32, u32)| -> usize { return (width*knot.1 as usize + knot.0 as usize); }; for (idx, knot_pos) in rope.iter().enumerate().rev() { if idx == 0 { buffer[calc_pos(knot_pos)] = 'H'; } else if idx == 9 { buffer[calc_pos(knot_pos)] = 'T'; } else { buffer[calc_pos(knot_pos)] = (48 + idx as u8) as char; } } println!("{:?}", rope); for (pos, ch) in buffer.iter().enumerate() { print!("{}", ch); if pos % width == 0 { println!(""); } } println!("\n\n\n"); } pub fn process_part2(input: &str) -> String { let mut tail_positions: HashSet<(u32, u32)> = HashSet::new(); let mut rope_position: Vec<(u32, u32)> = vec![(1000u32, 1000u32); 10]; const HEAD_ID: usize = 0; const TAIL_ID: usize = 9; input.lines().map(|line| { let move_instructions = line.split(" ").collect::<Vec<&str>>(); return match move_instructions[0] { "U" => vec![Move::Up; move_instructions[1].parse::<usize>().unwrap()], "D" => vec![Move::Down; move_instructions[1].parse::<usize>().unwrap()], "R" => vec![Move::Right; move_instructions[1].parse::<usize>().unwrap()], "L" => vec![Move::Left; move_instructions[1].parse::<usize>().unwrap()], _ => vec![], } }) .flatten() .for_each(|m| { let mut head_position = rope_position[HEAD_ID]; match m { Move::Up => head_position.1 -= 1, Move::Down => head_position.1 += 1, Move::Left => head_position.0 -= 1, Move::Right => head_position.0 += 1, } rope_position[HEAD_ID] = head_position; for (knot_idx, _knot_position) in rope_position.clone().iter().enumerate().skip(1) { rope_position[knot_idx] = compute_tail_position(rope_position[knot_idx], rope_position[knot_idx-1]); } // print(&rope_position, 33, 33); tail_positions.insert(rope_position[TAIL_ID].clone()); }); tail_positions.len().to_string() } #[cfg(test)] mod tests { use super::*; const INPUT: &str = "R 4 U 4 L 3 D 1 R 4 D 1 L 5 R 2"; #[test] fn part1() { assert_eq!(process_part1(INPUT), "13"); } #[test] fn part2() { const INPUT2: &str = "R 5 U 8 L 8 D 3 R 17 D 10 L 25 U 20"; // assert_eq!(process_part2(INPUT), "1"); assert_eq!(process_part2(INPUT2), "36"); } }
use crate::account::KeyedAccount; use crate::pubkey::Pubkey; use std; /// Reasons a program might have rejected an instruction. #[derive(Debug, PartialEq, Eq, Clone)] pub enum ProgramError { /// The program instruction returned an error GenericError, /// The arguments provided to a program instruction where invalid InvalidArgument, /// An instruction's data contents was invalid InvalidInstructionData, /// An account's data contents was invalid InvalidAccountData, /// An account's data was too small AccountDataTooSmall, /// The account did not have the expected program id IncorrectProgramId, /// A signature was required but not found MissingRequiredSignature, /// CustomError allows on-chain programs to implement program-specific error types and see /// them returned by the Solana runtime. A CustomError may be any type that is serialized /// to a Vec of bytes, max length 32 bytes. Any CustomError Vec greater than this length will /// be truncated by the runtime. CustomError(Vec<u8>), } impl std::fmt::Display for ProgramError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "error") } } impl std::error::Error for ProgramError {} // All native programs export a symbol named process() pub const ENTRYPOINT: &str = "process"; // Native program ENTRYPOINT prototype pub type Entrypoint = unsafe extern "C" fn( program_id: &Pubkey, keyed_accounts: &mut [KeyedAccount], data: &[u8], tick_height: u64, ) -> Result<(), ProgramError>; // Convenience macro to define the native program entrypoint. Supply a fn to this macro that // conforms to the `Entrypoint` type signature. #[macro_export] macro_rules! solana_entrypoint( ($entrypoint:ident) => ( #[no_mangle] pub extern "C" fn process( program_id: &Pubkey, keyed_accounts: &mut [KeyedAccount], data: &[u8], tick_height: u64 ) -> Result<(), ProgramError> { $entrypoint(program_id, keyed_accounts, data, tick_height) } ) );
use oauth2::AccessToken; pub trait BearerToken { fn token_type(&self) -> &str; fn scopes(&self) -> &[String]; fn expires_in(&self) -> u64; fn access_token(&self) -> &AccessToken; } pub trait RefreshToken { fn refresh_token(&self) -> &AccessToken; } pub trait ExtExpiresIn { fn ext_expires_in(&self) -> u64; }
// Pass by reference Tutorial struct Object { value: i32 } // Mutate functions fn mutate_obj(obj: &mut Object) -> () { obj.value = 20; } fn mutate_var(value: &mut i32) -> () { // Dereference pointer to assign new value to var *value = 20; } fn main() -> () { // Declared and initalized data let mut value = 5; let mut obj = Object { value: 10 }; // Print data println!("value {}", value); println!("obj.value {}", obj.value); // mutate data mutate_obj(&mut obj); mutate_var(&mut value); // Print mutated data println!("value {}", value); println!("obj.value {}", obj.value); }
#[doc = "Register `ISR1` reader"] pub type R = crate::R<ISR1_SPEC>; #[doc = "Register `ISR1` writer"] pub type W = crate::W<ISR1_SPEC>; #[doc = "Field `TOHSTX` reader - Timeout high"] pub type TOHSTX_R = crate::BitReader; #[doc = "Field `TOHSTX` writer - Timeout high"] pub type TOHSTX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TOLPRX` reader - Timeout low"] pub type TOLPRX_R = crate::BitReader; #[doc = "Field `TOLPRX` writer - Timeout low"] pub type TOLPRX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ECCSE` reader - ECC single"] pub type ECCSE_R = crate::BitReader; #[doc = "Field `ECCSE` writer - ECC single"] pub type ECCSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ECCME` reader - ECC multi"] pub type ECCME_R = crate::BitReader; #[doc = "Field `ECCME` writer - ECC multi"] pub type ECCME_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CRCE` reader - CRC error"] pub type CRCE_R = crate::BitReader; #[doc = "Field `CRCE` writer - CRC error"] pub type CRCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PSE` reader - Packet size error"] pub type PSE_R = crate::BitReader; #[doc = "Field `PSE` writer - Packet size error"] pub type PSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `EOTPE` reader - EoTp error"] pub type EOTPE_R = crate::BitReader; #[doc = "Field `EOTPE` writer - EoTp error"] pub type EOTPE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `LPWRE` reader - LTDC payload write error"] pub type LPWRE_R = crate::BitReader; #[doc = "Field `LPWRE` writer - LTDC payload write error"] pub type LPWRE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `GCWRE` reader - Generic command write error"] pub type GCWRE_R = crate::BitReader; #[doc = "Field `GCWRE` writer - Generic command write error"] pub type GCWRE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `GPWRE` reader - Generic payload write error"] pub type GPWRE_R = crate::BitReader; #[doc = "Field `GPWRE` writer - Generic payload write error"] pub type GPWRE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `GPTXE` reader - Generic payload transmit error"] pub type GPTXE_R = crate::BitReader; #[doc = "Field `GPTXE` writer - Generic payload transmit error"] pub type GPTXE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `GPRDE` reader - Generic payload read error"] pub type GPRDE_R = crate::BitReader; #[doc = "Field `GPRDE` writer - Generic payload read error"] pub type GPRDE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `GPRXE` reader - Generic payload receive error"] pub type GPRXE_R = crate::BitReader; #[doc = "Field `GPRXE` writer - Generic payload receive error"] pub type GPRXE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - Timeout high"] #[inline(always)] pub fn tohstx(&self) -> TOHSTX_R { TOHSTX_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Timeout low"] #[inline(always)] pub fn tolprx(&self) -> TOLPRX_R { TOLPRX_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - ECC single"] #[inline(always)] pub fn eccse(&self) -> ECCSE_R { ECCSE_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - ECC multi"] #[inline(always)] pub fn eccme(&self) -> ECCME_R { ECCME_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - CRC error"] #[inline(always)] pub fn crce(&self) -> CRCE_R { CRCE_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - Packet size error"] #[inline(always)] pub fn pse(&self) -> PSE_R { PSE_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - EoTp error"] #[inline(always)] pub fn eotpe(&self) -> EOTPE_R { EOTPE_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - LTDC payload write error"] #[inline(always)] pub fn lpwre(&self) -> LPWRE_R { LPWRE_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - Generic command write error"] #[inline(always)] pub fn gcwre(&self) -> GCWRE_R { GCWRE_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - Generic payload write error"] #[inline(always)] pub fn gpwre(&self) -> GPWRE_R { GPWRE_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - Generic payload transmit error"] #[inline(always)] pub fn gptxe(&self) -> GPTXE_R { GPTXE_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - Generic payload read error"] #[inline(always)] pub fn gprde(&self) -> GPRDE_R { GPRDE_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - Generic payload receive error"] #[inline(always)] pub fn gprxe(&self) -> GPRXE_R { GPRXE_R::new(((self.bits >> 12) & 1) != 0) } } impl W { #[doc = "Bit 0 - Timeout high"] #[inline(always)] #[must_use] pub fn tohstx(&mut self) -> TOHSTX_W<ISR1_SPEC, 0> { TOHSTX_W::new(self) } #[doc = "Bit 1 - Timeout low"] #[inline(always)] #[must_use] pub fn tolprx(&mut self) -> TOLPRX_W<ISR1_SPEC, 1> { TOLPRX_W::new(self) } #[doc = "Bit 2 - ECC single"] #[inline(always)] #[must_use] pub fn eccse(&mut self) -> ECCSE_W<ISR1_SPEC, 2> { ECCSE_W::new(self) } #[doc = "Bit 3 - ECC multi"] #[inline(always)] #[must_use] pub fn eccme(&mut self) -> ECCME_W<ISR1_SPEC, 3> { ECCME_W::new(self) } #[doc = "Bit 4 - CRC error"] #[inline(always)] #[must_use] pub fn crce(&mut self) -> CRCE_W<ISR1_SPEC, 4> { CRCE_W::new(self) } #[doc = "Bit 5 - Packet size error"] #[inline(always)] #[must_use] pub fn pse(&mut self) -> PSE_W<ISR1_SPEC, 5> { PSE_W::new(self) } #[doc = "Bit 6 - EoTp error"] #[inline(always)] #[must_use] pub fn eotpe(&mut self) -> EOTPE_W<ISR1_SPEC, 6> { EOTPE_W::new(self) } #[doc = "Bit 7 - LTDC payload write error"] #[inline(always)] #[must_use] pub fn lpwre(&mut self) -> LPWRE_W<ISR1_SPEC, 7> { LPWRE_W::new(self) } #[doc = "Bit 8 - Generic command write error"] #[inline(always)] #[must_use] pub fn gcwre(&mut self) -> GCWRE_W<ISR1_SPEC, 8> { GCWRE_W::new(self) } #[doc = "Bit 9 - Generic payload write error"] #[inline(always)] #[must_use] pub fn gpwre(&mut self) -> GPWRE_W<ISR1_SPEC, 9> { GPWRE_W::new(self) } #[doc = "Bit 10 - Generic payload transmit error"] #[inline(always)] #[must_use] pub fn gptxe(&mut self) -> GPTXE_W<ISR1_SPEC, 10> { GPTXE_W::new(self) } #[doc = "Bit 11 - Generic payload read error"] #[inline(always)] #[must_use] pub fn gprde(&mut self) -> GPRDE_W<ISR1_SPEC, 11> { GPRDE_W::new(self) } #[doc = "Bit 12 - Generic payload receive error"] #[inline(always)] #[must_use] pub fn gprxe(&mut self) -> GPRXE_W<ISR1_SPEC, 12> { GPRXE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "DSI Host interrupt and status register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`isr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ISR1_SPEC; impl crate::RegisterSpec for ISR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`isr1::R`](R) reader structure"] impl crate::Readable for ISR1_SPEC {} #[doc = "`write(|w| ..)` method takes [`isr1::W`](W) writer structure"] impl crate::Writable for ISR1_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets ISR1 to value 0"] impl crate::Resettable for ISR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
extern crate maud; use maud::{html, Markup}; pub fn head() -> Markup { html! { head{ meta charset="utf8"; meta lang="en-US"; meta name="viewport" content="width=device-width, initial-scale=1"; meta name="author" content="tom pridham"; meta name="description" content="portfolio site for tom pridham. he likes being good at things."; title { "tompridham.me "} link rel="shortcut icon" href="favicon.ico" type="image/x-icon"; link rel="stylesheet" href="/app.css"; } } }
use std::borrow::Cow; use std::cmp; use std::marker; use std::sync::Arc; use std::usize; use crate::file::FileHash; use crate::function::ParameterOffset; use crate::namespace::Namespace; use crate::source::Source; use crate::{Id, Size}; /// The kind of a type. #[derive(Debug, Clone)] pub enum TypeKind<'input> { /// The void type. Void, /// A base type. Base(BaseType<'input>), /// A type alias definition. Def(TypeDef<'input>), /// A struct type. Struct(StructType<'input>), /// A union type. Union(UnionType<'input>), /// An enumeration type. Enumeration(EnumerationType<'input>), /// A type for an array of elements. Array(ArrayType<'input>), /// A function type. Function(FunctionType<'input>), /// An unspecified type. Unspecified(UnspecifiedType<'input>), /// The type of a pointer to a member. PointerToMember(PointerToMemberType), /// A type that is obtained by adding a modifier to another type. Modifier(TypeModifier<'input>), /// A subrange of another type. Subrange(SubrangeType<'input>), } impl<'input> TypeKind<'input> { fn discriminant_value(&self) -> u8 { match *self { TypeKind::Void => 1, TypeKind::Base(..) => 2, TypeKind::Def(..) => 3, TypeKind::Struct(..) => 4, TypeKind::Union(..) => 5, TypeKind::Enumeration(..) => 6, TypeKind::Array(..) => 7, TypeKind::Function(..) => 8, TypeKind::Unspecified(..) => 9, TypeKind::PointerToMember(..) => 10, TypeKind::Modifier(..) => 11, TypeKind::Subrange(..) => 12, } } } /// The debuginfo offset of a type. /// /// This is unique for all types in a file. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct TypeOffset(usize); impl TypeOffset { #[inline] pub(crate) fn new(offset: usize) -> TypeOffset { debug_assert!(TypeOffset(offset) != TypeOffset::none()); TypeOffset(offset) } #[inline] pub(crate) fn none() -> TypeOffset { TypeOffset(usize::MAX) } /// Return true if the type is unknown or `void`. #[inline] pub fn is_none(self) -> bool { self == Self::none() } /// Return true if the type is known and not `void`. #[inline] pub fn is_some(self) -> bool { self != Self::none() } #[inline] pub(crate) fn get(self) -> Option<usize> { if self.is_none() { None } else { Some(self.0) } } } impl Default for TypeOffset { #[inline] fn default() -> Self { TypeOffset::none() } } /// A type. #[derive(Debug, Clone)] pub struct Type<'input> { pub(crate) id: Id, pub(crate) offset: TypeOffset, pub(crate) kind: TypeKind<'input>, } impl<'input> Default for Type<'input> { fn default() -> Self { Type { id: Id::new(0), offset: TypeOffset::none(), kind: TypeKind::Base(BaseType::default()), } } } impl<'input> Type<'input> { /// Lookup a type given its offset. /// /// Returns `None` if the type offset is invalid. pub fn from_offset<'a>( hash: &'a FileHash<'input>, offset: TypeOffset, ) -> Option<Cow<'a, Type<'input>>> { if offset.is_none() { return Some(Cow::Borrowed(&hash.void)); } hash.types .get(&offset) .map(|ty| Cow::Borrowed(*ty)) .or_else(|| hash.file.get_type(offset).map(Cow::Owned)) } pub(crate) fn void() -> Type<'static> { Type { id: Id::new(usize::MAX), offset: TypeOffset(usize::MAX), kind: TypeKind::Void, } } /// Return true if the type is the void type. #[inline] pub fn is_void(&self) -> bool { matches!(self.kind, TypeKind::Void) } /// The user defined id for this type. #[inline] pub fn id(&self) -> usize { self.id.get() } /// Set a user defined id for this type. #[inline] pub fn set_id(&self, id: usize) { self.id.set(id) } /// The debuginfo offset of this type. #[inline] pub fn offset(&self) -> TypeOffset { self.offset } /// The kind of this type. #[inline] pub fn kind(&self) -> &TypeKind<'input> { &self.kind } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { match self.kind { TypeKind::Void => Some(0), TypeKind::Base(ref val) => val.byte_size(), TypeKind::Def(ref val) => val.byte_size(hash), TypeKind::Struct(ref val) => val.byte_size(), TypeKind::Union(ref val) => val.byte_size(), TypeKind::Enumeration(ref val) => val.byte_size(hash), TypeKind::Array(ref val) => val.byte_size(hash), TypeKind::Function(ref val) => val.byte_size(), TypeKind::Unspecified(..) => None, TypeKind::PointerToMember(ref val) => val.byte_size(hash), TypeKind::Modifier(ref val) => val.byte_size(hash), TypeKind::Subrange(ref val) => val.byte_size(hash), } } /// Return true if this is an anonymous type, or defined within an anonymous type. pub fn is_anon(&self) -> bool { match self.kind { TypeKind::Struct(ref val) => val.is_anon(), TypeKind::Union(ref val) => val.is_anon(), TypeKind::Void | TypeKind::Base(..) | TypeKind::Def(..) | TypeKind::Enumeration(..) | TypeKind::Array(..) | TypeKind::Function(..) | TypeKind::Unspecified(..) | TypeKind::PointerToMember(..) | TypeKind::Modifier(..) | TypeKind::Subrange(..) => false, } } /// Return true if this is the type of a function (including aliases and modifiers). fn is_function(&self, hash: &FileHash) -> bool { match self.kind { TypeKind::Function(..) => true, TypeKind::Def(ref val) => match val.ty(hash) { Some(ty) => ty.is_function(hash), None => false, }, TypeKind::Modifier(ref val) => match val.ty(hash) { Some(ty) => ty.is_function(hash), None => false, }, TypeKind::Void | TypeKind::Struct(..) | TypeKind::Union(..) | TypeKind::Base(..) | TypeKind::Enumeration(..) | TypeKind::Array(..) | TypeKind::Unspecified(..) | TypeKind::PointerToMember(..) | TypeKind::Subrange(..) => false, } } /// The members of this type. pub fn members(&self) -> &[Member<'input>] { match self.kind { TypeKind::Struct(ref val) => val.members(), TypeKind::Union(ref val) => val.members(), TypeKind::Void | TypeKind::Enumeration(..) | TypeKind::Def(..) | TypeKind::Base(..) | TypeKind::Array(..) | TypeKind::Function(..) | TypeKind::Unspecified(..) | TypeKind::PointerToMember(..) | TypeKind::Modifier(..) | TypeKind::Subrange(..) => &[], } } /// Compare the identifying information of two types. /// /// Equal types must have the same type kind. Further requirements for equality /// depend on the specific type kind. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, type_a: &Type, hash_b: &FileHash, type_b: &Type, ) -> cmp::Ordering { use self::TypeKind::*; match (&type_a.kind, &type_b.kind) { (&Base(ref a), &Base(ref b)) => BaseType::cmp_id(a, b), (&Def(ref a), &Def(ref b)) => TypeDef::cmp_id(a, b), (&Struct(ref a), &Struct(ref b)) => StructType::cmp_id(a, b), (&Union(ref a), &Union(ref b)) => UnionType::cmp_id(a, b), (&Enumeration(ref a), &Enumeration(ref b)) => EnumerationType::cmp_id(a, b), (&Array(ref a), &Array(ref b)) => ArrayType::cmp_id(hash_a, a, hash_b, b), (&Function(ref a), &Function(ref b)) => FunctionType::cmp_id(hash_a, a, hash_b, b), (&Unspecified(ref a), &Unspecified(ref b)) => UnspecifiedType::cmp_id(a, b), (&PointerToMember(ref a), &PointerToMember(ref b)) => { PointerToMemberType::cmp_id(hash_a, a, hash_b, b) } (&Modifier(ref a), &Modifier(ref b)) => TypeModifier::cmp_id(hash_a, a, hash_b, b), (&Subrange(ref a), &Subrange(ref b)) => SubrangeType::cmp_id(hash_a, a, hash_b, b), _ => { let discr_a = type_a.kind.discriminant_value(); let discr_b = type_b.kind.discriminant_value(); debug_assert_ne!(discr_a, discr_b); discr_a.cmp(&discr_b) } } } } /// A type that is obtained by adding a modifier to another type. #[derive(Debug, Clone)] pub struct TypeModifier<'input> { pub(crate) kind: TypeModifierKind, pub(crate) ty: TypeOffset, pub(crate) name: Option<&'input str>, pub(crate) byte_size: Size, // TODO: hack pub(crate) address_size: Option<u64>, } /// The kind of a type modifier. #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum TypeModifierKind { /// The resulting type is a pointer to the type being modified. Pointer, /// The resulting type is a reference to the type being modified. Reference, /// The resulting type is a constant. Const, /// The resulting type is packed. Packed, /// The resulting type is volatile. Volatile, /// The resulting type has restricted aliasing. Restrict, /// The resulting type is shared (for example, in UPC). Shared, /// The resulting type is a rvalue reference to the type being modified. RvalueReference, /// The resulting type is atomic. Atomic, // TODO: // Immutable, /// Any other type modifier. // PDB is disabled #[allow(dead_code)] Other, } impl TypeModifierKind { fn discriminant_value(self) -> u8 { match self { TypeModifierKind::Pointer => 1, TypeModifierKind::Reference => 2, TypeModifierKind::Const => 3, TypeModifierKind::Packed => 4, TypeModifierKind::Volatile => 5, TypeModifierKind::Restrict => 6, TypeModifierKind::Shared => 7, TypeModifierKind::RvalueReference => 8, TypeModifierKind::Atomic => 9, TypeModifierKind::Other => 10, } } } impl<'input> TypeModifier<'input> { /// The name of the type. /// /// If this is `None` then the name should be derived from the type that is being modified. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The kind of this type modifier. #[inline] pub fn kind(&self) -> TypeModifierKind { self.kind } /// The type that is being modified. #[inline] pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } fn is_pointer_like(&self) -> bool { match self.kind { TypeModifierKind::Const | TypeModifierKind::Packed | TypeModifierKind::Volatile | TypeModifierKind::Restrict | TypeModifierKind::Shared | TypeModifierKind::Atomic | TypeModifierKind::Other => false, TypeModifierKind::Pointer | TypeModifierKind::Reference | TypeModifierKind::RvalueReference => true, } } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { if self.byte_size.is_some() { return self.byte_size.get(); } if self.is_pointer_like() { self.address_size } else { self.ty(hash).and_then(|v| v.byte_size(hash)) } } /// Compare the identifying information of two types. /// /// Type modifiers are equal if the modifiers are the same and the types being modified /// are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, a: &TypeModifier, hash_b: &FileHash, b: &TypeModifier, ) -> cmp::Ordering { match (a.ty(hash_a), b.ty(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } let discr_a = a.kind.discriminant_value(); let discr_b = b.kind.discriminant_value(); discr_a.cmp(&discr_b) } } /// The endianity of an object. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Endianity { /// Default endianity encoding. Default, /// Big-endian encoding. Big, /// Little-endian encoding. Little, } impl Default for Endianity { fn default() -> Self { Self::Default } } /// The encoding of a base type. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum BaseTypeEncoding { /// Unsupported or unspecified encoding. Other, /// True or false. Boolean, /// Linear machine address. Address, /// Signed binary integer. Signed, /// Signed character. SignedChar, /// Unsigned binary integer. Unsigned, /// Unsigned character. UnsignedChar, /// Binary floating-point number. Float, } impl Default for BaseTypeEncoding { fn default() -> Self { Self::Other } } /// A base type. #[derive(Debug, Default, Clone)] pub struct BaseType<'input> { pub(crate) name: Option<&'input str>, pub(crate) byte_size: Size, pub(crate) encoding: BaseTypeEncoding, pub(crate) endianity: Endianity, } impl<'input> BaseType<'input> { /// The name of the type. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The size in bytes of an instance of this type. #[inline] pub fn byte_size(&self) -> Option<u64> { self.byte_size.get() } /// How the base type is encoded an interpreted. #[inline] pub fn encoding(&self) -> BaseTypeEncoding { self.encoding } /// The endianity of the value, if applicable. #[inline] pub fn endianity(&self) -> Endianity { self.endianity } /// Compare the identifying information of two types. /// /// Base types are considered equal if their names are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). fn cmp_id(a: &BaseType, b: &BaseType) -> cmp::Ordering { a.name.cmp(&b.name) } } /// A type alias definition. #[derive(Debug, Default, Clone)] pub struct TypeDef<'input> { pub(crate) namespace: Option<Arc<Namespace<'input>>>, pub(crate) name: Option<&'input str>, pub(crate) ty: TypeOffset, pub(crate) source: Source<'input>, } impl<'input> TypeDef<'input> { /// The namespace of the type. pub fn namespace(&self) -> Option<&Namespace> { self.namespace.as_deref() } /// The name of the type definition. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The type that the alias is being defined for. #[inline] pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The source information for the type definition. #[inline] pub fn source(&self) -> &Source<'input> { &self.source } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { self.ty(hash).and_then(|v| v.byte_size(hash)) } /// Compare the identifying information of two types. /// /// Type definitions are considered equal if their names are equal, even if the type being /// aliased is different. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id(a: &TypeDef, b: &TypeDef) -> cmp::Ordering { Namespace::cmp_ns_and_name(a.namespace(), a.name(), b.namespace(), b.name()) } } /// A struct type. #[derive(Debug, Default, Clone)] pub struct StructType<'input> { pub(crate) namespace: Option<Arc<Namespace<'input>>>, pub(crate) name: Option<&'input str>, pub(crate) source: Source<'input>, pub(crate) byte_size: Size, pub(crate) declaration: bool, pub(crate) members: Vec<Member<'input>>, pub(crate) variant_parts: Vec<VariantPart<'input>>, pub(crate) inherits: Vec<Inherit>, } impl<'input> StructType<'input> { /// The namespace of the type. pub fn namespace(&self) -> Option<&Namespace> { self.namespace.as_deref() } /// The name of the type. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The source information for the type. #[inline] pub fn source(&self) -> &Source<'input> { &self.source } /// The size in bytes of an instance of this type. #[inline] pub fn bit_size(&self) -> Option<u64> { self.byte_size.get().map(|v| v * 8) } /// The size in bytes of an instance of this type. #[inline] pub fn byte_size(&self) -> Option<u64> { self.byte_size.get() } /// Return true if this is a declaration. #[inline] pub fn is_declaration(&self) -> bool { self.declaration } /// Return true if this is an anonymous type, or defined within an anonymous type. pub fn is_anon(&self) -> bool { self.name.is_none() || Namespace::is_anon_type(&self.namespace) } /// The members of this type. #[inline] pub fn members(&self) -> &[Member<'input>] { &self.members } /// The variant parts of this type. #[inline] pub fn variant_parts(&self) -> &[VariantPart<'input>] { &self.variant_parts } /// The inherited types. #[inline] pub fn inherits(&self) -> &[Inherit] { &self.inherits } /// The layout of members of this type. pub fn layout<'me>(&'me self, hash: &FileHash) -> Vec<Layout<'input, 'me>> { layout( &self.members, &self.inherits, &self.variant_parts, 0, self.bit_size(), hash, ) } /// Compare the identifying information of two types. /// /// Structs are considered equal if their names are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id(a: &StructType, b: &StructType) -> cmp::Ordering { Namespace::cmp_ns_and_name(a.namespace(), a.name(), b.namespace(), b.name()) } } /// A union type. #[derive(Debug, Default, Clone)] pub struct UnionType<'input> { pub(crate) namespace: Option<Arc<Namespace<'input>>>, pub(crate) name: Option<&'input str>, pub(crate) source: Source<'input>, pub(crate) byte_size: Size, pub(crate) declaration: bool, pub(crate) members: Vec<Member<'input>>, } impl<'input> UnionType<'input> { /// The namespace of the type. pub fn namespace(&self) -> Option<&Namespace> { self.namespace.as_deref() } /// The name of the type. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The source information for the type. #[inline] pub fn source(&self) -> &Source<'input> { &self.source } /// The size in bytes of an instance of this type. #[inline] pub fn byte_size(&self) -> Option<u64> { self.byte_size.get() } /// Return true if this is a declaration. #[inline] pub fn is_declaration(&self) -> bool { self.declaration } /// Return true if this is an anonymous type, or defined within an anonymous type. pub fn is_anon(&self) -> bool { self.name.is_none() || Namespace::is_anon_type(&self.namespace) } /// The members of this type. #[inline] pub fn members(&self) -> &[Member<'input>] { &self.members } /// Compare the identifying information of two types. /// /// Unions are considered equal if their names are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id(a: &UnionType, b: &UnionType) -> cmp::Ordering { Namespace::cmp_ns_and_name(a.namespace(), a.name(), b.namespace(), b.name()) } } /// A variant part. /// /// A variant part is a discriminant member and list of variants that are /// selected based on the value of the discriminant member. #[derive(Debug, Default, Clone)] pub struct VariantPart<'input> { pub(crate) discr: MemberOffset, pub(crate) variants: Vec<Variant<'input>>, } impl<'input> VariantPart<'input> { /// The discriminant member for this variant part. /// /// The given members should be from the type containing this variant part. #[inline] pub fn discriminant<'a>(&self, members: &'a [Member<'input>]) -> Option<&'a Member<'input>> { members.iter().find(|member| member.offset == self.discr) } /// The variants for this variant part. #[inline] pub fn variants(&self) -> &[Variant<'input>] { &self.variants } /// The smallest offset in bits for a variant of this variant part. pub fn bit_offset(&self) -> u64 { let mut bit_offset = u64::max_value(); for variant in &self.variants { let o = variant.bit_offset(); if bit_offset > o { bit_offset = o; } } if bit_offset < u64::max_value() { bit_offset } else { 0 } } /// The largest size in bits for the variants of this variant part, /// excluding leading and trailing padding. pub fn bit_size(&self, hash: &FileHash) -> Option<u64> { let start = self.bit_offset(); let mut end = start; for variant in &self.variants { let o = variant.bit_offset(); if let Some(size) = variant.bit_size(hash) { if end < o + size { end = o + size; } } else { return None; } } Some(end - start) } } /// A variant. /// /// A variant consists of a discriminant value that selects the variant, /// and a list of members that are valid when the variant is selected. #[derive(Debug, Default, Clone)] pub struct Variant<'input> { pub(crate) discr_value: Option<u64>, pub(crate) name: Option<&'input str>, pub(crate) members: Vec<Member<'input>>, } impl<'input> Variant<'input> { /// The discriminant value which selects this variant. /// /// The sign of this value depends on the type of the discriminant member. #[inline] pub fn discriminant_value(&self) -> Option<u64> { self.discr_value } /// The name of the variant. /// /// Currently this is only set for Rust enums. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The members for this variant. #[inline] pub fn members(&self) -> &[Member<'input>] { &self.members } /// The smallest offset in bits for a member of this variant. pub fn bit_offset(&self) -> u64 { let mut bit_offset = u64::max_value(); for member in &self.members { let o = member.bit_offset(); if bit_offset > o { bit_offset = o; } } if bit_offset < u64::max_value() { bit_offset } else { 0 } } /// The size in bits for the members of this variant, excluding leading and trailing padding. pub fn bit_size(&self, hash: &FileHash) -> Option<u64> { let start = self.bit_offset(); let mut end = start; for member in &self.members { let o = member.bit_offset(); if let Some(size) = member.bit_size(hash) { if end < o + size { end = o + size; } } else { return None; } } Some(end - start) } /// The layout of members of this variant within a variant part. /// /// The given bit_offset and bit_size should be for the variant part. pub fn layout<'me>( &'me self, bit_offset: u64, bit_size: Option<u64>, hash: &FileHash<'input>, ) -> Vec<Layout<'input, 'me>> { layout(&self.members, &[], &[], bit_offset, bit_size, hash) } /// Compare the identifying information of two types. /// /// Variants are considered equal if the discriminant values are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). // TODO: compare discriminant member too pub fn cmp_id( _hash_a: &FileHash, a: &Variant, _hash_b: &FileHash, b: &Variant, ) -> cmp::Ordering { a.discr_value.cmp(&b.discr_value) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub(crate) struct MemberOffset(usize); impl MemberOffset { #[inline] pub(crate) fn new(offset: usize) -> MemberOffset { debug_assert!(MemberOffset(offset) != MemberOffset::none()); MemberOffset(offset) } #[inline] pub(crate) fn none() -> MemberOffset { MemberOffset(usize::MAX) } } impl Default for MemberOffset { #[inline] fn default() -> Self { MemberOffset::none() } } /// A member of a struct or union. #[derive(Debug, Default, Clone)] pub struct Member<'input> { pub(crate) offset: MemberOffset, pub(crate) name: Option<&'input str>, pub(crate) ty: TypeOffset, // Defaults to 0, so always present. pub(crate) bit_offset: u64, pub(crate) bit_size: Size, } impl<'input> Member<'input> { /// The name of the member. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The debuginfo offset of the type of this member. #[inline] pub fn type_offset(&self) -> TypeOffset { self.ty } /// The type of this member. pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The offset in bits of this member. #[inline] pub fn bit_offset(&self) -> u64 { self.bit_offset } /// The size in bits of this member. pub fn bit_size(&self, hash: &FileHash) -> Option<u64> { if self.bit_size.is_some() { self.bit_size.get() } else { self.ty(hash).and_then(|v| v.byte_size(hash).map(|v| v * 8)) } } /// Return true if this member defines an inline type. pub fn is_inline(&self, hash: &FileHash) -> bool { match self.name() { Some(s) => { if s.starts_with("RUST$ENCODED$ENUM$") { return true; } } None => return true, }; if let Some(ty) = self.ty(hash) { ty.is_anon() } else { false } } } /// An inherited type of a struct or union. #[derive(Debug, Default, Clone)] pub struct Inherit { pub(crate) ty: TypeOffset, // Defaults to 0, so always present. pub(crate) bit_offset: u64, } impl Inherit { /// The debuginfo offset of the inherited type. #[inline] pub fn type_offset(&self) -> TypeOffset { self.ty } /// The inherited type. pub fn ty<'a, 'input>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The offset in bits of the inherited type within the struct. #[inline] pub fn bit_offset(&self) -> u64 { self.bit_offset } /// The size in bits of the inherited type. pub fn bit_size(&self, hash: &FileHash) -> Option<u64> { self.ty(hash).and_then(|v| v.byte_size(hash).map(|v| v * 8)) } } /// The layout of an item (member or padding) within a struct. #[derive(Debug, Clone)] pub struct Layout<'input, 'item> where 'input: 'item, { /// The offset in bits of the item within the struct. pub bit_offset: u64, /// The size in bits of the item. pub bit_size: Size, /// The member or padding. pub item: LayoutItem<'input, 'item>, } /// The item in a `Layout`. #[derive(Debug, Clone)] pub enum LayoutItem<'input, 'item> { /// Padding. Padding, /// A member. Member(&'item Member<'input>), /// A variant part. VariantPart(&'item VariantPart<'input>), /// An inherited type. Inherit(&'item Inherit), } fn layout<'input, 'item>( members: &'item [Member<'input>], inherits: &'item [Inherit], variant_parts: &'item [VariantPart<'input>], base_bit_offset: u64, bit_size: Option<u64>, hash: &FileHash, ) -> Vec<Layout<'input, 'item>> { let mut members: Vec<_> = members .iter() .map(|member| Layout { bit_offset: member.bit_offset() - base_bit_offset, bit_size: member.bit_size(hash).into(), item: LayoutItem::Member(member), }) .collect(); members.extend(inherits.iter().map(|inherit| Layout { bit_offset: inherit.bit_offset() - base_bit_offset, bit_size: inherit.bit_size(hash).into(), item: LayoutItem::Inherit(inherit), })); members.extend(variant_parts.iter().map(|variant_part| Layout { bit_offset: variant_part.bit_offset() - base_bit_offset, bit_size: variant_part.bit_size(hash).into(), item: LayoutItem::VariantPart(variant_part), })); members.sort_by(|a, b| { a.bit_offset .cmp(&b.bit_offset) .then_with(|| a.bit_size.cmp(&b.bit_size)) }); let mut next_bit_offset = bit_size; let mut layout = Vec::new(); for member in members.into_iter().rev() { if let (Some(bit_size), Some(next_bit_offset)) = (member.bit_size.get(), next_bit_offset) { let bit_offset = member.bit_offset + bit_size; if next_bit_offset > bit_offset { let bit_size = next_bit_offset - bit_offset; layout.push(Layout { bit_offset, bit_size: Size::new(bit_size), item: LayoutItem::Padding, }); } } next_bit_offset = Some(member.bit_offset); layout.push(member); } if let Some(first_bit_offset) = layout.last().map(|x| x.bit_offset) { if first_bit_offset > 0 { layout.push(Layout { bit_offset: 0, bit_size: Size::new(first_bit_offset), item: LayoutItem::Padding, }); } } layout.reverse(); layout } /// An enumeration type. #[derive(Debug, Default, Clone)] pub struct EnumerationType<'input> { pub(crate) offset: TypeOffset, pub(crate) namespace: Option<Arc<Namespace<'input>>>, pub(crate) name: Option<&'input str>, pub(crate) source: Source<'input>, pub(crate) declaration: bool, pub(crate) ty: TypeOffset, pub(crate) byte_size: Size, } impl<'input> EnumerationType<'input> { /// The namespace of the type. pub fn namespace(&self) -> Option<&Namespace> { self.namespace.as_deref() } /// The name of the type. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The source information for the type. #[inline] pub fn source(&self) -> &Source<'input> { &self.source } /// Return true if this is a declaration. #[inline] pub fn is_declaration(&self) -> bool { self.declaration } /// The underlying type of the enumeration. #[inline] pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { if self.byte_size.is_some() { self.byte_size.get() } else { self.ty(hash).and_then(|v| v.byte_size(hash)) } } /// The enumerators of this type. pub fn enumerators(&self, hash: &FileHash<'input>) -> Vec<Enumerator<'input>> { hash.file.get_enumerators(self.offset) } /// Compare the identifying information of two types. /// /// Enumerations are considered equal if their names are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id(a: &EnumerationType, b: &EnumerationType) -> cmp::Ordering { Namespace::cmp_ns_and_name(a.namespace(), a.name(), b.namespace(), b.name()) } } /// A member of an enumeration. #[derive(Debug, Default, Clone)] pub struct Enumerator<'input> { pub(crate) name: Option<&'input str>, pub(crate) value: Option<i64>, } impl<'input> Enumerator<'input> { /// The name of the enumerator. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// The value of the enumerator. #[inline] pub fn value(&self) -> Option<i64> { self.value } } /// A type for an array of elements. #[derive(Debug, Default, Clone)] pub struct ArrayType<'input> { pub(crate) ty: TypeOffset, pub(crate) count: Size, pub(crate) counts: Box<[Size]>, pub(crate) byte_size: Size, pub(crate) phantom: marker::PhantomData<&'input str>, } impl<'input> ArrayType<'input> { /// The type of the elements in the array. pub fn element_type<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The size in bytes of an element in the array. pub fn element_byte_size<'a>(&self, hash: &'a FileHash<'input>) -> Option<u64> { let ty = self.element_type(hash)?; ty.byte_size(hash) } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { if self.byte_size.is_some() { self.byte_size.get() } else if let Some(mut size) = self.element_byte_size(hash) { let counts = self.counts_as_slice(); if counts.is_empty() { return None; } for count in counts.iter().copied() { if let Some(count) = count.get() { size *= count; } else { return None; } } Some(size) } else { None } } fn counts_as_slice(&self) -> &[Size] { if self.counts.is_empty() { std::slice::from_ref(&self.count) } else { &self.counts[..] } } /// The number of elements in each dimension of the array. /// /// `None` is used for unknown dimensions. pub fn counts(&self) -> impl Iterator<Item = Option<u64>> + '_ { self.counts_as_slice().iter().map(|v| v.get()) } /// Compare the identifying information of two types. /// /// Array types are considered equal if the element identifiers and counts are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, a: &ArrayType, hash_b: &FileHash, b: &ArrayType, ) -> cmp::Ordering { match (a.element_type(hash_a), b.element_type(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } a.count.cmp(&b.count) } } /// A subrange of another type. #[derive(Debug, Default, Clone)] pub struct SubrangeType<'input> { pub(crate) name: Option<&'input str>, pub(crate) ty: TypeOffset, pub(crate) lower: Option<u64>, pub(crate) upper: Option<u64>, pub(crate) byte_size: Size, } impl<'input> SubrangeType<'input> { /// The name of the subrange. #[inline] pub fn name(&self) -> Option<&'input str> { self.name } /// The underlying type of the subrange. #[inline] pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The lower bound of the subrange (inclusive). #[inline] pub fn lower(&self) -> Option<u64> { self.lower } /// The upper bound of the subrange (inclusive). #[inline] pub fn upper(&self) -> Option<u64> { self.upper } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { if self.byte_size.is_some() { self.byte_size.get() } else { self.ty(hash).and_then(|v| v.byte_size(hash)) } } /// Compare the identifying information of two types. /// /// Subrange types are considered equal if the underlying type and bounds are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, a: &SubrangeType, hash_b: &FileHash, b: &SubrangeType, ) -> cmp::Ordering { match (a.ty(hash_a), b.ty(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } a.lower.cmp(&b.lower).then_with(|| a.upper.cmp(&b.upper)) } } /// A function type. #[derive(Debug, Default, Clone)] pub struct FunctionType<'input> { pub(crate) parameters: Vec<ParameterType<'input>>, pub(crate) return_type: TypeOffset, pub(crate) byte_size: Size, } impl<'input> FunctionType<'input> { /// The parameters of the function. #[inline] pub fn parameters(&self) -> &[ParameterType<'input>] { &self.parameters } /// The return type of the function. #[inline] pub fn return_type<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.return_type) } /// The size in bytes of an instance of this type. #[inline] pub fn byte_size(&self) -> Option<u64> { self.byte_size.get() } /// Compare the identifying information of two types. /// /// Function types are considered equal if they have the same parameter types and /// return types. Parameter names are ignored. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, a: &FunctionType, hash_b: &FileHash, b: &FunctionType, ) -> cmp::Ordering { for (parameter_a, parameter_b) in a.parameters.iter().zip(b.parameters.iter()) { let ord = ParameterType::cmp_id(hash_a, parameter_a, hash_b, parameter_b); if ord != cmp::Ordering::Equal { return ord; } } let ord = a.parameters.len().cmp(&b.parameters.len()); if ord != cmp::Ordering::Equal { return ord; } match (a.return_type(hash_a), b.return_type(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } cmp::Ordering::Equal } } /// The type of a function parameter. #[derive(Debug, Default, Clone)] pub struct ParameterType<'input> { pub(crate) offset: ParameterOffset, pub(crate) name: Option<&'input str>, pub(crate) ty: TypeOffset, } impl<'input> ParameterType<'input> { /// The name of the parameter. #[inline] pub fn name(&self) -> Option<&'input str> { self.name } /// The type of the parameter. #[inline] pub fn ty<'a>(&self, hash: &'a FileHash<'input>) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// Compare the identifying information of two types. /// /// Parameters are considered equal if they have the same types. /// Parameter names are ignored. pub fn cmp_id( hash_a: &FileHash, a: &ParameterType, hash_b: &FileHash, b: &ParameterType, ) -> cmp::Ordering { match (a.ty(hash_a), b.ty(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => Type::cmp_id(hash_a, ty_a, hash_b, ty_b), (Some(_), None) => cmp::Ordering::Less, (None, Some(_)) => cmp::Ordering::Greater, (None, None) => cmp::Ordering::Equal, } } } /// An unspecified type. #[derive(Debug, Default, Clone)] pub struct UnspecifiedType<'input> { pub(crate) namespace: Option<Arc<Namespace<'input>>>, pub(crate) name: Option<&'input str>, } impl<'input> UnspecifiedType<'input> { /// The namespace of the type. pub fn namespace(&self) -> Option<&Namespace> { self.namespace.as_deref() } /// The name of the type. #[inline] pub fn name(&self) -> Option<&str> { self.name } /// Compare the identifying information of two types. /// /// Unspecified types are considered equal if they have the same name. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id(a: &UnspecifiedType, b: &UnspecifiedType) -> cmp::Ordering { Namespace::cmp_ns_and_name(a.namespace(), a.name(), b.namespace(), b.name()) } } /// A type for a pointer to a member of a containing type. #[derive(Debug, Default, Clone)] pub struct PointerToMemberType { pub(crate) ty: TypeOffset, pub(crate) containing_ty: TypeOffset, pub(crate) byte_size: Size, // TODO: hack pub(crate) address_size: Option<u64>, } impl PointerToMemberType { /// The type of the member. #[inline] pub fn member_type<'a, 'input>( &self, hash: &'a FileHash<'input>, ) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.ty) } /// The containing type. #[inline] pub fn containing_type<'a, 'input>( &self, hash: &'a FileHash<'input>, ) -> Option<Cow<'a, Type<'input>>> { Type::from_offset(hash, self.containing_ty) } /// The size in bytes of an instance of this type. pub fn byte_size(&self, hash: &FileHash) -> Option<u64> { if self.byte_size.is_some() { return self.byte_size.get(); } // TODO: this probably depends on the ABI self.member_type(hash).and_then(|ty| { if ty.is_function(hash) { self.address_size.map(|v| v * 2) } else { self.address_size } }) } /// Compare the identifying information of two types. /// /// Pointer to member types are considered equal if both the member type and containing /// type are equal. /// /// This can be used to sort, and to determine if two types refer to the same definition /// (even if there are differences in the definitions). pub fn cmp_id( hash_a: &FileHash, a: &PointerToMemberType, hash_b: &FileHash, b: &PointerToMemberType, ) -> cmp::Ordering { match (a.containing_type(hash_a), b.containing_type(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } match (a.member_type(hash_a), b.member_type(hash_b)) { (Some(ref ty_a), Some(ref ty_b)) => { let ord = Type::cmp_id(hash_a, ty_a, hash_b, ty_b); if ord != cmp::Ordering::Equal { return ord; } } (Some(_), None) => { return cmp::Ordering::Less; } (None, Some(_)) => { return cmp::Ordering::Greater; } (None, None) => {} } cmp::Ordering::Equal } }
use crate::evdev::{Device, UInputDevice}; use crate::foreign::*; use super::{DeviceId, Error, Result}; use std::cell::Cell; pub struct DestinationDevice { id: DeviceId, uidev: UInputDevice, components: InternalComponents, should_sync: Cell<bool>, } #[derive(Clone)] pub enum Action { RelativeMove { code: u32, amount100: i32, }, KeyUp(u32), KeyDown(u32), } #[derive(Default)] struct InternalComponents { relative: Option<Vec<Cell<RelativeComponent>>>, key: Option<Vec<Cell<KeyComponent>>>, } #[derive(Clone, Copy, Default)] struct RelativeComponent { acc: f32, } #[derive(Clone, Copy, Default)] struct KeyComponent { pressed: bool, } // TODO: implement device capability // pub struct DeviceCapability { // } impl DestinationDevice { pub fn new(id: DeviceId) -> Result<DestinationDevice> { let mut dev = Device::new().unwrap(); dev.set_name("evdev device"); { dev.enable_event(EV_REL, REL_X); dev.enable_event(EV_REL, REL_Y); dev.enable_event(EV_REL, REL_WHEEL); dev.enable_event(EV_REL, REL_HWHEEL); dev.enable_event(EV_REL, REL_WHEEL_HI_RES); dev.enable_event(EV_REL, REL_HWHEEL_HI_RES); dev.enable_event(EV_KEY, BTN_LEFT); dev.enable_event(EV_KEY, BTN_RIGHT); dev.enable_event(EV_KEY, BTN_MIDDLE); dev.enable_event(EV_KEY, BTN_SIDE); dev.enable_event(EV_KEY, BTN_EXTRA); dev.enable_event(EV_KEY, BTN_FORWARD); dev.enable_event(EV_KEY, BTN_BACK); dev.enable_event(EV_KEY, BTN_TASK); for code in 1..=248 { dev.enable_event(EV_KEY, code); } } let uidev = UInputDevice::new_from_device(dev)?; let mut components = InternalComponents::default(); components.relative = Some(vec![Default::default(); REL_CNT as usize]); components.key = Some(vec![Default::default(); KEY_CNT as usize]); Ok(DestinationDevice { id, uidev, components, should_sync: Cell::from(false), }) } pub fn id(&self) -> DeviceId { self.id.clone() } pub fn write_event(&self, type_: u32, code: u32, value: i32) -> Result<()> { self.should_sync.set(true); Ok(self.uidev.write_event(type_, code, value)?) } pub fn perform_action(&self, action: Action) -> Result<()> { match action { Action::RelativeMove{code, amount100} => self.move_relative(code, (amount100 as f32) / 100f32), Action::KeyDown(code) => self.press_key(code, true ), Action::KeyUp (code) => self.press_key(code, false), } } pub fn move_relative(&self, code: u32, amount: f32) -> Result<()> { let component_cell = self.components.relative.as_ref() .ok_or_else(|| Error::Message("invalid component: Relative".into()))? .get(code as usize) .ok_or_else(|| Error::Message(format!("invalid event code: {}", code)))? ; let mut component = component_cell.get(); let mut acc = component.acc + amount; let trunc = acc.trunc(); if trunc.abs() > 0.0f32 { self.write_event(EV_REL, code, trunc as i32)?; acc -= trunc; } component.acc = acc; component_cell.set(component); Ok(()) } pub fn press_key(&self, code: u32, press: bool) -> Result<()> { let component_cell = self.components.key.as_ref() .ok_or_else(|| Error::Message("invalid component: Relative".into()))? .get(code as usize) .ok_or_else(|| Error::Message(format!("invalid event code: {}", code)))? ; let mut component = component_cell.get(); if component.pressed != press { self.write_event(EV_KEY, code, if press { 1 } else { 0 })?; component.pressed = press; component_cell.set(component); } Ok(()) } pub fn sync(&self) { if self.should_sync.get() { let _ = self.write_event(EV_SYN, SYN_REPORT, 0); self.should_sync.set(false); } } }
//! # GameBuilder Helper //! Utility for creating complex games with non standard komi, handicap etc... //! # Exemple //! ``` //! use crate::goban::rules::game_builder::GameBuilder; //! use crate::goban::rules::Rule; //! //! let mut builder = GameBuilder::default(); //! let game = builder //! .rule(Rule::Japanese) //! .size((19,19)) //! .handicap(&[(3,3), (4,4)]) //! .komi(10.) //! .build(); //! ``` use crate::pieces::goban::Goban; use crate::pieces::stones::Color; use crate::pieces::uint; use crate::pieces::util::coord::Point; use crate::rules::game::Game; use crate::rules::Rule::Chinese; use crate::rules::{EndGame, Move, Player, Rule}; pub struct GameBuilder { size: (u32, u32), komi: f32, manual_komi: bool, black_player: String, white_player: String, rule: Rule, handicap_points: Vec<Point>, turn: Player, moves: Vec<Move>, outcome: Option<EndGame>, } impl GameBuilder { fn new() -> GameBuilder { GameBuilder { size: (19, 19), komi: Chinese.komi(), manual_komi: false, black_player: "".to_string(), white_player: "".to_string(), handicap_points: vec![], rule: Chinese, turn: Player::Black, moves: vec![], outcome: None, } } pub fn moves(&mut self, moves: &[Move]) -> &mut Self { self.moves = moves.to_vec(); self } pub fn outcome(&mut self, outcome: EndGame) -> &mut Self { self.outcome = Some(outcome); self } /// Overrides the turn because it's a game with handicap. So White begins. pub fn handicap(&mut self, points: &[Point]) -> &mut Self { self.handicap_points = points.to_vec(); self.turn = Player::White; self } pub fn size(&mut self, size: (u32, u32)) -> &mut Self { self.size = size; self } pub fn komi(&mut self, komi: f32) -> &mut Self { self.komi = komi; self.manual_komi = true; self } pub fn black_player(&mut self, black_player_name: &str) -> &mut Self { self.black_player = black_player_name.to_string(); self } pub fn rule(&mut self, rule: Rule) -> &mut Self { self.rule = rule; if !self.manual_komi { self.komi = rule.komi(); } self } pub fn white_player(&mut self, white_player_name: &str) -> &mut Self { self.white_player = white_player_name.to_string(); self } pub fn build(&mut self) -> Result<Game, String> { let mut goban: Goban = Goban::new((self.size.0 as uint, self.size.1 as uint)); goban.push_many(&self.handicap_points, Color::Black); let mut g = Game { goban, passes: 0, prisoners: (0, 0), outcome: self.outcome, turn: self.turn, komi: self.komi, rule: self.rule, handicap: self.handicap_points.len() as u8, #[cfg(feature = "history")] plays: vec![], hashes: Default::default(), last_hash: 0, ko_point: None, }; for &m in &self.moves { g.play(m); // without verifications of Ko } Ok(g) } } impl Default for GameBuilder { fn default() -> Self { Self::new() } }
use std::convert::{Infallible, TryFrom, TryInto}; use bitflags::{bitflags, BitFlags}; pub trait Flags: BitFlags<u32> + TryFrom<u32> + TryInto<u32> + Default { fn test(&self, value: u32) -> bool { let flags = value.try_into().or(Err(crate::Error::Unexpected)).unwrap(); self.contains(flags) } } bitflags! { pub struct RecordFlags: u32 { const DELETED = 0x00000020; const CONSTANT = 0x00000040; const MUST_UPDATE_ANIMS = 0x00000100; const HIDDEN_FROM_LOCAL_MAP = 0x00000200; const QUEST_ITEM = 0x00000400; const INITIALLY_DISABLED = 0x00000800; const IGNORED = 0x00001000; const VISIBLE_WHEN_DISTANT = 0x00008000; const RANDOM_ANIMATION_START = 0x00010000; const DANGEROUS = 0x00020000; const COMPRESSED = 0x00040000; const CANNOT_WAIT = 0x00080000; const IGNORE_OBJECT_INTERACTION = 0x00100000; const MARKER = 0x00800000; const OBSTACLE = 0x02000000; const NAVMESH_GEN_FILTER = 0x04000000; const NAVMESH_GEN_BBOX = 0x08000000; const REFLECTED_BY_WATER = 0x10000000; const NO_HAVOK_SETTLE = 0x20000000; const NO_RESPAWN = 0x40000000; const MULTI_BOUND = 0x80000000; } } impl Default for RecordFlags { fn default() -> Self { RecordFlags::empty() } } impl TryFrom<u32> for RecordFlags { type Error = crate::Error; fn try_from(value: u32) -> std::result::Result<Self, Self::Error> { RecordFlags::from_bits(value).ok_or(crate::Error::InvalidFlags(value)) } } impl TryInto<u32> for RecordFlags { type Error = Infallible; fn try_into(self) -> Result<u32, Self::Error> { Ok(self.bits) } } impl Flags for RecordFlags {} bitflags! { pub struct PluginFlags: u32 { const MASTER = 0x0001; const LOCALIZED = 0x0080; const LIGHT = 0x0200; } } impl Default for PluginFlags { fn default() -> Self { PluginFlags::empty() } } impl TryFrom<u32> for PluginFlags { type Error = crate::Error; fn try_from(value: u32) -> std::result::Result<Self, Self::Error> { PluginFlags::from_bits(value).ok_or(crate::Error::InvalidFlags(value)) } } impl TryInto<u32> for PluginFlags { type Error = Infallible; fn try_into(self) -> Result<u32, Self::Error> { Ok(self.bits) } } impl Flags for PluginFlags {}
enum TrafficLight { Green, Red, Yellow } impl TrafficLight { fn time(&self) -> u8 { match self { TrafficLight::Green => 10, TrafficLight::Red => 20, TrafficLight::Yellow => 30 } } } pub fn test_lights() { let green = TrafficLight::Green; let red = TrafficLight::Red; let yellow = TrafficLight::Yellow; assert!(green.time() == 10); assert!(red.time() == 20); assert!(yellow.time() == 30); println!("lights cases all passed!"); }
use std::iter; use std::ops::{Add, Sub, Neg, Mul}; pub trait Additive where Self: Sized { const ZERO: Self; fn add(self, n: Self) -> Self; fn sub(self, n: Self) -> Self { self.add(n.neg()) } fn neg(self) -> Self { Self::ZERO.sub(self) } } macro_rules! additive_impl_core { ($id:ident => $body:expr, $($t:ty)*) => ($( impl Additive for $t { const ZERO: $t = 0 as $t; #[inline] fn add(self, n: $t) -> $t { self + n } #[inline] fn sub(self, n: $t) -> $t { self - n } #[inline] fn neg(self) -> $t { let $id = self; $body } } )*) } macro_rules! additive_impl_numeric { ($($t:ty)*) => { additive_impl_core!{ x => -x, $($t)*} } } macro_rules! additive_impl_unsigned { ($($t:ty)*) => { additive_impl_core!{ x => { !x.wrapping_add(1) }, $($t)*} } } additive_impl_numeric! { isize i8 i16 i32 i64 f32 f64 } additive_impl_unsigned! { usize u8 u16 u32 u64 } pub trait Ring where Self: Additive + Clone { const ONE: Self; fn mul(self, n: Self) -> Self; fn pow(self, n: u32) -> Self { iter::repeat(self).take(n as usize).fold(Ring::ONE, Ring::mul) } } macro_rules! ring_impl_core { ($id:ident, $n:ident => $body:expr, $($t:ty)*) => ($( impl Ring for $t { const ONE: $t = 1 as $t; fn mul(self, n: $t) -> $t { self * n } fn pow(self, n: u32) -> $t { let $id = self; let $n = n; $body } } )*) } macro_rules! ring_impl_int { ($($t:ty)*) => { ring_impl_core!{ x, n => x.pow(n), $($t)*} } } macro_rules! ring_impl_float { ($($t:ty)*) => { ring_impl_core!{ x, n => x.powi(n as i32), $($t)*} } } ring_impl_int! { usize u8 u16 u32 u64 isize i8 i16 i32 i64 } ring_impl_float! { f32 f64 } pub trait Module<T> where Self: Additive, T: Ring { fn scale(self, n: T) -> Self; } macro_rules! module_impl { ($($t:ty)*) => ($( impl Module<$t> for $t { fn scale(self, n: $t) -> $t { self * n } } )*) } module_impl! { usize u8 u16 u32 u64 isize i8 i16 i32 i64 f32 f64 } /* TODO: Implement Field */ pub trait Algebraic where Self: Ring { fn sqrt(self) -> Self; } macro_rules! algebraic_impl { ($($t:ty)*) => ($( impl Algebraic for $t { fn sqrt(self) -> $t { self.sqrt() } } )*) } algebraic_impl! { f32 f64 } #[derive(Debug, Clone, Copy, Default)] pub struct vec2<T>{ pub x: T, pub y: T, } impl<T> vec2<T> { pub fn new(x: T, y: T) -> vec2<T> { vec2 {x, y} } } impl<T: Additive> Add for vec2<T> { type Output = vec2<T>; fn add(self, v: vec2<T>) -> vec2<T> { vec2 { x: self.x.add(v.x), y: self.y.add(v.y), } } } impl<T: Additive> Sub for vec2<T> { type Output = vec2<T>; fn sub(self, v: vec2<T>) -> vec2<T> { vec2 { x: self.x.sub(v.x), y: self.y.sub(v.y), } } } impl<T: Additive> Neg for vec2<T> { type Output = vec2<T>; fn neg(self) -> vec2<T> { vec2 { x: self.x.neg(), y: self.y.neg(), } } } impl<T: Additive + Copy> Additive for vec2<T> { const ZERO: vec2<T> = vec2 { x: T::ZERO, y: T::ZERO, }; fn add(self, v: Self) -> Self { self + v } fn sub(self, v: Self) -> Self { self - v } fn neg(self) -> Self { -self } } impl<T, K> Mul<K> for vec2<T> where T: Module<K>, K: Ring + Copy { type Output = vec2<T>; fn mul(self, n: K) -> vec2<T> { vec2 { x: self.x.scale(n), y: self.y.scale(n), } } } impl<T> vec2<T> where T: Ring + Copy, { pub fn normsq(self) -> T { self.x.pow(2).add(self.y.pow(2)) } pub fn norm<A: Algebraic + From<T>>(self) -> A { A::from(self.normsq()).sqrt() } }
pub mod rom; pub use self::rom::Rom; pub mod basic_mmu; pub trait Mmu { fn read_byte(&self, addr: u16) -> u8; fn write_byte(&mut self, addr: u16, value: u8); fn rom_len(&self) -> usize; }
use amethyst::{ prelude::*, renderer::{ plugins::{RenderFlat2D, RenderToWindow}, types::DefaultBackend, RenderingBundle, Camera, ImageFormat, SpriteRender, SpriteSheet, SpriteSheetFormat, Texture, }, ecs::prelude::*, utils::application_root_dir, core::transform::{TransformBundle, Transform}, core::math::Vector2, assets::{PrefabLoaderSystemDesc, Handle, AssetStorage, Loader}, }; use specs_physics::{ nphysics::{ math::{Vector, Velocity}, object::{ColliderDesc, BodyPartHandle, RigidBodyDesc, BodyStatus}, }, PhysicsBundle, BodyComponent, ColliderComponent, ncollide::shape::{Cuboid, ShapeHandle}, }; fn main() -> amethyst::Result<()>{ amethyst::start_logger(Default::default()); let app_root = application_root_dir()?; let assets_dir = app_root.join("assets"); let display_config_path = assets_dir.join("display.ron"); let game_data = GameDataBuilder::default() .with_bundle(TransformBundle::new())? .with_bundle( RenderingBundle::<DefaultBackend>::new() .with_plugin( RenderToWindow::from_config_path(display_config_path)? .with_clear([0.0, 0.0, 0.0, 1.0]), ) .with_plugin(RenderFlat2D::default()), )? .with_bundle(PhysicsBundle::<f32, Transform>::new(Vector::y() * -9.81, &[]))?; let mut game = Application::build(assets_dir, Playing::new())? .build(game_data)?; game.run(); Ok(()) } struct Playing<'a, 'b> { fixed_dispatcher: Dispatcher<'a, 'b>, sprite_sheet_handle: Option<Handle<SpriteSheet>>, } impl<'a, 'b> Playing<'a, 'b> { pub fn new() -> Self { let fixed_dispatcher = DispatcherBuilder::new().build(); Self { fixed_dispatcher, sprite_sheet_handle: None, } } } impl<'a, 'b> SimpleState for Playing<'a, 'b> { fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) { let world = data.world; self.sprite_sheet_handle.replace(load_sprite_sheet(world)); initialize_crate(world, self.sprite_sheet_handle.clone().unwrap()); initialize_camera(world); } fn fixed_update(&mut self, data: StateData<'_, GameData<'_, '_>>) -> SimpleTrans { self.fixed_dispatcher.dispatch(data.world); Trans::None } } fn load_sprite_sheet(world: &mut World) -> Handle<SpriteSheet> { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); let texture_handle = loader.load( "logo.png", ImageFormat::default(), (), &texture_storage, ); let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>(); loader.load( "logo.ron", SpriteSheetFormat(texture_handle), (), &sprite_sheet_store ) } fn initialize_crate(world: &mut World, sprite_sheet_handle: Handle<SpriteSheet>) { build_crate(world, sprite_sheet_handle.clone(), 30.0, 70.0, BodyStatus::Dynamic); build_crate(world, sprite_sheet_handle, 0.0, 10.0, BodyStatus::Static); } fn build_crate(world: &mut World, sprite_sheet_handle: Handle<SpriteSheet>, x: f32, y: f32, status: BodyStatus) { let mut local_transform = Transform::default(); local_transform.set_translation_xyz(x, y, 0.0); let sprite_render = SpriteRender { sprite_sheet: sprite_sheet_handle, sprite_number: 0 }; let entity = world .create_entity() .with(sprite_render) .with(local_transform) .with(BodyComponent::new( RigidBodyDesc::new() .translation(Vector2::new(x, y)) .status(status) .velocity(Velocity::new(Vector2::new(0.0, 0.0), 0.0)) .build() ) ) .build(); let shape = ShapeHandle::new(Cuboid::new(Vector::new(16.0, 16.0))); // there may be a better way to do this, idk world.exec(|mut colliders: WriteStorage<ColliderComponent<f32>>| { colliders.insert(entity, ColliderComponent( ColliderDesc::new(shape) .density(0.05) .build(BodyPartHandle(entity, 0)) ) ).unwrap(); }); } fn initialize_camera(world: &mut World) { let mut transform = Transform::default(); transform.set_translation_xyz(0.0, 50.0, 1.0); let camera = Camera::standard_2d(100.0, 100.0); world .create_entity() .with(transform) .with(camera) .build(); }
#[doc = "Reader of register ETH_MTLOMR"] pub type R = crate::R<u32, super::ETH_MTLOMR>; #[doc = "Writer for register ETH_MTLOMR"] pub type W = crate::W<u32, super::ETH_MTLOMR>; #[doc = "Register ETH_MTLOMR `reset()`'s with value 0"] impl crate::ResetValue for super::ETH_MTLOMR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DTXSTS`"] pub type DTXSTS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DTXSTS`"] pub struct DTXSTS_W<'a> { w: &'a mut W, } impl<'a> DTXSTS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `RAA`"] pub type RAA_R = crate::R<bool, bool>; #[doc = "Write proxy for field `RAA`"] pub struct RAA_W<'a> { w: &'a mut W, } impl<'a> RAA_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `SCHALG`"] pub type SCHALG_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SCHALG`"] pub struct SCHALG_W<'a> { w: &'a mut W, } impl<'a> SCHALG_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 5)) | (((value as u32) & 0x03) << 5); self.w } } #[doc = "Reader of field `CNTPRST`"] pub type CNTPRST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CNTPRST`"] pub struct CNTPRST_W<'a> { w: &'a mut W, } impl<'a> CNTPRST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Reader of field `CNTCLR`"] pub type CNTCLR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CNTCLR`"] pub struct CNTCLR_W<'a> { w: &'a mut W, } impl<'a> CNTCLR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } impl R { #[doc = "Bit 1 - DTXSTS"] #[inline(always)] pub fn dtxsts(&self) -> DTXSTS_R { DTXSTS_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - RAA"] #[inline(always)] pub fn raa(&self) -> RAA_R { RAA_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bits 5:6 - SCHALG"] #[inline(always)] pub fn schalg(&self) -> SCHALG_R { SCHALG_R::new(((self.bits >> 5) & 0x03) as u8) } #[doc = "Bit 8 - CNTPRST"] #[inline(always)] pub fn cntprst(&self) -> CNTPRST_R { CNTPRST_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - CNTCLR"] #[inline(always)] pub fn cntclr(&self) -> CNTCLR_R { CNTCLR_R::new(((self.bits >> 9) & 0x01) != 0) } } impl W { #[doc = "Bit 1 - DTXSTS"] #[inline(always)] pub fn dtxsts(&mut self) -> DTXSTS_W { DTXSTS_W { w: self } } #[doc = "Bit 2 - RAA"] #[inline(always)] pub fn raa(&mut self) -> RAA_W { RAA_W { w: self } } #[doc = "Bits 5:6 - SCHALG"] #[inline(always)] pub fn schalg(&mut self) -> SCHALG_W { SCHALG_W { w: self } } #[doc = "Bit 8 - CNTPRST"] #[inline(always)] pub fn cntprst(&mut self) -> CNTPRST_W { CNTPRST_W { w: self } } #[doc = "Bit 9 - CNTCLR"] #[inline(always)] pub fn cntclr(&mut self) -> CNTCLR_W { CNTCLR_W { w: self } } }
extern crate cxx_build; fn main() { cxx_build::bridge("src/main.rs") .file("src/demo.cpp") .flag_if_supported("-std=c++17") .compile("cxxbridge-demo"); cc::Build::new() .file("src/demo_c.c") .define("FOO", Some("bar")) .include("src") .compile("demo_c"); println!("cargo:rustc-link-lib=demo_c"); println!("cargo:rerun-if-changed=src/demo_c.c"); println!("cargo:rerun-if-changed=src/main.rs"); println!("cargo:rerun-if-changed=src/demo.h"); println!("cargo:rerun-if-changed=src/demo.cpp"); }
use parking_lot::Mutex; use sc_utils::mpsc::{TracingUnboundedReceiver, TracingUnboundedSender}; use sp_consensus_slots::Slot; use sp_runtime::traits::{Block as BlockT, NumberFor}; use std::convert::TryInto; use std::sync::Arc; use subspace_core_primitives::{BlockNumber, Randomness}; /// Data required to produce bundles on executor node. #[derive(PartialEq, Clone, Debug)] pub(super) struct OperatorSlotInfo { /// Slot pub(super) slot: Slot, /// Global randomness pub(super) global_randomness: Randomness, } #[derive(Debug, Clone)] pub(crate) struct BlockInfo<Block> where Block: BlockT, { /// hash of the block. pub hash: Block::Hash, /// hash of the parent block. pub parent_hash: Block::Hash, /// block's number. pub number: NumberFor<Block>, /// Is this the new best block. pub is_new_best: bool, } /// Converts a generic block number to a concrete primitive block number. pub(crate) fn to_number_primitive<N>(block_number: N) -> BlockNumber where N: TryInto<BlockNumber>, { block_number .try_into() .unwrap_or_else(|_| panic!("Block number must fit into u32; qed")) } pub type DomainImportNotificationSinks<Block, CBlock> = Arc<Mutex<Vec<TracingUnboundedSender<DomainBlockImportNotification<Block, CBlock>>>>>; pub type DomainImportNotifications<Block, CBlock> = TracingUnboundedReceiver<DomainBlockImportNotification<Block, CBlock>>; #[derive(Clone, Debug)] pub struct DomainBlockImportNotification<Block: BlockT, CBlock: BlockT> { pub domain_block_hash: Block::Hash, pub consensus_block_hash: CBlock::Hash, }
mod map_deserialize; use self::map_deserialize::N; use networking::ToPlcConn; use std::collections::BTreeMap; #[derive(Debug, Deserialize, Serialize)] pub struct Setting { pub connection_parameter: AmsConn, pub plc: Vec<PlcSetting>, pub versions: BTreeMap<N, VersionSetting>, } #[derive(Debug, Deserialize, Serialize)] pub struct AmsConn { pub ams_net_id: String, pub ams_port: u16, } impl ToPlcConn for AmsConn { fn as_plc_conn(&self) -> [u8; 8] { let net_id: Vec<_> = self .ams_net_id .split('.') .map(|x| u8::from_str_radix(x, 10).unwrap()) .collect(); let mut d = [0u8; 8]; d[..6].clone_from_slice(&net_id[..6]); d[7] = ((self.ams_port >> 8) & 0xff) as u8; d[6] = (self.ams_port & 0xff) as u8; d } } #[derive(Debug, Deserialize, Serialize)] pub struct PlcSetting { pub version: u32, pub ip: String, pub ams_net_id: String, pub ams_port: u16, } #[derive(Debug, Deserialize, Serialize)] pub struct VersionSetting { pub path: String, pub symbol_names: Vec<String>, }
use std::convert::TryFrom; use anyhow::{anyhow, Result}; use grid::Grid; use crate::Challenge; use itertools::Itertools; pub struct Day11; type Layout = Grid<Cell>; impl Challenge for Day11 { const DAY_NUMBER: u32 = 11; type InputType = Layout; type OutputType = usize; fn part1(input: &Self::InputType) -> Result<Self::OutputType> { let equilibrium = find_equilibrium(input, 4, get_occupied_neighbors); Ok(count_occupied(&equilibrium)) } fn part2(input: &Self::InputType) -> Result<Self::OutputType> { let equilibrium = find_equilibrium(input, 5, get_visible_neighbors); Ok(count_occupied(&equilibrium)) } fn parse(content: &str) -> Result<Self::InputType> { let width = content .lines() .next() .ok_or_else(|| anyhow!("Empty grid"))? .trim_end() .len(); let cells = content .trim() .chars() .filter(|c| !c.is_whitespace()) .map(<Cell as TryFrom<char>>::try_from) .collect::<Result<_>>()?; Ok(Grid::from_vec(cells, width)) } } fn find_equilibrium<F>(input: &Layout, max_neighbors: usize, neighbors_func: F) -> Layout where F: Copy + Fn(&Layout, (usize, usize)) -> usize, { let mut last_input = input.clone(); let mut new_input = seating_step(&last_input, max_neighbors, neighbors_func); while last_input != new_input { last_input = new_input.clone(); new_input = seating_step(&last_input, max_neighbors, neighbors_func); } new_input } fn seating_step<F>(layout: &Layout, max_neighbors: usize, neighbors_func: F) -> Layout where F: Fn(&Layout, (usize, usize)) -> usize, { let cells = (0..layout.rows()) .cartesian_product(0..layout.cols()) .map(|(y, x)| { let cell = layout[y][x]; if cell != Cell::Floor { new_cell_state(cell, neighbors_func(layout, (x, y)), max_neighbors) } else { cell } }) .collect(); Layout::from_vec(cells, layout.cols()) } fn new_cell_state(cell: Cell, occupied_neighbors: usize, max_neighbors: usize) -> Cell { match (cell, occupied_neighbors) { (Cell::Empty, 0) => Cell::Occupied, (Cell::Occupied, i) if i >= max_neighbors => Cell::Empty, (c, _) => c, } } const DIRS: &[(i8, i8)] = &[ (-1, -1), (0, -1), (1, -1), (-1, 0), (1, 0), (-1, 1), (0, 1), (1, 1), ]; fn get_occupied_neighbors(layout: &Layout, coords: (usize, usize)) -> usize { DIRS.iter() .filter_map(|&offset| filter_map_in_bounds(layout, coords, offset)) .filter(|&(neighbor_x, neighbor_y)| layout[neighbor_y][neighbor_x] == Cell::Occupied) .count() } fn get_visible_neighbors(layout: &Layout, coords: (usize, usize)) -> usize { DIRS.iter() .copied() // For each direction, tries multiple of that direction to find the first visible seat. .filter(|&offset| { (1..) // Multiply offset .map(|i| (offset.0 * i, offset.1 * i)) // map_while offset is in bounds .map(|offset| filter_map_in_bounds(layout, coords, offset)) .take_while(|&visible_coords| visible_coords.is_some()) .map(|visible_coords| visible_coords.unwrap()) .map(|(x, y)| layout[y][x]) .find(|&cell| cell != Cell::Floor) == Some(Cell::Occupied) }) .count() } fn filter_map_in_bounds( layout: &Layout, (x, y): (usize, usize), (offset_x, offset_y): (i8, i8), ) -> Option<(usize, usize)> { if is_in_bounds(layout, (x, y), (offset_x, offset_y)) { Some(( (x as i64 + offset_x as i64) as usize, (y as i64 + offset_y as i64) as usize, )) } else { None } } fn is_in_bounds(layout: &Layout, coords: (usize, usize), offset: (i8, i8)) -> bool { match (coords, offset) { ((x, _), (offset_x, _)) if offset_x < 0 && (-offset_x) as usize > x => false, ((_, y), (_, offset_y)) if offset_y < 0 && (-offset_y) as usize > y => false, ((x, _), (offset_x, _)) if offset_x > 0 && x + offset_x as usize >= layout.cols() => false, ((_, y), (_, offset_y)) if offset_y > 0 && y + offset_y as usize >= layout.rows() => false, _ => true, } } fn count_occupied(layout: &Layout) -> usize { layout.iter().filter(|&&c| c == Cell::Occupied).count() } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Cell { Floor, Empty, Occupied, } impl TryFrom<char> for Cell { type Error = anyhow::Error; fn try_from(value: char) -> Result<Self, Self::Error> { match value { '.' => Ok(Cell::Floor), 'L' => Ok(Cell::Empty), '#' => Ok(Cell::Occupied), _ => Err(anyhow!("Cannot parse seat: {}", value)), } } } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "L.LL.LL.LL LLLLLLL.LL L.L.L..L.. LLLL.LL.LL L.LL.LL.LL L.LLLLL.LL ..L.L..... LLLLLLLLLL L.LLLLLL.L L.LLLLL.LL"; #[test] fn test_part1() { assert_eq!(Day11::solve1(EXAMPLE).unwrap(), 37); } #[test] fn test_part2() { assert_eq!(Day11::solve2(EXAMPLE).unwrap(), 26); } } crate::benchmark_challenge!(crate::day11::Day11);
#[doc = "Reader of register DDRCTRL_POISONSTAT"] pub type R = crate::R<u32, super::DDRCTRL_POISONSTAT>; #[doc = "Reader of field `WR_POISON_INTR_0`"] pub type WR_POISON_INTR_0_R = crate::R<bool, bool>; #[doc = "Reader of field `WR_POISON_INTR_1`"] pub type WR_POISON_INTR_1_R = crate::R<bool, bool>; #[doc = "Reader of field `RD_POISON_INTR_0`"] pub type RD_POISON_INTR_0_R = crate::R<bool, bool>; #[doc = "Reader of field `RD_POISON_INTR_1`"] pub type RD_POISON_INTR_1_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - WR_POISON_INTR_0"] #[inline(always)] pub fn wr_poison_intr_0(&self) -> WR_POISON_INTR_0_R { WR_POISON_INTR_0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - WR_POISON_INTR_1"] #[inline(always)] pub fn wr_poison_intr_1(&self) -> WR_POISON_INTR_1_R { WR_POISON_INTR_1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 16 - RD_POISON_INTR_0"] #[inline(always)] pub fn rd_poison_intr_0(&self) -> RD_POISON_INTR_0_R { RD_POISON_INTR_0_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - RD_POISON_INTR_1"] #[inline(always)] pub fn rd_poison_intr_1(&self) -> RD_POISON_INTR_1_R { RD_POISON_INTR_1_R::new(((self.bits >> 17) & 0x01) != 0) } }
#![cfg_attr(not(feature = "std"), no_std)] #![feature(nll)] #![feature(external_doc)] #![feature(try_trait)] #![deny(missing_docs)] #![doc(include = "../README.md")] #![doc(html_logo_url = "https://doc.dalek.rs/assets/dalek-logo-clear.png")] #![doc(html_root_url = "https://docs.rs/bulletproofs/2.0.0")] extern crate alloc; #[macro_use] extern crate serde_derive; mod util; #[doc(include = "../docs/notes-intro.md")] mod notes { #[doc(include = "../docs/notes-ipp.md")] mod inner_product_proof {} #[doc(include = "../docs/notes-rp.md")] mod range_proof {} #[doc(include = "../docs/notes-r1cs.md")] mod r1cs_proof {} } mod errors; mod generators; mod inner_product_proof; mod linear_proof; mod range_proof; mod transcript; pub use crate::errors::ProofError; pub use crate::generators::{BulletproofGens, BulletproofGensShare, PedersenGens}; pub use crate::linear_proof::LinearProof; pub use crate::range_proof::RangeProof; #[doc(include = "../docs/aggregation-api.md")] pub mod range_proof_mpc { pub use crate::errors::MPCError; pub use crate::range_proof::dealer; pub use crate::range_proof::messages; pub use crate::range_proof::party; } #[cfg(feature = "yoloproofs")] #[cfg(feature = "std")] pub mod r1cs;
use std::path::PathBuf; #[test] fn non_existent_file() { let path = PathBuf::from("src/fixtures/no-such-file"); let result = super::read_file(path); match result { Err(err) => match err { super::ParserError::BadFile(msg) => println!("{}", msg), super::ParserError::BadSize(msg) => assert!(false, msg) }, Ok(message) => assert!(false, message) } } #[test] fn empty_file() { let path = PathBuf::from("src/fixtures/empty"); let result = super::read_file(path); match result { Err(err) => match err { super::ParserError::BadFile(msg) => assert!(false, msg), super::ParserError::BadSize(msg) => println!("{}", msg) }, Ok(message) => assert!(false, message) } } #[test] fn non_empty_file() { let path = PathBuf::from("src/fixtures/nonempty"); let result = super::read_file(path); match result { Err(err) => match err { super::ParserError::BadFile(msg) => assert!(false, msg), super::ParserError::BadSize(msg) => assert!(false, msg) }, Ok(message) => println!("{}", message) } }
#![no_std] #![cfg_attr(feature = "external_doc", feature(external_doc))] #![cfg_attr(feature = "external_doc", doc(include = "../README.md"))] extern crate proc_macro; extern crate proc_macro2; extern crate alloc; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput}; use alloc::{format, vec, vec::Vec, boxed::Box, string::String}; macro_rules! die { ($ident:expr, $($reason:expr),+) => {{ let message = format!("Can't generate num_enum traits for {} because {}.", $ident, format!($($reason),+)); panic!("{}", message); }} } #[proc_macro_derive(IntoPrimitive)] pub fn derive_into_primitive(stream: TokenStream) -> TokenStream { let input = parse_macro_input!(stream as DeriveInput); let enum_info = parse_enum(input); let name = enum_info.name; let repr = enum_info.repr; let expanded = quote! { impl From<#name> for #repr { fn from(number: #name) -> Self { number as Self } } }; TokenStream::from(expanded) } #[proc_macro_derive(TryFromPrimitive)] pub fn derive_try_from_primitive(stream: TokenStream) -> TokenStream { let input = parse_macro_input!(stream as DeriveInput); let enum_info = parse_enum(input); let TryIntoEnumInfo { name, repr, match_const_names, match_const_exprs, enum_keys, no_match_message, .. } = TryIntoEnumInfo::from(enum_info); let match_const_names2 = match_const_names.clone(); let repeated_repr = core::iter::repeat(repr.clone()).take(enum_keys.len()); let repeated_name = core::iter::repeat(name.clone()).take(enum_keys.len()); let expanded = quote! { impl ::core::convert::TryFrom<#repr> for #name { type Error=String; fn try_from(number: #repr) -> Result<Self, String> { #( const #match_const_names: #repeated_repr = #match_const_exprs; )* match number { #( #match_const_names2 => Ok(#repeated_name::#enum_keys), )* _ => Err(format!(#no_match_message, number)), } } } }; TokenStream::from(expanded) } struct EnumInfo { name: syn::Ident, repr: proc_macro2::Ident, value_expressions_to_enum_keys: Vec<(syn::Expr, syn::Ident)>, } fn parse_enum(input: DeriveInput) -> EnumInfo { let mut repr = None; for attr in input.attrs { if attr.path.segments.len() == 1 && format!("{}", attr.path.segments.first().unwrap().value().ident) == "repr" { let tokens: Vec<proc_macro2::TokenTree> = attr.tts.into_iter().collect(); if tokens.len() == 1 { if let proc_macro2::TokenTree::Group(ref group) = tokens[0] { match group.stream().into_iter().next().unwrap() { proc_macro2::TokenTree::Ident(ident) => { if &format!("{}", ident) == "C" { die!( input.ident, "it has repr(C), which doesn't have a generally defined size" ) } repr = Some(ident); break; } val => { die!(input.ident, "it had unexpected repr: {}", val); } } } } } } let mut variants = vec![]; let mut next_discriminant = literal(0); if let Data::Enum(data) = input.data { for variant in data.variants { let disc = if let Some(d) = variant.discriminant { d.1 } else { next_discriminant }; next_discriminant = syn::Expr::Binary(syn::ExprBinary { attrs: vec![], left: Box::new(disc.clone()), op: syn::BinOp::Add(syn::token::Add { spans: [proc_macro2::Span::call_site()], }), right: Box::new(literal(1)), }); variants.push((disc, variant.ident.clone())); } } else { die!(input.ident, "it was not an enum"); } if let Some(repr) = repr { EnumInfo { name: input.ident, repr: repr, value_expressions_to_enum_keys: variants, } } else { die!(input.ident, "it does not have a valid `#[repr]` attribute"); } } fn literal(i: u64) -> syn::Expr { syn::Expr::Lit(syn::ExprLit { attrs: vec![], lit: syn::Lit::Int(syn::LitInt::new( i, syn::IntSuffix::None, proc_macro2::Span::call_site(), )), }) } struct TryIntoEnumInfo { name: proc_macro2::Ident, repr: proc_macro2::Ident, match_const_names: Vec<proc_macro2::Ident>, match_const_exprs: Vec<syn::Expr>, enum_keys: Vec<proc_macro2::Ident>, no_match_message: String, } impl TryIntoEnumInfo { fn from(enum_info: EnumInfo) -> TryIntoEnumInfo { let mut match_const_names = Vec::with_capacity(enum_info.value_expressions_to_enum_keys.len()); let mut match_const_exprs = Vec::with_capacity(enum_info.value_expressions_to_enum_keys.len()); let mut enum_keys = Vec::with_capacity(enum_info.value_expressions_to_enum_keys.len()); for (enum_value_expression, enum_key) in enum_info.value_expressions_to_enum_keys { // Use an intermediate const so that enums defined like `Two = ONE + 1u8` work properly. let match_const = format!("__num_enum_match_{}", enum_key); match_const_names.push(proc_macro2::Ident::new( &match_const, proc_macro2::Span::call_site(), )); match_const_exprs.push(enum_value_expression.clone()); enum_keys.push(enum_key); } let no_match_message = format!("No value in enum {} for value {{}}", enum_info.name); TryIntoEnumInfo { name: enum_info.name, repr: enum_info.repr, match_const_names: match_const_names, match_const_exprs: match_const_exprs, enum_keys: enum_keys, no_match_message: no_match_message, } } }
#[doc = "Reader of register FMC_HWCFGR2"] pub type R = crate::R<u32, super::FMC_HWCFGR2>; #[doc = "Reader of field `RD_LN2DPTH`"] pub type RD_LN2DPTH_R = crate::R<u8, u8>; #[doc = "Reader of field `NOR_BASE`"] pub type NOR_BASE_R = crate::R<bool, bool>; #[doc = "Reader of field `SDRAM_RBASE`"] pub type SDRAM_RBASE_R = crate::R<bool, bool>; #[doc = "Reader of field `NAND_BASE`"] pub type NAND_BASE_R = crate::R<u8, u8>; #[doc = "Reader of field `SDRAM1_BASE`"] pub type SDRAM1_BASE_R = crate::R<u8, u8>; #[doc = "Reader of field `SDRAM2_BASE`"] pub type SDRAM2_BASE_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:3 - RD_LN2DPTH"] #[inline(always)] pub fn rd_ln2dpth(&self) -> RD_LN2DPTH_R { RD_LN2DPTH_R::new((self.bits & 0x0f) as u8) } #[doc = "Bit 4 - NOR_BASE"] #[inline(always)] pub fn nor_base(&self) -> NOR_BASE_R { NOR_BASE_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 8 - SDRAM_RBASE"] #[inline(always)] pub fn sdram_rbase(&self) -> SDRAM_RBASE_R { SDRAM_RBASE_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bits 12:15 - NAND_BASE"] #[inline(always)] pub fn nand_base(&self) -> NAND_BASE_R { NAND_BASE_R::new(((self.bits >> 12) & 0x0f) as u8) } #[doc = "Bits 16:19 - SDRAM1_BASE"] #[inline(always)] pub fn sdram1_base(&self) -> SDRAM1_BASE_R { SDRAM1_BASE_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 20:23 - SDRAM2_BASE"] #[inline(always)] pub fn sdram2_base(&self) -> SDRAM2_BASE_R { SDRAM2_BASE_R::new(((self.bits >> 20) & 0x0f) as u8) } }
use std::{i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64}; use std::io::stdin; fn main() { // Client let facade = PhoneFactoryFacade{}; facade.develop_phones(10); } #[derive(Debug)] struct PhoneFactoryFacade {} impl PhoneFactoryFacade { fn develop_phones(&self, amount_of_phones: i64) { println!("==========================================================="); MaterialFinder::find_metal(amount_of_phones); println!("-----------------------------------------------------------"); MaterialFinder::find_other_materials(amount_of_phones); println!("==========================================================="); Transport::transport_materials(); println!("==========================================================="); MaterialProcessor::process_metals(); println!("-----------------------------------------------------------"); MaterialProcessor::process_other_materials(); println!("==========================================================="); Factory::create_processor(); println!("-----------------------------------------------------------"); Factory::create_memory(); println!("-----------------------------------------------------------"); Factory::create_remaining(); println!("==========================================================="); Assembler::join_components(); println!("-----------------------------------------------------------"); Assembler::assemble_materials(); println!("==========================================================="); Tester::test_phones(amount_of_phones); println!("==========================================================="); Packager::request_boxes(amount_of_phones); println!("-----------------------------------------------------------"); Packager::package_phones(amount_of_phones); println!("==========================================================="); Transport::transport_product(); println!("==========================================================="); } } struct MaterialFinder {} impl MaterialFinder { fn find_metal(amount: i64) { println!("Searching for {} aluminium grams...", 500 * amount); println!("Searching for {} sillicon grams...", 10 * amount); println!("Searching for {} gold grams...", 1 * amount); } fn find_other_materials(amount: i64) { println!("Searching for {} plastic grams...", 200 * amount); println!("Searching for {} glass grams...", 300 * amount); } } struct Transport {} impl Transport { fn transport_materials() { println!("Transporting products to the factory..."); } fn transport_product() { println!("Tansporting the phones to the stores..."); } } struct MaterialProcessor {} impl MaterialProcessor { fn process_metals() { println!("Processing aluminium for the phone structures..."); println!("Processing sillicon for semiconductors..."); println!("Processing gold for semiconductors..."); } fn process_other_materials() { println!("Processing plastic for the phone cases, chargers and headphones..."); println!("Processing glass for the screens..."); } } struct Factory {} impl Factory { fn create_processor() { println!("Requesting sillicon and gold..."); println!("Creating the processors..."); println!("Processors created."); } fn create_memory() { println!("Requesting sillicon and gold..."); println!("Creating the memories..."); println!("Memories created."); } fn create_remaining() { println!("Requesting plastic, glass and other materials..."); println!("Creating the screens..."); println!("Creating the cases..."); println!("Creating the motherboards..."); println!("Creating the cameras..."); println!("Creating the buttons..."); println!("Creating the chargers..."); println!("Creating the headphones..."); println!("All components created."); } } struct Assembler {} impl Assembler { fn join_components() { println!("Preparing components..."); println!("Requesting needed materials..."); println!("All the components are ready to assemble."); } fn assemble_materials() { println!("Turning on assembling machines..."); println!("Requesting screws..."); println!("Assembling..."); println!("Assembling finished successfully."); } } struct Tester {} impl Tester { fn test_phones(amount: i64) { let mut counter = 1; while (counter <= amount) { println!("Testing phone {} of {}.", counter, amount); counter += 1; } println!("All phones tested successfully."); } } struct Packager {} impl Packager { fn request_boxes(amount: i64) { println!("Requesting {} boxes to manufacturer...", amount); println!("Boxes ready."); } fn package_phones(amount: i64) { let mut counter = 1; while (counter <= amount) { println!("Packing phone {} of {}.", counter, amount); counter += 1; } println!("All phones packaged successfully."); println!("All phones are ready for distribution."); } }
use std::cell::RefCell; use std::rc::Rc; use lattices::{Point, WithBot}; use serde::de::{DeserializeSeed, Visitor}; use serde::{Serialize, Serializer}; use super::point::PointWrapper; use crate::buffer_pool::{AutoReturnBuffer, BufferPool}; use crate::protocol::serialization::lattices::point::PointDeserializer; #[repr(transparent)] pub struct WithBotWrapper<'a, const SIZE: usize>( pub &'a WithBot<Point<AutoReturnBuffer<SIZE>, ()>>, ); impl<'a, const SIZE: usize> Serialize for WithBotWrapper<'a, SIZE> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if let Some(inner) = &self.0 .0 { serializer.serialize_some(&PointWrapper(inner)) } else { serializer.serialize_none() } } } pub struct WithBotDeserializer<const SIZE: usize> { pub collector: Rc<RefCell<BufferPool<SIZE>>>, } impl<'de, const SIZE: usize> DeserializeSeed<'de> for WithBotDeserializer<SIZE> { type Value = WithBot<Point<AutoReturnBuffer<SIZE>, ()>>; fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error> where D: serde::Deserializer<'de>, { struct V<const SIZE: usize> { pub collector: Rc<RefCell<BufferPool<SIZE>>>, } impl<'de, const SIZE: usize> Visitor<'de> for V<SIZE> { type Value = WithBot<Point<AutoReturnBuffer<SIZE>, ()>>; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str(std::any::type_name::<Self::Value>()) } fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error> where D: serde::Deserializer<'de>, { struct V<const SIZE: usize> { pub collector: Rc<RefCell<BufferPool<SIZE>>>, } impl<'de, const SIZE: usize> Visitor<'de> for V<SIZE> { type Value = Point<AutoReturnBuffer<SIZE>, ()>; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str(std::any::type_name::<Self::Value>()) } fn visit_newtype_struct<D>( self, deserializer: D, ) -> Result<Self::Value, D::Error> where D: serde::Deserializer<'de>, { serde::de::DeserializeSeed::deserialize( PointDeserializer { collector: self.collector, }, deserializer, ) } } let inner = deserializer.deserialize_newtype_struct( "Point", V { collector: self.collector, }, )?; Ok(WithBot::new(Some(inner))) } fn visit_none<E>(self) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(WithBot::new(None)) } } deserializer.deserialize_option(V { collector: self.collector, }) } }
//! An expression that consumes any single character. //! //! See [`crate::any`]. use crate::error::UnexpectedEndOfInput; use crate::parser::Parser; use crate::span::Span; /// The struct returned from [`crate::any`]. pub struct Any; impl Parser for Any { type Value = char; type Error = UnexpectedEndOfInput; fn parse(&self, input: &'_ str) -> Result<Span<Self::Value>, Span<Self::Error>> { let actual = input .chars() .next() .ok_or_else(|| Span::new(0..0, UnexpectedEndOfInput))?; let len = actual.len_utf8(); Ok(Span::new(0..len, actual)) } } #[cfg(test)] mod tests { use quickcheck_macros::quickcheck; use crate::error::UnexpectedEndOfInput; use crate::parser::Parser; use crate::span::Span; use super::Any; #[test] fn match_ascii() { assert_eq!(Any.parse("hello"), Ok(Span::new(0..1, 'h'))); } #[test] fn match_utf8() { assert_eq!(Any.parse("💩"), Ok(Span::new(0..4, '💩'))); } #[test] fn match_grapheme() { assert_eq!(Any.parse("नि"), Ok(Span::new(0..3, 'न'))); } #[test] fn error_if_empty() { assert_eq!(Any.parse(""), Err(Span::new(0..0, UnexpectedEndOfInput))); } #[quickcheck] fn parse(input: String) { let result = Any.parse(&input); if input.is_empty() { assert_eq!(result, Err(Span::new(0..0, UnexpectedEndOfInput))); } else { let first_char = input.chars().next().unwrap(); assert_eq!(result, Ok(Span::new(0..first_char.len_utf8(), first_char))); } } }
fn main(){ let n1 = "Tutorials".to_string(); let n2 = "Point".to_string(); let n3 = n1 + &n2; // n2 reference is passed let n4 = n1 + n2; println!("{}",n3); println!("{}",n4); }
use ratatui::style::{Color, Modifier, Style}; use ratatui::text::{Line, Span}; use ratatui::widgets::Paragraph; pub struct TopBar<'a> { pub widget: Paragraph<'a>, text: Vec<Line<'a>>, } impl<'a> TopBar<'a> { pub fn new() -> Self { let text = vec![Line::from(vec![ Span::styled(" <u>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("update all,"), Span::styled(" <U>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("update selected,"), Span::styled(" <i>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("ignore update,"), Span::styled(" <p>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("pause/resume,"), Span::styled(" <v>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("visit on Nexus,"), Span::styled(" <Del>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("delete,"), Span::styled(" <q>", Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)), Span::raw("quit"), ])]; let widget = Paragraph::new(Line::from(vec![])); Self { widget, text } } pub async fn refresh(&mut self) { self.widget = Paragraph::new(self.text.clone()); } }
use format::Row; use std::collections::HashMap; use std::fs; use tfdeploy; use tfdeploy::analyser::Analyser; use tfdeploy::tfpb::graph::GraphDef; use OutputParameters; use Result as CliResult; #[derive(Debug, Serialize)] pub struct Edge { pub id: usize, pub src_node_id: usize, pub src_node_output: usize, pub dst_node_id: usize, pub dst_node_input: usize, pub main: bool, pub label: Option<String>, } #[derive(Debug, Serialize)] pub struct Node { pub id: usize, pub name: String, pub op: String, pub label: Option<String>, pub more_lines: Vec<String>, pub attrs: Vec<(String, String)>, pub inputs: Vec<usize>, pub outputs: Vec<usize>, pub hidden: bool, } #[derive(Debug, Serialize)] pub struct DisplayGraph { pub nodes: Vec<Node>, pub edges: Vec<Edge>, } impl DisplayGraph { pub fn render(&self, params: &OutputParameters) -> CliResult<()> { if params.web { ::web::open_web(&self, params) } else if let Some(json) = params.json.as_ref() { ::serde_json::to_writer(fs::File::create(json)?, self)?; Ok(()) } else { self.render_console(params) } } pub fn render_console(&self, params: &OutputParameters) -> CliResult<()> { use colored::Colorize; for node in &self.nodes { if node.op == "Const" && !params.konst || node.hidden { continue; } let output_ports: HashMap<usize, Option<String>> = node.outputs .iter() .map(|edge| { let edge = &self.edges[*edge]; (edge.src_node_output, edge.label.clone()) }) .collect(); let mut sections = vec![ node.attrs .iter() .map(|a| Row::Double(format!("Attribute {}:", a.0.bold()), a.1.clone())) .collect(), node.inputs .iter() .enumerate() .map(|(ix, a)| { let edge = &self.edges[*a]; Row::Double( if edge.src_node_output == 0 { format!( "Input {}: Node #{}", ix.to_string().bold(), edge.src_node_id.to_string().bold() ) } else { format!( "Input {}: Node #{}/{}", ix.to_string().bold(), edge.src_node_id.to_string().bold(), edge.src_node_output.to_string().bold() ) }, edge.label.clone().unwrap_or_else(|| "".to_string()), ) }) .collect(), (0..output_ports.len()) .map(|ix| { let edge = &output_ports[&ix]; Row::Double( format!("Output {}:", ix.to_string().bold()), edge.clone().unwrap_or_else(|| "".to_string()), ) }) .collect(), ]; if node.more_lines.len() > 0 { sections.push( node.more_lines .iter() .map(|s| Row::Simple(s.clone())) .collect(), ); } ::format::print_box( &node.id.to_string(), &node.op, &node.name, &*node.label.as_ref().map(|a| vec![a]).unwrap_or(vec![]), sections, ); } Ok(()) } pub fn from_nodes(tfnodes: &[&tfdeploy::Node]) -> CliResult<DisplayGraph> { let mut nodes: Vec<Node> = tfnodes .iter() .map(|n| Node { id: n.id, name: n.name.clone(), op: n.op_name.clone(), label: None, more_lines: vec![], attrs: vec![], inputs: vec![], outputs: vec![], hidden: false, }) .collect(); let mut edges = vec![]; for node in tfnodes.iter() { for (ix, input) in node.inputs.iter().enumerate() { let edge = Edge { id: edges.len(), src_node_id: input.0, src_node_output: input.1.unwrap_or(0), dst_node_id: node.id, dst_node_input: ix, main: ix == 0, label: tfnodes[input.0] .op() .const_value() .map(|v| format!("Const {:?}", v)), }; nodes[edge.src_node_id].outputs.push(edges.len()); nodes[node.id].inputs.push(edges.len()); edges.push(edge); } } Ok(DisplayGraph { nodes, edges }) } pub fn with_graph_def(mut self, graph_def: &GraphDef) -> CliResult<DisplayGraph> { let index_to_graph_def: HashMap<String, usize> = self.nodes.iter().map(|n| (n.name.clone(), n.id)).collect(); for gnode in graph_def.get_node().iter() { if let Some(node_id) = index_to_graph_def.get(gnode.get_name()) { for a in gnode.get_attr().iter() { let value = if a.1.has_tensor() { format!("{:?}", tfdeploy::tensor::Tensor::from_pb(a.1.get_tensor())?) } else { format!("{:?}", a.1) }; self.nodes[*node_id].attrs.push((a.0.to_owned(), value)); } self.nodes[*node_id].attrs.sort(); } } Ok(self) } pub fn with_analyser(mut self, analyser: &Analyser) -> CliResult<DisplayGraph> { { let index: HashMap<(usize, usize, usize, usize), usize> = self.edges .iter() .enumerate() .map(|(ix, edge)| { ( ( edge.src_node_id, edge.src_node_output, edge.dst_node_id, edge.dst_node_input, ), ix, ) }) .collect(); for an_edge in &analyser.edges { if let (Some(from_node), Some(to_node)) = (an_edge.from_node, an_edge.to_node) { let key = (from_node, an_edge.from_out, to_node, an_edge.to_input); self.edges[index[&key]].label = Some(format!("{:?}", an_edge.fact)); } } } Ok(self) } }
//! Private module for selective re-export. use crate::actor::{Actor, Network}; use std::fmt::Debug; use std::hash::{Hash, Hasher}; use std::sync::Arc; /// Represents a snapshot in time for the entire actor system. pub struct ActorModelState<A: Actor, H = ()> { pub actor_states: Vec<Arc<A::State>>, pub network: Network<A::Msg>, pub is_timer_set: Vec<bool>, pub history: H, } impl<A, H> serde::Serialize for ActorModelState<A, H> where A: Actor, A::State: serde::Serialize, A::Msg: serde::Serialize, H: serde::Serialize, { fn serialize<Ser: serde::Serializer>(&self, ser: Ser) -> Result<Ser::Ok, Ser::Error> { use serde::ser::SerializeStruct; let mut out = ser.serialize_struct("ActorModelState", 4)?; out.serialize_field("actor_states", &self.actor_states)?; out.serialize_field("network", &self.network)?; out.serialize_field("is_timer_set", &self.is_timer_set)?; out.serialize_field("history", &self.history)?; out.end() } } // Manual implementation to avoid `Clone` constraint that `#derive(Clone)` would introduce on // `ActorModelState<A, H>` type parameters. impl<A, H> Clone for ActorModelState<A, H> where A: Actor, H: Clone, { fn clone(&self) -> Self { ActorModelState { actor_states: self.actor_states.clone(), history: self.history.clone(), is_timer_set: self.is_timer_set.clone(), network: self.network.clone(), } } } // Manual implementation to avoid `Debug` constraint that `#derive(Debug)` would introduce on // `ActorModelState<A, H>` type parameters. impl<A, H> Debug for ActorModelState<A, H> where A: Actor, H: Debug, { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut builder = f.debug_struct("ActorModelState"); builder.field("actor_states", &self.actor_states); builder.field("history", &self.history); builder.field("is_timer_set", &self.is_timer_set); builder.field("network", &self.network); builder.finish() } } // Manual implementation to avoid `Eq` constraint that `#derive(Eq)` would introduce on // `ActorModelState<A, H>` type parameters. impl<A, H> Eq for ActorModelState<A, H> where A: Actor, A::State: Eq, H: Eq, {} // Manual implementation to avoid `Hash` constraint that `#derive(Hash)` would introduce on // `ActorModelState<A, H>` type parameters. impl<A, H> Hash for ActorModelState<A, H> where A: Actor, H: Hash, { fn hash<Hash: Hasher>(&self, state: &mut Hash) { self.actor_states.hash(state); self.history.hash(state); self.is_timer_set.hash(state); self.network.hash(state); } } // Manual implementation to avoid `PartialEq` constraint that `#derive(PartialEq)` would // introduce on `ActorModelState<A, H>` type parameters. impl<A, H> PartialEq for ActorModelState<A, H> where A: Actor, A::State: PartialEq, H: PartialEq, { fn eq(&self, other: &Self) -> bool { self.actor_states.eq(&other.actor_states) && self.history.eq(&other.history) && self.is_timer_set.eq(&other.is_timer_set) && self.network.eq(&other.network) } }
use std::borrow::Borrow; pub trait KVIterator<K, V> { fn valid(&self) -> bool; fn key(&self) -> &K; fn value(&self) -> &V; fn next(&mut self); fn advance<Q: ?Sized + Ord>(&mut self, key: &Q) where K: Borrow<Q>; } #[cfg(test)] pub mod tests { use super::*; use rand::prelude::random; pub fn rand_int_array() -> Vec<u32> { let mut v: Vec<u32> = (0..100u32).collect(); for i in 0..100 { let idx = (random::<u32>() % 100) as usize; let tmp = v[i]; v[i] = v[idx]; v[idx] = tmp; } v } }
pub use drive_selector_derive::DriveSelector; use std::collections::{HashMap, HashSet}; pub trait DriveSelector { fn selector() -> String { let mut selector = String::new(); Self::selector_with_ident("", &mut selector); selector } fn selector_with_ident(ident: &str, selector: &mut String); } impl DriveSelector for String { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); } } impl DriveSelector for bool { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); } } impl<K, V> DriveSelector for HashMap<K, V> { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); } } impl<T> DriveSelector for Vec<T> where T: DriveSelector, { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); let mut inner_selector = String::new(); T::selector_with_ident("", &mut inner_selector); if !inner_selector.is_empty() { selector.push_str("("); selector.push_str(&inner_selector); selector.push_str(")"); } } } impl<T> DriveSelector for HashSet<T> where T: DriveSelector, { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); let mut inner_selector = String::new(); T::selector_with_ident("", &mut inner_selector); if !inner_selector.is_empty() { selector.push_str("("); selector.push_str(&inner_selector); selector.push_str(")"); } } } impl<T> DriveSelector for Option<T> where T: DriveSelector, { fn selector_with_ident(ident: &str, selector: &mut String) { T::selector_with_ident(ident, selector) } } #[cfg(feature = "chrono")] mod chrono { use super::DriveSelector; impl<T> DriveSelector for chrono::DateTime<T> where T: chrono::offset::TimeZone, { fn selector_with_ident(ident: &str, selector: &mut String) { match selector.chars().last() { Some(',') | None => {} _ => selector.push_str(","), } selector.push_str(ident); } } }
#![feature(lang_items, no_core)] #![no_core] #[no_mangle] pub fn main() {} #[lang = "sized"] trait Sized {} #[lang = "copy"] pub trait Copy {}
use std::error::Error; mod lib; fn main() -> Result<(), Box<Error>> { lib::notify("Hello", "World! 🌍") }
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. */ use reverie::syscalls::Displayable; use reverie::syscalls::Errno; use reverie::syscalls::Syscall; use reverie::syscalls::SyscallInfo; use reverie::Error; use reverie::ExitStatus; use reverie::GlobalRPC; use reverie::Guest; use reverie::Pid; use reverie::Signal; use reverie::Subscription; use reverie::Tid; use reverie::Tool; use crate::config::Config; use crate::global_state::GlobalState; // Strace has no need for process-level state, so this is a unit struct. #[derive(Debug, Default, Clone)] pub struct Strace; /// Here we use the same dummy type for both our local and global trait /// implementations. #[reverie::tool] impl Tool for Strace { type GlobalState = GlobalState; type ThreadState = (); fn subscriptions(cfg: &Config) -> Subscription { // Check if we're only excluding things. let exclude_only = cfg.filters.iter().all(|f| f.inverse); let mut subs = if exclude_only { // Only excluding syscalls. Subscription::all_syscalls() } else { // Only including syscalls. Subscription::none() }; for filter in &cfg.filters { let syscalls = filter.syscalls.iter().copied(); if filter.inverse { subs.disable_syscalls(syscalls); } else { subs.syscalls(syscalls); } } subs } async fn handle_syscall_event<T: Guest<Self>>( &self, guest: &mut T, syscall: Syscall, ) -> Result<i64, Error> { match syscall { Syscall::Exit(_) | Syscall::ExitGroup(_) => { eprintln!( "[pid {}] {} = ?", guest.tid().colored(), syscall.display_with_outputs(&guest.memory()), ); guest.tail_inject(syscall).await } Syscall::Execve(_) | Syscall::Execveat(_) => { let tid = guest.tid(); // must be pre-formatted, otherwise the memory references become // invalid when execve/execveat returns success because the original // program got wiped out. eprintln!( "[pid {}] {}", tid.colored(), syscall.display_with_outputs(&guest.memory()) ); let errno = guest.inject(syscall).await.unwrap_err(); eprintln!( "[pid {}] ({}) = {:?}", tid.colored(), syscall.number(), errno ); Err(errno.into()) } _otherwise => { let syscall_ret = guest.inject(syscall).await; eprintln!( "[pid {}] {} = {}", guest.tid().colored(), syscall.display_with_outputs(&guest.memory()), // TODO: Pretty print the return value according to its type. syscall_ret.unwrap_or_else(|errno| -errno.into_raw() as i64) ); Ok(syscall_ret?) } } } async fn handle_signal_event<G: Guest<Self>>( &self, guest: &mut G, signal: Signal, ) -> Result<Option<Signal>, Errno> { eprintln!( "[pid {}] Received signal: {}", guest.tid().colored(), signal ); Ok(Some(signal)) } async fn on_exit_thread<G: GlobalRPC<Self::GlobalState>>( &self, tid: Tid, _global_state: &G, _thread_state: Self::ThreadState, exit_status: ExitStatus, ) -> Result<(), Error> { eprintln!( "Thread {} exited with status {:?}", tid.colored(), exit_status ); Ok(()) } async fn on_exit_process<G: GlobalRPC<Self::GlobalState>>( self, pid: Pid, _global_state: &G, exit_status: ExitStatus, ) -> Result<(), Error> { eprintln!( "Process {} exited with status {:?}", pid.colored(), exit_status ); Ok(()) } }
use sudo_test::{Command, Env, TextFile}; use crate::Result; use super::{CHMOD_EXEC, DEFAULT_EDITOR, EDITOR_TRUE}; #[test] #[ignore = "gh657"] fn supresses_syntax_error_messages() -> Result<()> { let env = Env("this is fine") .file(DEFAULT_EDITOR, TextFile(EDITOR_TRUE).chmod(CHMOD_EXEC)) .build()?; let output = Command::new("visudo").arg("-q").output(&env)?; assert!(output.status().success()); assert_not_contains!(output.stderr(), "syntax error"); Ok(()) }
use std::{self, env}; use std::collections::HashMap; use std::convert::From; use std::path::PathBuf; use std::ffi::{self, CString}; use std::fs::File; use std::io::{self, BufReader, BufRead}; pub enum Error { IoError(io::Error), Utf8Error(std::str::Utf8Error), IntoStringError(ffi::IntoStringError), ParseError((String, usize)), NulError(ffi::NulError) } impl From<io::Error> for Error { fn from(e: io::Error) -> Error { Error::IoError(e) } } impl From<ffi::IntoStringError> for Error { fn from(e: ffi::IntoStringError) -> Error { Error::IntoStringError(e) } } impl From<ffi::NulError> for Error { fn from(e: ffi::NulError) -> Error { Error::NulError(e) } } impl From<std::str::Utf8Error> for Error { fn from(e: std::str::Utf8Error) -> Error { Error::Utf8Error(e) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { match *self { Error::IoError(ref e) => write!(f, "Encountered I/O error while attempting to load .trashrc: {}.", e), Error::IntoStringError(ref e) => write!(f, "Failed to parse pw_dir as String: {}.", e), Error::ParseError(ref e) => write!(f, "Error while parsing .trashrc: Line {} - {}.", e.1, e.0), Error::NulError(ref e) => write!(f, "Interior null byte found when parsing aliases or exports, don't pull null bytes there: {}.", e), Error::Utf8Error(ref e) => write!(f, "System username was invalid utf-8: {}", e), } } } enum ParserState { LookingForSection, PathSection, ExportsSection, AliasesSection, } /// Loads the .trashrc in the user's home directory pub fn load_settings(home_dir: &str) -> Result<(Vec<PathBuf>, Vec<CString>, HashMap<CString, String>), Error> { let mut exports: Vec<CString> = Vec::with_capacity(16); let mut path: Vec<PathBuf> = Vec::with_capacity(16); let mut aliases: HashMap<CString, String> = HashMap::with_capacity(16); let mut trash_rc_path = PathBuf::from(home_dir); trash_rc_path.push(".trashrc"); if trash_rc_path.is_file() { let f = File::open(trash_rc_path)?; let f = BufReader::new(f); let mut parser_state = ParserState::LookingForSection; let mut visited_path = false; let mut visited_exports = false; let mut visited_aliases = false; let mut expected_open = false; let mut line_number = 0; for line in f.lines() { line_number += 1; let line = line?; for token in line.split_whitespace() { if token == "#" { break; } if expected_open { if token == "{" { expected_open = false; continue; } else { let issue = match parser_state { ParserState::LookingForSection => unreachable!(), ParserState::PathSection => "PATH section identifier was not immediately proceeded by an opening section token `{`", ParserState::ExportsSection => "EXPORTS section identifier was not immediately proceeded by an opening section token `{`", ParserState::AliasesSection => "ALIASES section identifier was not immediately proceeded by an opening section token `{`", }; return Err(Error::ParseError((issue.into(), line_number))); } } match token { "PATH" => { if visited_path { return Err(Error::ParseError(("Encountered PATH identifier but PATH already set".into(), line_number))); } match parser_state { ParserState::LookingForSection => (), ParserState::PathSection => return Err(Error::ParseError(("Encountered PATH section identifier while still processing PATH".into(), line_number))), ParserState::ExportsSection => return Err(Error::ParseError(("Encountered PATH section identifier while still processing EXPORTS".into(), line_number))), ParserState::AliasesSection => return Err(Error::ParseError(("Encountered ALIASES section identifier while still processing ALIASES".into(), line_number))), } expected_open = true; parser_state = ParserState::PathSection; }, "EXPORTS" => { if visited_exports { return Err(Error::ParseError(("Encountered EXPORTS identifier but EXPORTS already set".into(), line_number))); } match parser_state { ParserState::LookingForSection => (), ParserState::PathSection => return Err(Error::ParseError(("Encountered EXPORTS identifier while still processing PATH".into(), line_number))), ParserState::ExportsSection => return Err(Error::ParseError(("Encountered EXPORTS identifier while still processing EXPORTS".into(), line_number))), ParserState::AliasesSection => return Err(Error::ParseError(("Encountered ALIASES identifier while still processing ALIASES".into(), line_number))), } expected_open = true; parser_state = ParserState::ExportsSection; }, "ALIASES" => { if visited_aliases { return Err(Error::ParseError(("Encountered ALIASES identifier but ALIASES already set.".into(), line_number))); } expected_open = true; parser_state = ParserState::AliasesSection; }, "}" => { match parser_state { ParserState::LookingForSection => return Err(Error::ParseError(("Encountered closing section token `}` but no section was open".into(), line_number))), ParserState::ExportsSection => { visited_exports = true }, ParserState::PathSection => { visited_path = true }, ParserState::AliasesSection =>{ visited_aliases = true }, } parser_state = ParserState::LookingForSection; }, "{" => { return Err(Error::ParseError(("Received opening section token `{` but without a preceding identifier".into(), line_number))); }, _ => { match parser_state { ParserState::LookingForSection => { return Err(Error::ParseError((format!("Encountered unexpected token `{}`; expected section identifier", token), line_number))); }, ParserState::PathSection => { path.push(PathBuf::from(token)) }, ParserState::ExportsSection => { let bytes: Vec<u8> = token.bytes().collect(); exports.push(CString::new(bytes)?); }, ParserState::AliasesSection => { let alias: Vec<&str> = line.trim().splitn(2, |x| x == '=').collect(); if alias.len() != 2 { return Err(Error::ParseError((format!("Failed to create alias from `{}`", line.trim()), line_number))); } let mut replacement = String::from(alias[1]); unsafe { for byte in replacement.as_bytes_mut() { if *byte == b' ' { *byte = 0; } } } replacement.push('\0'); aliases.insert(CString::new(alias[0])?, replacement); break }, } } } } } match parser_state { ParserState::LookingForSection => (), ParserState::PathSection => return Err(Error::ParseError(("Still parsing PATH section when end of .trashrc was reached".into(), line_number))), ParserState::ExportsSection => return Err(Error::ParseError(("Still parsing EXPORTS section when end of .trashrc was reached".into(), line_number))), ParserState::AliasesSection => return Err(Error::ParseError(("Still parsing ALIASES section when end of .trashrc was reached".into(), line_number))) } } // If a PATH is already set, append those values if let Ok(path_string) = env::var("PATH") { for segment in path_string.split(":") { path.push(PathBuf::from(segment)); } } // export our PATH { let mut path_string = path.iter().fold(String::from("PATH="), |mut string, path| { string.push_str(path.to_str().unwrap()); string.push(':'); string }); if path_string.ends_with(":") { let _ = path_string.pop(); // Remove trailing : } exports.push(CString::new(path_string)?); } // export HOME { let home_string = format!("HOME={}", home_dir); exports.push(CString::new(home_string)?); } Ok((path, exports, aliases)) }
use super::expressions::*; use super::lex_token::TokenType; use super::lex_token::*; use super::scanner::Scanner; use super::statements::*; use anyhow::Result as AnyResult; use std::iter::Peekable; pub struct Parser<'a> { pub ast: Vec<StmtBox<'a>>, allow_unidentified: bool, scanner: Peekable<Scanner<'a>>, can_pair: bool, leftover_stmts: Vec<StmtBox<'a>>, check_leftovers: bool, } impl<'a> Parser<'a> { pub fn new(input: &'a str) -> Parser<'a> { Parser { ast: Vec::new(), scanner: Scanner::new(input).into_iter().peekable(), allow_unidentified: false, can_pair: true, leftover_stmts: Vec::new(), check_leftovers: false, } } pub fn build_ast(mut self) -> AnyResult<Vec<StmtBox<'a>>> { while let Some(_) = self.scanner.peek() { self.can_pair = true; let ret = self.statement()?; self.ast.push(ret); if self.check_leftovers { self.ast.append(&mut self.leftover_stmts); self.check_leftovers = false; } } Ok(self.ast) } fn statement(&mut self) -> AnyResult<StmtBox<'a>> { if let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Comment(_) => { let comment = self.consume_next(); return Ok(StatementWrapper::new(Statement::Comment { comment }, false)); } TokenType::MultilineComment(_) => { let multiline_comment = self.consume_next(); return Ok(StatementWrapper::new( Statement::MultilineComment { multiline_comment }, false, )); } TokenType::RegionBegin(_) => { let token = self.consume_next(); return Ok(StatementWrapper::new(Statement::RegionBegin(token), false)); } TokenType::RegionEnd(_) => { let token = self.consume_next(); return Ok(StatementWrapper::new(Statement::RegionEnd(token), false)); } TokenType::Macro(_) => { let token = self.consume_next(); return Ok(StatementWrapper::new(Statement::Macro(token), false)); } TokenType::Define => { self.consume_next(); return self.define_statement(); } TokenType::Var | TokenType::GlobalVar => { return self.series_var_declaration(); } TokenType::Enum => { self.consume_next(); return self.enum_declaration(); } TokenType::If => { self.consume_next(); return self.if_statement(); } TokenType::Return => { self.consume_next(); return self.return_statement(); } TokenType::Break => { self.consume_next(); return self.break_statement(); } TokenType::Exit => { self.consume_next(); return self.exit_statment(); } TokenType::Do => { self.consume_next(); return self.do_until_statement(); } TokenType::While | TokenType::With | TokenType::Repeat => { let token = self.consume_next(); return self.while_with_repeat(token); } TokenType::Switch => { self.consume_next(); return self.switch_statement(); } TokenType::For => { self.consume_next(); return self.for_statement(); } TokenType::LeftBrace => { self.consume_next(); return self.block(); } _ => return self.expression_statement(), } }; self.expression_statement() } fn define_statement(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let script_name = self.expression()?; let mut body = vec![]; while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Define => { break; } _ => { body.push(self.statement()?); } } } Ok(StatementWrapper::new( Statement::Define { comments_after_control_word, script_name, body, }, false, )) } fn series_var_declaration(&mut self) -> AnyResult<StmtBox<'a>> { let starting_var_type = self.scanner.next().unwrap(); let comments_after_control_word = self.get_newlines_and_comments(); let var_decl = self.var_declaration()?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::VariableDeclList { starting_var_type, var_decl, comments_after_control_word, }, has_semicolon, )) } fn var_declaration(&mut self) -> AnyResult<DelimitedLines<'a, VariableDecl<'a>>> { let mut arguments: Vec<DelimitedLine<'a, VariableDecl<'a>>> = Vec::new(); let end_delimiter; loop { if self.check_next(TokenType::Semicolon) { end_delimiter = true; break; } let has_var = self.check_next_either(TokenType::Var, TokenType::GlobalVar); let mut say_var = None; let mut say_var_comments = None; if has_var { say_var = Some(self.scanner.next().unwrap()); say_var_comments = Some(self.get_newlines_and_comments()); } // If we've said var, and then had an expression, we deserve suffering. if has_var == false { if let Some(next) = self.scanner.peek() { if let TokenType::Identifier(_) = next.token_type { } else { // EEK! We had a `,` and then some comments and now we're // somewhere else. If you write code like this, you // do not live in the light of the lord end_delimiter = false; break; } } else { // ACK! we're in a dingus's `,` but we're out of tokens. // Who would do this? Do they deserve formatting? // What are we eve doing here. Is this good for America? end_delimiter = true; break; } } let var_expr = self.expression()?; match var_expr.expr { Expr::Identifier { .. } | Expr::Assign { .. } => {} _ => { // Ah shit you suck. let has_semicolon = self.check_next_consume(TokenType::Semicolon); self.check_leftovers = true; self.leftover_stmts.push(StatementWrapper::new( Statement::ExpresssionStatement { expression: var_expr }, has_semicolon, )); end_delimiter = true; // we never woulda gotten here if not for you cursed end delimiters! break; } }; let var_decl = VariableDecl { say_var, say_var_comments, var_expr, }; let do_break = self.check_next_consume(TokenType::Comma) == false; let trailing_comment = self.get_newlines_and_comments(); arguments.push(DelimitedLine { expr: var_decl, trailing_comment, }); if do_break { end_delimiter = false; break; } } Ok(DelimitedLines { lines: arguments, has_end_delimiter: end_delimiter, }) } fn block(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_lbrace = self.get_newlines_and_comments(); let mut statements = Vec::new(); while let Some(_) = self.scanner.peek() { if self.check_next_consume(TokenType::RightBrace) { break; } else { statements.push(self.statement()?); } } let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::Block { statements, comments_after_lbrace, }, has_semicolon, )) } fn if_statement(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let condition = self.expression()?; let then_branch = self.statement()?; let comments_between = self.get_newlines_and_comments(); let else_branch = if self.check_next_consume(TokenType::Else) { Some(self.statement()?) } else { None }; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::If { comments_after_control_word, condition, then_branch, comments_between, else_branch, }, has_semicolon, )) } fn while_with_repeat(&mut self, token: Token<'a>) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let condition = self.expression()?; let body = self.statement()?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::WhileWithRepeat { token, condition, body, comments_after_control_word, }, has_semicolon, )) } fn do_until_statement(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let body = self.statement()?; let comments_between = self.get_newlines_and_comments(); self.check_next_consume(TokenType::Until); let condition = self.expression()?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::DoUntil { comments_after_control_word, comments_between, condition, body, }, has_semicolon, )) } fn switch_statement(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let condition = self.expression()?; self.check_next_consume(TokenType::LeftBrace); let comments_after_lbrace = self.get_newlines_and_comments(); let mut cases: Vec<Case<'a>> = vec![]; while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Case => { self.consume_next(); let comments_after_control_word = self.get_newlines_and_comments(); let constant = self.expression()?; self.check_next_consume(TokenType::Colon); let comments_after_colon = self.get_newlines_and_comments(); let mut statements = Vec::new(); while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::DefaultCase | TokenType::Case => { break; } TokenType::RightBrace => { break; } _ => { statements.push(self.statement()?); } } } cases.push(Case { comments_after_control_word, control_word: CaseType::Case(constant), comments_after_colon, statements, }); } TokenType::DefaultCase => { self.consume_next(); let comments_after_control_word = self.get_newlines_and_comments(); self.check_next_consume(TokenType::Colon); let comments_after_colon = self.get_newlines_and_comments(); let mut statements = Vec::new(); while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::DefaultCase | TokenType::Case | TokenType::RightBrace => { break; } _ => { statements.push(self.statement()?); } } } cases.push(Case { comments_after_control_word, control_word: CaseType::Default, comments_after_colon, statements, }); } TokenType::RightBrace => break, _ => { anyhow::bail!("Unknown token {} in Switch statement", token); } } } self.check_next_consume(TokenType::RightBrace); let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::Switch { comments_after_control_word, comments_after_lbrace, cases, condition, }, has_semicolon, )) } fn for_statement(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); self.check_next_consume(TokenType::LeftParen); let comments_after_lparen = self.get_newlines_and_comments(); let initializer = if self.check_next_consume(TokenType::Semicolon) { None } else if self.check_next(TokenType::Var) { Some(self.series_var_declaration()?) } else { Some(self.expression_statement()?) }; let comments_after_initializer = self.get_newlines_and_comments(); let condition = if self.check_next_consume(TokenType::Semicolon) { None } else { Some(self.expression()?) }; self.check_next_consume(TokenType::Semicolon); let comments_after_condition = self.get_newlines_and_comments(); let increment = if self.check_next(TokenType::RightParen) { None } else { Some(self.expression()?) }; self.check_next_consume(TokenType::Semicolon); let comments_after_increment = self.get_newlines_and_comments(); self.check_next_consume(TokenType::RightParen); let comments_after_rparen = self.get_newlines_and_comments(); let body = self.statement()?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::For { comments_after_control_word, comments_after_lparen, initializer, comments_after_initializer, condition, comments_after_condition, increment, comments_after_increment, comments_after_rparen, body, }, has_semicolon, )) } fn return_statement(&mut self) -> AnyResult<StmtBox<'a>> { let expression = if self.check_next(TokenType::Semicolon) { None } else { Some(self.expression()?) }; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new(Statement::Return { expression }, has_semicolon)) } fn break_statement(&mut self) -> AnyResult<StmtBox<'a>> { let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new(Statement::Break, has_semicolon)) } fn exit_statment(&mut self) -> AnyResult<StmtBox<'a>> { let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new(Statement::Exit, has_semicolon)) } fn enum_declaration(&mut self) -> AnyResult<StmtBox<'a>> { let comments_after_control_word = self.get_newlines_and_comments(); let name = self.expression()?; self.check_next_consume(TokenType::LeftBrace); let comments_after_lbrace = self.get_newlines_and_comments(); let members = self.finish_call(TokenType::RightBrace, TokenType::Comma)?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::EnumDeclaration { comments_after_control_word, name, comments_after_lbrace, members, }, has_semicolon, )) } fn expression_statement(&mut self) -> AnyResult<StmtBox<'a>> { let expr = self.expression()?; let has_semicolon = self.check_next_consume(TokenType::Semicolon); Ok(StatementWrapper::new( Statement::ExpresssionStatement { expression: expr }, has_semicolon, )) } fn expression(&mut self) -> AnyResult<ExprBox<'a>> { self.allow_unidentified = true; let ret = self.assignment()?; self.can_pair = true; self.allow_unidentified = false; Ok(ret) } fn assignment(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.ternary()?; if self.can_pair { if let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Equal | TokenType::PlusEquals | TokenType::MinusEquals | TokenType::StarEquals | TokenType::SlashEquals | TokenType::BitXorEquals | TokenType::BitOrEquals | TokenType::BitAndEquals | TokenType::ModEquals => { let operator = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let assignment_expr = self.assignment()?; expr = self.create_expr_box_no_comment(Expr::Assign { left: expr, operator: operator, comments_and_newlines_between_op_and_r, right: assignment_expr, }); } _ => {} } } } Ok(expr) } fn ternary(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.or()?; if self.check_next_consume(TokenType::Hook) { let comments_and_newlines_after_q = self.get_newlines_and_comments(); let left = self.ternary()?; self.check_next_consume(TokenType::Colon); let comments_and_newlines_after_colon = self.get_newlines_and_comments(); let right = self.ternary()?; expr = self.create_expr_box_no_comment(Expr::Ternary { conditional: expr, comments_and_newlines_after_q, left, comments_and_newlines_after_colon, right, }); } Ok(expr) } // parse our Logical Operands here fn or(&mut self) -> AnyResult<ExprBox<'a>> { let mut left = self.and()?; if self.check_next_either(TokenType::LogicalOr, TokenType::OrAlias) { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.or()?; left = self.create_expr_box_no_comment(Expr::Binary { left, operator: token, comments_and_newlines_between_op_and_r, right, }); } Ok(left) } fn and(&mut self) -> AnyResult<ExprBox<'a>> { let mut left = self.xor()?; if self.check_next_either(TokenType::LogicalAnd, TokenType::AndAlias) { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.and()?; left = self.create_expr_box_no_comment(Expr::Binary { left, operator: token, comments_and_newlines_between_op_and_r, right, }); } Ok(left) } fn xor(&mut self) -> AnyResult<ExprBox<'a>> { let mut left = self.equality()?; if self.check_next_either(TokenType::LogicalXor, TokenType::XorAlias) { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.xor()?; left = self.create_expr_box_no_comment(Expr::Binary { left, operator: token, comments_and_newlines_between_op_and_r, right, }) } Ok(left) } fn equality(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.comparison()?; if self.can_pair { while let Some(t) = self.scanner.peek() { if t.token_type == TokenType::EqualEqual || t.token_type == TokenType::BangEqual { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.comparison()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: token, comments_and_newlines_between_op_and_r, right, }); } else { break; } } } Ok(expr) } fn comparison(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.binary()?; if self.can_pair { while let Some(t) = self.scanner.peek() { match t.token_type { TokenType::Greater | TokenType::GreaterEqual | TokenType::Less | TokenType::LessEqual => { let t = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.binary()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: t, comments_and_newlines_between_op_and_r, right, }); } _ => break, }; } } Ok(expr) } fn binary(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.bitshift()?; if self.can_pair { while let Some(t) = self.scanner.peek() { match t.token_type { TokenType::BitAnd | TokenType::BitOr | TokenType::BitXor => { let t = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.bitshift()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: t, comments_and_newlines_between_op_and_r, right, }); } _ => break, } } } Ok(expr) } fn bitshift(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.addition()?; if self.can_pair { while let Some(t) = self.scanner.peek() { match t.token_type { TokenType::BitLeft | TokenType::BitRight => { let t = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.addition()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: t, comments_and_newlines_between_op_and_r, right, }); } _ => break, } } } Ok(expr) } fn addition(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.multiplication()?; if self.can_pair { while let Some(t) = self.scanner.peek() { match t.token_type { TokenType::Minus | TokenType::Plus => { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.multiplication()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: token, comments_and_newlines_between_op_and_r, right, }); } _ => break, }; } } Ok(expr) } fn multiplication(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.unary()?; if self.can_pair { while let Some(t) = self.scanner.peek() { match t.token_type { TokenType::Slash | TokenType::Star | TokenType::Mod | TokenType::ModAlias | TokenType::Div => { let token = self.scanner.next().unwrap(); let comments_and_newlines_between_op_and_r = self.get_newlines_and_comments(); let right = self.unary()?; expr = self.create_expr_box_no_comment(Expr::Binary { left: expr, operator: token, comments_and_newlines_between_op_and_r, right, }); } _ => break, }; } } Ok(expr) } fn unary(&mut self) -> AnyResult<ExprBox<'a>> { if self.can_pair { if let Some(t) = self.scanner.peek() { match t.token_type { TokenType::Bang | TokenType::Minus | TokenType::Plus | TokenType::Tilde | TokenType::NotAlias => { let t = self.scanner.next().unwrap(); let comments_and_newlines_between = self.get_newlines_and_comments(); let right = self.unary()?; return Ok(self.create_expr_box_no_comment(Expr::Unary { operator: t, comments_and_newlines_between, right, })); } TokenType::Incrementer | TokenType::Decrementer => { let t = self.scanner.next().unwrap(); let comments_and_newlines_between = self.get_newlines_and_comments(); let right = self.unary()?; return Ok(self.create_expr_box_no_comment(Expr::Unary { operator: t, comments_and_newlines_between, right, })); } _ => {} } } } self.postfix() } fn postfix(&mut self) -> AnyResult<ExprBox<'a>> { let mut expr = self.call()?; if self.check_next_either(TokenType::Incrementer, TokenType::Decrementer) { let t = self.scanner.next().unwrap(); let comments_and_newlines_between = self.get_newlines_and_comments(); expr = self.create_expr_box_no_comment(Expr::Postfix { operator: t, comments_and_newlines_between, expr, }); } Ok(expr) } fn call(&mut self) -> AnyResult<ExprBox<'a>> { let mut expression = self.primary()?; if self.check_next_consume(TokenType::LeftParen) { let comments_and_newlines_after_lparen = self.get_newlines_and_comments(); let arguments = self.finish_call(TokenType::RightParen, TokenType::Comma)?; expression = self.create_comment_expr_box(Expr::Call { procedure_name: expression, arguments, comments_and_newlines_after_lparen, }); } while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Dot => { self.consume_next(); let comments_between = self.get_newlines_and_comments(); let instance_variable = self.call()?; expression = self.create_comment_expr_box(Expr::DotAccess { object_name: expression, comments_between, instance_variable, }); } TokenType::LeftBracket | TokenType::ArrayIndexer | TokenType::MapIndexer | TokenType::ListIndexer | TokenType::GridIndexer => { let access_type = self.scanner.next().unwrap(); let mut access_exprs = vec![]; while let Some(token) = self.scanner.peek() { if token.token_type == TokenType::RightBracket { break; } access_exprs.push((self.get_newlines_and_comments(), self.expression()?)); if self.check_next_consume(TokenType::Comma) == false { break; } } self.check_next_consume(TokenType::RightBracket); expression = self.create_comment_expr_box(Expr::DataStructureAccess { ds_name: expression, access_type, access_exprs, }); } _ => break, } } Ok(expression) } fn primary(&mut self) -> AnyResult<ExprBox<'a>> { if let Some(t) = self.scanner.peek() { let output = match t.token_type { TokenType::Number(_) | TokenType::String(_) => { let t = self.consume_next(); let comments = self.get_newlines_and_comments(); self.create_expr_box_no_comment(Expr::Literal { literal_token: t, comments, }) } TokenType::NumberStartDot(_) => { let t = self.consume_next(); let comments = self.get_newlines_and_comments(); self.create_expr_box_no_comment(Expr::NumberStartDot { literal_token: t, comments, }) } TokenType::NumberEndDot(_) => { let t = self.consume_next(); let comments = self.get_newlines_and_comments(); self.create_expr_box_no_comment(Expr::NumberEndDot { literal_token: t, comments, }) } TokenType::Identifier(_) => { let t = self.consume_next(); let comments = self.get_newlines_and_comments(); self.create_expr_box_no_comment(Expr::Identifier { name: t, comments }) } TokenType::LeftParen => { self.consume_next(); let comments_and_newlines_after_lparen = self.get_newlines_and_comments(); let mut expressions = vec![]; expressions.push(self.expression()?); while self.check_next_consume(TokenType::RightParen) == false { expressions.push(self.expression()?); } let comments_and_newlines_after_rparen = self.get_newlines_and_comments(); self.create_expr_box_no_comment(Expr::Grouping { expressions, comments_and_newlines_after_lparen, comments_and_newlines_after_rparen, }) } TokenType::LeftBracket => { self.consume_next(); let comments_and_newlines_after_lbracket = self.get_newlines_and_comments(); let arguments = self.finish_call(TokenType::RightBracket, TokenType::Comma)?; self.create_expr_box_no_comment(Expr::ArrayLiteral { comments_and_newlines_after_lbracket, arguments, }) } TokenType::Newline(_) => { self.consume_next(); self.can_pair = false; self.create_expr_box_no_comment(Expr::Newline) } TokenType::Comment(_) => { let comment = self.consume_next(); self.can_pair = false; self.create_expr_box_no_comment(Expr::Comment { comment }) } TokenType::MultilineComment(_) => { let multiline_comment = self.consume_next(); self.can_pair = false; self.create_expr_box_no_comment(Expr::MultilineComment { multiline_comment }) } _ => { let literal_token = self.consume_next(); if self.allow_unidentified == false { anyhow::bail!("Error parsing {}", literal_token); } self.create_comment_expr_box(Expr::UnidentifiedAsLiteral { literal_token }) } }; return Ok(output); } anyhow::bail!("Unexpected end!"); } fn finish_call( &mut self, end_token_type: TokenType, delimiter_type: TokenType, ) -> AnyResult<DelimitedLines<'a, ExprBox<'a>>> { let mut arguments = Vec::new(); let mut end_delimiter = true; if self.check_next(end_token_type) == false { loop { if self.check_next(end_token_type) { end_delimiter = true; break; } let expr = self.expression()?; let do_break = self.check_next_consume(delimiter_type) == false; let trailing_comment = self.get_newlines_and_comments(); arguments.push(DelimitedLine { expr, trailing_comment }); if do_break { end_delimiter = false; break; } } }; self.check_next_consume(end_token_type); Ok(DelimitedLines { lines: arguments, has_end_delimiter: end_delimiter, }) } fn check_next(&mut self, token_type: TokenType) -> bool { if self.can_pair == false { return false; } if let Some(t) = self.scanner.peek() { return t.token_type == token_type; } false } fn check_next_either(&mut self, token_type1: TokenType, token_type2: TokenType) -> bool { if self.can_pair == false { return false; } if let Some(t) = self.scanner.peek() { return t.token_type == token_type1 || t.token_type == token_type2; } false } fn check_next_consume(&mut self, token_type: TokenType) -> bool { if self.can_pair == false { return false; } if self.check_next(token_type) { self.consume_next(); true } else { false } } fn get_newlines_and_comments(&mut self) -> Option<Vec<Token<'a>>> { let mut ret: Option<Vec<Token<'a>>> = None; while let Some(token) = self.scanner.peek() { match token.token_type { TokenType::Newline(_) | TokenType::Comment(_) | TokenType::MultilineComment(_) | TokenType::RegionBegin(_) | TokenType::RegionEnd(_) | TokenType::Then => { let token = self.scanner.next().unwrap(); if let Some(vec) = &mut ret { vec.push(token); } else { ret = Some(vec![token]); } } _ => break, } } ret } fn consume_next(&mut self) -> Token<'a> { self.scanner.next().unwrap() } fn create_comment_expr_box(&mut self, expr: Expr<'a>) -> ExprBox<'a> { Box::new(ExprBoxInterior { expr, trailing_comments: self.get_newlines_and_comments(), }) } fn create_expr_box_no_comment(&self, expr: Expr<'a>) -> ExprBox<'a> { Box::new(ExprBoxInterior { expr, trailing_comments: None, }) } }
/* * Copyright (c) 2020 Adel Prokurov * All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use super::msg::{Msg, MsgWithPos}; use super::reader::Reader; use super::token::*; use std::collections::HashMap; pub struct Lexer { reader: Reader, keywords: HashMap<&'static str, TokenKind>, } use hmap::hmap; impl Lexer { pub fn from_str(code: &str) -> Lexer { let reader = Reader::from_string(code); Lexer::new(reader) } pub fn new(reader: Reader) -> Lexer { let keywords = hmap!( "this" => TokenKind::This, "function" => TokenKind::Fun, "let" => TokenKind::Let, "var" => TokenKind::Var, "while" => TokenKind::While, "for" => TokenKind::For, "try" => TokenKind::Try, "catch" => TokenKind::Catch, "if" => TokenKind::If, "else" => TokenKind::Else, "in" => TokenKind::In, "loop" => TokenKind::Loop, "match" => TokenKind::Match, "when" => TokenKind::When, "const" => TokenKind::Const, "return" => TokenKind::Return, "true" => TokenKind::True, "false" => TokenKind::False, "null" => TokenKind::Nil, "type" => TokenKind::Type, "throw" => TokenKind::Throw, "do" => TokenKind::Do, "import" => TokenKind::Import, "class" => TokenKind::Class, "implements" => TokenKind::Implements, "new" => TokenKind::New ); Lexer { reader: reader, keywords: keywords, } } pub fn filename(&self) -> &str { self.reader.filename() } fn read_multi_comment(&mut self) -> Result<(), MsgWithPos> { let pos = self.reader.pos(); self.read_char(); self.read_char(); while !self.cur().is_none() && !self.is_multi_comment_end() { self.read_char(); } if self.cur().is_none() { return Err(MsgWithPos::new(pos, Msg::UnclosedComment)); } self.read_char(); self.read_char(); Ok(()) } pub fn read_token(&mut self) -> Result<Token, MsgWithPos> { loop { self.skip_white(); let pos = self.reader.pos(); let ch = self.cur(); if let None = ch { return Ok(Token::new(TokenKind::End, pos)); } if is_digit(ch) { return self.read_number(); } else if self.is_comment_start() { self.read_comment()?; } else if self.is_multi_comment_start() { self.read_multi_comment()?; } else if is_identifier_start(ch) { return self.read_identifier(); } else if is_quote(ch) { return self.read_string(); } else if is_char_quote(ch) { return self.read_char_literal(); } else if is_operator(ch) { return self.read_operator(); } else { let ch = ch.unwrap(); return Err(MsgWithPos::new(pos, Msg::UnknownChar(ch))); } } } fn skip_white(&mut self) { while is_whitespace(self.cur()) { self.read_char(); } } fn read_identifier(&mut self) -> Result<Token, MsgWithPos> { let pos = self.reader.pos(); let mut value = String::new(); while is_identifier(self.cur()) { let ch = self.cur().unwrap(); self.read_char(); value.push(ch); } let lookup = self.keywords.get(&value[..]).cloned(); let ttype; if let Some(tok_type) = lookup { ttype = tok_type; } else if value == "_" { ttype = TokenKind::Underscore; } else { ttype = TokenKind::Identifier(value); } Ok(Token::new(ttype, pos)) } fn read_char_literal(&mut self) -> Result<Token, MsgWithPos> { let pos = self.reader.pos(); self.read_char(); let ch = self.read_escaped_char(pos, Msg::UnclosedChar)?; if is_char_quote(self.cur()) { self.read_char(); let ttype = TokenKind::LitChar(ch); Ok(Token::new(ttype, pos)) } else { Err(MsgWithPos::new(pos, Msg::UnclosedChar)) } } fn read_escaped_char(&mut self, pos: Position, unclosed: Msg) -> Result<char, MsgWithPos> { if let Some(ch) = self.cur() { self.read_char(); if ch == '\\' { let ch = if let Some(ch) = self.cur() { ch } else { return Err(MsgWithPos::new(pos, unclosed)); }; self.read_char(); match ch { '\\' => Ok('\\'), 'n' => Ok('\n'), 't' => Ok('\t'), 'r' => Ok('\r'), '\"' => Ok('\"'), '\'' => Ok('\''), '0' => Ok('\0'), _ => { let msg = Msg::InvalidEscapeSequence(ch); Err(MsgWithPos::new(pos, msg)) } } } else { Ok(ch) } } else { Err(MsgWithPos::new(pos, unclosed)) } } fn read_string(&mut self) -> Result<Token, MsgWithPos> { let pos = self.reader.pos(); let mut value = String::new(); self.read_char(); while !self.cur().is_none() && !is_quote(self.cur()) { let ch = self.read_escaped_char(pos, Msg::UnclosedString)?; value.push(ch); } if is_quote(self.cur()) { self.read_char(); let ttype = TokenKind::String(value); Ok(Token::new(ttype, pos)) } else { Err(MsgWithPos::new(pos, Msg::UnclosedString)) } } fn read_operator(&mut self) -> Result<Token, MsgWithPos> { let mut tok = self.build_token(TokenKind::End); let ch = self.cur().unwrap(); self.read_char(); let nch = self.cur().unwrap_or('x'); tok.kind = match ch { '+' => TokenKind::Add, '-' => { if nch == '>' { self.read_char(); TokenKind::Arrow } else { TokenKind::Sub } } '*' => TokenKind::Mul, '/' => TokenKind::Div, '%' => TokenKind::Mod, '(' => TokenKind::LParen, ')' => TokenKind::RParen, '[' => TokenKind::LBracket, ']' => TokenKind::RBracket, '{' => TokenKind::LBrace, '}' => TokenKind::RBrace, '|' => { if nch == '|' { self.read_char(); TokenKind::Or } else { TokenKind::BitOr } } '&' => { if nch == '&' { self.read_char(); TokenKind::And } else { TokenKind::BitAnd } } '^' => TokenKind::Caret, '~' => TokenKind::Tilde, ',' => TokenKind::Comma, ';' => TokenKind::Semicolon, ':' => { if nch == ':' { self.read_char(); TokenKind::Sep } else { TokenKind::Colon } } '.' => { if nch == '.' { self.read_char(); TokenKind::DotDot } else { TokenKind::Dot } } '=' => { if nch == '=' { self.read_char(); TokenKind::EqEq //} else if nch == '>' { // self.read_char(); //TokenKind::Arrow } else { TokenKind::Eq } } '<' => match nch { '=' => { self.read_char(); TokenKind::Le } '<' => { self.read_char(); TokenKind::LtLt } _ => TokenKind::Lt, }, '>' => match nch { '=' => { self.read_char(); TokenKind::Ge } '>' => { self.read_char(); TokenKind::GtGt } _ => TokenKind::Gt, }, '!' => { if nch == '=' { self.read_char(); TokenKind::Ne } else { TokenKind::Not } } _ => { return Err(MsgWithPos::new(tok.position, Msg::UnknownChar(ch))); } }; Ok(tok) } fn read_comment(&mut self) -> Result<(), MsgWithPos> { while !self.cur().is_none() && !is_newline(self.cur()) { self.read_char(); } Ok(()) } fn read_digits(&mut self, buffer: &mut String, base: IntBase) { while is_digit_or_underscore(self.cur(), base) { let ch = self.cur().unwrap(); self.read_char(); buffer.push(ch); } } fn read_char(&mut self) { self.reader.advance(); } fn cur(&self) -> Option<char> { self.reader.cur() } fn next(&self) -> Option<char> { self.reader.next() } fn build_token(&self, kind: TokenKind) -> Token { Token::new(kind, self.reader.pos()) } fn is_comment_start(&self) -> bool { self.cur() == Some('/') && self.next() == Some('/') } fn is_multi_comment_start(&self) -> bool { self.cur() == Some('/') && self.next() == Some('*') } fn is_multi_comment_end(&self) -> bool { self.cur() == Some('*') && self.next() == Some('/') } fn read_number(&mut self) -> Result<Token, MsgWithPos> { let pos = self.reader.pos(); let mut value = String::new(); let base = if self.cur() == Some('0') { let next = self.next(); match next { Some('x') => { self.read_char(); self.read_char(); IntBase::Hex } Some('b') => { self.read_char(); self.read_char(); IntBase::Bin } _ => IntBase::Dec, } } else { IntBase::Dec }; self.read_digits(&mut value, base); if base == IntBase::Dec && self.cur() == Some('.') && is_digit(self.next()) { self.read_char(); value.push('.'); self.read_digits(&mut value, IntBase::Dec); if self.cur() == Some('e') || self.cur() == Some('E') { value.push(self.cur().unwrap()); self.read_char(); if self.cur() == Some('+') || self.cur() == Some('-') { value.push(self.cur().unwrap()); self.read_char(); } self.read_digits(&mut value, IntBase::Dec); } let ttype = TokenKind::LitFloat(value); return Ok(Token::new(ttype, pos)); } let ttype = TokenKind::LitInt(value, base, IntSuffix::Int); Ok(Token::new(ttype, pos)) } } fn is_digit(ch: Option<char>) -> bool { ch.map(|ch| ch.is_digit(10)).unwrap_or(false) } fn is_digit_or_underscore(ch: Option<char>, base: IntBase) -> bool { ch.map(|ch| ch.is_digit(base.num()) || ch == '_') .unwrap_or(false) } fn is_whitespace(ch: Option<char>) -> bool { ch.map(|ch| ch.is_whitespace()).unwrap_or(false) } fn is_newline(ch: Option<char>) -> bool { ch == Some('\n') } fn is_quote(ch: Option<char>) -> bool { ch == Some('\"') } fn is_char_quote(ch: Option<char>) -> bool { ch == Some('\'') } fn is_operator(ch: Option<char>) -> bool { ch.map(|ch| "^+-*/%&|,=!~;:.()[]{}<>".contains(ch)) .unwrap_or(false) } fn is_identifier_start(ch: Option<char>) -> bool { match ch { Some(ch) => (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_', _ => false, } } fn is_identifier(ch: Option<char>) -> bool { is_identifier_start(ch) || is_digit(ch) }
#[macro_export] macro_rules! profile_start { ($tag_count: expr) => { $crate::profiler::internal::begin($tag_count) }; } #[macro_export] macro_rules! profile_finish { ($writer: expr) => { $crate::profiler::internal::end($writer) }; } #[macro_export] macro_rules! profile_finish_to_file { ($filename: expr) => { $crate::profiler::internal::end_to_file($filename) }; } #[macro_export] macro_rules! profile_begin { ($tag: expr) => { $crate::profiler::internal::profile_begin($tag) }; } #[macro_export] macro_rules! profile_end { () => { $crate::profiler::internal::profile_end() }; } #[macro_export] macro_rules! profile_scope { ($tag: expr) => { let _profile_guard = $crate::profiler::internal::ProfileScope::new($tag); }; } #[macro_export] macro_rules! use_profile_memory_allocator { () => { #[global_allocator] static A: $crate::profiler::internal::MemTrackAllocator = $crate::profiler::internal::MemTrackAllocator; }; } #[cfg(windows)] mod sys { use std::mem; use std::cell::UnsafeCell; use winapi::um::winnt::*; use winapi::um::profileapi::*; use winapi::um::synchapi::*; use winapi::um::minwinbase::*; use winapi::um::dbghelp::*; use winapi::um::processthreadsapi::*; use std::ops::{Deref, DerefMut}; use winapi::shared::minwindef::{ TRUE, BOOL, DWORD, HMODULE, LPDWORD, PDWORD, PUCHAR, PULONG, UCHAR, ULONG, USHORT, WORD, }; #[derive(PartialEq, Clone, Copy)] pub struct TimePoint(i64); pub struct StopWatch { frequency : i64 } impl StopWatch { pub fn new() -> StopWatch { let frequency; unsafe { let mut l : LARGE_INTEGER = mem::zeroed(); QueryPerformanceFrequency(&mut l); frequency = *l.QuadPart(); } StopWatch { frequency } } pub fn get_time() -> TimePoint { unsafe { let mut l : LARGE_INTEGER = mem::zeroed(); QueryPerformanceCounter(&mut l); TimePoint(*l.QuadPart()) } } pub fn get_milliseconds(&self, a : &TimePoint, b : &TimePoint) -> i64 { mul_div_i64(b.0 - a.0, 1000_000, self.frequency) } } pub fn get_thread_id() -> u32 { unsafe { GetCurrentThreadId() } } pub struct ReentrantMutex<T: ?Sized> { inner : Box<CRITICAL_SECTION>, lock_count : u32, data: UnsafeCell<T> } pub struct MutexGuard<'a, T: ?Sized + 'a> { // funny underscores due to how Deref/DerefMut currently work (they // disregard field privacy). __lock: &'a mut ReentrantMutex<T> } impl<T> ReentrantMutex<T> { pub fn new(t: T) -> ReentrantMutex<T> { unsafe { let mut ret = ReentrantMutex { inner: Box::new(mem::zeroed()), lock_count : 0, data: UnsafeCell::new(t) }; InitializeCriticalSection(&mut *ret.inner); ret } } } impl<T: ?Sized> Drop for ReentrantMutex<T> { fn drop(&mut self) { unsafe { DeleteCriticalSection(&mut *self.inner); } } } impl<'mutex, T: ?Sized> MutexGuard<'mutex, T> { pub fn new(lock: &'mutex mut ReentrantMutex<T>) -> Result<MutexGuard<'mutex, T>,()> { unsafe { EnterCriticalSection(&mut *lock.inner); lock.lock_count += 1; Ok(MutexGuard { __lock: lock }) } } } impl<'mutex, T: ?Sized> MutexGuard<'mutex, T> { pub fn new_no_recurse(lock: &'mutex mut ReentrantMutex<T>) -> Result<MutexGuard<'mutex, T>,()> { unsafe { EnterCriticalSection(&mut *lock.inner); if lock.lock_count > 0 { LeaveCriticalSection(&mut *lock.inner); return Err(()); } lock.lock_count += 1; Ok(MutexGuard { __lock: lock }) } } } impl<T: ?Sized> Drop for MutexGuard<'_, T> { #[inline] fn drop(&mut self) { unsafe { self.__lock.lock_count -= 1; LeaveCriticalSection(&mut *self.__lock.inner); } } } impl<T: ?Sized> Deref for MutexGuard<'_, T> { type Target = T; fn deref(&self) -> &T { unsafe { &*self.__lock.data.get() } } } impl<T: ?Sized> DerefMut for MutexGuard<'_, T> { fn deref_mut(&mut self) -> &mut T { unsafe { &mut *self.__lock.data.get() } } } // Computes (value*numer)/denom without overflow, as long as both // (numer*denom) and the overall result fit into i64 (which is the case // for our time conversions). pub fn mul_div_i64(value: i64, numer: i64, denom: i64) -> i64 { let q = value / denom; let r = value % denom; // Decompose value as (value/denom*denom + value%denom), // substitute into (value*numer)/denom and simplify. // r < denom, so (denom*numer) is the upper bound of (r*numer) q * numer + r * numer / denom } /* pub unsafe fn trace(cb: &mut FnMut(&super::Frame) -> bool) { // Allocate necessary structures for doing the stack walk let process = GetCurrentProcess(); let thread = GetCurrentThread(); let mut context = mem::zeroed::<MyContext>(); RtlCaptureContext(&mut context.0); // Attempt to use `StackWalkEx` if we can, but fall back to `StackWalk64` // since it's in theory supported on more systems. let mut frame = super::Frame { inner: Frame::New(mem::zeroed()), }; let image = init_frame(&mut frame.inner, &context.0); let frame_ptr = match &mut frame.inner { Frame::New(ptr) => ptr as *mut STACKFRAME_EX, _ => unreachable!(), }; while StackWalkEx( image as DWORD, process, thread, frame_ptr, &mut context.0 as *mut CONTEXT as *mut _, None, Some(SymFunctionTableAccess64()), Some(SymGetModuleBase64()), None, 0, ) == TRUE { if !cb(&frame) { break; } } } */ } pub mod internal { use super::sys; use backtrace::*; use std::io; use std::io::{Write, BufWriter}; use std::alloc::{System, GlobalAlloc, Layout}; use std::sync::Once; use std::sync::atomic::{AtomicBool, Ordering}; use std::collections::HashMap; use std::ops::DerefMut; enum TagType { Begin(&'static str), End, Complete(&'static str, i64), // A complete event holds a duration of the event Allocate(usize), Deallocate(usize) } struct ProfileRecord { time : sys::TimePoint, // The time of the profile data thread_id : u32, // The id of the thread tag : TagType, // The tag used in profiling - if empty is an end event } struct Tags { index : usize, // The index of the thread tags : Vec<&'static str> // The tag stack } pub struct ProfileData { stopwatch : sys::StopWatch, start_time : sys::TimePoint, // The start time of the profile enabled : bool, // If profiling is enabled records : Vec<ProfileRecord>, // The profiling records } impl ProfileData { pub fn new() -> ProfileData { ProfileData { stopwatch : sys::StopWatch::new(), start_time : sys::StopWatch::get_time(), enabled : false, records : vec![] } } fn add_record(&mut self, record : ProfileRecord) -> Option<usize> { if !self.enabled || self.records.len() >= self.records.capacity() { return None; } self.records.push(record); return Some(self.records.len() - 1); } } pub struct ProfileScope { index : Option<usize>, time : sys::TimePoint } impl ProfileScope { pub fn new(name: &'static str) -> ProfileScope { let thread_id = sys::get_thread_id(); // Start as a begin tag let mut ret = ProfileScope { index : None, time : sys::StopWatch::get_time() }; if let Ok(ref mut profile) = get_profile() { ret.index = profile.add_record(ProfileRecord { time : ret.time, thread_id, tag : TagType::Begin(name) }); } ret } } impl Drop for ProfileScope { fn drop(&mut self) { if let Some(index) = self.index { if let Ok(ref mut profile) = get_profile() { let profile = profile.deref_mut(); if index < profile.records.len() { let record = &mut profile.records[index]; if let TagType::Begin(name) = record.tag { if self.time == record.time { // If the time is different, it must have started in a different profile session // Change the tag type to complete let duration = profile.stopwatch.get_milliseconds(&record.time, &sys::StopWatch::get_time()); record.tag = TagType::Complete(name, duration); } } } } } } } pub struct MemTrackAllocator; static TRACK_ALLOCS : AtomicBool = AtomicBool::new(false); impl MemTrackAllocator { pub fn set_mem_tracking(new_val : bool) { TRACK_ALLOCS.store(new_val, Ordering::SeqCst); } pub fn get_mem_tracking() -> bool { TRACK_ALLOCS.load(Ordering::SeqCst) } } unsafe impl GlobalAlloc for MemTrackAllocator { unsafe fn alloc(&self, _layout: Layout) -> *mut u8 { if MemTrackAllocator::get_mem_tracking() { if let Ok(ref mut profile) = get_profile_no_recurse() { //let bt = Backtrace::new_unresolved(); //println!("Stack {:?}", bt); backtrace::trace_unsynchronized(|frame| { true }); //backtrace::trace(|frame| { true }); let time = sys::StopWatch::get_time(); let thread_id = sys::get_thread_id(); profile.add_record(ProfileRecord { thread_id, tag : TagType::Allocate(_layout.size()), time }); } } System.alloc(_layout) } unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) { if MemTrackAllocator::get_mem_tracking() { if let Ok(ref mut profile) = get_profile_no_recurse() { let time = sys::StopWatch::get_time(); let thread_id = sys::get_thread_id(); profile.add_record(ProfileRecord { thread_id, tag : TagType::Deallocate(_layout.size()), time }); } } System.dealloc(_ptr, _layout) } } pub fn profile_begin(tag : &'static str) { let thread_id = sys::get_thread_id(); if let Ok(ref mut profile) = get_profile() { profile.add_record(ProfileRecord { thread_id, tag : TagType::Begin(tag), time : sys::StopWatch::get_time() }); } } pub fn profile_end() { let time = sys::StopWatch::get_time(); // Always get time as soon as possible let thread_id = sys::get_thread_id(); if let Ok(ref mut profile) = get_profile() { profile.add_record(ProfileRecord { thread_id, tag : TagType::End, time }); } } pub fn begin(record_count : usize) { if let Ok(ref mut profile) = get_profile() { // Abort if already enabled if profile.enabled { return; } profile.records.clear(); profile.records.reserve(record_count); profile.start_time = sys::StopWatch::get_time(); profile.enabled = true; MemTrackAllocator::set_mem_tracking(true); } } fn clean_json_str<'a>(io_str : &'a str, str_buffer : &'a mut String) -> &'a str { // Check if there are any characters to replace if io_str.find(|c: char| (c == '\\') || (c == '"')) == None { return io_str; } // Escape json protected characters (not fast, but should be rare) *str_buffer = io_str.replace('\\', "\\\\").replace('"', "\\\""); return str_buffer; } pub fn end_to_file(filename : &str) -> io::Result<()> { MemTrackAllocator::set_mem_tracking(false); end(&mut BufWriter::new(std::fs::File::create(filename)?)) } pub fn end(w : &mut dyn Write) -> io::Result<()> { MemTrackAllocator::set_mem_tracking(false); if let Ok(ref mut profile) = get_profile() { // Abort if already enabled if !profile.enabled { return Err(io::Error::from(io::ErrorKind::InvalidData)); } profile.enabled = false; // DT_TODO: Parhaps use thread::current().name() ? let mut thread_stack = HashMap::new(); thread_stack.insert(sys::get_thread_id(), Tags { index : 0, tags : vec!()}); let mut first : bool = true; let mut clean_buffer : String = String::new(); let mut extra_buffer : String = String::new(); w.write(b"{\"traceEvents\":[\n")?; for entry in profile.records.iter() { // Assign a unique index to each thread let new_id = thread_stack.len(); let stack = thread_stack.entry(entry.thread_id).or_insert(Tags { index : new_id, tags : vec!()}); let tag; let type_tag; extra_buffer.clear(); match entry.tag { TagType::Begin(s) => { type_tag = "B"; tag = s; stack.tags.push(tag) }, TagType::End => { type_tag = "E"; if let Some(stack_tag) = stack.tags.pop() { tag = stack_tag; } else { tag = "Unknown"; } } TagType::Complete(t, d) => { type_tag = "X"; tag = t; extra_buffer = format!(",\"dur\":{}", d); } TagType::Allocate(a) => { type_tag = "O"; tag = "Allocate"; extra_buffer = format!(",\"id\":0,\"args\":{{\"snapshot\":{{\"amount\":{}}}}}", a); } TagType::Deallocate(a) => { type_tag = "O"; tag = "Deallocate"; extra_buffer = format!(",\"id\":1,\"args\":{{\"snapshot\":{{\"amount\":{}}}}}", a); } } if !first { w.write(b",\n")?; } first = false; // Ensure escaped json is written let tag = clean_json_str(tag, &mut clean_buffer); // Get the microsecond count let tag_time = profile.stopwatch.get_milliseconds(&profile.start_time, &entry.time); // Format the string write!(w, "{{\"name\":\"{}\",\"ph\":\"{}\",\"ts\":{},\"tid\":0,\"pid\":{}{}}}", tag, type_tag, tag_time, stack.index, extra_buffer)?; } w.write(b"\n]\n}\n")?; return Ok(()); } return Err(io::Error::from(io::ErrorKind::InvalidData)); /* // Write thread "names" if (!first) { for (auto& t : threadStack) { char indexString[64]; snprintf(indexString, sizeof(indexString), "%d", t.second.m_index); // Sort thread listing by the time that they appear in the profile (tool sorts by name) char indexSpaceString[64]; snprintf(indexSpaceString, sizeof(indexSpaceString), "%02d", t.second.m_index); // Ensure a clean json string std::stringstream ss; ss << t.first; std::string threadName = ss.str(); CleanJsonStr(threadName); o_outStream << ",\n{\"name\":\"thread_name\",\"ph\":\"M\",\"pid\":0,\"tid\":" << indexString << ",\"args\":{\"name\":\"Thread" << indexSpaceString << "_" << threadName << "\"}}"; } } */ } fn get_profile_mutex() -> &'static mut sys::ReentrantMutex<ProfileData> { static INIT : Once = Once::new(); static mut GPROFILE : Option<sys::ReentrantMutex<ProfileData>> = None; unsafe { INIT.call_once(|| { GPROFILE = Option::Some(sys::ReentrantMutex::new(ProfileData::new())); }); GPROFILE.as_mut().unwrap_or_else(|| {std::hint::unreachable_unchecked()}) } } fn get_profile() -> Result<sys::MutexGuard<'static, ProfileData>, ()> { sys::MutexGuard::new(get_profile_mutex()) } fn get_profile_no_recurse() -> Result<sys::MutexGuard<'static, ProfileData>, ()> { sys::MutexGuard::new_no_recurse(get_profile_mutex()) } }
#[doc = "Register `OPTR` reader"] pub type R = crate::R<OPTR_SPEC>; #[doc = "Field `RDPROT` reader - Read protection"] pub type RDPROT_R = crate::FieldReader<RDPROT_A>; #[doc = "Read protection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum RDPROT_A { #[doc = "0: Level 1"] Level1 = 0, #[doc = "170: Level 0"] Level0 = 170, #[doc = "204: Level 2"] Level2 = 204, } impl From<RDPROT_A> for u8 { #[inline(always)] fn from(variant: RDPROT_A) -> Self { variant as _ } } impl crate::FieldSpec for RDPROT_A { type Ux = u8; } impl RDPROT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<RDPROT_A> { match self.bits { 0 => Some(RDPROT_A::Level1), 170 => Some(RDPROT_A::Level0), 204 => Some(RDPROT_A::Level2), _ => None, } } #[doc = "Level 1"] #[inline(always)] pub fn is_level1(&self) -> bool { *self == RDPROT_A::Level1 } #[doc = "Level 0"] #[inline(always)] pub fn is_level0(&self) -> bool { *self == RDPROT_A::Level0 } #[doc = "Level 2"] #[inline(always)] pub fn is_level2(&self) -> bool { *self == RDPROT_A::Level2 } } #[doc = "Field `WPRMOD` reader - Selection of protection mode of WPR bits"] pub type WPRMOD_R = crate::BitReader<WPRMOD_A>; #[doc = "Selection of protection mode of WPR bits\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WPRMOD_A { #[doc = "0: PCROP disabled. The WRPROT bits are used as a write protection on a sector."] Disabled = 0, #[doc = "1: PCROP enabled. The WRPROT bits are used as a read protection on a sector."] Enabled = 1, } impl From<WPRMOD_A> for bool { #[inline(always)] fn from(variant: WPRMOD_A) -> Self { variant as u8 != 0 } } impl WPRMOD_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WPRMOD_A { match self.bits { false => WPRMOD_A::Disabled, true => WPRMOD_A::Enabled, } } #[doc = "PCROP disabled. The WRPROT bits are used as a write protection on a sector."] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == WPRMOD_A::Disabled } #[doc = "PCROP enabled. The WRPROT bits are used as a read protection on a sector."] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == WPRMOD_A::Enabled } } #[doc = "Field `BOR_LEV` reader - BOR_LEV"] pub type BOR_LEV_R = crate::FieldReader<BOR_LEV_A>; #[doc = "BOR_LEV\n\nValue on reset: 8"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum BOR_LEV_A { #[doc = "0: This is the reset threshold level for the 1.45 V - 1.55 V voltage range (power-down only)"] BorOff = 0, #[doc = "1: Reset threshold level for VBOR0 (around 1.8 V)"] BorLevel1 = 1, #[doc = "2: Reset threshold level for VBOR1 (around 2.0 V)"] BorLevel2 = 2, #[doc = "3: Reset threshold level for VBOR2 (around 2.5 V)"] BorLevel3 = 3, #[doc = "4: Reset threshold level for VBOR3 (around 2.7 V)"] BorLevel4 = 4, #[doc = "5: Reset threshold level for VBOR4 (around 3.0 V)"] BorLevel5 = 5, } impl From<BOR_LEV_A> for u8 { #[inline(always)] fn from(variant: BOR_LEV_A) -> Self { variant as _ } } impl crate::FieldSpec for BOR_LEV_A { type Ux = u8; } impl BOR_LEV_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<BOR_LEV_A> { match self.bits { 0 => Some(BOR_LEV_A::BorOff), 1 => Some(BOR_LEV_A::BorLevel1), 2 => Some(BOR_LEV_A::BorLevel2), 3 => Some(BOR_LEV_A::BorLevel3), 4 => Some(BOR_LEV_A::BorLevel4), 5 => Some(BOR_LEV_A::BorLevel5), _ => None, } } #[doc = "This is the reset threshold level for the 1.45 V - 1.55 V voltage range (power-down only)"] #[inline(always)] pub fn is_bor_off(&self) -> bool { *self == BOR_LEV_A::BorOff } #[doc = "Reset threshold level for VBOR0 (around 1.8 V)"] #[inline(always)] pub fn is_bor_level1(&self) -> bool { *self == BOR_LEV_A::BorLevel1 } #[doc = "Reset threshold level for VBOR1 (around 2.0 V)"] #[inline(always)] pub fn is_bor_level2(&self) -> bool { *self == BOR_LEV_A::BorLevel2 } #[doc = "Reset threshold level for VBOR2 (around 2.5 V)"] #[inline(always)] pub fn is_bor_level3(&self) -> bool { *self == BOR_LEV_A::BorLevel3 } #[doc = "Reset threshold level for VBOR3 (around 2.7 V)"] #[inline(always)] pub fn is_bor_level4(&self) -> bool { *self == BOR_LEV_A::BorLevel4 } #[doc = "Reset threshold level for VBOR4 (around 3.0 V)"] #[inline(always)] pub fn is_bor_level5(&self) -> bool { *self == BOR_LEV_A::BorLevel5 } } impl R { #[doc = "Bits 0:7 - Read protection"] #[inline(always)] pub fn rdprot(&self) -> RDPROT_R { RDPROT_R::new((self.bits & 0xff) as u8) } #[doc = "Bit 8 - Selection of protection mode of WPR bits"] #[inline(always)] pub fn wprmod(&self) -> WPRMOD_R { WPRMOD_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bits 16:19 - BOR_LEV"] #[inline(always)] pub fn bor_lev(&self) -> BOR_LEV_R { BOR_LEV_R::new(((self.bits >> 16) & 0x0f) as u8) } } #[doc = "Option byte register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`optr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct OPTR_SPEC; impl crate::RegisterSpec for OPTR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`optr::R`](R) reader structure"] impl crate::Readable for OPTR_SPEC {} #[doc = "`reset()` method sets OPTR to value 0x00f8_0000"] impl crate::Resettable for OPTR_SPEC { const RESET_VALUE: Self::Ux = 0x00f8_0000; }
//! Implemented an [Object-Oriented Design Pattern] //! //! [object-oriented design pattern]: https://doc.rust-lang.org/book/ch17-03-oo-design-patterns.html use std::error::Error; use the_book::ch17::sec02::Post; fn main() -> Result<(), Box<dyn Error>> { let mut post = Post::new(); post.add_text("Let's start the blog\n"); post.add_text("oops, let's add more text to the first blog entry"); post.request_review(); post.approve(); println!("Here is the published content: '{}'", post.content()); Ok(()) }
use std; use thiserror::Error; use bson::document::ValueAccessError; use bson::oid::Error as BsonError; use mongodb::error::Error as MongoError; use crate::model::ErrorMessage; use crate::reject::get_internal_error_message; pub type Result<T> = std::result::Result<T, Error>; #[derive(Error, Debug)] pub enum Error { #[error("MongoDB: {source}")] MongoDB { #[from] source: MongoError, }, #[error("BSON: {source}")] Bson { #[from] source: BsonError, }, #[error("ValueAccess: {source}")] ValueAccess { #[from] source: ValueAccessError, }, #[error("Received an empty result")] EmptyResult, #[error("Persistence: Field not loaded: '{0}' is missing '{1}'")] FieldNotLoaded(&'static str, &'static str), #[error("Got poisoned mutex")] PoisonedMutex, #[error("Can't connect to database: Environment variable 'DATABASE_URL' not set!")] DatabaseURLNotSet, } impl Into<ErrorMessage> for &Error { fn into(self) -> ErrorMessage { match self { Error::EmptyResult => ErrorMessage { code: 200, message: "Empty Result".to_string(), }, _ => get_internal_error_message(), } } }
fn main() { let f1 = 2.0; // f64 println!("{:?}", f1); let f2: f32 = 2.0; println!("{:?}", f2); let f3: f64 = 2.0; println!("{:?}", f3); }
// Problem 39 - Integer right triangles // // If p is the perimeter of a right angle triangle with integral length sides, // {a,b,c}, there are exactly three solutions for p = 120: // // {20, 48, 52}, {24, 45, 51}, {30, 40, 50} // // For which value of p ≤ 1000, is the number of solutions maximised? use std::collections::HashMap; fn main() { println!("{}", solution()); } fn solution() -> usize { let pmax = 1000; let mut squares = HashMap::new(); for n in 1..1000 { squares.insert(n*n, n); } let mut perimeters = HashMap::new(); // Note: a < b < c for c in 2..pmax { for a in 1..(c/2) { let bsq = c*c - a*a; if squares.contains_key(&bsq) { let b = squares.get(&bsq).unwrap(); let p: usize = a + b + c; if p > pmax { break; } let new_count = match perimeters.get(&p) { Some(count) => count + 1, None => 1 }; perimeters.insert(p, new_count); } } } perimeters.iter().map(|(&p, &count)| (count, p)).max().unwrap().1 }
pub fn setup() { println!("During test"); }
use crate::Stargate; use http::{HeaderMap, HeaderValue}; use serde::{Deserialize, Serialize}; use serde_json::Value; #[derive(Serialize, Deserialize, Debug)] pub struct GraphQLRequest { pub query: String, #[serde(rename = "operationName")] pub operation_name: Option<String>, pub variables: Option<Value>, } #[derive(Serialize, Deserialize)] pub struct GraphQLResponse { pub data: Option<Value>, // errors: 'a Option<async_graphql::http::GQLError>, } pub struct RequestContext<'req> { pub graphql_request: GraphQLRequest, pub header_map: HeaderMap<&'req HeaderValue>, } #[derive(Debug)] pub struct ServerState<'app> { pub stargate: Stargate<'app>, }
use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::ToTokens; use syn::parse::Parser; use syn::parse_macro_input::parse; use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{Attribute, Error, Path}; use syn::{ExprAssign, LitStr}; pub(crate) struct ProvidesAttr { pub profiles: Vec<Path>, } pub(crate) fn parse_provides_attr(attr: TokenStream) -> Result<ProvidesAttr, Error> { let profiles_syn = <Punctuated<Path, Comma>>::parse_terminated.parse(attr)?; let profiles: Vec<Path> = profiles_syn.iter().map(|p| p.clone()).collect(); Ok(ProvidesAttr { profiles }) } #[derive(Clone)] pub(crate) struct PropAttr { pub(crate) name: Option<String>, pub(crate) default_value: Option<TokenStream2>, } pub(crate) fn parse_prop_attr(attr: &Attribute) -> Result<PropAttr, Error> { if attr.tokens.is_empty() { Ok(PropAttr { name: None, default_value: None, }) } else { attr.parse_args::<ExprAssign>() .and_then(|with_default| { let name = parse::<LitStr>(with_default.left.to_token_stream().into())?; Ok(PropAttr { name: Some(name.value()), default_value: Some(with_default.right.to_token_stream()), }) }) .or_else(|_| { Ok(PropAttr { name: Some(attr.parse_args::<LitStr>()?.value()), default_value: None, }) }) } }
#[doc = "Register `AHB3RSTR` reader"] pub type R = crate::R<AHB3RSTR_SPEC>; #[doc = "Register `AHB3RSTR` writer"] pub type W = crate::W<AHB3RSTR_SPEC>; #[doc = "Field `QSPIRST` reader - Quad SPI memory interface reset"] pub type QSPIRST_R = crate::BitReader; #[doc = "Field `QSPIRST` writer - Quad SPI memory interface reset"] pub type QSPIRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PKARST` reader - PKA interface reset"] pub type PKARST_R = crate::BitReader; #[doc = "Field `PKARST` writer - PKA interface reset"] pub type PKARST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `AES2RST` reader - AES2 interface reset"] pub type AES2RST_R = crate::BitReader; #[doc = "Field `AES2RST` writer - AES2 interface reset"] pub type AES2RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RNGRST` reader - RNG interface reset"] pub type RNGRST_R = crate::BitReader; #[doc = "Field `RNGRST` writer - RNG interface reset"] pub type RNGRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `HSEMRST` reader - HSEM interface reset"] pub type HSEMRST_R = crate::BitReader; #[doc = "Field `HSEMRST` writer - HSEM interface reset"] pub type HSEMRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IPCCRST` reader - IPCC interface reset"] pub type IPCCRST_R = crate::BitReader; #[doc = "Field `IPCCRST` writer - IPCC interface reset"] pub type IPCCRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FLASHRST` reader - Flash interface reset"] pub type FLASHRST_R = crate::BitReader; #[doc = "Field `FLASHRST` writer - Flash interface reset"] pub type FLASHRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 8 - Quad SPI memory interface reset"] #[inline(always)] pub fn qspirst(&self) -> QSPIRST_R { QSPIRST_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 16 - PKA interface reset"] #[inline(always)] pub fn pkarst(&self) -> PKARST_R { PKARST_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - AES2 interface reset"] #[inline(always)] pub fn aes2rst(&self) -> AES2RST_R { AES2RST_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - RNG interface reset"] #[inline(always)] pub fn rngrst(&self) -> RNGRST_R { RNGRST_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - HSEM interface reset"] #[inline(always)] pub fn hsemrst(&self) -> HSEMRST_R { HSEMRST_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - IPCC interface reset"] #[inline(always)] pub fn ipccrst(&self) -> IPCCRST_R { IPCCRST_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 25 - Flash interface reset"] #[inline(always)] pub fn flashrst(&self) -> FLASHRST_R { FLASHRST_R::new(((self.bits >> 25) & 1) != 0) } } impl W { #[doc = "Bit 8 - Quad SPI memory interface reset"] #[inline(always)] #[must_use] pub fn qspirst(&mut self) -> QSPIRST_W<AHB3RSTR_SPEC, 8> { QSPIRST_W::new(self) } #[doc = "Bit 16 - PKA interface reset"] #[inline(always)] #[must_use] pub fn pkarst(&mut self) -> PKARST_W<AHB3RSTR_SPEC, 16> { PKARST_W::new(self) } #[doc = "Bit 17 - AES2 interface reset"] #[inline(always)] #[must_use] pub fn aes2rst(&mut self) -> AES2RST_W<AHB3RSTR_SPEC, 17> { AES2RST_W::new(self) } #[doc = "Bit 18 - RNG interface reset"] #[inline(always)] #[must_use] pub fn rngrst(&mut self) -> RNGRST_W<AHB3RSTR_SPEC, 18> { RNGRST_W::new(self) } #[doc = "Bit 19 - HSEM interface reset"] #[inline(always)] #[must_use] pub fn hsemrst(&mut self) -> HSEMRST_W<AHB3RSTR_SPEC, 19> { HSEMRST_W::new(self) } #[doc = "Bit 20 - IPCC interface reset"] #[inline(always)] #[must_use] pub fn ipccrst(&mut self) -> IPCCRST_W<AHB3RSTR_SPEC, 20> { IPCCRST_W::new(self) } #[doc = "Bit 25 - Flash interface reset"] #[inline(always)] #[must_use] pub fn flashrst(&mut self) -> FLASHRST_W<AHB3RSTR_SPEC, 25> { FLASHRST_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "AHB3 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb3rstr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb3rstr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct AHB3RSTR_SPEC; impl crate::RegisterSpec for AHB3RSTR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ahb3rstr::R`](R) reader structure"] impl crate::Readable for AHB3RSTR_SPEC {} #[doc = "`write(|w| ..)` method takes [`ahb3rstr::W`](W) writer structure"] impl crate::Writable for AHB3RSTR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets AHB3RSTR to value 0"] impl crate::Resettable for AHB3RSTR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use super::{BaseMap, Point3}; /// Implement these for handling conversion to/from 2D coordinates (they are separate, because you might /// want Dwarf Fortress style 3D!) pub trait Algorithm3D: BaseMap { /// Convert a Point (x/y) to an array index. fn point3d_to_index(&self, pt: Point3) -> usize; /// Convert an array index to a point. fn index_to_point3d(&self, idx: usize) -> Point3; }
pub mod predefined; pub mod init; pub mod start;
//! Game Tree Search use crate::state::*; use crate::consts::*; use std::cmp; use std::collections::VecDeque; use crate::hashtables::*; // Evaluation Type #[derive(Copy,Clone,Debug,PartialEq,Eq)] pub enum EvalType { Alpha, Exact, } // Evaluation Result #[derive(Copy,Clone,Debug,PartialEq,Eq)] pub struct Eval { pub eval_type: EvalType, pub value: i32, } impl Default for Eval { fn default() -> Self { Eval { eval_type: EvalType::Alpha, value: -INF_VALUE, } } } pub struct Variation { pub eval: i32, pub move_list: VecDeque<Move>, } impl Variation { pub fn terminal( eval: i32 ) -> Self { Variation { eval: eval, move_list: VecDeque::new(), } } pub fn max_assign( &mut self, mv: &Move, var: Variation ) { if self.eval < -var.eval { self.eval = -var.eval; self.move_list = var.move_list; self.move_list.push_front( *mv ); } } } #[derive(Copy,Clone,Debug,PartialEq,Eq)] pub struct SearchStats { pub end: u64, pub middle: u64, pub max_depth: u64, pub end_qs: u64, pub middle_qs: u64, pub quiet_qs: u64, pub beta_cutoff_qs: u64, pub hash_hit: u64, pub hash_cutoff: u64, } impl SearchStats { pub fn new() -> Self { SearchStats { end: 0, middle: 0, max_depth: 0, end_qs: 0, middle_qs: 0, quiet_qs: 0, beta_cutoff_qs: 0, hash_hit: 0, hash_cutoff: 0, } } } pub fn profile( state: &mut State, depth: usize ) { if depth == 0 { let _ = state.pst_eval(); } else { let ( legal_moves, _ ) = state.node_info(); let irs = state.ir_state(); for mv in &legal_moves { state.make( mv ); profile( state, depth - 1 ); state.unmake( mv, &irs ); } } } pub fn quiescence( state: &mut State, mut alpha: i32, beta: i32, stats: &mut SearchStats ) -> i32 { let ( legal_moves, status ) = state.node_info(); if status == Status::Ongoing { let mut eval = state.pst_eval(); if beta <= eval { // Assuming that we are not in Zugzwang, the "Stand Pat" is a lower bound on the eval. // FIXME: Ideally, we might want to make a null_move and then return that score - given we might implement some sort of Tempo into the static_eval? // Failing soft stats.beta_cutoff_qs += 1; eval } else { alpha = cmp::max( alpha, eval ); let irs = state.ir_state(); let tactical_moves = state.tactical_moves( &legal_moves ); if tactical_moves.is_empty() { stats.quiet_qs += 1; } else { stats.middle_qs += 1; } for mv in &tactical_moves { state.make( mv ); eval = cmp::max( eval, -quiescence( state, -beta, -alpha, stats ) ); state.unmake( mv, &irs ); alpha = cmp::max( alpha, eval ); // Failing soft if beta <= alpha { break; } } eval } } else { stats.end_qs += 1; if status == Status::Checkmate { -MATE_VALUE } else { DRAW_VALUE } } } pub fn negamax( state: &mut State, depth: usize, mut alpha: i32, beta: i32, stats: &mut SearchStats, tt: &mut HashTable<Eval> ) -> Variation { let ( legal_moves, status ) = state.node_info(); if status == Status::Ongoing { if let Some( hashed ) = tt.get( state.hash, depth ) { stats.hash_hit += 1; if beta <= hashed.value || ( hashed.eval_type == EvalType::Exact && hashed.value <= alpha ) { stats.hash_cutoff += 1; return Variation::terminal( hashed.value ); } } if depth == 0 { stats.max_depth += 1; Variation::terminal( quiescence( state, alpha, beta, stats ) ) } else { stats.middle += 1; let irs = state.ir_state(); let mut var = Variation::terminal( -INF_VALUE ); let mut eval_type: EvalType = EvalType::Exact; for mv in &legal_moves { state.make( mv ); var.max_assign( mv, negamax( state, depth - 1, -beta, -alpha, stats, tt ) ); state.unmake( mv, &irs ); alpha = cmp::max( alpha, var.eval ); // Failing soft if beta <= alpha { eval_type = EvalType::Alpha; break; } } tt.set( state.hash, depth, Eval { eval_type: eval_type, value: var.eval } ); var } } else { stats.end += 1; if status == Status::Checkmate { Variation::terminal( -MATE_VALUE ) } else { Variation::terminal( DRAW_VALUE ) } } }
extern crate aws_kms_crypt; // Optional; needed only if EncryptedSecret is serialized into JSON string extern crate serde_json; use std::collections::HashMap; fn main() { let mut encryption_context = HashMap::new(); encryption_context.insert("entity".to_owned(), "admin".to_owned()); let options = aws_kms_crypt::EncryptOptions { encryption_context: encryption_context, key: "alias/common".into(), region: "eu-west-1".into() }; let data = "secret".into(); let res = aws_kms_crypt::encrypt(&data, &options); println!("{}", serde_json::to_string(&res.unwrap()).unwrap()); }
extern crate two_timer; // for timing the cost savings of using a serialized matcher fn main() { two_timer::MATCHER.parse("yesterday"); }
//! A tiny crate of utilities for working with implicit Wasm codegen conventions //! (often established by LLVM and lld). //! //! Examples conventions include: //! //! * The shadow stack pointer //! * The canonical linear memory that contains the shadow stack #![deny(missing_docs, missing_debug_implementations)] use anyhow::{anyhow, bail, Error}; use walrus::{ir::Value, GlobalId, GlobalKind, InitExpr, MemoryId, Module, ValType}; /// Get a Wasm module's canonical linear memory. pub fn get_memory(module: &Module) -> Result<MemoryId, Error> { let mut memories = module.memories.iter().map(|m| m.id()); let memory = memories.next(); if memories.next().is_some() { bail!( "expected a single memory, found multiple; multiple memories \ currently not supported" ); } memory.ok_or_else(|| { anyhow!( "module does not have a memory; must have a memory \ to transform return pointers into Wasm multi-value" ) }) } /// Get the `__shadow_stack_pointer`. /// /// It must have been previously added to the module's exports via /// `export_shadow_stack_pointer`. pub fn get_shadow_stack_pointer(module: &Module) -> Option<GlobalId> { let candidates = module .globals .iter() .filter(|g| g.ty == ValType::I32) .filter(|g| g.mutable) // The stack pointer is guaranteed to not be initialized to 0, and it's // guaranteed to have an i32 initializer, so find globals which are // locally defined, are an i32, and have a nonzero initializer .filter(|g| match g.kind { GlobalKind::Local(InitExpr::Value(Value::I32(n))) => n != 0, _ => false, }) .collect::<Vec<_>>(); match candidates.len() { 0 => None, // TODO: have an actual check here. 1 => Some(candidates[0].id()), _ => None, } }
use specs::Join; pub struct HookSystem { collided: Vec<(::specs::Entity, f32)>, } impl HookSystem { pub fn new() -> Self { HookSystem { collided: vec![] } } } impl<'a> ::specs::System<'a> for HookSystem { type SystemData = ( ::specs::ReadStorage<'a, ::component::PhysicBody>, ::specs::ReadStorage<'a, ::component::Aim>, ::specs::WriteStorage<'a, ::component::Hook>, ::specs::Fetch<'a, ::resource::PhysicWorld>, ::specs::Entities<'a>, ); fn run(&mut self, (bodies, aims, mut hooks, physic_world, entities): Self::SystemData) { for (aim, body, hook, entity) in (&aims, &bodies, &mut hooks, &*entities).join() { // Delete anchor if entity doesn't exist anymore if let Some(false) = hook.anchor .as_ref() .map(|anchor| entities.is_alive(anchor.entity)) { hook.anchor = None; hook.launch = false; } if !hook.launch && hook.anchor.is_some() { hook.anchor = None; } if hook.launch && hook.anchor.is_none() { let body_pos = body.get(&physic_world).position().clone(); let ray = ::ncollide::query::Ray { origin: ::na::Point3::from_coordinates(body_pos.translation.vector), dir: aim.rotation * ::na::Vector3::x(), }; // TODO: resolve hack with membership nphysic #82 let mut group = ::nphysics::object::RigidBodyCollisionGroups::new_dynamic(); group.set_whitelist(&[::entity::MONSTER_GROUP, ::entity::WALL_GROUP]); self.collided.clear(); for (other_body, collision) in physic_world .collision_world() .interferences_with_ray(&ray, &group.as_collision_groups()) { if let ::nphysics::object::WorldObject::RigidBody(other_body) = other_body.data { let other_entity = ::component::PhysicBody::entity(physic_world.rigid_body(other_body)); if entity != other_entity { self.collided.push((other_entity, collision.toi)); } } } self.collided .sort_by(|a, b| (a.1).partial_cmp(&b.1).unwrap()); for collided in &self.collided { let other_pos = bodies .get(collided.0) .unwrap() .get(&physic_world) .position(); let collision_pos = ray.origin + ray.dir * collided.1; let local_pos = other_pos.inverse() * collision_pos; hook.anchor = Some(::component::Anchor { entity: collided.0, local_pos, pos: ::na::zero(), }); break; } } // compute position and draw if let Some(ref mut anchor) = hook.anchor { anchor.pos = (bodies .get(anchor.entity) .unwrap() .get(&physic_world) .position() * anchor.local_pos) .coords; } } } }
use fltk::{app, button::*, enums::*, frame::*, group::*, prelude::*, window::*}; use std::cell::RefCell; use std::rc::Rc; /* Created:0.0.1 updated:0.0.1 description: As the file name explains. This is just example code for any new devs to go off of. */ fn _main() -> Result<(), Box<dyn std::error::Error>> { //remove the Underscore if you want to run this file. //create app & widgets let app = app::App::default(); let counter = Counter::new(0); let mut wind = Window::default() .with_size(160, 200) .center_screen() .with_label("Counter"); let mut frame = Frame::default() .with_size(100, 40) .center_of(&wind) .with_label("0"); frame.set_label_size(20); let mut but_inc = Button::default() .size_of(&frame) .above_of(&frame, 0) .with_label("+"); let mut but_dec = Button::default() .size_of(&frame) .below_of(&frame, 0) .with_label("-"); wind.make_resizable(true); wind.end(); wind.show(); // Theming wind.set_color(Color::from_u32(0xffebee)); but_inc.set_color(Color::from_u32(0x304FFE)); but_inc.set_selection_color(Color::Green); but_inc.set_label_size(20); but_inc.set_frame(FrameType::RFlatBox); but_inc.set_label_color(Color::White); but_dec.set_color(Color::from_u32(0x2962FF)); but_dec.set_selection_color(Color::Red); but_dec.set_frame(FrameType::RFlatBox); but_dec.set_label_size(20); but_dec.set_label_color(Color::White); //add functionality via callbacks and handlers but_inc.set_callback({ let mut c = counter.clone(); move |_| c.increment() }); but_dec.set_callback({ let mut c = counter.clone(); move |_| c.decrement() }); frame.handle(move |f, ev| { //f is a clone of what is being handled. In this case, &frame. EV is the Event code. //println!("ev: {} && f: {}",ev as i32,f.label()); if ev as i32 == MyEvent::CHANGED { f.set_label(&counter.clone().value().to_string()); return true; } else { return false; } }); //run the app Ok(app.run()?) } // create a counter Struct which contains all functionality for the counter app. It also Derives clone so that way we wont get errors when using move |_| #[derive(Clone)] pub struct Counter{ count: Rc<RefCell<i32>>, //creates a single cell in memory that contains an i32. } impl Counter { pub fn new(val: i32) -> Self{ return Counter { count: Rc::from(RefCell::from(val)) }; } pub fn increment(&mut self){ *self.count.borrow_mut() += 1; app::handle_main(MyEvent::CHANGED).unwrap(); } pub fn decrement(&mut self){ *self.count.borrow_mut() -= 1; app::handle_main(MyEvent::CHANGED).unwrap(); } pub fn value(&self) -> i32 { *self.count.borrow() } } //list of events you want to name. pub struct MyEvent; impl MyEvent{ const CHANGED: i32 = 42;//the value of this can be anything }
//! Implements the "Universal Chess Interface" protocol communication. //! //! "Universal Chess Interface" (UCI) is an open communication //! protocol for chess engines to play games automatically, that is to //! communicate with other programs including Graphical User //! Interfaces (GUI). UCI was designed and developed by Rudolf Huber //! and Stefan Meyer-Kahlen, released in November 2000. The protocol //! is independent of the operating system. For "Windows", the engine //! is a normal "exe" file, either a console or "real" windows //! application. All communication is done via standard input and //! output with text commands. //! //! This module handles the low-level details of the UCI protocol. It //! only requires the programmer to define a type that implements the //! `UciEngine` trait. Then `run_engine` will handle the communication //! with the GUI all by itself. use std::default::Default; use std::time::Duration; use std::thread::{spawn, sleep}; use std::io; use std::io::{Write, BufWriter, BufRead, ErrorKind}; use std::sync::mpsc::{channel, TryRecvError}; use regex::Regex; /// A command from the GUI to the engine. enum UciCommand { /// This is sent to the engine when the user wants to change the /// value of some configuration option supported by the engine. SetOption { name: String, value: String }, /// This is used to synchronize the engine with the GUI. IsReady, /// This is sent to the engine when the next search (started with /// `UciCommand::Position` and `UciCommand::Go`) will be from a /// different game. UciNewGame, /// Set up the position described in `fen` and play the suppied /// `moves` on the internal chess board. Position { fen: String, moves: String }, /// Start calculating on the current position set up with /// `UciCommand::Position`. Go(GoParams), /// Stop calculating as soon as possible and send /// `EngineReply::BestMove`. Stop, /// The user has played the expected move. This will be sent if /// the engine was told to ponder on the same move the user has /// played. PonderHit, /// Quit the program as soon as possible. Quit, } /// Parameters influencing engine's thinking. #[derive(Default)] pub struct GoParams { /// Restricts the search to a subset of moves only. /// /// No restrictions should be applied when the supplied list is /// empty. The move format is long algebraic notation. Examples: /// `e2e4`, `e7e5`, `e1g1` (white short castling), `e7e8q` (for /// promotion). pub searchmoves: Vec<String>, /// Whether to starts searching in pondering mode. /// /// When searching in pondering mode the last move sent in the /// "position" command is the ponder move. The engine can do what /// it wants to do, but after a "ponder hit" command it should /// execute the suggested move to ponder on. This means that the /// ponder move sent by the GUI can be interpreted as a /// recommendation about which move to ponder. However, if the /// engine decides to ponder on a different move, it should not /// display any mainlines as they are likely to be misinterpreted /// by the GUI because the GUI expects the engine to ponder on the /// suggested move. pub ponder: bool, /// Milliseconds left on white's clock. pub wtime: Option<u64>, /// Milliseconds left on black's clock. pub btime: Option<u64>, /// White increment per move in milliseconds. pub winc: Option<u64>, /// Black increment per move in milliseconds. pub binc: Option<u64>, /// The number of moves to the next time control. pub movestogo: Option<u64>, /// Search to this depth (plies) only. pub depth: Option<u64>, /// Search that many nodes only. pub nodes: Option<u64>, /// Search for a mate in that many moves. pub mate: Option<u64>, /// Search for exactly that many milliseconds. pub movetime: Option<u64>, /// Search until the "stop" command. /// /// The engine must not exit the search without being told so in /// this mode. pub infinite: bool, } /// An information item that the engine sends to the GUI. /// /// The GUI requires the engine to send various types of information /// during its working. Here are some standard ones: /// /// * `"depth"`: The search depth in half-moves. /// /// * `"time"`: The time searched in milliseconds, this should be sent /// together with the PV. /// /// * `"nodes"`: Nodes searched, the engine should send this info /// regularly. /// /// * `"pv"`: The best line found. /// /// * `"multipv"`: For the multi PV mode. /// /// * `"score"`: The score from the engine's point of view. /// /// * `"nps"`: Nodes per second searched, the engine should send this /// info regularly. /// /// * `"string"`: Any string that will be displayed. pub struct InfoItem { pub info_type: String, pub data: String, } /// A reply from the engine to the GUI. /// /// The engine reply is either a best move found, or new/updated /// information items. The move format is long algebraic /// notation. Examples: `e2e4`, `e7e5`, `e1g1` (white short castling), /// `e7e8q` (for promotion). If supplied, `ponder_move` is the /// response on which the engine would like to ponder. pub enum EngineReply { Info(Vec<InfoItem>), BestMove { best_move: String, ponder_move: Option<String>, }, } /// Describes a configuration option. /// /// Configurable options can be of several different types, depending /// on their intended appearance in the GUI: check box, spin box, /// combo box, string box, or button. #[derive(Clone, Debug)] pub enum OptionDescription { Check { default: bool }, Spin { min: i32, max: i32, default: i32 }, Combo { list: Vec<String>, default: String }, String { default: String }, Button, } /// A trait for announcing and changing configuration options. pub trait SetOption { /// Returns a list of supported configuration options (name and /// description). fn options() -> Vec<(&'static str, OptionDescription)> where Self: Sized { vec![] } /// Updates the internal state to keep up with the new value for a /// given configuration option. /// /// * `name` gives the name of the configuration option. /// /// * `value` is the new value for the configuration option. /// /// Does nothing when called with unsupported configuration option /// name. #[allow(unused_variables)] fn set_option(name: &str, value: &str) where Self: Sized {} } /// A trait for UCI-compatible chess engines. /// /// The methods in this trait, except the method `wait_for_reply`, /// must not block the current thread. pub trait UciEngine { /// Returns the name of the engine. fn name() -> &'static str; /// Returns the author of the engine. fn author() -> &'static str; /// Returns a list of supported configuration options (name and /// description). fn options() -> Vec<(&'static str, OptionDescription)>; /// Creates a new instance. /// /// `tt_size_mb` is the preferred size of the transposition table /// in Mbytes. fn new(tt_size_mb: Option<usize>) -> Self; /// Sets a new value for a given configuration option. fn set_option(&mut self, name: &str, value: &str); /// Tells the engine that the next position will be from a /// different game. /// /// In practice, this method will clear the transposition tables. fn new_game(&mut self); /// Loads a new chess position. /// /// `fen` will be the position represented in Forsyth–Edwards /// notation. `moves` is an iterator over the moves played from /// the given position. The move format is long algebraic /// notation. Examples: `e2e4`, `e7e5`, `e1g1` (white short /// castling), `e7e8q` (for promotion). fn position(&mut self, fen: &str, moves: &mut Iterator<Item = &str>); /// Tells the engine to start thinking. fn go(&mut self, params: &GoParams); /// Forces the engine to stop thinking and reply with the best /// move it had found. fn stop(&mut self); /// Tells the engine that the move it is pondering on was played /// on the board. /// /// Pondering is using the opponent's move time to consider likely /// opponent moves and thus gain a pre-processing advantage when /// it is our turn to move. fn ponder_hit(&mut self); /// Waits for an engine reply, timing out after a specified /// duration or earlier. fn wait_for_reply(&mut self, duration: Duration) -> Option<EngineReply>; /// Terminates the engine permanently. /// /// After calling `exit`, no other methods on this instance should /// be called. fn exit(&mut self); } /// Serves UCI commands until a "quit" command is received. /// /// The current thread will block until the UCI session is closed. /// /// Returns `Err` if the handshake was unsuccessful, or if an IO error /// occurred. pub fn run_engine<E: UciEngine>() -> io::Result<()> { let mut server = try!(Server::<E>::wait_for_hanshake()); server.serve() } /// A UCI protocol server. /// /// Connects the engine to the GUI. struct Server<E: UciEngine> { engine: Option<E>, } impl<E: UciEngine> Server<E> { /// Waits for UCI handshake from the GUI. /// /// Will return `Err` if the handshake was unsuccessful, or if an /// IO error has occurred. The current thread will be blocked /// until the handshake is finalized. pub fn wait_for_hanshake() -> io::Result<Self> { lazy_static! { static ref RE: Regex = Regex::new(r"\buci(?:\s|$)").unwrap(); } let stdin = io::stdin(); let mut reader = stdin.lock(); let mut writer = BufWriter::new(io::stdout()); let mut line = String::new(); if try!(reader.read_line(&mut line)) == 0 { return Err(io::Error::new(ErrorKind::UnexpectedEof, "EOF")); } if !RE.is_match(line.as_str()) { return Err(io::Error::new(ErrorKind::Other, "unrecognized protocol")); } try!(write!(writer, "id name {}\n", E::name())); try!(write!(writer, "id author {}\n", E::author())); for (name, description) in E::options() { try!(write!(writer, "option name {} type {}\n", name, match description { OptionDescription::Check { default } => { format!("check default {}", default) } OptionDescription::Spin { default, min, max } => { format!("spin default {} min {} max {}", default, min, max) } OptionDescription::Combo { default, list } => { format!("combo default {}{}", default, list.into_iter() .fold(String::new(), |mut acc, x| { acc.push_str(" var "); acc.push_str(x.as_str()); acc })) } OptionDescription::String { default } => { format!("string default {}", default) } OptionDescription::Button => "button".to_string(), })); } try!(write!(writer, "uciok\n")); try!(writer.flush()); Ok(Server { engine: None }) } /// Blocks the current thread and serves UCI commands until a /// "quit" command is received. /// /// Will return `Err` if an IO error has occurred. pub fn serve(&mut self) -> io::Result<()> { let mut writer = BufWriter::new(io::stdout()); let (tx, rx) = channel(); // Spawn a thread that reads from `stdin` and writes to `tx`. let read_thread = spawn(move || -> io::Result<()> { let stdin = io::stdin(); let mut reader = stdin.lock(); let mut line = String::new(); loop { if let Ok(cmd) = match try!(reader.read_line(&mut line)) { 0 => return Err(io::Error::new(ErrorKind::UnexpectedEof, "EOF")), _ => parse_uci_command(line.as_str()), } { if let UciCommand::Quit = cmd { return Ok(()); } tx.send(cmd).unwrap(); } line.clear(); } }); 'mainloop: loop { // Try to receive commands from the GUI, pass them to the engine. 'read_commands: while let Some(cmd) = match rx.try_recv() { Ok(cmd) => Some(cmd), Err(TryRecvError::Empty) => None, Err(TryRecvError::Disconnected) => break 'mainloop, } { let engine = if let Some(ref mut e) = self.engine { e } else { // Initialize the engine. (The UCI specification // states that the "Hash" "setoption" command // should be the first command passed to the // engine.) if let UciCommand::SetOption { ref name, ref value, } = cmd { if name == "Hash" { let hash_size_mb = value.parse::<usize>().ok(); self.engine = Some(E::new(hash_size_mb)); continue 'read_commands; } } self.engine = Some(E::new(None)); self.engine.as_mut().unwrap() }; // Pass the received command to the engine. match cmd { UciCommand::IsReady => { try!(write!(writer, "readyok\n")); try!(writer.flush()); } UciCommand::SetOption { name, value } => { engine.set_option(name.as_str(), value.as_str()); } UciCommand::Position { fen, moves } => { engine.position(fen.as_str(), &mut moves.split_whitespace()); } UciCommand::Stop => { engine.stop(); // The "stop" command requires the engine to // reply with a move immediately. break 'read_commands; } UciCommand::UciNewGame => { engine.new_game(); } UciCommand::PonderHit => { engine.ponder_hit(); } UciCommand::Go(params) => { engine.go(&params); } UciCommand::Quit => unreachable!(), } } // 'read_commands if let Some(ref mut engine) = self.engine { // Wait for engine replies, fetch them to `stdout`. let mut reply_count = 0; while let Some(reply) = engine.wait_for_reply(Duration::from_millis(25)) { reply_count += 1; match reply { EngineReply::BestMove { best_move, ponder_move, } => { try!(write!(writer, "bestmove {}{}", best_move, match ponder_move { None => "\n".to_string(), Some(m) => format!(" ponder {}\n", m), })) } EngineReply::Info(infos) => { if infos.len() > 0 { try!(write!(writer, "info")); for InfoItem { info_type, data } in infos { try!(write!(writer, " {} {}", info_type, data)); } try!(write!(writer, "\n")); } } } if reply_count >= 40 { // The engine is sending lots of replies, but // we should not forget to process GUI // commands as well. break; } } try!(writer.flush()); } else { // The engine is not initialized yet. sleep(Duration::from_millis(25)); } } // 'mainloop // End the UCI session. if let Some(ref mut engine) = self.engine { engine.exit(); } read_thread.join().unwrap() } } /// Represents a parse error. struct ParseError; fn parse_uci_command(s: &str) -> Result<UciCommand, ParseError> { lazy_static! { static ref RE: Regex = Regex::new( format!(r"\b({})\s*(?:\s(.*)|$)", "setoption|isready|ucinewgame|\ position|go|stop|ponderhit|quit", ).as_str() ).unwrap(); } if let Some(captures) = RE.captures(s) { let command_str = captures.get(1).unwrap().as_str(); let params_str = captures.get(2).map_or("", |m| m.as_str()); match command_str { "stop" => Ok(UciCommand::Stop), "quit" => Ok(UciCommand::Quit), "isready" => Ok(UciCommand::IsReady), "ponderhit" => Ok(UciCommand::PonderHit), "ucinewgame" => Ok(UciCommand::UciNewGame), "setoption" => parse_setoption_params(params_str), "position" => parse_position_params(params_str), "go" => parse_go_params(params_str), _ => Err(ParseError), } } else { Err(ParseError) } } fn parse_setoption_params(s: &str) -> Result<UciCommand, ParseError> { lazy_static! { static ref RE: Regex = Regex::new( r"^name\s+(\S.*?)(?:\s+value\s+(.*?))?\s*$").unwrap(); } if let Some(captures) = RE.captures(s) { Ok(UciCommand::SetOption { name: captures.get(1).unwrap().as_str().to_string(), value: captures.get(2).map_or("", |m| m.as_str()).to_string(), }) } else { Err(ParseError) } } fn parse_position_params(s: &str) -> Result<UciCommand, ParseError> { const STARTPOS: &'static str = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1"; lazy_static! { static ref RE: Regex = Regex::new( format!( r"^(?:fen\s+(?P<fen>{})|startpos)(?:\s+moves(?P<moves>{}))?\s*$", r"[1-8KQRBNPkqrbnp/]+\s+[wb]\s+(?:[KQkq]{1,4}|-)\s+(?:[a-h][1-8]|-)\s+\d+\s+\d+", r"(?:\s+[a-h][1-8][a-h][1-8][qrbn]?)*", // a possibly empty list of moves ).as_str() ).unwrap(); } if let Some(captures) = RE.captures(s) { Ok(UciCommand::Position { fen: if let Some(fen) = captures.name("fen") { fen.as_str().to_string() } else { STARTPOS.to_string() }, moves: captures .name("moves") .map_or("", |m| m.as_str()) .to_string(), }) } else { Err(ParseError) } } fn parse_go_params(s: &str) -> Result<UciCommand, ParseError> { lazy_static! { static ref RE: Regex = Regex::new( format!( r"\b(?P<keyword>{})(?:\s+(?P<number>\d+)|(?P<moves>{}))?(?:\s+|$)", "wtime|btime|winc|binc|movestogo|depth|\ nodes|mate|movetime|ponder|infinite|searchmoves", r"(?:\s+[a-h][1-8][a-h][1-8][qrbn]?)+", // a non-empty list of moves ).as_str() ).unwrap(); } let mut params = GoParams::default(); for captures in RE.captures_iter(s) { let keyword = captures.name("keyword").unwrap(); match keyword.as_str() { "searchmoves" => { if let Some(moves) = captures.name("moves") { params.searchmoves = moves .as_str() .split_whitespace() .map(|x| x.to_string()) .collect(); } } "infinite" => { params.infinite = true; } "ponder" => { params.ponder = true; } _ => { if let Some(number) = captures.name("number") { let field = match keyword.as_str() { "wtime" => &mut params.wtime, "btime" => &mut params.btime, "winc" => &mut params.winc, "binc" => &mut params.binc, "movestogo" => &mut params.movestogo, "depth" => &mut params.depth, "nodes" => &mut params.nodes, "mate" => &mut params.mate, "movetime" => &mut params.movetime, _ => panic!("invalid keyword"), }; *field = number.as_str().parse::<u64>().ok(); } } } } Ok(UciCommand::Go(params)) } #[cfg(test)] mod tests { #[test] fn parse_go_params() { use super::{parse_go_params, UciCommand}; let params = [" wtime22000 ", " wtime 22000 ", "wtime 22000", "wtime 99999999999999998888888888999999999999999999", "wtime 22000", "searchmoves e2e4 c7c8q ", "searchmoves e2e4 c7c8q,ponder ", "searchmoves aabb", "infinite wtime 22000", "wtime 22000 infinite btime 11000", "wtime fdfee / 22000 infinite btime 11000 fdfds", "wtime 22000 infinite btime 11000 ponder", "searchmoves"]; for (i, s) in params.iter().enumerate() { if let Some(UciCommand::Go(p)) = parse_go_params(s).ok() { match i { 0 => { assert_eq!(p.wtime, None); } 1 => { assert_eq!(p.wtime, Some(22000)); assert_eq!(p.ponder, false); } 2 => { assert_eq!(p.wtime, Some(22000)); } 3 => { assert_eq!(p.wtime, None); } 4 => { assert_eq!(p.infinite, false); } 5 => { assert_eq!(p.searchmoves, vec!["e2e4".to_string(), "c7c8q".to_string()]); } 6 => { assert_eq!(p.searchmoves, vec!["e2e4".to_string()]); } 7 => { assert!(p.searchmoves.is_empty()); } 8 => { assert_eq!(p.wtime, Some(22000)); assert_eq!(p.infinite, true); } 9 => { assert_eq!(p.infinite, true); assert_eq!(p.wtime, Some(22000)); assert_eq!(p.btime, Some(11000)); } 10 => { assert_eq!(p.infinite, true); assert_eq!(p.wtime, None); assert_eq!(p.btime, Some(11000)); } 11 => { assert_eq!(p.infinite, true); assert_eq!(p.wtime, Some(22000)); assert_eq!(p.btime, Some(11000)); assert_eq!(p.ponder, true); assert!(p.searchmoves.is_empty()); } 12 => { assert!(p.searchmoves.is_empty()); } _ => (), } } else { panic!("unsuccessful parsing: {}", s); } } } #[test] fn parse_setoption_params() { use super::{parse_setoption_params, UciCommand}; let params = ["name xxx value yyy ", "name xxx value yyy", "name xxx value ", "name xxx "]; for (i, s) in params.iter().enumerate() { if let Some(UciCommand::SetOption { name, value }) = parse_setoption_params(s).ok() { match i { 0 => { assert_eq!(name, "xxx"); assert_eq!(value, "yyy"); } 1 => { assert_eq!(name, "xxx"); assert_eq!(value, "yyy"); } 2 => { assert_eq!(name, "xxx"); assert_eq!(value, ""); } 3 => { assert_eq!(name, "xxx"); assert_eq!(value, ""); } _ => (), } } else { panic!("unsuccessful parsing: {}", s); } } assert!(parse_setoption_params("name ").is_err()); assert!(parse_setoption_params("namexxx ").is_err()); } #[test] fn parse_position_params() { use super::{parse_position_params, UciCommand}; let params = ["startpos ", "startpos ", "startpos moves ", "startpos moves e2e4 d2d4 ", "fen 8/8/8/8/8/8/8/k6K w KQk e6 0 1 moves e2e4", "fen 8/8/8/8/8/8/8/k6K w - - 0 1 moves e2e4", "fen 8/8/8/8/8/8/8/k6K w - - 0 1 moves e2e4", "fen 8/8/8/8/8/8/8/k6K w - - 0 1 moves", "fen 8/8/8/8/8/8/8/k6K w - - 0 1 "]; for (i, s) in params.iter().enumerate() { if let Some(UciCommand::Position { fen, moves }) = parse_position_params(s).ok() { match i { 0 => { assert_eq!(fen, "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1"); assert_eq!(moves.len(), 0); } 1 => { assert_eq!(fen, "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1"); assert_eq!(moves.len(), 0); } 2 => { assert_eq!(fen, "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1"); assert_eq!(moves.len(), 0); } 3 => { assert_eq!(fen, "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w QKqk - 0 1"); assert_eq!(moves.split_whitespace().count(), 2); } 4 => { assert_eq!(moves.split_whitespace().count(), 1); } 5 => { assert_eq!(fen, "8/8/8/8/8/8/8/k6K w - - 0 1".to_string()); assert_eq!(moves.split_whitespace().count(), 1); } 6 => { assert_eq!(fen, "8/8/8/8/8/8/8/k6K w - - 0 1".to_string()); assert_eq!(moves.split_whitespace().count(), 1); } 7 => { assert_eq!(fen, "8/8/8/8/8/8/8/k6K w - - 0 1".to_string()); assert_eq!(moves.len(), 0); } 8 => { assert_eq!(fen, "8/8/8/8/8/8/8/k6K w - - 0 1".to_string()); assert_eq!(moves.len(), 0); } _ => (), } } else { panic!("unsuccessful parsing: {}", s); } } } #[test] fn parse_uci_command() { use super::{parse_uci_command, UciCommand}; assert!(match parse_uci_command("isready").ok().unwrap() { UciCommand::IsReady => true, _ => false, }); assert!(match parse_uci_command(" isready ").ok().unwrap() { UciCommand::IsReady => true, _ => false, }); assert!(match parse_uci_command("isready ").ok().unwrap() { UciCommand::IsReady => true, _ => false, }); assert!(match parse_uci_command("isready xxx").ok().unwrap() { UciCommand::IsReady => true, _ => false, }); assert!(match parse_uci_command("ponderhit ").ok().unwrap() { UciCommand::PonderHit => true, _ => false, }); assert!(match parse_uci_command(" foo quit ").ok().unwrap() { UciCommand::Quit => true, _ => false, }); assert!(match parse_uci_command(" stop ").ok().unwrap() { UciCommand::Stop => true, _ => false, }); assert!(match parse_uci_command("ucinewgame").ok().unwrap() { UciCommand::UciNewGame => true, _ => false, }); assert!(match parse_uci_command("position startpos").ok().unwrap() { UciCommand::Position { .. } => true, _ => false, }); assert!(match parse_uci_command("position fen k7/8/8/8/8/8/8/7K w - - 0 1") .ok() .unwrap() { UciCommand::Position { .. } => true, _ => false, }); assert!(match parse_uci_command("position fen k7/8/8/8/8/8/8/7K w - - 0 1 moves h1h2") .ok() .unwrap() { UciCommand::Position { .. } => true, _ => false, }); assert!(parse_uci_command("position fen k7/8/8/8/8/8/8/7K w - - 0 1 moves h1h2 aabb") .is_err()); assert!(match parse_uci_command("setoption name x value y") .ok() .unwrap() { UciCommand::SetOption { .. } => true, _ => false, }); assert!(match parse_uci_command("go infinite").ok().unwrap() { UciCommand::Go(_) => true, _ => false, }); } }
use anyhow::{format_err, Error}; use rusoto_s3::Object; use stack_string::{format_sstr, StackString}; use std::path::Path; use time::{format_description::well_known::Rfc3339, OffsetDateTime}; use url::Url; use crate::{ file_info::{FileInfo, FileInfoTrait, FileStat, Md5Sum, Sha1Sum}, file_service::FileService, }; #[derive(Debug, Default, Clone)] pub struct FileInfoS3(FileInfo); impl FileInfoS3 { /// # Errors /// Return error if init fails pub fn from_url(url: &Url) -> Result<Self, Error> { if url.scheme() != "s3" { return Err(format_err!("Invalid URL")); } let bucket: StackString = url .host_str() .ok_or_else(|| format_err!("Parse error"))? .into(); let key = url.path(); let filepath = Path::new(&key); let filename = filepath .file_name() .ok_or_else(|| format_err!("Parse failure"))? .to_string_lossy() .into_owned() .into(); let fileurl = format_sstr!("s3://{bucket}/{key}"); let fileurl: Url = fileurl.parse()?; let serviceid = bucket.clone().into(); let servicesession = bucket.parse()?; let finfo = FileInfo::new( filename, filepath.to_path_buf().into(), fileurl.into(), None, None, FileStat::default(), serviceid, FileService::S3, servicesession, ); Ok(Self(finfo)) } } impl FileInfoTrait for FileInfoS3 { fn get_finfo(&self) -> &FileInfo { &self.0 } fn into_finfo(self) -> FileInfo { self.0 } fn get_md5(&self) -> Option<Md5Sum> { self.0.md5sum.clone() } fn get_sha1(&self) -> Option<Sha1Sum> { self.0.sha1sum.clone() } fn get_stat(&self) -> FileStat { self.0.filestat } } impl FileInfoS3 { /// # Errors /// Return error if init fails pub fn from_object(bucket: &str, item: Object) -> Result<Self, Error> { let key = item.key.as_ref().ok_or_else(|| format_err!("No key"))?; let filepath = Path::new(&key); let filename = filepath .file_name() .ok_or_else(|| format_err!("Parse failure"))? .to_os_string() .to_string_lossy() .into_owned() .into(); let md5sum = item.e_tag.and_then(|m| m.trim_matches('"').parse().ok()); let last_modified = item .last_modified .as_ref() .ok_or_else(|| format_err!("No last modified"))?; let st_mtime = OffsetDateTime::parse(last_modified, &Rfc3339)?.unix_timestamp(); let size = item.size.ok_or_else(|| format_err!("No file size"))?; let fileurl = format_sstr!("s3://{bucket}/{key}"); let fileurl: Url = fileurl.parse()?; let id_str: StackString = bucket.into(); let serviceid = id_str.into(); let servicesession = bucket.parse()?; let finfo = FileInfo::new( filename, filepath.to_path_buf().into(), fileurl.into(), md5sum, None, FileStat { st_mtime: st_mtime as u32, st_size: size as u32, }, serviceid, FileService::S3, servicesession, ); Ok(Self(finfo)) } } #[cfg(test)] mod tests { use rusoto_s3::{Object, Owner}; use crate::{file_info::FileInfoTrait, file_info_s3::FileInfoS3}; #[test] fn test_file_info_s3() { let test_owner = Owner { display_name: Some("me".to_string()), id: Some("8675309".to_string()), }; let test_object = Object { e_tag: Some(r#""6f90ebdaabef92a9f76be131037f593b""#.to_string()), key: Some("test_key".to_string()), last_modified: Some("2019-05-01T00:00:00+00:00".to_string()), owner: Some(test_owner), size: Some(100), storage_class: Some("Standard".to_string()), }; let finfo = FileInfoS3::from_object("test_bucket", test_object).unwrap(); assert_eq!( finfo.get_finfo().urlname.as_str(), "s3://test_bucket/test_key" ); assert_eq!(&finfo.get_finfo().filename, "test_key"); } }
use super::{CacheError, Cacheable, FileData, FileLists, LocalFile}; use crate::config::Config; use std::collections::BinaryHeap; use std::collections::HashMap; use std::ffi::OsStr; use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, }; use std::time::UNIX_EPOCH; use std::fs; use tokio::sync::RwLock; // Contains various data structures to efficiently look up FileData #[derive(Clone)] pub struct FileIndex { // maps file_id to FileData pub file_id_map: Arc<RwLock<HashMap<u64, Arc<FileData>>>>, // (game, mod_id) -> BinaryHeap that keeps the modfiles sorted by timestamp. Used by the update checker. #[allow(clippy::type_complexity)] pub mod_file_map: Arc<RwLock<HashMap<(String, u32), BinaryHeap<Arc<FileData>>>>>, // used by the UI pub files_sorted: Arc<RwLock<Vec<Arc<FileData>>>>, // should the list be re-rendered pub has_changed: Arc<AtomicBool>, // reference to FileLists (which uses Arc internally) file_lists: FileLists, } impl FileIndex { pub async fn new(config: &Config, file_lists: FileLists) -> Result<Self, CacheError> { // It's unexpected but possible that FileDetails is missing let mut file_index: HashMap<u64, Arc<FileData>> = HashMap::new(); let mut mod_files: HashMap<(String, u32), BinaryHeap<Arc<FileData>>> = HashMap::new(); let mut files_sorted: Vec<Arc<FileData>> = vec![]; /* 1. Iterates through all <mod_file>.json files in the download directory for the current game, skipping those * where the corresponding <mod_file> is missing. * 2. Serialize the json files into LocalFile's. * 3. Use the file id to map each LocalFile to a FileDetails, stored in the FileData struct. * 4. Store the FileData's in a timestamp-sorted binary heap because the update algorithm depends on it. */ // Sort files by creation time let mut dir_entries: Vec<_> = match fs::read_dir(config.download_dir()) { Ok(rd) => rd.map(|f| f.unwrap()).collect(), Err(_) => vec![], }; dir_entries.sort_by_key(|f| match f.metadata() { Ok(md) => md.created().unwrap(), Err(_) => UNIX_EPOCH, }); for f in dir_entries { if f.path().is_file() && f.path().extension().and_then(OsStr::to_str) != Some("json") { let json_file = f.path().with_file_name(format!("{}.json", f.file_name().to_string_lossy())); if let Ok(lf) = LocalFile::load(json_file).await { if let Some(file_list) = file_lists.get((&lf.game, lf.mod_id)).await { let file_details = file_list.files.iter().find(|fd| fd.file_id == lf.file_id).unwrap(); let file_data = Arc::new(FileData::new(lf.clone(), file_details.clone())); file_index.insert(lf.file_id, file_data.clone()); files_sorted.push(file_data.clone()); match mod_files.get_mut(&(lf.game.to_string(), lf.mod_id)) { Some(heap) => { heap.push(file_data); } None => { let mut heap = BinaryHeap::new(); heap.push(file_data); mod_files.insert((lf.game.to_string(), lf.mod_id), heap); } } } } } } Ok(Self { file_id_map: Arc::new(RwLock::new(file_index)), mod_file_map: Arc::new(RwLock::new(mod_files)), files_sorted: Arc::new(RwLock::new(files_sorted)), has_changed: Arc::new(AtomicBool::new(false)), file_lists, }) } pub async fn add(&self, lf: LocalFile) { // TODO handle missing FileDetails gracefully let file_details = self.file_lists.filedetails_for(&lf).await.unwrap(); let fdata: Arc<FileData> = FileData::new(lf.clone(), file_details).into(); self.file_id_map.write().await.insert(lf.file_id, fdata.clone()); let mut mfm_lock = self.mod_file_map.write().await; match mfm_lock.get_mut(&(lf.game.to_owned(), lf.mod_id)) { Some(heap) => { heap.push(fdata.clone()); } None => { let mut heap = BinaryHeap::new(); heap.push(fdata.clone()); mfm_lock.insert((lf.game, lf.mod_id), heap); } } self.files_sorted.write().await.push(fdata); self.has_changed.store(true, Ordering::Relaxed); } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SubResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Object {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceReference { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Workflow { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowProvisioningState>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<String>, #[serde(rename = "accessEndpoint", default, skip_serializing_if = "Option::is_none")] pub access_endpoint: Option<String>, #[serde(rename = "endpointsConfiguration", default, skip_serializing_if = "Option::is_none")] pub endpoints_configuration: Option<FlowEndpointsConfiguration>, #[serde(rename = "accessControl", default, skip_serializing_if = "Option::is_none")] pub access_control: Option<FlowAccessControlConfiguration>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[serde(rename = "integrationAccount", default, skip_serializing_if = "Option::is_none")] pub integration_account: Option<ResourceReference>, #[serde(rename = "integrationServiceEnvironment", default, skip_serializing_if = "Option::is_none")] pub integration_service_environment: Option<ResourceReference>, #[serde(default, skip_serializing_if = "Option::is_none")] pub definition: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Workflow>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowVersion { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowVersionProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowVersionProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowProvisioningState>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<String>, #[serde(rename = "accessEndpoint", default, skip_serializing_if = "Option::is_none")] pub access_endpoint: Option<String>, #[serde(rename = "endpointsConfiguration", default, skip_serializing_if = "Option::is_none")] pub endpoints_configuration: Option<FlowEndpointsConfiguration>, #[serde(rename = "accessControl", default, skip_serializing_if = "Option::is_none")] pub access_control: Option<FlowAccessControlConfiguration>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[serde(rename = "integrationAccount", default, skip_serializing_if = "Option::is_none")] pub integration_account: Option<ResourceReference>, #[serde(default, skip_serializing_if = "Option::is_none")] pub definition: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowVersionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowVersion>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTrigger { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowTriggerProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowTriggerProvisioningState>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(rename = "lastExecutionTime", default, skip_serializing_if = "Option::is_none")] pub last_execution_time: Option<String>, #[serde(rename = "nextExecutionTime", default, skip_serializing_if = "Option::is_none")] pub next_execution_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub recurrence: Option<WorkflowTriggerRecurrence>, #[serde(default, skip_serializing_if = "Option::is_none")] pub workflow: Option<ResourceReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowTrigger>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerCallbackUrl { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub method: Option<String>, #[serde(rename = "basePath", default, skip_serializing_if = "Option::is_none")] pub base_path: Option<String>, #[serde(rename = "relativePath", default, skip_serializing_if = "Option::is_none")] pub relative_path: Option<String>, #[serde(rename = "relativePathParameters", default, skip_serializing_if = "Vec::is_empty")] pub relative_path_parameters: Vec<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub queries: Option<WorkflowTriggerListCallbackUrlQueries>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerListCallbackUrlQueries { #[serde(rename = "api-version", default, skip_serializing_if = "Option::is_none")] pub api_version: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sp: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sv: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sig: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub se: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerHistory { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowTriggerHistoryProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerHistoryProperties { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "scheduledTime", default, skip_serializing_if = "Option::is_none")] pub scheduled_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Object>, #[serde(rename = "trackingId", default, skip_serializing_if = "Option::is_none")] pub tracking_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub correlation: Option<Correlation>, #[serde(rename = "inputsLink", default, skip_serializing_if = "Option::is_none")] pub inputs_link: Option<ContentLink>, #[serde(rename = "outputsLink", default, skip_serializing_if = "Option::is_none")] pub outputs_link: Option<ContentLink>, #[serde(default, skip_serializing_if = "Option::is_none")] pub fired: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub run: Option<ResourceReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerHistoryListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowTriggerHistory>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerHistoryFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRun { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowRunProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunProperties { #[serde(rename = "waitEndTime", default, skip_serializing_if = "Option::is_none")] pub wait_end_time: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Object>, #[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")] pub correlation_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub correlation: Option<Correlation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub workflow: Option<ResourceReference>, #[serde(default, skip_serializing_if = "Option::is_none")] pub trigger: Option<WorkflowRunTrigger>, #[serde(default, skip_serializing_if = "Option::is_none")] pub outputs: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub response: Option<WorkflowRunTrigger>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowRun>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunAction { #[serde(flatten)] pub sub_resource: SubResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<WorkflowRunActionProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionProperties { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Object>, #[serde(rename = "trackingId", default, skip_serializing_if = "Option::is_none")] pub tracking_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub correlation: Option<RunActionCorrelation>, #[serde(rename = "inputsLink", default, skip_serializing_if = "Option::is_none")] pub inputs_link: Option<ContentLink>, #[serde(rename = "outputsLink", default, skip_serializing_if = "Option::is_none")] pub outputs_link: Option<ContentLink>, #[serde(rename = "trackedProperties", default, skip_serializing_if = "Option::is_none")] pub tracked_properties: Option<Object>, #[serde(rename = "retryHistory", default, skip_serializing_if = "Vec::is_empty")] pub retry_history: Vec<RetryHistory>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionFilter { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowRunAction>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SkuName { NotSpecified, Free, Shared, Basic, Standard, Premium, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum WorkflowState { NotSpecified, Completed, Enabled, Disabled, Deleted, Suspended, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum WorkflowStatus { NotSpecified, Paused, Running, Waiting, Succeeded, Skipped, Suspended, Cancelled, Failed, Faulted, TimedOut, Aborted, Ignored, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ParameterType { NotSpecified, String, SecureString, Int, Float, Bool, Array, Object, SecureObject, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KeyType { NotSpecified, Primary, Secondary, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { pub name: SkuName, #[serde(default, skip_serializing_if = "Option::is_none")] pub plan: Option<ResourceReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContentLink { #[serde(default, skip_serializing_if = "Option::is_none")] pub uri: Option<String>, #[serde(rename = "contentVersion", default, skip_serializing_if = "Option::is_none")] pub content_version: Option<String>, #[serde(rename = "contentSize", default, skip_serializing_if = "Option::is_none")] pub content_size: Option<i64>, #[serde(rename = "contentHash", default, skip_serializing_if = "Option::is_none")] pub content_hash: Option<ContentHash>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<Object>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContentHash { #[serde(default, skip_serializing_if = "Option::is_none")] pub algorithm: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RegenerateActionParameter { #[serde(rename = "keyType", default, skip_serializing_if = "Option::is_none")] pub key_type: Option<KeyType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RetryHistory { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(rename = "clientRequestId", default, skip_serializing_if = "Option::is_none")] pub client_request_id: Option<String>, #[serde(rename = "serviceRequestId", default, skip_serializing_if = "Option::is_none")] pub service_request_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Correlation { #[serde(rename = "clientTrackingId", default, skip_serializing_if = "Option::is_none")] pub client_tracking_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowParameter { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<ParameterType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowOutputParameter { #[serde(flatten)] pub workflow_parameter: WorkflowParameter, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Object>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum RecurrenceFrequency { NotSpecified, Second, Minute, Hour, Day, Week, Month, Year, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RecurrenceSchedule { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub minutes: Vec<i32>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub hours: Vec<i32>, #[serde(rename = "weekDays", default, skip_serializing_if = "Vec::is_empty")] pub week_days: Vec<String>, #[serde(rename = "monthDays", default, skip_serializing_if = "Vec::is_empty")] pub month_days: Vec<i32>, #[serde(rename = "monthlyOccurrences", default, skip_serializing_if = "Vec::is_empty")] pub monthly_occurrences: Vec<RecurrenceScheduleOccurrence>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RecurrenceScheduleOccurrence { #[serde(default, skip_serializing_if = "Option::is_none")] pub day: Option<DayOfWeek>, #[serde(default, skip_serializing_if = "Option::is_none")] pub occurrence: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerRecurrence { #[serde(default, skip_serializing_if = "Option::is_none")] pub frequency: Option<RecurrenceFrequency>, #[serde(default, skip_serializing_if = "Option::is_none")] pub interval: Option<i32>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "timeZone", default, skip_serializing_if = "Option::is_none")] pub time_zone: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub schedule: Option<RecurrenceSchedule>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunTrigger { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub inputs: Option<Object>, #[serde(rename = "inputsLink", default, skip_serializing_if = "Option::is_none")] pub inputs_link: Option<ContentLink>, #[serde(default, skip_serializing_if = "Option::is_none")] pub outputs: Option<Object>, #[serde(rename = "outputsLink", default, skip_serializing_if = "Option::is_none")] pub outputs_link: Option<ContentLink>, #[serde(rename = "scheduledTime", default, skip_serializing_if = "Option::is_none")] pub scheduled_time: Option<String>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "trackingId", default, skip_serializing_if = "Option::is_none")] pub tracking_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub correlation: Option<Correlation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Object>, #[serde(rename = "trackedProperties", default, skip_serializing_if = "Option::is_none")] pub tracked_properties: Option<Object>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum WorkflowTriggerProvisioningState { NotSpecified, Accepted, Running, Ready, Creating, Created, Deleting, Deleted, Canceled, Failed, Succeeded, Moving, Updating, Registering, Registered, Unregistering, Unregistered, Completed, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum WorkflowProvisioningState { NotSpecified, Accepted, Running, Ready, Creating, Created, Deleting, Deleted, Canceled, Failed, Succeeded, Moving, Updating, Registering, Registered, Unregistering, Unregistered, Completed, Renewing, Pending, Waiting, InProgress, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DayOfWeek { Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GenerateUpgradedDefinitionParameters { #[serde(rename = "targetSchemaVersion", default, skip_serializing_if = "Option::is_none")] pub target_schema_version: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ApiTier { NotSpecified, Enterprise, Standard, Premium, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiOperationAnnotation { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<StatusAnnotation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub family: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub revision: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerXml { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub namespace: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub prefix: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub attribute: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub wrapped: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub extensions: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerExternalDocumentation { #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub extensions: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicSchema { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, #[serde(rename = "valuePath", default, skip_serializing_if = "Option::is_none")] pub value_path: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicProperties { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, #[serde(rename = "valuePath", default, skip_serializing_if = "Option::is_none")] pub value_path: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicList { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, #[serde(rename = "builtInOperation", default, skip_serializing_if = "Option::is_none")] pub built_in_operation: Option<String>, #[serde(rename = "itemsPath", default, skip_serializing_if = "Option::is_none")] pub items_path: Option<String>, #[serde(rename = "itemValuePath", default, skip_serializing_if = "Option::is_none")] pub item_value_path: Option<String>, #[serde(rename = "itemTitlePath", default, skip_serializing_if = "Option::is_none")] pub item_title_path: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicTree { #[serde(default, skip_serializing_if = "Option::is_none")] pub settings: Option<SwaggerCustomDynamicTreeSettings>, #[serde(default, skip_serializing_if = "Option::is_none")] pub open: Option<SwaggerCustomDynamicTreeCommand>, #[serde(default, skip_serializing_if = "Option::is_none")] pub browse: Option<SwaggerCustomDynamicTreeCommand>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicTreeParameter { #[serde(rename = "selectedItemValuePath", default, skip_serializing_if = "Option::is_none")] pub selected_item_value_path: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<Object>, #[serde(rename = "parameterReference", default, skip_serializing_if = "Option::is_none")] pub parameter_reference: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub required: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicTreeCommand { #[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")] pub operation_id: Option<String>, #[serde(rename = "itemsPath", default, skip_serializing_if = "Option::is_none")] pub items_path: Option<String>, #[serde(rename = "itemValuePath", default, skip_serializing_if = "Option::is_none")] pub item_value_path: Option<String>, #[serde(rename = "itemTitlePath", default, skip_serializing_if = "Option::is_none")] pub item_title_path: Option<String>, #[serde(rename = "itemFullTitlePath", default, skip_serializing_if = "Option::is_none")] pub item_full_title_path: Option<String>, #[serde(rename = "itemIsParent", default, skip_serializing_if = "Option::is_none")] pub item_is_parent: Option<String>, #[serde(rename = "selectableFilter", default, skip_serializing_if = "Option::is_none")] pub selectable_filter: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub parameters: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerCustomDynamicTreeSettings { #[serde(rename = "CanSelectParentNodes", default, skip_serializing_if = "Option::is_none")] pub can_select_parent_nodes: Option<bool>, #[serde(rename = "CanSelectLeafNodes", default, skip_serializing_if = "Option::is_none")] pub can_select_leaf_nodes: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SwaggerSchemaType { String, Number, Integer, Boolean, Array, File, Object, Null, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SwaggerSchema { #[serde(rename = "ref", default, skip_serializing_if = "Option::is_none")] pub ref_: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<SwaggerSchemaType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub items: Box<Option<SwaggerSchema>>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, #[serde(rename = "additionalProperties", default, skip_serializing_if = "Option::is_none")] pub additional_properties: Option<Object>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub required: Vec<String>, #[serde(rename = "maxProperties", default, skip_serializing_if = "Option::is_none")] pub max_properties: Option<i64>, #[serde(rename = "minProperties", default, skip_serializing_if = "Option::is_none")] pub min_properties: Option<i64>, #[serde(rename = "allOf", default, skip_serializing_if = "Vec::is_empty")] pub all_of: Vec<SwaggerSchema>, #[serde(default, skip_serializing_if = "Option::is_none")] pub discriminator: Option<String>, #[serde(rename = "readOnly", default, skip_serializing_if = "Option::is_none")] pub read_only: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub xml: Option<SwaggerXml>, #[serde(rename = "externalDocs", default, skip_serializing_if = "Option::is_none")] pub external_docs: Option<SwaggerExternalDocumentation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub example: Option<Object>, #[serde(rename = "notificationUrlExtension", default, skip_serializing_if = "Option::is_none")] pub notification_url_extension: Option<bool>, #[serde(rename = "dynamicSchemaOld", default, skip_serializing_if = "Option::is_none")] pub dynamic_schema_old: Option<SwaggerCustomDynamicSchema>, #[serde(rename = "dynamicSchemaNew", default, skip_serializing_if = "Option::is_none")] pub dynamic_schema_new: Option<SwaggerCustomDynamicProperties>, #[serde(rename = "dynamicListNew", default, skip_serializing_if = "Option::is_none")] pub dynamic_list_new: Option<SwaggerCustomDynamicList>, #[serde(rename = "dynamicTree", default, skip_serializing_if = "Option::is_none")] pub dynamic_tree: Option<SwaggerCustomDynamicTree>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiOperationPropertiesDefinition { #[serde(default, skip_serializing_if = "Option::is_none")] pub summary: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub visibility: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub trigger: Option<String>, #[serde(rename = "triggerHint", default, skip_serializing_if = "Option::is_none")] pub trigger_hint: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub pageable: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub annotation: Option<ApiOperationAnnotation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub api: Option<ApiReference>, #[serde(rename = "inputsDefinition", default, skip_serializing_if = "Option::is_none")] pub inputs_definition: Option<SwaggerSchema>, #[serde(rename = "responsesDefinition", default, skip_serializing_if = "Option::is_none")] pub responses_definition: Option<serde_json::Value>, #[serde(rename = "isWebhook", default, skip_serializing_if = "Option::is_none")] pub is_webhook: Option<bool>, #[serde(rename = "isNotification", default, skip_serializing_if = "Option::is_none")] pub is_notification: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiOperation { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ApiOperationPropertiesDefinition>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiOperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ApiOperation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum StatusAnnotation { NotSpecified, Preview, Production, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowTriggerReference { #[serde(flatten)] pub resource_reference: ResourceReference, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "flowName", default, skip_serializing_if = "Option::is_none")] pub flow_name: Option<String>, #[serde(rename = "triggerName", default, skip_serializing_if = "Option::is_none")] pub trigger_name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowReference { #[serde(flatten)] pub resource_reference: ResourceReference, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiReference { #[serde(flatten)] pub resource_reference: ResourceReference, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "iconUri", default, skip_serializing_if = "Option::is_none")] pub icon_uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub swagger: Option<Object>, #[serde(rename = "brandColor", default, skip_serializing_if = "Option::is_none")] pub brand_color: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub category: Option<ApiTier>, #[serde(rename = "integrationServiceEnvironment", default, skip_serializing_if = "Option::is_none")] pub integration_service_environment: Option<ResourceReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedApiListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ManagedApi>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourceProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "connectionParameters", default, skip_serializing_if = "Option::is_none")] pub connection_parameters: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<ApiResourceMetadata>, #[serde(rename = "runtimeUrls", default, skip_serializing_if = "Vec::is_empty")] pub runtime_urls: Vec<String>, #[serde(rename = "generalInformation", default, skip_serializing_if = "Option::is_none")] pub general_information: Option<ApiResourceGeneralInformation>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub capabilities: Vec<String>, #[serde(rename = "backendService", default, skip_serializing_if = "Option::is_none")] pub backend_service: Option<ApiResourceBackendService>, #[serde(default, skip_serializing_if = "Option::is_none")] pub policies: Option<ApiResourcePolicies>, #[serde(rename = "apiDefinitionUrl", default, skip_serializing_if = "Option::is_none")] pub api_definition_url: Option<String>, #[serde(rename = "apiDefinitions", default, skip_serializing_if = "Option::is_none")] pub api_definitions: Option<ApiResourceDefinitions>, #[serde(rename = "integrationServiceEnvironment", default, skip_serializing_if = "Option::is_none")] pub integration_service_environment: Option<ResourceReference>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowProvisioningState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub category: Option<ApiTier>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourcePolicies { #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<String>, #[serde(rename = "contentLink", default, skip_serializing_if = "Option::is_none")] pub content_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourceDefinitions { #[serde(rename = "originalSwaggerUrl", default, skip_serializing_if = "Option::is_none")] pub original_swagger_url: Option<String>, #[serde(rename = "modifiedSwaggerUrl", default, skip_serializing_if = "Option::is_none")] pub modified_swagger_url: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiDeploymentParameterMetadataSet { #[serde(rename = "packageContentLink", default, skip_serializing_if = "Option::is_none")] pub package_content_link: Option<ApiDeploymentParameterMetadata>, #[serde(rename = "redisCacheConnectionString", default, skip_serializing_if = "Option::is_none")] pub redis_cache_connection_string: Option<ApiDeploymentParameterMetadata>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiDeploymentParameterMetadata { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(rename = "isRequired", default, skip_serializing_if = "Option::is_none")] pub is_required: Option<bool>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub visibility: Option<ApiDeploymentParameterVisibility>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ApiDeploymentParameterVisibility { NotSpecified, Default, Internal, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourceBackendService { #[serde(rename = "serviceUrl", default, skip_serializing_if = "Option::is_none")] pub service_url: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourceMetadata { #[serde(default, skip_serializing_if = "Option::is_none")] pub source: Option<String>, #[serde(rename = "brandColor", default, skip_serializing_if = "Option::is_none")] pub brand_color: Option<String>, #[serde(rename = "hideKey", default, skip_serializing_if = "Option::is_none")] pub hide_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(rename = "ApiType", default, skip_serializing_if = "Option::is_none")] pub api_type: Option<ApiType>, #[serde(rename = "wsdlService", default, skip_serializing_if = "Option::is_none")] pub wsdl_service: Option<WsdlService>, #[serde(rename = "wsdlImportMethod", default, skip_serializing_if = "Option::is_none")] pub wsdl_import_method: Option<WsdlImportMethod>, #[serde(rename = "connectionType", default, skip_serializing_if = "Option::is_none")] pub connection_type: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowProvisioningState>, #[serde(rename = "deploymentParameters", default, skip_serializing_if = "Option::is_none")] pub deployment_parameters: Option<ApiDeploymentParameterMetadataSet>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ApiType { NotSpecified, Rest, Soap, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WsdlService { #[serde(rename = "qualifiedName", default, skip_serializing_if = "Option::is_none")] pub qualified_name: Option<String>, #[serde(rename = "EndpointQualifiedNames", default, skip_serializing_if = "Vec::is_empty")] pub endpoint_qualified_names: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum WsdlImportMethod { NotSpecified, SoapToRest, SoapPassThrough, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiResourceGeneralInformation { #[serde(rename = "iconUrl", default, skip_serializing_if = "Option::is_none")] pub icon_url: Option<String>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "termsOfUseUrl", default, skip_serializing_if = "Option::is_none")] pub terms_of_use_url: Option<String>, #[serde(rename = "releaseTag", default, skip_serializing_if = "Option::is_none")] pub release_tag: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tier: Option<ApiTier>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedApi { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ApiResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentNetworkEndPointAccessibilityState { NotSpecified, Unknown, Available, NotAvailable, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentNetworkEndpoint { #[serde(default, skip_serializing_if = "Option::is_none")] pub accessibility: Option<IntegrationServiceEnvironmentNetworkEndPointAccessibilityState>, #[serde(rename = "domainName", default, skip_serializing_if = "Option::is_none")] pub domain_name: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub ports: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentNetworkDependencyCategoryType { NotSpecified, AzureStorage, AzureManagement, AzureActiveDirectory, #[serde(rename = "SSLCertificateVerification")] SslCertificateVerification, DiagnosticLogsAndMetrics, IntegrationServiceEnvironmentConnectors, RedisCache, AccessEndpoints, RecoveryService, #[serde(rename = "SQL")] Sql, RegionalService, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentNetworkDependency { #[serde(default, skip_serializing_if = "Option::is_none")] pub category: Option<IntegrationServiceEnvironmentNetworkDependencyCategoryType>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub endpoints: Vec<IntegrationServiceEnvironmentNetworkEndpoint>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentNetworkDependencyHealthState { NotSpecified, Healthy, Unhealthy, Unknown, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentNetworkDependencyHealth { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ExtendedErrorInfo>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<IntegrationServiceEnvironmentNetworkDependencyHealthState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentNetworkHealth {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentSubnetNetworkHealth { #[serde(rename = "outboundNetworkDependencies", default, skip_serializing_if = "Vec::is_empty")] pub outbound_network_dependencies: Vec<IntegrationServiceEnvironmentNetworkDependency>, #[serde(rename = "outboundNetworkHealth", default, skip_serializing_if = "Option::is_none")] pub outbound_network_health: Option<IntegrationServiceEnvironmentNetworkDependencyHealth>, #[serde(rename = "networkDependencyHealthState")] pub network_dependency_health_state: IntegrationServiceEnvironmentNetworkEndPointAccessibilityState, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExtendedErrorInfo { pub code: ErrorResponseCode, pub message: String, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ExtendedErrorInfo>, #[serde(rename = "innerError", default, skip_serializing_if = "Option::is_none")] pub inner_error: Option<Object>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ErrorResponseCode { NotSpecified, IntegrationServiceEnvironmentNotFound, InternalServerError, InvalidOperationId, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AzureAsyncOperationState { Failed, Succeeded, Pending, Canceled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationServiceEnvironment>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironment { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<IntegrationServiceEnvironmentProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<IntegrationServiceEnvironmentSku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentSku { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<IntegrationServiceEnvironmentSkuName>, #[serde(default, skip_serializing_if = "Option::is_none")] pub capacity: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<WorkflowProvisioningState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, #[serde(rename = "integrationServiceEnvironmentId", default, skip_serializing_if = "Option::is_none")] pub integration_service_environment_id: Option<String>, #[serde(rename = "endpointsConfiguration", default, skip_serializing_if = "Option::is_none")] pub endpoints_configuration: Option<FlowEndpointsConfiguration>, #[serde(rename = "networkConfiguration", default, skip_serializing_if = "Option::is_none")] pub network_configuration: Option<NetworkConfiguration>, #[serde(rename = "encryptionConfiguration", default, skip_serializing_if = "Option::is_none")] pub encryption_configuration: Option<IntegrationServiceEnvironmenEncryptionConfiguration>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentSkuName { NotSpecified, Premium, Developer, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NetworkConfiguration { #[serde(rename = "virtualNetworkAddressSpace", default, skip_serializing_if = "Option::is_none")] pub virtual_network_address_space: Option<String>, #[serde(rename = "accessEndpoint", default, skip_serializing_if = "Option::is_none")] pub access_endpoint: Option<IntegrationServiceEnvironmentAccessEndpoint>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub subnets: Vec<ResourceReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentAccessEndpoint { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<IntegrationServiceEnvironmentAccessEndpointType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentAccessEndpointType { NotSpecified, External, Internal, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmenEncryptionConfiguration { #[serde(rename = "encryptionKeyReference", default, skip_serializing_if = "Option::is_none")] pub encryption_key_reference: Option<IntegrationServiceEnvironmenEncryptionKeyReference>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmenEncryptionKeyReference { #[serde(rename = "keyVault", default, skip_serializing_if = "Option::is_none")] pub key_vault: Option<ResourceReference>, #[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")] pub key_name: Option<String>, #[serde(rename = "keyVersion", default, skip_serializing_if = "Option::is_none")] pub key_version: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FlowAccessControlConfiguration { #[serde(default, skip_serializing_if = "Option::is_none")] pub triggers: Option<FlowAccessControlConfigurationPolicy>, #[serde(default, skip_serializing_if = "Option::is_none")] pub contents: Option<FlowAccessControlConfigurationPolicy>, #[serde(default, skip_serializing_if = "Option::is_none")] pub actions: Option<FlowAccessControlConfigurationPolicy>, #[serde(rename = "workflowManagement", default, skip_serializing_if = "Option::is_none")] pub workflow_management: Option<FlowAccessControlConfigurationPolicy>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FlowAccessControlConfigurationPolicy { #[serde(rename = "allowedCallerIpAddresses", default, skip_serializing_if = "Vec::is_empty")] pub allowed_caller_ip_addresses: Vec<IpAddressRange>, #[serde(rename = "openAuthenticationPolicies", default, skip_serializing_if = "Option::is_none")] pub open_authentication_policies: Option<OpenAuthenticationAccessPolicies>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IpAddressRange { #[serde(rename = "addressRange", default, skip_serializing_if = "Option::is_none")] pub address_range: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OpenAuthenticationAccessPolicies { #[serde(default, skip_serializing_if = "Option::is_none")] pub policies: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OpenAuthenticationAccessPolicy { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<OpenAuthenticationProviderType>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub claims: Vec<OpenAuthenticationPolicyClaim>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OpenAuthenticationProviderType { #[serde(rename = "AAD")] Aad, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OpenAuthenticationPolicyClaim { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FlowEndpointsConfiguration { #[serde(default, skip_serializing_if = "Option::is_none")] pub workflow: Option<FlowEndpoints>, #[serde(default, skip_serializing_if = "Option::is_none")] pub connector: Option<FlowEndpoints>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FlowEndpoints { #[serde(rename = "outgoingIpAddresses", default, skip_serializing_if = "Vec::is_empty")] pub outgoing_ip_addresses: Vec<IpAddress>, #[serde(rename = "accessEndpointIpAddresses", default, skip_serializing_if = "Vec::is_empty")] pub access_endpoint_ip_addresses: Vec<IpAddress>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IpAddress { #[serde(default, skip_serializing_if = "Option::is_none")] pub address: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedServiceIdentity { #[serde(rename = "type")] pub type_: managed_service_identity::Type, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")] pub user_assigned_identities: Option<serde_json::Value>, } pub mod managed_service_identity { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { SystemAssigned, UserAssigned, None, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UserAssignedIdentity { #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")] pub client_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentSkuList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationServiceEnvironmentSkuDefinition>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentSkuDefinition { #[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")] pub resource_type: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<integration_service_environment_sku_definition::Sku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub capacity: Option<IntegrationServiceEnvironmentSkuCapacity>, } pub mod integration_service_environment_sku_definition { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<IntegrationServiceEnvironmentSkuName>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tier: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentSkuCapacity { #[serde(default, skip_serializing_if = "Option::is_none")] pub minimum: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub maximum: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub default: Option<i32>, #[serde(rename = "scaleType", default, skip_serializing_if = "Option::is_none")] pub scale_type: Option<IntegrationServiceEnvironmentSkuScaleType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationServiceEnvironmentSkuScaleType { Manual, Automatic, None, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentManagedApiListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationServiceEnvironmentManagedApi>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentManagedApi { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<IntegrationServiceEnvironmentManagedApiProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentManagedApiProperties { #[serde(flatten)] pub api_resource_properties: ApiResourceProperties, #[serde(rename = "deploymentParameters", default, skip_serializing_if = "Option::is_none")] pub deployment_parameters: Option<IntegrationServiceEnvironmentManagedApiDeploymentParameters>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationServiceEnvironmentManagedApiDeploymentParameters { #[serde(rename = "contentLinkDefinition", default, skip_serializing_if = "Option::is_none")] pub content_link_definition: Option<ContentLink>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IntegrationAccountSkuName { NotSpecified, Free, Basic, Standard, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccount { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<IntegrationAccountProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<IntegrationAccountSku>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountProperties { #[serde(rename = "integrationServiceEnvironment", default, skip_serializing_if = "Option::is_none")] pub integration_service_environment: Option<ResourceReference>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<WorkflowState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccount>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GetCallbackUrlParameters { #[serde(rename = "notAfter", default, skip_serializing_if = "Option::is_none")] pub not_after: Option<String>, #[serde(rename = "keyType", default, skip_serializing_if = "Option::is_none")] pub key_type: Option<KeyType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CallbackUrl { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSchema { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountSchemaProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSchemaProperties { #[serde(rename = "schemaType")] pub schema_type: SchemaType, #[serde(rename = "targetNamespace", default, skip_serializing_if = "Option::is_none")] pub target_namespace: Option<String>, #[serde(rename = "documentName", default, skip_serializing_if = "Option::is_none")] pub document_name: Option<String>, #[serde(rename = "fileName", default, skip_serializing_if = "Option::is_none")] pub file_name: Option<String>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<String>, #[serde(rename = "contentType", default, skip_serializing_if = "Option::is_none")] pub content_type: Option<String>, #[serde(rename = "contentLink", default, skip_serializing_if = "Option::is_none")] pub content_link: Option<ContentLink>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSchemaListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountSchema>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSchemaFilter { #[serde(rename = "schemaType")] pub schema_type: SchemaType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SchemaType { NotSpecified, Xml, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountMap { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountMapProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountMapProperties { #[serde(rename = "mapType")] pub map_type: MapType, #[serde(rename = "parametersSchema", default, skip_serializing_if = "Option::is_none")] pub parameters_schema: Option<integration_account_map_properties::ParametersSchema>, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<String>, #[serde(rename = "contentType", default, skip_serializing_if = "Option::is_none")] pub content_type: Option<String>, #[serde(rename = "contentLink", default, skip_serializing_if = "Option::is_none")] pub content_link: Option<ContentLink>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, } pub mod integration_account_map_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ParametersSchema { #[serde(rename = "ref", default, skip_serializing_if = "Option::is_none")] pub ref_: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountMapListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountMap>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountMapFilter { #[serde(rename = "mapType")] pub map_type: MapType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum MapType { NotSpecified, Xslt, Xslt20, Xslt30, Liquid, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSku { pub name: IntegrationAccountSkuName, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountPartnerListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountPartner>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountPartnerFilter { #[serde(rename = "partnerType")] pub partner_type: PartnerType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountPartner { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountPartnerProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountPartnerProperties { #[serde(rename = "partnerType")] pub partner_type: PartnerType, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, pub content: PartnerContent, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum PartnerType { NotSpecified, #[serde(rename = "B2B")] B2b, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartnerContent { #[serde(default, skip_serializing_if = "Option::is_none")] pub b2b: Option<B2bPartnerContent>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct B2bPartnerContent { #[serde(rename = "businessIdentities", default, skip_serializing_if = "Vec::is_empty")] pub business_identities: Vec<BusinessIdentity>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BusinessIdentity { pub qualifier: String, pub value: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountAgreementListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountAgreement>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountAgreementFilter { #[serde(rename = "agreementType")] pub agreement_type: AgreementType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountAgreement { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountAgreementProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountAgreementProperties { #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, #[serde(rename = "agreementType")] pub agreement_type: AgreementType, #[serde(rename = "hostPartner")] pub host_partner: String, #[serde(rename = "guestPartner")] pub guest_partner: String, #[serde(rename = "hostIdentity")] pub host_identity: BusinessIdentity, #[serde(rename = "guestIdentity")] pub guest_identity: BusinessIdentity, pub content: AgreementContent, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AgreementType { NotSpecified, #[serde(rename = "AS2")] As2, X12, Edifact, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AgreementContent { #[serde(rename = "aS2", default, skip_serializing_if = "Option::is_none")] pub a_s2: Option<As2AgreementContent>, #[serde(default, skip_serializing_if = "Option::is_none")] pub x12: Option<X12AgreementContent>, #[serde(default, skip_serializing_if = "Option::is_none")] pub edifact: Option<EdifactAgreementContent>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2AgreementContent { #[serde(rename = "receiveAgreement")] pub receive_agreement: As2OneWayAgreement, #[serde(rename = "sendAgreement")] pub send_agreement: As2OneWayAgreement, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2OneWayAgreement { #[serde(rename = "senderBusinessIdentity")] pub sender_business_identity: BusinessIdentity, #[serde(rename = "receiverBusinessIdentity")] pub receiver_business_identity: BusinessIdentity, #[serde(rename = "protocolSettings")] pub protocol_settings: As2ProtocolSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2ProtocolSettings { #[serde(rename = "messageConnectionSettings")] pub message_connection_settings: As2MessageConnectionSettings, #[serde(rename = "acknowledgementConnectionSettings")] pub acknowledgement_connection_settings: As2AcknowledgementConnectionSettings, #[serde(rename = "mdnSettings")] pub mdn_settings: As2MdnSettings, #[serde(rename = "securitySettings")] pub security_settings: As2SecuritySettings, #[serde(rename = "validationSettings")] pub validation_settings: As2ValidationSettings, #[serde(rename = "envelopeSettings")] pub envelope_settings: As2EnvelopeSettings, #[serde(rename = "errorSettings")] pub error_settings: As2ErrorSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2AcknowledgementConnectionSettings { #[serde(rename = "ignoreCertificateNameMismatch")] pub ignore_certificate_name_mismatch: bool, #[serde(rename = "supportHttpStatusCodeContinue")] pub support_http_status_code_continue: bool, #[serde(rename = "keepHttpConnectionAlive")] pub keep_http_connection_alive: bool, #[serde(rename = "unfoldHttpHeaders")] pub unfold_http_headers: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2MessageConnectionSettings { #[serde(rename = "ignoreCertificateNameMismatch")] pub ignore_certificate_name_mismatch: bool, #[serde(rename = "supportHttpStatusCodeContinue")] pub support_http_status_code_continue: bool, #[serde(rename = "keepHttpConnectionAlive")] pub keep_http_connection_alive: bool, #[serde(rename = "unfoldHttpHeaders")] pub unfold_http_headers: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2MdnSettings { #[serde(rename = "needMDN")] pub need_mdn: bool, #[serde(rename = "signMDN")] pub sign_mdn: bool, #[serde(rename = "sendMDNAsynchronously")] pub send_mdn_asynchronously: bool, #[serde(rename = "receiptDeliveryUrl", default, skip_serializing_if = "Option::is_none")] pub receipt_delivery_url: Option<String>, #[serde(rename = "dispositionNotificationTo", default, skip_serializing_if = "Option::is_none")] pub disposition_notification_to: Option<String>, #[serde(rename = "signOutboundMDNIfOptional")] pub sign_outbound_mdn_if_optional: bool, #[serde(rename = "mdnText", default, skip_serializing_if = "Option::is_none")] pub mdn_text: Option<String>, #[serde(rename = "sendInboundMDNToMessageBox")] pub send_inbound_mdn_to_message_box: bool, #[serde(rename = "micHashingAlgorithm")] pub mic_hashing_algorithm: HashingAlgorithm, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2SecuritySettings { #[serde(rename = "overrideGroupSigningCertificate")] pub override_group_signing_certificate: bool, #[serde(rename = "signingCertificateName", default, skip_serializing_if = "Option::is_none")] pub signing_certificate_name: Option<String>, #[serde(rename = "encryptionCertificateName", default, skip_serializing_if = "Option::is_none")] pub encryption_certificate_name: Option<String>, #[serde(rename = "enableNRRForInboundEncodedMessages")] pub enable_nrr_for_inbound_encoded_messages: bool, #[serde(rename = "enableNRRForInboundDecodedMessages")] pub enable_nrr_for_inbound_decoded_messages: bool, #[serde(rename = "enableNRRForOutboundMDN")] pub enable_nrr_for_outbound_mdn: bool, #[serde(rename = "enableNRRForOutboundEncodedMessages")] pub enable_nrr_for_outbound_encoded_messages: bool, #[serde(rename = "enableNRRForOutboundDecodedMessages")] pub enable_nrr_for_outbound_decoded_messages: bool, #[serde(rename = "enableNRRForInboundMDN")] pub enable_nrr_for_inbound_mdn: bool, #[serde(rename = "sha2AlgorithmFormat", default, skip_serializing_if = "Option::is_none")] pub sha2_algorithm_format: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2ValidationSettings { #[serde(rename = "overrideMessageProperties")] pub override_message_properties: bool, #[serde(rename = "encryptMessage")] pub encrypt_message: bool, #[serde(rename = "signMessage")] pub sign_message: bool, #[serde(rename = "compressMessage")] pub compress_message: bool, #[serde(rename = "checkDuplicateMessage")] pub check_duplicate_message: bool, #[serde(rename = "interchangeDuplicatesValidityDays")] pub interchange_duplicates_validity_days: i32, #[serde(rename = "checkCertificateRevocationListOnSend")] pub check_certificate_revocation_list_on_send: bool, #[serde(rename = "checkCertificateRevocationListOnReceive")] pub check_certificate_revocation_list_on_receive: bool, #[serde(rename = "encryptionAlgorithm")] pub encryption_algorithm: EncryptionAlgorithm, #[serde(rename = "signingAlgorithm", default, skip_serializing_if = "Option::is_none")] pub signing_algorithm: Option<SigningAlgorithm>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2EnvelopeSettings { #[serde(rename = "messageContentType")] pub message_content_type: String, #[serde(rename = "transmitFileNameInMimeHeader")] pub transmit_file_name_in_mime_header: bool, #[serde(rename = "fileNameTemplate")] pub file_name_template: String, #[serde(rename = "suspendMessageOnFileNameGenerationError")] pub suspend_message_on_file_name_generation_error: bool, #[serde(rename = "autogenerateFileName")] pub autogenerate_file_name: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct As2ErrorSettings { #[serde(rename = "suspendDuplicateMessage")] pub suspend_duplicate_message: bool, #[serde(rename = "resendIfMDNNotReceived")] pub resend_if_mdn_not_received: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12AgreementContent { #[serde(rename = "receiveAgreement")] pub receive_agreement: X12OneWayAgreement, #[serde(rename = "sendAgreement")] pub send_agreement: X12OneWayAgreement, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12OneWayAgreement { #[serde(rename = "senderBusinessIdentity")] pub sender_business_identity: BusinessIdentity, #[serde(rename = "receiverBusinessIdentity")] pub receiver_business_identity: BusinessIdentity, #[serde(rename = "protocolSettings")] pub protocol_settings: X12ProtocolSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12ProtocolSettings { #[serde(rename = "validationSettings")] pub validation_settings: X12ValidationSettings, #[serde(rename = "framingSettings")] pub framing_settings: X12FramingSettings, #[serde(rename = "envelopeSettings")] pub envelope_settings: X12EnvelopeSettings, #[serde(rename = "acknowledgementSettings")] pub acknowledgement_settings: X12AcknowledgementSettings, #[serde(rename = "messageFilter")] pub message_filter: X12MessageFilter, #[serde(rename = "securitySettings")] pub security_settings: X12SecuritySettings, #[serde(rename = "processingSettings")] pub processing_settings: X12ProcessingSettings, #[serde(rename = "envelopeOverrides", default, skip_serializing_if = "Vec::is_empty")] pub envelope_overrides: Vec<X12EnvelopeOverride>, #[serde(rename = "validationOverrides", default, skip_serializing_if = "Vec::is_empty")] pub validation_overrides: Vec<X12ValidationOverride>, #[serde(rename = "messageFilterList", default, skip_serializing_if = "Vec::is_empty")] pub message_filter_list: Vec<X12MessageIdentifier>, #[serde(rename = "schemaReferences")] pub schema_references: Vec<X12SchemaReference>, #[serde(rename = "x12DelimiterOverrides", default, skip_serializing_if = "Vec::is_empty")] pub x12_delimiter_overrides: Vec<X12DelimiterOverrides>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12ValidationSettings { #[serde(rename = "validateCharacterSet")] pub validate_character_set: bool, #[serde(rename = "checkDuplicateInterchangeControlNumber")] pub check_duplicate_interchange_control_number: bool, #[serde(rename = "interchangeControlNumberValidityDays")] pub interchange_control_number_validity_days: i32, #[serde(rename = "checkDuplicateGroupControlNumber")] pub check_duplicate_group_control_number: bool, #[serde(rename = "checkDuplicateTransactionSetControlNumber")] pub check_duplicate_transaction_set_control_number: bool, #[serde(rename = "validateEDITypes")] pub validate_edi_types: bool, #[serde(rename = "validateXSDTypes")] pub validate_xsd_types: bool, #[serde(rename = "allowLeadingAndTrailingSpacesAndZeroes")] pub allow_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trimLeadingAndTrailingSpacesAndZeroes")] pub trim_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trailingSeparatorPolicy")] pub trailing_separator_policy: TrailingSeparatorPolicy, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12FramingSettings { #[serde(rename = "dataElementSeparator")] pub data_element_separator: i32, #[serde(rename = "componentSeparator")] pub component_separator: i32, #[serde(rename = "replaceSeparatorsInPayload")] pub replace_separators_in_payload: bool, #[serde(rename = "replaceCharacter")] pub replace_character: i32, #[serde(rename = "segmentTerminator")] pub segment_terminator: i32, #[serde(rename = "characterSet")] pub character_set: X12CharacterSet, #[serde(rename = "segmentTerminatorSuffix")] pub segment_terminator_suffix: SegmentTerminatorSuffix, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12EnvelopeSettings { #[serde(rename = "controlStandardsId")] pub control_standards_id: i32, #[serde(rename = "useControlStandardsIdAsRepetitionCharacter")] pub use_control_standards_id_as_repetition_character: bool, #[serde(rename = "senderApplicationId")] pub sender_application_id: String, #[serde(rename = "receiverApplicationId")] pub receiver_application_id: String, #[serde(rename = "controlVersionNumber")] pub control_version_number: String, #[serde(rename = "interchangeControlNumberLowerBound")] pub interchange_control_number_lower_bound: i32, #[serde(rename = "interchangeControlNumberUpperBound")] pub interchange_control_number_upper_bound: i32, #[serde(rename = "rolloverInterchangeControlNumber")] pub rollover_interchange_control_number: bool, #[serde(rename = "enableDefaultGroupHeaders")] pub enable_default_group_headers: bool, #[serde(rename = "functionalGroupId", default, skip_serializing_if = "Option::is_none")] pub functional_group_id: Option<String>, #[serde(rename = "groupControlNumberLowerBound")] pub group_control_number_lower_bound: i32, #[serde(rename = "groupControlNumberUpperBound")] pub group_control_number_upper_bound: i32, #[serde(rename = "rolloverGroupControlNumber")] pub rollover_group_control_number: bool, #[serde(rename = "groupHeaderAgencyCode")] pub group_header_agency_code: String, #[serde(rename = "groupHeaderVersion")] pub group_header_version: String, #[serde(rename = "transactionSetControlNumberLowerBound")] pub transaction_set_control_number_lower_bound: i32, #[serde(rename = "transactionSetControlNumberUpperBound")] pub transaction_set_control_number_upper_bound: i32, #[serde(rename = "rolloverTransactionSetControlNumber")] pub rollover_transaction_set_control_number: bool, #[serde(rename = "transactionSetControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub transaction_set_control_number_prefix: Option<String>, #[serde(rename = "transactionSetControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub transaction_set_control_number_suffix: Option<String>, #[serde(rename = "overwriteExistingTransactionSetControlNumber")] pub overwrite_existing_transaction_set_control_number: bool, #[serde(rename = "groupHeaderDateFormat")] pub group_header_date_format: X12DateFormat, #[serde(rename = "groupHeaderTimeFormat")] pub group_header_time_format: X12TimeFormat, #[serde(rename = "usageIndicator")] pub usage_indicator: UsageIndicator, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12AcknowledgementSettings { #[serde(rename = "needTechnicalAcknowledgement")] pub need_technical_acknowledgement: bool, #[serde(rename = "batchTechnicalAcknowledgements")] pub batch_technical_acknowledgements: bool, #[serde(rename = "needFunctionalAcknowledgement")] pub need_functional_acknowledgement: bool, #[serde(rename = "functionalAcknowledgementVersion", default, skip_serializing_if = "Option::is_none")] pub functional_acknowledgement_version: Option<String>, #[serde(rename = "batchFunctionalAcknowledgements")] pub batch_functional_acknowledgements: bool, #[serde(rename = "needImplementationAcknowledgement")] pub need_implementation_acknowledgement: bool, #[serde(rename = "implementationAcknowledgementVersion", default, skip_serializing_if = "Option::is_none")] pub implementation_acknowledgement_version: Option<String>, #[serde(rename = "batchImplementationAcknowledgements")] pub batch_implementation_acknowledgements: bool, #[serde(rename = "needLoopForValidMessages")] pub need_loop_for_valid_messages: bool, #[serde(rename = "sendSynchronousAcknowledgement")] pub send_synchronous_acknowledgement: bool, #[serde(rename = "acknowledgementControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub acknowledgement_control_number_prefix: Option<String>, #[serde(rename = "acknowledgementControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub acknowledgement_control_number_suffix: Option<String>, #[serde(rename = "acknowledgementControlNumberLowerBound")] pub acknowledgement_control_number_lower_bound: i32, #[serde(rename = "acknowledgementControlNumberUpperBound")] pub acknowledgement_control_number_upper_bound: i32, #[serde(rename = "rolloverAcknowledgementControlNumber")] pub rollover_acknowledgement_control_number: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12MessageFilter { #[serde(rename = "messageFilterType")] pub message_filter_type: MessageFilterType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12SecuritySettings { #[serde(rename = "authorizationQualifier")] pub authorization_qualifier: String, #[serde(rename = "authorizationValue", default, skip_serializing_if = "Option::is_none")] pub authorization_value: Option<String>, #[serde(rename = "securityQualifier")] pub security_qualifier: String, #[serde(rename = "passwordValue", default, skip_serializing_if = "Option::is_none")] pub password_value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12ProcessingSettings { #[serde(rename = "maskSecurityInfo")] pub mask_security_info: bool, #[serde(rename = "convertImpliedDecimal")] pub convert_implied_decimal: bool, #[serde(rename = "preserveInterchange")] pub preserve_interchange: bool, #[serde(rename = "suspendInterchangeOnError")] pub suspend_interchange_on_error: bool, #[serde(rename = "createEmptyXmlTagsForTrailingSeparators")] pub create_empty_xml_tags_for_trailing_separators: bool, #[serde(rename = "useDotAsDecimalSeparator")] pub use_dot_as_decimal_separator: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12EnvelopeOverride { #[serde(rename = "targetNamespace")] pub target_namespace: String, #[serde(rename = "protocolVersion")] pub protocol_version: String, #[serde(rename = "messageId")] pub message_id: String, #[serde(rename = "responsibleAgencyCode")] pub responsible_agency_code: String, #[serde(rename = "headerVersion")] pub header_version: String, #[serde(rename = "senderApplicationId")] pub sender_application_id: String, #[serde(rename = "receiverApplicationId")] pub receiver_application_id: String, #[serde(rename = "functionalIdentifierCode", default, skip_serializing_if = "Option::is_none")] pub functional_identifier_code: Option<String>, #[serde(rename = "dateFormat")] pub date_format: X12DateFormat, #[serde(rename = "timeFormat")] pub time_format: X12TimeFormat, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12ValidationOverride { #[serde(rename = "messageId")] pub message_id: String, #[serde(rename = "validateEDITypes")] pub validate_edi_types: bool, #[serde(rename = "validateXSDTypes")] pub validate_xsd_types: bool, #[serde(rename = "allowLeadingAndTrailingSpacesAndZeroes")] pub allow_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "validateCharacterSet")] pub validate_character_set: bool, #[serde(rename = "trimLeadingAndTrailingSpacesAndZeroes")] pub trim_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trailingSeparatorPolicy")] pub trailing_separator_policy: TrailingSeparatorPolicy, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12MessageIdentifier { #[serde(rename = "messageId")] pub message_id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12SchemaReference { #[serde(rename = "messageId")] pub message_id: String, #[serde(rename = "senderApplicationId", default, skip_serializing_if = "Option::is_none")] pub sender_application_id: Option<String>, #[serde(rename = "schemaVersion")] pub schema_version: String, #[serde(rename = "schemaName")] pub schema_name: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct X12DelimiterOverrides { #[serde(rename = "protocolVersion", default, skip_serializing_if = "Option::is_none")] pub protocol_version: Option<String>, #[serde(rename = "messageId", default, skip_serializing_if = "Option::is_none")] pub message_id: Option<String>, #[serde(rename = "dataElementSeparator")] pub data_element_separator: i32, #[serde(rename = "componentSeparator")] pub component_separator: i32, #[serde(rename = "segmentTerminator")] pub segment_terminator: i32, #[serde(rename = "segmentTerminatorSuffix")] pub segment_terminator_suffix: SegmentTerminatorSuffix, #[serde(rename = "replaceCharacter")] pub replace_character: i32, #[serde(rename = "replaceSeparatorsInPayload")] pub replace_separators_in_payload: bool, #[serde(rename = "targetNamespace", default, skip_serializing_if = "Option::is_none")] pub target_namespace: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum X12CharacterSet { NotSpecified, Basic, Extended, #[serde(rename = "UTF8")] Utf8, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SegmentTerminatorSuffix { NotSpecified, None, #[serde(rename = "CR")] Cr, #[serde(rename = "LF")] Lf, #[serde(rename = "CRLF")] Crlf, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum X12DateFormat { NotSpecified, #[serde(rename = "CCYYMMDD")] Ccyymmdd, #[serde(rename = "YYMMDD")] Yymmdd, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum X12TimeFormat { NotSpecified, #[serde(rename = "HHMM")] Hhmm, #[serde(rename = "HHMMSS")] Hhmmss, #[serde(rename = "HHMMSSdd")] HhmmsSdd, #[serde(rename = "HHMMSSd")] HhmmsSd, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UsageIndicator { NotSpecified, Test, Information, Production, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum MessageFilterType { NotSpecified, Include, Exclude, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum HashingAlgorithm { NotSpecified, None, #[serde(rename = "MD5")] Md5, #[serde(rename = "SHA1")] Sha1, #[serde(rename = "SHA2256")] Sha2256, #[serde(rename = "SHA2384")] Sha2384, #[serde(rename = "SHA2512")] Sha2512, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EncryptionAlgorithm { NotSpecified, None, #[serde(rename = "DES3")] Des3, #[serde(rename = "RC2")] Rc2, #[serde(rename = "AES128")] Aes128, #[serde(rename = "AES192")] Aes192, #[serde(rename = "AES256")] Aes256, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SigningAlgorithm { NotSpecified, Default, #[serde(rename = "SHA1")] Sha1, #[serde(rename = "SHA2256")] Sha2256, #[serde(rename = "SHA2384")] Sha2384, #[serde(rename = "SHA2512")] Sha2512, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum TrailingSeparatorPolicy { NotSpecified, NotAllowed, Optional, Mandatory, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactAgreementContent { #[serde(rename = "receiveAgreement")] pub receive_agreement: EdifactOneWayAgreement, #[serde(rename = "sendAgreement")] pub send_agreement: EdifactOneWayAgreement, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactOneWayAgreement { #[serde(rename = "senderBusinessIdentity")] pub sender_business_identity: BusinessIdentity, #[serde(rename = "receiverBusinessIdentity")] pub receiver_business_identity: BusinessIdentity, #[serde(rename = "protocolSettings")] pub protocol_settings: EdifactProtocolSettings, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactProtocolSettings { #[serde(rename = "validationSettings")] pub validation_settings: EdifactValidationSettings, #[serde(rename = "framingSettings")] pub framing_settings: EdifactFramingSettings, #[serde(rename = "envelopeSettings")] pub envelope_settings: EdifactEnvelopeSettings, #[serde(rename = "acknowledgementSettings")] pub acknowledgement_settings: EdifactAcknowledgementSettings, #[serde(rename = "messageFilter")] pub message_filter: EdifactMessageFilter, #[serde(rename = "processingSettings")] pub processing_settings: EdifactProcessingSettings, #[serde(rename = "envelopeOverrides", default, skip_serializing_if = "Vec::is_empty")] pub envelope_overrides: Vec<EdifactEnvelopeOverride>, #[serde(rename = "messageFilterList", default, skip_serializing_if = "Vec::is_empty")] pub message_filter_list: Vec<EdifactMessageIdentifier>, #[serde(rename = "schemaReferences")] pub schema_references: Vec<EdifactSchemaReference>, #[serde(rename = "validationOverrides", default, skip_serializing_if = "Vec::is_empty")] pub validation_overrides: Vec<EdifactValidationOverride>, #[serde(rename = "edifactDelimiterOverrides", default, skip_serializing_if = "Vec::is_empty")] pub edifact_delimiter_overrides: Vec<EdifactDelimiterOverride>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactValidationSettings { #[serde(rename = "validateCharacterSet")] pub validate_character_set: bool, #[serde(rename = "checkDuplicateInterchangeControlNumber")] pub check_duplicate_interchange_control_number: bool, #[serde(rename = "interchangeControlNumberValidityDays")] pub interchange_control_number_validity_days: i32, #[serde(rename = "checkDuplicateGroupControlNumber")] pub check_duplicate_group_control_number: bool, #[serde(rename = "checkDuplicateTransactionSetControlNumber")] pub check_duplicate_transaction_set_control_number: bool, #[serde(rename = "validateEDITypes")] pub validate_edi_types: bool, #[serde(rename = "validateXSDTypes")] pub validate_xsd_types: bool, #[serde(rename = "allowLeadingAndTrailingSpacesAndZeroes")] pub allow_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trimLeadingAndTrailingSpacesAndZeroes")] pub trim_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trailingSeparatorPolicy")] pub trailing_separator_policy: TrailingSeparatorPolicy, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactFramingSettings { #[serde(rename = "serviceCodeListDirectoryVersion", default, skip_serializing_if = "Option::is_none")] pub service_code_list_directory_version: Option<String>, #[serde(rename = "characterEncoding", default, skip_serializing_if = "Option::is_none")] pub character_encoding: Option<String>, #[serde(rename = "protocolVersion")] pub protocol_version: i32, #[serde(rename = "dataElementSeparator")] pub data_element_separator: i32, #[serde(rename = "componentSeparator")] pub component_separator: i32, #[serde(rename = "segmentTerminator")] pub segment_terminator: i32, #[serde(rename = "releaseIndicator")] pub release_indicator: i32, #[serde(rename = "repetitionSeparator")] pub repetition_separator: i32, #[serde(rename = "characterSet")] pub character_set: EdifactCharacterSet, #[serde(rename = "decimalPointIndicator")] pub decimal_point_indicator: EdifactDecimalIndicator, #[serde(rename = "segmentTerminatorSuffix")] pub segment_terminator_suffix: SegmentTerminatorSuffix, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactEnvelopeSettings { #[serde(rename = "groupAssociationAssignedCode", default, skip_serializing_if = "Option::is_none")] pub group_association_assigned_code: Option<String>, #[serde(rename = "communicationAgreementId", default, skip_serializing_if = "Option::is_none")] pub communication_agreement_id: Option<String>, #[serde(rename = "applyDelimiterStringAdvice")] pub apply_delimiter_string_advice: bool, #[serde(rename = "createGroupingSegments")] pub create_grouping_segments: bool, #[serde(rename = "enableDefaultGroupHeaders")] pub enable_default_group_headers: bool, #[serde(rename = "recipientReferencePasswordValue", default, skip_serializing_if = "Option::is_none")] pub recipient_reference_password_value: Option<String>, #[serde(rename = "recipientReferencePasswordQualifier", default, skip_serializing_if = "Option::is_none")] pub recipient_reference_password_qualifier: Option<String>, #[serde(rename = "applicationReferenceId", default, skip_serializing_if = "Option::is_none")] pub application_reference_id: Option<String>, #[serde(rename = "processingPriorityCode", default, skip_serializing_if = "Option::is_none")] pub processing_priority_code: Option<String>, #[serde(rename = "interchangeControlNumberLowerBound")] pub interchange_control_number_lower_bound: i64, #[serde(rename = "interchangeControlNumberUpperBound")] pub interchange_control_number_upper_bound: i64, #[serde(rename = "rolloverInterchangeControlNumber")] pub rollover_interchange_control_number: bool, #[serde(rename = "interchangeControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub interchange_control_number_prefix: Option<String>, #[serde(rename = "interchangeControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub interchange_control_number_suffix: Option<String>, #[serde(rename = "senderReverseRoutingAddress", default, skip_serializing_if = "Option::is_none")] pub sender_reverse_routing_address: Option<String>, #[serde(rename = "receiverReverseRoutingAddress", default, skip_serializing_if = "Option::is_none")] pub receiver_reverse_routing_address: Option<String>, #[serde(rename = "functionalGroupId", default, skip_serializing_if = "Option::is_none")] pub functional_group_id: Option<String>, #[serde(rename = "groupControllingAgencyCode", default, skip_serializing_if = "Option::is_none")] pub group_controlling_agency_code: Option<String>, #[serde(rename = "groupMessageVersion", default, skip_serializing_if = "Option::is_none")] pub group_message_version: Option<String>, #[serde(rename = "groupMessageRelease", default, skip_serializing_if = "Option::is_none")] pub group_message_release: Option<String>, #[serde(rename = "groupControlNumberLowerBound")] pub group_control_number_lower_bound: i64, #[serde(rename = "groupControlNumberUpperBound")] pub group_control_number_upper_bound: i64, #[serde(rename = "rolloverGroupControlNumber")] pub rollover_group_control_number: bool, #[serde(rename = "groupControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub group_control_number_prefix: Option<String>, #[serde(rename = "groupControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub group_control_number_suffix: Option<String>, #[serde(rename = "groupApplicationReceiverQualifier", default, skip_serializing_if = "Option::is_none")] pub group_application_receiver_qualifier: Option<String>, #[serde(rename = "groupApplicationReceiverId", default, skip_serializing_if = "Option::is_none")] pub group_application_receiver_id: Option<String>, #[serde(rename = "groupApplicationSenderQualifier", default, skip_serializing_if = "Option::is_none")] pub group_application_sender_qualifier: Option<String>, #[serde(rename = "groupApplicationSenderId", default, skip_serializing_if = "Option::is_none")] pub group_application_sender_id: Option<String>, #[serde(rename = "groupApplicationPassword", default, skip_serializing_if = "Option::is_none")] pub group_application_password: Option<String>, #[serde(rename = "overwriteExistingTransactionSetControlNumber")] pub overwrite_existing_transaction_set_control_number: bool, #[serde(rename = "transactionSetControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub transaction_set_control_number_prefix: Option<String>, #[serde(rename = "transactionSetControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub transaction_set_control_number_suffix: Option<String>, #[serde(rename = "transactionSetControlNumberLowerBound")] pub transaction_set_control_number_lower_bound: i64, #[serde(rename = "transactionSetControlNumberUpperBound")] pub transaction_set_control_number_upper_bound: i64, #[serde(rename = "rolloverTransactionSetControlNumber")] pub rollover_transaction_set_control_number: bool, #[serde(rename = "isTestInterchange")] pub is_test_interchange: bool, #[serde(rename = "senderInternalIdentification", default, skip_serializing_if = "Option::is_none")] pub sender_internal_identification: Option<String>, #[serde(rename = "senderInternalSubIdentification", default, skip_serializing_if = "Option::is_none")] pub sender_internal_sub_identification: Option<String>, #[serde(rename = "receiverInternalIdentification", default, skip_serializing_if = "Option::is_none")] pub receiver_internal_identification: Option<String>, #[serde(rename = "receiverInternalSubIdentification", default, skip_serializing_if = "Option::is_none")] pub receiver_internal_sub_identification: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactAcknowledgementSettings { #[serde(rename = "needTechnicalAcknowledgement")] pub need_technical_acknowledgement: bool, #[serde(rename = "batchTechnicalAcknowledgements")] pub batch_technical_acknowledgements: bool, #[serde(rename = "needFunctionalAcknowledgement")] pub need_functional_acknowledgement: bool, #[serde(rename = "batchFunctionalAcknowledgements")] pub batch_functional_acknowledgements: bool, #[serde(rename = "needLoopForValidMessages")] pub need_loop_for_valid_messages: bool, #[serde(rename = "sendSynchronousAcknowledgement")] pub send_synchronous_acknowledgement: bool, #[serde(rename = "acknowledgementControlNumberPrefix", default, skip_serializing_if = "Option::is_none")] pub acknowledgement_control_number_prefix: Option<String>, #[serde(rename = "acknowledgementControlNumberSuffix", default, skip_serializing_if = "Option::is_none")] pub acknowledgement_control_number_suffix: Option<String>, #[serde(rename = "acknowledgementControlNumberLowerBound")] pub acknowledgement_control_number_lower_bound: i32, #[serde(rename = "acknowledgementControlNumberUpperBound")] pub acknowledgement_control_number_upper_bound: i32, #[serde(rename = "rolloverAcknowledgementControlNumber")] pub rollover_acknowledgement_control_number: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactMessageFilter { #[serde(rename = "messageFilterType")] pub message_filter_type: MessageFilterType, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactProcessingSettings { #[serde(rename = "maskSecurityInfo")] pub mask_security_info: bool, #[serde(rename = "preserveInterchange")] pub preserve_interchange: bool, #[serde(rename = "suspendInterchangeOnError")] pub suspend_interchange_on_error: bool, #[serde(rename = "createEmptyXmlTagsForTrailingSeparators")] pub create_empty_xml_tags_for_trailing_separators: bool, #[serde(rename = "useDotAsDecimalSeparator")] pub use_dot_as_decimal_separator: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactEnvelopeOverride { #[serde(rename = "messageId", default, skip_serializing_if = "Option::is_none")] pub message_id: Option<String>, #[serde(rename = "messageVersion", default, skip_serializing_if = "Option::is_none")] pub message_version: Option<String>, #[serde(rename = "messageRelease", default, skip_serializing_if = "Option::is_none")] pub message_release: Option<String>, #[serde(rename = "messageAssociationAssignedCode", default, skip_serializing_if = "Option::is_none")] pub message_association_assigned_code: Option<String>, #[serde(rename = "targetNamespace", default, skip_serializing_if = "Option::is_none")] pub target_namespace: Option<String>, #[serde(rename = "functionalGroupId", default, skip_serializing_if = "Option::is_none")] pub functional_group_id: Option<String>, #[serde(rename = "senderApplicationQualifier", default, skip_serializing_if = "Option::is_none")] pub sender_application_qualifier: Option<String>, #[serde(rename = "senderApplicationId", default, skip_serializing_if = "Option::is_none")] pub sender_application_id: Option<String>, #[serde(rename = "receiverApplicationQualifier", default, skip_serializing_if = "Option::is_none")] pub receiver_application_qualifier: Option<String>, #[serde(rename = "receiverApplicationId", default, skip_serializing_if = "Option::is_none")] pub receiver_application_id: Option<String>, #[serde(rename = "controllingAgencyCode", default, skip_serializing_if = "Option::is_none")] pub controlling_agency_code: Option<String>, #[serde(rename = "groupHeaderMessageVersion", default, skip_serializing_if = "Option::is_none")] pub group_header_message_version: Option<String>, #[serde(rename = "groupHeaderMessageRelease", default, skip_serializing_if = "Option::is_none")] pub group_header_message_release: Option<String>, #[serde(rename = "associationAssignedCode", default, skip_serializing_if = "Option::is_none")] pub association_assigned_code: Option<String>, #[serde(rename = "applicationPassword", default, skip_serializing_if = "Option::is_none")] pub application_password: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactMessageIdentifier { #[serde(rename = "messageId")] pub message_id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactSchemaReference { #[serde(rename = "messageId")] pub message_id: String, #[serde(rename = "messageVersion")] pub message_version: String, #[serde(rename = "messageRelease")] pub message_release: String, #[serde(rename = "senderApplicationId", default, skip_serializing_if = "Option::is_none")] pub sender_application_id: Option<String>, #[serde(rename = "senderApplicationQualifier", default, skip_serializing_if = "Option::is_none")] pub sender_application_qualifier: Option<String>, #[serde(rename = "associationAssignedCode", default, skip_serializing_if = "Option::is_none")] pub association_assigned_code: Option<String>, #[serde(rename = "schemaName")] pub schema_name: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactValidationOverride { #[serde(rename = "messageId")] pub message_id: String, #[serde(rename = "enforceCharacterSet")] pub enforce_character_set: bool, #[serde(rename = "validateEDITypes")] pub validate_edi_types: bool, #[serde(rename = "validateXSDTypes")] pub validate_xsd_types: bool, #[serde(rename = "allowLeadingAndTrailingSpacesAndZeroes")] pub allow_leading_and_trailing_spaces_and_zeroes: bool, #[serde(rename = "trailingSeparatorPolicy")] pub trailing_separator_policy: TrailingSeparatorPolicy, #[serde(rename = "trimLeadingAndTrailingSpacesAndZeroes")] pub trim_leading_and_trailing_spaces_and_zeroes: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EdifactDelimiterOverride { #[serde(rename = "messageId", default, skip_serializing_if = "Option::is_none")] pub message_id: Option<String>, #[serde(rename = "messageVersion", default, skip_serializing_if = "Option::is_none")] pub message_version: Option<String>, #[serde(rename = "messageRelease", default, skip_serializing_if = "Option::is_none")] pub message_release: Option<String>, #[serde(rename = "dataElementSeparator")] pub data_element_separator: i32, #[serde(rename = "componentSeparator")] pub component_separator: i32, #[serde(rename = "segmentTerminator")] pub segment_terminator: i32, #[serde(rename = "repetitionSeparator")] pub repetition_separator: i32, #[serde(rename = "segmentTerminatorSuffix")] pub segment_terminator_suffix: SegmentTerminatorSuffix, #[serde(rename = "decimalPointIndicator")] pub decimal_point_indicator: EdifactDecimalIndicator, #[serde(rename = "releaseIndicator")] pub release_indicator: i32, #[serde(rename = "messageAssociationAssignedCode", default, skip_serializing_if = "Option::is_none")] pub message_association_assigned_code: Option<String>, #[serde(rename = "targetNamespace", default, skip_serializing_if = "Option::is_none")] pub target_namespace: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EdifactCharacterSet { NotSpecified, #[serde(rename = "UNOB")] Unob, #[serde(rename = "UNOA")] Unoa, #[serde(rename = "UNOC")] Unoc, #[serde(rename = "UNOD")] Unod, #[serde(rename = "UNOE")] Unoe, #[serde(rename = "UNOF")] Unof, #[serde(rename = "UNOG")] Unog, #[serde(rename = "UNOH")] Unoh, #[serde(rename = "UNOI")] Unoi, #[serde(rename = "UNOJ")] Unoj, #[serde(rename = "UNOK")] Unok, #[serde(rename = "UNOX")] Unox, #[serde(rename = "UNOY")] Unoy, #[serde(rename = "KECA")] Keca, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EdifactDecimalIndicator { NotSpecified, Comma, Decimal, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountCertificateListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountCertificate>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountCertificate { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountCertificateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountCertificateProperties { #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub key: Option<KeyVaultKeyReference>, #[serde(rename = "publicCertificate", default, skip_serializing_if = "Option::is_none")] pub public_certificate: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultKeyReference { #[serde(rename = "keyVault")] pub key_vault: key_vault_key_reference::KeyVault, #[serde(rename = "keyName")] pub key_name: String, #[serde(rename = "keyVersion", default, skip_serializing_if = "Option::is_none")] pub key_version: Option<String>, } pub mod key_vault_key_reference { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVault { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSessionFilter { #[serde(rename = "changedTime")] pub changed_time: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSessionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<IntegrationAccountSession>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSession { #[serde(flatten)] pub resource: Resource, pub properties: IntegrationAccountSessionProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IntegrationAccountSessionProperties { #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<Object>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<Object>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListKeyVaultKeysDefinition { #[serde(rename = "keyVault")] pub key_vault: KeyVaultReference, #[serde(rename = "skipToken", default, skip_serializing_if = "Option::is_none")] pub skip_token: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultReference { #[serde(flatten)] pub resource_reference: ResourceReference, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultKeyCollection { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<KeyVaultKey>, #[serde(rename = "skipToken", default, skip_serializing_if = "Option::is_none")] pub skip_token: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct KeyVaultKey { #[serde(default, skip_serializing_if = "Option::is_none")] pub kid: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub attributes: Option<key_vault_key::Attributes>, } pub mod key_vault_key { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Attributes { #[serde(default, skip_serializing_if = "Option::is_none")] pub enabled: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub created: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub updated: Option<i64>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackingEventsDefinition { #[serde(rename = "sourceType")] pub source_type: String, #[serde(rename = "trackEventsOptions", default, skip_serializing_if = "Option::is_none")] pub track_events_options: Option<TrackEventsOperationOptions>, pub events: Vec<TrackingEvent>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum TrackEventsOperationOptions { None, DisableSourceInfoEnrich, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackingEvent { #[serde(rename = "eventLevel")] pub event_level: EventLevel, #[serde(rename = "eventTime")] pub event_time: String, #[serde(rename = "recordType")] pub record_type: TrackingRecordType, #[serde(default, skip_serializing_if = "Option::is_none")] pub record: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<TrackingEventErrorInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EventLevel { LogAlways, Critical, Error, Warning, Informational, Verbose, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum TrackingRecordType { NotSpecified, Custom, #[serde(rename = "AS2Message")] As2Message, #[serde(rename = "AS2MDN")] As2mdn, X12Interchange, X12FunctionalGroup, X12TransactionSet, X12InterchangeAcknowledgment, X12FunctionalGroupAcknowledgment, X12TransactionSetAcknowledgment, EdifactInterchange, EdifactFunctionalGroup, EdifactTransactionSet, EdifactInterchangeAcknowledgment, EdifactFunctionalGroupAcknowledgment, EdifactTransactionSetAcknowledgment, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackingEventErrorInfo { #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SetTriggerStateActionDefinition { pub source: WorkflowTriggerReference, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExpressionTraces { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub inputs: Vec<ExpressionRoot>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExpressionRoot { #[serde(flatten)] pub expression: Expression, #[serde(default, skip_serializing_if = "Option::is_none")] pub path: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Expression { #[serde(default, skip_serializing_if = "Option::is_none")] pub text: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub subexpressions: Vec<Expression>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<AzureResourceErrorInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AzureResourceErrorInfo { #[serde(flatten)] pub error_info: ErrorInfo, pub message: String, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<AzureResourceErrorInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorInfo { pub code: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionRepetitionDefinitionCollection { #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<WorkflowRunActionRepetitionDefinition>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionRepetitionDefinition { #[serde(flatten)] pub resource: Resource, pub properties: WorkflowRunActionRepetitionProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct WorkflowRunActionRepetitionProperties { #[serde(flatten)] pub operation_result: OperationResult, #[serde(rename = "repetitionIndexes", default, skip_serializing_if = "Vec::is_empty")] pub repetition_indexes: Vec<RepetitionIndex>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RepetitionIndex { #[serde(rename = "scopeName", default, skip_serializing_if = "Option::is_none")] pub scope_name: Option<String>, #[serde(rename = "itemIndex")] pub item_index: i32, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationResult { #[serde(flatten)] pub operation_result_properties: OperationResultProperties, #[serde(rename = "trackingId", default, skip_serializing_if = "Option::is_none")] pub tracking_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub inputs: Option<Object>, #[serde(rename = "inputsLink", default, skip_serializing_if = "Option::is_none")] pub inputs_link: Option<ContentLink>, #[serde(default, skip_serializing_if = "Option::is_none")] pub outputs: Option<Object>, #[serde(rename = "outputsLink", default, skip_serializing_if = "Option::is_none")] pub outputs_link: Option<ContentLink>, #[serde(rename = "trackedProperties", default, skip_serializing_if = "Option::is_none")] pub tracked_properties: Option<Object>, #[serde(rename = "retryHistory", default, skip_serializing_if = "Vec::is_empty")] pub retry_history: Vec<RetryHistory>, #[serde(rename = "iterationCount", default, skip_serializing_if = "Option::is_none")] pub iteration_count: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationResultProperties { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub correlation: Option<RunActionCorrelation>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<WorkflowStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RunActionCorrelation { #[serde(flatten)] pub run_correlation: RunCorrelation, #[serde(rename = "actionTrackingId", default, skip_serializing_if = "Option::is_none")] pub action_tracking_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RunCorrelation { #[serde(rename = "clientTrackingId", default, skip_serializing_if = "Option::is_none")] pub client_tracking_id: Option<String>, #[serde(rename = "clientKeywords", default, skip_serializing_if = "Vec::is_empty")] pub client_keywords: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JsonSchema { #[serde(default, skip_serializing_if = "Option::is_none")] pub title: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AssemblyCollection { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<AssemblyDefinition>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AssemblyDefinition { #[serde(flatten)] pub resource: Resource, pub properties: AssemblyProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AssemblyProperties { #[serde(flatten)] pub artifact_content_properties_definition: ArtifactContentPropertiesDefinition, #[serde(rename = "assemblyName")] pub assembly_name: String, #[serde(rename = "assemblyVersion", default, skip_serializing_if = "Option::is_none")] pub assembly_version: Option<String>, #[serde(rename = "assemblyCulture", default, skip_serializing_if = "Option::is_none")] pub assembly_culture: Option<String>, #[serde(rename = "assemblyPublicKeyToken", default, skip_serializing_if = "Option::is_none")] pub assembly_public_key_token: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ArtifactContentPropertiesDefinition { #[serde(flatten)] pub artifact_properties: ArtifactProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option<serde_json::Value>, #[serde(rename = "contentType", default, skip_serializing_if = "Option::is_none")] pub content_type: Option<String>, #[serde(rename = "contentLink", default, skip_serializing_if = "Option::is_none")] pub content_link: Option<ContentLink>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ArtifactProperties { #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BatchConfigurationCollection { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<BatchConfiguration>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BatchConfiguration { #[serde(flatten)] pub resource: Resource, pub properties: BatchConfigurationProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BatchConfigurationProperties { #[serde(flatten)] pub artifact_properties: ArtifactProperties, #[serde(rename = "batchGroupName")] pub batch_group_name: String, #[serde(rename = "releaseCriteria")] pub release_criteria: BatchReleaseCriteria, #[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")] pub created_time: Option<String>, #[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")] pub changed_time: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BatchReleaseCriteria { #[serde(rename = "messageCount", default, skip_serializing_if = "Option::is_none")] pub message_count: Option<i32>, #[serde(rename = "batchSize", default, skip_serializing_if = "Option::is_none")] pub batch_size: Option<i32>, #[serde(default, skip_serializing_if = "Option::is_none")] pub recurrence: Option<WorkflowTriggerRecurrence>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RequestHistoryListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RequestHistory>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RequestHistory { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RequestHistoryProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RequestHistoryProperties { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub request: Option<Request>, #[serde(default, skip_serializing_if = "Option::is_none")] pub response: Option<Response>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Request { #[serde(default, skip_serializing_if = "Option::is_none")] pub headers: Option<Object>, #[serde(default, skip_serializing_if = "Option::is_none")] pub uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub method: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Response { #[serde(default, skip_serializing_if = "Option::is_none")] pub headers: Option<Object>, #[serde(rename = "statusCode", default, skip_serializing_if = "Option::is_none")] pub status_code: Option<i32>, #[serde(rename = "bodyLink", default, skip_serializing_if = "Option::is_none")] pub body_link: Option<ContentLink>, }
use std::io; use std::path::PathBuf; use thiserror::Error; #[derive(Error, Debug)] pub enum EnvReaderError { #[error("io env reader error")] Io { #[from] source: io::Error, }, #[error("space on var name `{0}`")] SpaceOnVarName(String), #[error("unknown env error")] Unknown, } #[derive(Error, Debug)] pub enum EnvError { #[error("io env error")] Io { #[from] source: io::Error, }, #[error("fs_extra env error")] FsExtra { #[from] source: fs_extra::error::Error, }, #[error("fail to parse `{file:?}`")] FailToParse { #[source] source: EnvReaderError, file: PathBuf, }, #[error("env var `{0}` not found in `{1:?}`")] EnvVarNotFound(String, PathBuf), #[error("env file `{0:?}` has no file name")] EnvFileHasNoFileName(PathBuf), #[error("env file `{0:?}` has an empty file name")] EnvFileNameIsEmpty(PathBuf), #[error("env file `{0:?}` has incorrect file name : it must begin with `.` char")] EnvFileNameIncorrect(PathBuf), }
use crate::config; use crate::maze::maze_genotype::MazeGenome; use crate::mcc::maze::maze_species::MazeSpecies; pub struct SpeciatedMazeQueue { pub species: Vec<MazeSpecies>, pub species_added: u32, } impl SpeciatedMazeQueue { pub fn new(mazes: Vec<MazeGenome>) -> SpeciatedMazeQueue { let mut queue = SpeciatedMazeQueue { species: vec![], species_added: 0, }; let species_max_mazes_limit: u32 = config::MCC.maze_population_capacity / mazes.len() as u32; for (i, maze) in mazes.iter().enumerate() { let species = MazeSpecies::new(maze.clone(), species_max_mazes_limit, i as u32); queue.species.push(species); queue.species_added += 1; } queue } pub fn len(&self) -> usize { let mut length = 0; for species in self.species.iter() { length += species.len(); } length } pub fn iter_species(&self) -> impl Iterator<Item = &MazeSpecies> { self.species.iter() } pub fn iter_species_mut(&mut self) -> impl Iterator<Item = &mut MazeSpecies> { self.species.iter_mut() } pub fn iter_individuals(&self) -> impl Iterator<Item = &MazeGenome> { self.species.iter().map(|species| species.iter()).flatten() } pub fn push(&mut self, maze: MazeGenome) { let mut distances: Vec<f64> = vec![]; for species in self.species.iter() { distances.push(species.distance(&maze)); } let mut highest = 0.0; let mut index: usize = 0; for (i, value) in distances.iter().enumerate() { if *value > highest { highest = *value; index = i; } } self.species[index].push(maze); } pub fn get_children(&mut self) -> Vec<MazeGenome> { let mut children: Vec<MazeGenome> = vec![]; // should always be 1 let amount: usize = config::MCC.maze_selection_limit / self.species.len(); for species in self.species.iter_mut() { for child in species.get_children(amount) { children.push(child); } } for child in children.iter_mut() { child.mutate(); child.viable = false; } children } pub fn get_smallest_maze(&self) -> Option<MazeGenome> { let mut smallest_found: Option<MazeGenome> = Option::None; for maze in self.iter_individuals() { if smallest_found.is_none() { smallest_found = Some(maze.clone()); } else { let small = smallest_found.clone().unwrap(); if small.width > maze.width { smallest_found = Some(maze.clone()); } } } smallest_found } pub fn get_largest_size(&self) -> u32 { let max = self .species .iter() .max_by_key(|p| p.maze_queue.get_largest_size()); max.unwrap().maze_queue.get_largest_size() } pub fn get_smallest_size(&self) -> u32 { let min = self .species .iter() .min_by_key(|p| p.maze_queue.get_smallest_size()); min.unwrap().maze_queue.get_smallest_size() } pub fn get_average_size(&self) -> f64 { let mut sum = 0; for s in self.species.iter() { for m in s.maze_queue.iter() { sum += m.width; } } sum as f64 / self.len() as f64 } pub fn get_largest_path_size(&self) -> u32 { let max = self .species .iter() .max_by_key(|p| p.maze_queue.get_largest_path_size()); max.unwrap().maze_queue.get_largest_path_size() as u32 } pub fn get_smallest_path_size(&self) -> u32 { let min = self .species .iter() .min_by_key(|p| p.maze_queue.get_smallest_path_size()); min.unwrap().maze_queue.get_smallest_path_size() as u32 } pub fn get_average_path_size(&self) -> f64 { let mut sum = 0; for s in self.species.iter() { for m in s.maze_queue.iter() { sum += m.get_amount_of_junctures(); } } sum as f64 / self.len() as f64 } pub fn get_overall_average_size_increase(&self) -> f64 { let mut sum = 0.0; for s in self.species.iter() { sum += s.statistics.get_overall_average_increase(); } sum as f64 / self.species.len() as f64 } pub fn get_overall_average_complexity_increase(&self) -> f64 { let mut sum = 0.0; for s in self.species.iter() { sum += s.statistics.get_overall_average_path_complexity_increase(); } sum as f64 / self.species.len() as f64 } pub fn get_last_average_size_increase(&self) -> f64 { let mut sum = 0.0; for s in self.species.iter() { sum += s.statistics.get_current_average_size_increase(); } sum as f64 / self.species.len() as f64 } pub fn get_last_average_complexity_increase(&self) -> f64 { let mut sum = 0.0; for s in self.species.iter() { sum += s.statistics.get_current_average_complexity_increase(); } sum as f64 / self.species.len() as f64 } pub fn save_state(&mut self) { for s in self.species.iter_mut() { s.save_state(); } } }
struct CircularBuffer<T : Copy> { seqno : usize, data : Vec<T>, } struct CircularBufferIterator<'a, T: 'a + Copy> { slice : &'a [T], start : usize, end : usize, pos : usize, wrap : bool, } impl <T : Copy> CircularBuffer<T> { fn new(size : usize, default_value : T) -> CircularBuffer<T> { if size == 0 { panic!("size cannot be zero"); } let mut ret = CircularBuffer { seqno : 0, data : vec![], }; // make sure there is enough place and fill it with the // default value ret.data.resize(size, default_value); ret } fn min_pos(&self) -> usize { if self.seqno < self.data.len() { 0 } else { (self.seqno - self.data.len()) as usize } } fn iter(&self) -> CircularBufferIterator<T> { let min = self.min_pos(); let max = self.seqno; let sz = self.data.len(); let min_pos = min % sz; let max_pos = max % sz; if self.seqno == 0 { // no data CircularBufferIterator { slice : self.data.as_slice(), start : 0, end : 0, pos : 1, wrap : false, } } else if min_pos < max_pos { // no wrap over CircularBufferIterator { slice : self.data.as_slice(), start : min_pos, end : max_pos, pos : min_pos, wrap : false, } } else { CircularBufferIterator { slice : self.data.as_slice(), start : max_pos, end : sz, pos : max_pos, wrap : (max_pos != 0), } } } fn put<F>(&mut self, setter: F) -> usize where F : FnMut(&mut T) { // calculate where to put the data let pos = self.seqno % self.data.len(); // get a reference to the data let mut opt : Option<&mut T> = self.data.get_mut(pos); let mut setter = setter; // make sure the index worked match opt.as_mut() { Some(v) => setter(v), None => { panic!("out of bounds {}", pos); } } // increase sequence number self.seqno += 1; self.seqno } } impl <'_, T: '_ + Copy> Iterator for CircularBufferIterator<'_, T> { type Item = T; fn next(&mut self) -> Option<T> { if self.pos >= self.end { if self.wrap { self.pos = 1; self.end = self.start; self.wrap = false; Some(self.slice[0]) } else { None } } else { let at = self.pos; self.pos += 1; Some(self.slice[at]) } } } pub fn tests() { let mut x = CircularBuffer::new(2, 0 as i32); x.put(|v| *v = 1); let mut y : i32 = 2; x.put(|v| { *v = y; y += 1; }); x.put(|v| { *v = y; y += 1; }); let it = x.iter(); for i in it { println!("CB: {}", i); } } #[cfg(test)] mod tests { use super::CircularBuffer; #[test] #[should_panic] fn create_zero_sized() { let _x = CircularBuffer::new(0, 0 as i32); } #[test] fn empty_buffer() { let x = CircularBuffer::new(1, 0 as i32); let count = x.iter().count(); assert_eq!(count, 0); } #[test] fn overload_buffer() { let mut x = CircularBuffer::new(2, 0 as i32); x.put(|v| *v = 1); x.put(|v| *v = 2); x.put(|v| *v = 3); assert_eq!(x.iter().count(), 2); assert_eq!(x.iter().last().unwrap(), 3); assert_eq!(x.iter().take(1).last().unwrap(), 2); } #[test] fn sum_available() { let mut x = CircularBuffer::new(4, 0 as i32); x.put(|v| *v = 2); x.put(|v| *v = 4); x.put(|v| *v = 6); x.put(|v| *v = 8); x.put(|v| *v = 10); assert_eq!(x.iter().count(), 4); let sum = x.iter().take(3).fold(0, |acc, num| acc + num); assert_eq!(sum, 18); } #[test] fn can_put_with_env() { let mut x = CircularBuffer::new(1, 0 as i32); let mut y = 0; { let my_fn = |v : &mut i32| { *v = y; y += 1; }; x.put(my_fn); } { // the other way is x.put(|v| { *v = y; y += 1; }); // TODO : check if I need this at all //x.put(&my_fn); } } }
use crate::rtb_type; rtb_type! { NativeImageAssetType, 500, Icon=1 }
use super::*; /// Structure that saves the reader specific to writing and reading a nodes csv file. /// /// # Attributes pub struct EdgeFileWriter { pub(crate) writer: CSVFileWriter, pub(crate) sources_column: String, pub(crate) sources_column_number: usize, pub(crate) destinations_column: String, pub(crate) destinations_column_number: usize, pub(crate) edge_types_column: String, pub(crate) edge_types_column_number: usize, pub(crate) weights_column: String, pub(crate) weights_column_number: usize, pub(crate) numeric_node_ids: bool, pub(crate) directed: Option<bool>, } impl EdgeFileWriter { /// Return new EdgeFileWriter object. /// /// # Arguments /// /// * path: String - Path where to store/load the file. /// pub fn new(path: String) -> EdgeFileWriter { EdgeFileWriter { writer: CSVFileWriter::new(path), sources_column: "subject".to_string(), sources_column_number: 0, destinations_column: "object".to_string(), destinations_column_number: 1, edge_types_column: "label".to_string(), edge_types_column_number: 2, weights_column: "weight".to_string(), weights_column_number: 3, numeric_node_ids: false, directed: None, } } /// Set the column of the source nodes. /// /// # Arguments /// /// * sources_column: Option<String> - The source nodes column to use for the file. /// pub fn set_sources_column(mut self, sources_column: Option<String>) -> EdgeFileWriter { if let Some(column) = sources_column { self.sources_column = column; } self } /// Set the column of the source nodes. /// /// # Arguments /// /// * sources_column_number: Option<String> - The source nodes column to use for the file. /// pub fn set_sources_column_number( mut self, sources_column_number: Option<usize>, ) -> EdgeFileWriter { if let Some(column_number) = sources_column_number { self.sources_column_number = column_number; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * destinations_column: Option<String> - The node types column to use for the file. /// pub fn set_destinations_column( mut self, destinations_column: Option<String>, ) -> EdgeFileWriter { if let Some(column) = destinations_column { self.destinations_column = column; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * destinations_column_number: Option<String> - The node types column to use for the file. /// pub fn set_destinations_column_number( mut self, destinations_column_number: Option<usize>, ) -> EdgeFileWriter { if let Some(column_number) = destinations_column_number { self.destinations_column_number = column_number; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * edge_types_column: Option<String> - The node types column to use for the file. /// pub fn set_edge_types_column(mut self, edge_type_column: Option<String>) -> EdgeFileWriter { if let Some(column) = edge_type_column { self.edge_types_column = column; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * edge_types_column_number: Option<usize> - The node types column to use for the file. /// pub fn set_edge_types_column_number( mut self, edge_type_column_number: Option<usize>, ) -> EdgeFileWriter { if let Some(column_number) = edge_type_column_number { self.edge_types_column_number = column_number; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * weights_column: Option<String> - The node types column to use for the file. /// pub fn set_weights_column(mut self, weights_column: Option<String>) -> EdgeFileWriter { if let Some(column) = weights_column { self.weights_column = column; } self } /// Set the column of the nodes. /// /// # Arguments /// /// * weights_column_number: Option<usize> - The node types column to use for the file. /// pub fn set_weights_column_number( mut self, weights_column_number: Option<usize>, ) -> EdgeFileWriter { if let Some(column_number) = weights_column_number { self.weights_column_number = column_number; } self } /// Set the verbose. /// /// # Arguments /// /// * verbose: Option<bool> - Wethever to show the loading bar or not. /// pub fn set_verbose(mut self, verbose: Option<bool>) -> EdgeFileWriter { if let Some(v) = verbose { self.writer.verbose = v; } self } /// Set the numeric_id. /// /// # Arguments /// /// * numeric_id: Option<bool> - Wethever to convert numeric Ids to Node Id. /// pub fn set_numeric_node_ids(mut self, numeric_node_ids: Option<bool>) -> EdgeFileWriter { if let Some(nni) = numeric_node_ids { self.numeric_node_ids = nni; } self } /// Set the separator. /// /// # Arguments /// /// * separator: Option<String> - The separator to use for the file. /// pub fn set_separator(mut self, separator: Option<String>) -> EdgeFileWriter { if let Some(v) = separator { self.writer.separator = v; } self } /// Set the header. /// /// # Arguments /// /// * header: Option<bool> - Wethever to write out an header or not. /// pub fn set_header(mut self, header: Option<bool>) -> EdgeFileWriter { if let Some(v) = header { self.writer.header = v; } self } /// Set the directed. /// /// # Arguments /// /// * directed: Option<bool> - Wethever to write out the graph as directed or not. /// pub fn set_directed(mut self, directed: Option<bool>) -> EdgeFileWriter { self.directed = directed; self } /// Write edge file. /// /// # Arguments /// /// * `graph`: &Graph - the graph to write out. pub fn dump(&self, graph: &Graph) -> Result<(), String> { let directed: bool = self.directed.unwrap_or_else(|| graph.is_directed()); // build the header let mut header = vec![ (self.sources_column.clone(), self.sources_column_number), ( self.destinations_column.clone(), self.destinations_column_number, ), ]; if graph.has_edge_types() { header.push(( self.edge_types_column.clone(), self.edge_types_column_number, )); } if graph.has_weights() { header.push((self.weights_column.clone(), self.weights_column_number)); } let number_of_columns = 1 + header.iter().map(|(_, i)| i).max().unwrap(); self.writer.write_lines( graph.get_edges_number() as usize, compose_lines(number_of_columns, header), graph .get_edges_quadruples(directed) .map(|(_, src, dst, edge_type, weight)| { let mut line = vec![ ( match self.numeric_node_ids { true => src.to_string(), false => graph.nodes.translate(src).to_string(), }, self.sources_column_number, ), ( match self.numeric_node_ids { true => dst.to_string(), false => graph.nodes.translate(dst).to_string(), }, self.destinations_column_number, ), ]; if let Some(ets) = &graph.edge_types { line.push(( ets.translate(edge_type.unwrap()).to_string(), self.edge_types_column_number, )); } if let Some(w) = weight { line.push((w.to_string(), self.weights_column_number)); } compose_lines(number_of_columns, line) }), ) } }
use octocrab::models::repos::Object; use octocrab::models::Repository; use octocrab::Octocrab; use octocrab::params::repos::Reference; use serde_json::Value; use warp::{Rejection, Reply}; use warp::http::StatusCode; pub async fn run() -> Result<impl Reply, Rejection> { let token = read_env_var("GITHUB_TOKEN"); let octocrab = Octocrab::builder().personal_token(token).build().unwrap(); let repo_list = vec!["Jackett"]; for repo_name in repo_list { let repo = octocrab.repos("htynkn", repo_name).get().await.unwrap(); match repo.parent { None => { info!("current repo:{} not have parent info, skip", repo_name) } Some(parent_repo) => { match parent_repo.full_name { None => { info!("current repo:{} not have full name, skip", repo_name) } Some(full_name) => { let parent_repo: Repository = octocrab.get(format!("/repos/{}", full_name), None::<&()>).await.unwrap(); info!("find parent:{} with branch:{:?}",full_name,parent_repo.default_branch); let default_branch = parent_repo.default_branch.unwrap(); let parent_ref = octocrab.repos( parent_repo.owner.unwrap().login, parent_repo.name, ).get_ref(&Reference::Branch(default_branch.to_string())).await.unwrap(); let sha = if let Object::Commit { sha, .. } = parent_ref.object { sha } else { panic!() }; info!("find ref {:?} to repo:{}", sha.to_string(), full_name); let x: Value = octocrab.patch(format!("/repos/htynkn/{}/git/refs/heads/{}", repo_name, &default_branch), Some(&serde_json::json!({ "sha": sha.to_string() }))).await.unwrap(); info!("update success for {} with {:?}", full_name,x); } } } } } Ok(StatusCode::OK) } fn read_env_var(var_name: &str) -> String { let err = format!("Missing environment variable: {}", var_name); std::env::var(var_name).expect(&err) }
// Copyright (C) 2021 Subspace Labs, Inc. // SPDX-License-Identifier: GPL-3.0-or-later // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. #![cfg_attr(not(feature = "std"), no_std)] #![feature(const_option)] // `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256. #![recursion_limit = "256"] // Make the WASM binary available. #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); use codec::{Compact, CompactLen, Decode, Encode, MaxEncodedLen}; use core::num::NonZeroU64; use domain_runtime_primitives::{BlockNumber as DomainNumber, Hash as DomainHash}; use frame_support::traits::{ ConstU128, ConstU16, ConstU32, ConstU64, ConstU8, Currency, ExistenceRequirement, Get, Imbalance, WithdrawReasons, }; use frame_support::weights::constants::{RocksDbWeight, WEIGHT_REF_TIME_PER_SECOND}; use frame_support::weights::{ConstantMultiplier, IdentityFee, Weight}; use frame_support::{construct_runtime, parameter_types, PalletId}; use frame_system::limits::{BlockLength, BlockWeights}; use frame_system::EnsureNever; use pallet_balances::NegativeImbalance; use pallet_feeds::feed_processor::{FeedMetadata, FeedObjectMapping, FeedProcessor}; use pallet_grandpa_finality_verifier::chain::Chain; pub use pallet_subspace::AllowAuthoringBy; use scale_info::TypeInfo; use sp_api::{impl_runtime_apis, BlockT, HashT, HeaderT}; use sp_consensus_slots::SlotDuration; use sp_consensus_subspace::digests::CompatibleDigestItem; use sp_consensus_subspace::{ ChainConstants, EquivocationProof, FarmerPublicKey, GlobalRandomnesses, SignedVote, SolutionRanges, Vote, }; use sp_core::crypto::{ByteArray, KeyTypeId}; use sp_core::{Hasher, OpaqueMetadata, H256}; use sp_domains::bundle_producer_election::BundleProducerElectionParams; use sp_domains::fraud_proof::FraudProof; use sp_domains::transaction::PreValidationObject; use sp_domains::{ DomainId, DomainInstanceData, DomainsHoldIdentifier, ExecutionReceipt, OpaqueBundle, OpaqueBundles, OperatorId, OperatorPublicKey, StakingHoldIdentifier, }; use sp_runtime::traits::{ AccountIdConversion, AccountIdLookup, BlakeTwo256, DispatchInfoOf, NumberFor, PostDispatchInfoOf, Zero, }; use sp_runtime::transaction_validity::{ InvalidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, }; use sp_runtime::{ create_runtime_str, generic, AccountId32, ApplyExtrinsicResult, DispatchError, Perbill, }; use sp_std::iter::Peekable; use sp_std::marker::PhantomData; use sp_std::prelude::*; #[cfg(feature = "std")] use sp_version::NativeVersion; use sp_version::RuntimeVersion; use subspace_core_primitives::objects::{BlockObject, BlockObjectMapping}; use subspace_core_primitives::{ HistorySize, Piece, Randomness, SegmentCommitment, SegmentHeader, SegmentIndex, SolutionRange, U256, }; use subspace_runtime_primitives::{ opaque, AccountId, Balance, BlockNumber, Hash, Index, Moment, Signature, MIN_REPLICATION_FACTOR, STORAGE_FEES_ESCROW_BLOCK_REWARD, STORAGE_FEES_ESCROW_BLOCK_TAX, }; use subspace_verification::derive_randomness; sp_runtime::impl_opaque_keys! { pub struct SessionKeys { } } // Smaller value for testing purposes const MAX_PIECES_IN_SECTOR: u16 = 32; // To learn more about runtime versioning and what each of the following value means: // https://substrate.dev/docs/en/knowledgebase/runtime/upgrades#runtime-versioning #[sp_version::runtime_version] pub const VERSION: RuntimeVersion = RuntimeVersion { spec_name: create_runtime_str!("subspace"), impl_name: create_runtime_str!("subspace"), authoring_version: 1, // The version of the runtime specification. A full node will not attempt to use its native // runtime in substitute for the on-chain Wasm runtime unless all of `spec_name`, // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. spec_version: 100, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, state_version: 1, }; /// The version information used to identify this runtime when compiled natively. #[cfg(feature = "std")] pub fn native_version() -> NativeVersion { NativeVersion { runtime_version: VERSION, can_author_with: Default::default(), } } /// The smallest unit of the token is called Shannon. pub const SHANNON: Balance = 1; /// Subspace Credits have 18 decimal places. pub const DECIMAL_PLACES: u8 = 18; /// One Subspace Credit. pub const SSC: Balance = (10 * SHANNON).pow(DECIMAL_PLACES as u32); // TODO: Many of below constants should probably be updatable but currently they are not /// Since Subspace is probabilistic this is the average expected block time that /// we are targeting. Blocks will be produced at a minimum duration defined /// by `SLOT_DURATION`, but some slots will not be allocated to any /// farmer and hence no block will be produced. We expect to have this /// block time on average following the defined slot duration and the value /// of `c` configured for Subspace (where `1 - c` represents the probability of /// a slot being empty). /// This value is only used indirectly to define the unit constants below /// that are expressed in blocks. The rest of the code should use /// `SLOT_DURATION` instead (like the Timestamp pallet for calculating the /// minimum period). /// /// Based on: /// <https://research.web3.foundation/en/latest/polkadot/block-production/Babe.html#-6.-practical-results> pub const MILLISECS_PER_BLOCK: u64 = 2000; // NOTE: Currently it is not possible to change the slot duration after the chain has started. // Attempting to do so will brick block production. pub const SLOT_DURATION: u64 = 2000; /// 1 in 6 slots (on average, not counting collisions) will have a block. /// Must match ratio between block and slot duration in constants above. const SLOT_PROBABILITY: (u64, u64) = (1, 1); /// The amount of time, in blocks, between updates of global randomness. const GLOBAL_RANDOMNESS_UPDATE_INTERVAL: BlockNumber = 256; /// Era duration in blocks. const ERA_DURATION_IN_BLOCKS: BlockNumber = 2016; const EQUIVOCATION_REPORT_LONGEVITY: BlockNumber = 256; /// Any solution range is valid in the test environment. const INITIAL_SOLUTION_RANGE: SolutionRange = SolutionRange::MAX; /// A ratio of `Normal` dispatch class within block, for `BlockWeight` and `BlockLength`. const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75); /// The block weight for 2 seconds of compute const BLOCK_WEIGHT_FOR_2_SEC: Weight = Weight::from_parts(WEIGHT_REF_TIME_PER_SECOND.saturating_mul(2), u64::MAX); /// Maximum block length for non-`Normal` extrinsic is 5 MiB. const MAX_BLOCK_LENGTH: u32 = 5 * 1024 * 1024; const MAX_OBJECT_MAPPING_RECURSION_DEPTH: u16 = 5; parameter_types! { pub const Version: RuntimeVersion = VERSION; pub const BlockHashCount: BlockNumber = 2400; /// We allow for 2 seconds of compute with a 6 second average block time. pub SubspaceBlockWeights: BlockWeights = BlockWeights::with_sensible_defaults(BLOCK_WEIGHT_FOR_2_SEC, NORMAL_DISPATCH_RATIO); /// We allow for 3.75 MiB for `Normal` extrinsic with 5 MiB maximum block length. pub SubspaceBlockLength: BlockLength = BlockLength::max_with_normal_ratio(MAX_BLOCK_LENGTH, NORMAL_DISPATCH_RATIO); } pub type SS58Prefix = ConstU16<2254>; // Configure FRAME pallets to include in runtime. impl frame_system::Config for Runtime { /// The basic call filter to use in dispatchable. type BaseCallFilter = frame_support::traits::Everything; /// Block & extrinsics weights: base values and limits. type BlockWeights = SubspaceBlockWeights; /// The maximum length of a block (in bytes). type BlockLength = SubspaceBlockLength; /// The identifier used to distinguish between accounts. type AccountId = AccountId; /// The aggregated dispatch type that is available for extrinsics. type RuntimeCall = RuntimeCall; /// The lookup mechanism to get account ID from whatever is passed in dispatchers. type Lookup = AccountIdLookup<AccountId, ()>; /// The index type for storing how many extrinsics an account has signed. type Index = Index; /// The index type for blocks. type BlockNumber = BlockNumber; /// The type for hashing blocks and tries. type Hash = Hash; /// The hashing algorithm used. type Hashing = BlakeTwo256; /// The header type. type Header = Header; /// The ubiquitous event type. type RuntimeEvent = RuntimeEvent; /// The ubiquitous origin type. type RuntimeOrigin = RuntimeOrigin; /// Maximum number of block number to block hash mappings to keep (oldest pruned first). type BlockHashCount = ConstU32<250>; /// The weight of database operations that the runtime can invoke. type DbWeight = RocksDbWeight; /// Version of the runtime. type Version = Version; /// Converts a module to the index of the module in `construct_runtime!`. /// /// This type is being generated by `construct_runtime!`. type PalletInfo = PalletInfo; /// What to do if a new account is created. type OnNewAccount = (); /// What to do if an account is fully reaped from the system. type OnKilledAccount = (); /// The data to be stored in an account. type AccountData = pallet_balances::AccountData<Balance>; /// Weight information for the extrinsics of this pallet. type SystemWeightInfo = (); /// This is used as an identifier of the chain. type SS58Prefix = SS58Prefix; /// The set code logic, just the default since we're not a parachain. type OnSetCode = (); type MaxConsumers = ConstU32<16>; } parameter_types! { pub const SlotProbability: (u64, u64) = SLOT_PROBABILITY; pub const ExpectedBlockTime: Moment = MILLISECS_PER_BLOCK; pub const ShouldAdjustSolutionRange: bool = false; pub const ExpectedVotesPerBlock: u32 = 9; pub const ConfirmationDepthK: u32 = 100; pub const RecentSegments: HistorySize = HistorySize::new(NonZeroU64::new(5).unwrap()); pub const RecentHistoryFraction: (HistorySize, HistorySize) = ( HistorySize::new(NonZeroU64::new(1).unwrap()), HistorySize::new(NonZeroU64::new(10).unwrap()), ); pub const MinSectorLifetime: HistorySize = HistorySize::new(NonZeroU64::new(4).unwrap()); } impl pallet_subspace::Config for Runtime { type RuntimeEvent = RuntimeEvent; type GlobalRandomnessUpdateInterval = ConstU32<GLOBAL_RANDOMNESS_UPDATE_INTERVAL>; type EraDuration = ConstU32<ERA_DURATION_IN_BLOCKS>; type InitialSolutionRange = ConstU64<INITIAL_SOLUTION_RANGE>; type SlotProbability = SlotProbability; type ExpectedBlockTime = ExpectedBlockTime; type ConfirmationDepthK = ConfirmationDepthK; type RecentSegments = RecentSegments; type RecentHistoryFraction = RecentHistoryFraction; type MinSectorLifetime = MinSectorLifetime; type ExpectedVotesPerBlock = ExpectedVotesPerBlock; type MaxPiecesInSector = ConstU16<{ MAX_PIECES_IN_SECTOR }>; type ShouldAdjustSolutionRange = ShouldAdjustSolutionRange; type GlobalRandomnessIntervalTrigger = pallet_subspace::NormalGlobalRandomnessInterval; type EraChangeTrigger = pallet_subspace::NormalEraChange; type HandleEquivocation = pallet_subspace::equivocation::EquivocationHandler< OffencesSubspace, ConstU64<{ EQUIVOCATION_REPORT_LONGEVITY as u64 }>, >; type WeightInfo = (); } impl pallet_timestamp::Config for Runtime { /// A timestamp: milliseconds since the unix epoch. type Moment = Moment; type OnTimestampSet = Subspace; type MinimumPeriod = ConstU64<{ SLOT_DURATION / 2 }>; type WeightInfo = (); } #[derive( PartialEq, Eq, Clone, Encode, Decode, TypeInfo, MaxEncodedLen, Ord, PartialOrd, Copy, Debug, )] pub enum HoldIdentifier { Domains(DomainsHoldIdentifier), } impl pallet_domains::HoldIdentifier<Runtime> for HoldIdentifier { fn staking_pending_deposit(operator_id: OperatorId) -> Self { Self::Domains(DomainsHoldIdentifier::Staking( StakingHoldIdentifier::PendingDeposit(operator_id), )) } fn staking_staked(operator_id: OperatorId) -> Self { Self::Domains(DomainsHoldIdentifier::Staking( StakingHoldIdentifier::Staked(operator_id), )) } fn staking_pending_unlock(operator_id: OperatorId) -> Self { Self::Domains(DomainsHoldIdentifier::Staking( StakingHoldIdentifier::PendingUnlock(operator_id), )) } fn domain_instantiation_id(domain_id: DomainId) -> Self { Self::Domains(DomainsHoldIdentifier::DomainInstantiation(domain_id)) } } parameter_types! { pub const MaxHolds: u32 = 10; } impl pallet_balances::Config for Runtime { type MaxLocks = ConstU32<50>; type MaxReserves = (); type ReserveIdentifier = [u8; 8]; /// The type for recording an account's balance. type Balance = Balance; /// The ubiquitous event type. type RuntimeEvent = RuntimeEvent; type DustRemoval = (); // TODO: Correct value type ExistentialDeposit = ConstU128<{ 500 * SHANNON }>; type AccountStore = System; type WeightInfo = pallet_balances::weights::SubstrateWeight<Runtime>; type FreezeIdentifier = (); type MaxFreezes = (); type RuntimeHoldReason = HoldIdentifier; type MaxHolds = MaxHolds; } parameter_types! { pub const StorageFeesEscrowBlockReward: (u64, u64) = STORAGE_FEES_ESCROW_BLOCK_REWARD; pub const StorageFeesEscrowBlockTax: (u64, u64) = STORAGE_FEES_ESCROW_BLOCK_TAX; } pub struct CreditSupply; impl Get<Balance> for CreditSupply { fn get() -> Balance { Balances::total_issuance() } } pub struct TotalSpacePledged; impl Get<u128> for TotalSpacePledged { fn get() -> u128 { // Operations reordered to avoid data loss, but essentially are: // u64::MAX * SlotProbability / (solution_range / PIECE_SIZE) u128::from(u64::MAX) .saturating_mul(Piece::SIZE as u128) .saturating_mul(u128::from(SlotProbability::get().0)) / u128::from(Subspace::solution_ranges().current) / u128::from(SlotProbability::get().1) } } pub struct BlockchainHistorySize; impl Get<u128> for BlockchainHistorySize { fn get() -> u128 { u128::from(Subspace::archived_history_size()) } } impl pallet_transaction_fees::Config for Runtime { type RuntimeEvent = RuntimeEvent; type MinReplicationFactor = ConstU16<MIN_REPLICATION_FACTOR>; type StorageFeesEscrowBlockReward = StorageFeesEscrowBlockReward; type StorageFeesEscrowBlockTax = StorageFeesEscrowBlockTax; type CreditSupply = CreditSupply; type TotalSpacePledged = TotalSpacePledged; type BlockchainHistorySize = BlockchainHistorySize; type Currency = Balances; type FindBlockRewardAddress = Subspace; type WeightInfo = (); } pub struct TransactionByteFee; impl Get<Balance> for TransactionByteFee { fn get() -> Balance { if cfg!(feature = "do-not-enforce-cost-of-storage") { 1 } else { TransactionFees::transaction_byte_fee() } } } pub struct LiquidityInfo { storage_fee: Balance, imbalance: NegativeImbalance<Runtime>, } /// Implementation of [`pallet_transaction_payment::OnChargeTransaction`] that charges transaction /// fees and distributes storage/compute fees and tip separately. pub struct OnChargeTransaction; impl pallet_transaction_payment::OnChargeTransaction<Runtime> for OnChargeTransaction { type LiquidityInfo = Option<LiquidityInfo>; type Balance = Balance; fn withdraw_fee( who: &AccountId, call: &RuntimeCall, _info: &DispatchInfoOf<RuntimeCall>, fee: Self::Balance, tip: Self::Balance, ) -> Result<Self::LiquidityInfo, TransactionValidityError> { if fee.is_zero() { return Ok(None); } let withdraw_reason = if tip.is_zero() { WithdrawReasons::TRANSACTION_PAYMENT } else { WithdrawReasons::TRANSACTION_PAYMENT | WithdrawReasons::TIP }; let withdraw_result = <Balances as Currency<AccountId>>::withdraw( who, fee, withdraw_reason, ExistenceRequirement::KeepAlive, ); let imbalance = withdraw_result.map_err(|_error| InvalidTransaction::Payment)?; // Separate storage fee while we have access to the call data structure to calculate it. let storage_fee = TransactionByteFee::get() * Balance::try_from(call.encoded_size()) .expect("Size of the call never exceeds balance units; qed"); Ok(Some(LiquidityInfo { storage_fee, imbalance, })) } fn correct_and_deposit_fee( who: &AccountId, _dispatch_info: &DispatchInfoOf<RuntimeCall>, _post_info: &PostDispatchInfoOf<RuntimeCall>, corrected_fee: Self::Balance, tip: Self::Balance, liquidity_info: Self::LiquidityInfo, ) -> Result<(), TransactionValidityError> { if let Some(LiquidityInfo { storage_fee, imbalance, }) = liquidity_info { // Calculate how much refund we should return let refund_amount = imbalance.peek().saturating_sub(corrected_fee); // Refund to the the account that paid the fees. If this fails, the account might have // dropped below the existential balance. In that case we don't refund anything. let refund_imbalance = Balances::deposit_into_existing(who, refund_amount) .unwrap_or_else(|_| <Balances as Currency<AccountId>>::PositiveImbalance::zero()); // Merge the imbalance caused by paying the fees and refunding parts of it again. let adjusted_paid = imbalance .offset(refund_imbalance) .same() .map_err(|_| TransactionValidityError::Invalid(InvalidTransaction::Payment))?; // Split the tip from the total fee that ended up being paid. let (tip, fee) = adjusted_paid.split(tip); // Split paid storage and compute fees so that they can be distributed separately. let (paid_storage_fee, paid_compute_fee) = fee.split(storage_fee); TransactionFees::note_transaction_fees( paid_storage_fee.peek(), paid_compute_fee.peek(), tip.peek(), ); } Ok(()) } } impl pallet_transaction_payment::Config for Runtime { type RuntimeEvent = RuntimeEvent; type OnChargeTransaction = OnChargeTransaction; type OperationalFeeMultiplier = ConstU8<5>; type WeightToFee = IdentityFee<Balance>; type LengthToFee = ConstantMultiplier<Balance, TransactionByteFee>; type FeeMultiplierUpdate = (); } impl pallet_utility::Config for Runtime { type RuntimeEvent = RuntimeEvent; type RuntimeCall = RuntimeCall; type PalletsOrigin = OriginCaller; type WeightInfo = pallet_utility::weights::SubstrateWeight<Runtime>; } impl pallet_sudo::Config for Runtime { type RuntimeEvent = RuntimeEvent; type RuntimeCall = RuntimeCall; type WeightInfo = pallet_sudo::weights::SubstrateWeight<Runtime>; } impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime where RuntimeCall: From<C>, { type Extrinsic = UncheckedExtrinsic; type OverarchingCall = RuntimeCall; } impl pallet_offences_subspace::Config for Runtime { type RuntimeEvent = RuntimeEvent; type OnOffenceHandler = Subspace; } parameter_types! { pub const MaximumReceiptDrift: BlockNumber = 2; pub const InitialDomainTxRange: u64 = 10; pub const DomainTxRangeAdjustmentInterval: u64 = 100; pub const DomainRuntimeUpgradeDelay: BlockNumber = 10; pub const MinOperatorStake: Balance = 100 * SSC; /// Use the consensus chain's `Normal` extrinsics block size limit as the domain block size limit pub MaxDomainBlockSize: u32 = NORMAL_DISPATCH_RATIO * MAX_BLOCK_LENGTH; /// Use the consensus chain's `Normal` extrinsics block weight limit as the domain block weight limit pub MaxDomainBlockWeight: Weight = NORMAL_DISPATCH_RATIO * BLOCK_WEIGHT_FOR_2_SEC; pub const MaxBundlesPerBlock: u32 = 10; pub const DomainInstantiationDeposit: Balance = 100 * SSC; pub const MaxDomainNameLength: u32 = 32; pub const BlockTreePruningDepth: u32 = 16; pub const StakeWithdrawalLockingPeriod: BlockNumber = 20; pub const StakeEpochDuration: DomainNumber = 5; pub TreasuryAccount: AccountId = PalletId(*b"treasury").into_account_truncating(); pub const MaxPendingStakingOperation: u32 = 100; } impl pallet_domains::Config for Runtime { type RuntimeEvent = RuntimeEvent; type DomainNumber = DomainNumber; type DomainHash = DomainHash; type ConfirmationDepthK = ConfirmationDepthK; type DomainRuntimeUpgradeDelay = DomainRuntimeUpgradeDelay; type Currency = Balances; type HoldIdentifier = HoldIdentifier; type WeightInfo = pallet_domains::weights::SubstrateWeight<Runtime>; type InitialDomainTxRange = InitialDomainTxRange; type DomainTxRangeAdjustmentInterval = DomainTxRangeAdjustmentInterval; type MinOperatorStake = MinOperatorStake; type MaxDomainBlockSize = MaxDomainBlockSize; type MaxDomainBlockWeight = MaxDomainBlockWeight; type MaxBundlesPerBlock = MaxBundlesPerBlock; type DomainInstantiationDeposit = DomainInstantiationDeposit; type MaxDomainNameLength = MaxDomainNameLength; type Share = Balance; type BlockTreePruningDepth = BlockTreePruningDepth; type StakeWithdrawalLockingPeriod = StakeWithdrawalLockingPeriod; type StakeEpochDuration = StakeEpochDuration; type TreasuryAccount = TreasuryAccount; type DomainBlockReward = BlockReward; type MaxPendingStakingOperation = MaxPendingStakingOperation; } parameter_types! { pub const BlockReward: Balance = SSC / (ExpectedVotesPerBlock::get() as Balance + 1); pub const VoteReward: Balance = SSC / (ExpectedVotesPerBlock::get() as Balance + 1); } impl pallet_rewards::Config for Runtime { type RuntimeEvent = RuntimeEvent; type Currency = Balances; type BlockReward = BlockReward; type VoteReward = VoteReward; type FindBlockRewardAddress = Subspace; type FindVotingRewardAddresses = Subspace; type WeightInfo = (); type OnReward = (); } /// Polkadot-like chain. struct PolkadotLike; impl Chain for PolkadotLike { type BlockNumber = u32; type Hash = <BlakeTwo256 as Hasher>::Out; type Header = generic::Header<u32, BlakeTwo256>; type Hasher = BlakeTwo256; } /// Type used to represent a FeedId or ChainId pub type FeedId = u64; pub struct GrandpaValidator<C>(PhantomData<C>); impl<C: Chain> FeedProcessor<FeedId> for GrandpaValidator<C> { fn init(&self, feed_id: FeedId, data: &[u8]) -> sp_runtime::DispatchResult { pallet_grandpa_finality_verifier::initialize::<Runtime, C>(feed_id, data) } fn put(&self, feed_id: FeedId, object: &[u8]) -> Result<Option<FeedMetadata>, DispatchError> { Ok(Some( pallet_grandpa_finality_verifier::validate_finalized_block::<Runtime, C>( feed_id, object, )? .encode(), )) } fn object_mappings(&self, _feed_id: FeedId, object: &[u8]) -> Vec<FeedObjectMapping> { let block = match C::decode_block::<Runtime>(object) { Ok(block) => block, // we just return empty if we failed to decode as this is not called in runtime Err(_) => return vec![], }; // for substrate, we store the height and block hash at that height let key = (*block.block.header.number(), block.block.header.hash()).encode(); vec![FeedObjectMapping::Custom { key, offset: 0 }] } fn delete(&self, feed_id: FeedId) -> sp_runtime::DispatchResult { pallet_grandpa_finality_verifier::purge::<Runtime>(feed_id) } } parameter_types! { pub const MaxFeeds: u32 = 10; } impl pallet_feeds::Config for Runtime { type RuntimeEvent = RuntimeEvent; type FeedId = FeedId; type FeedProcessorKind = (); type MaxFeeds = MaxFeeds; fn feed_processor( _feed_processor_id: Self::FeedProcessorKind, ) -> Box<dyn FeedProcessor<Self::FeedId>> { Box::new(GrandpaValidator(PhantomData::<PolkadotLike>)) } } impl pallet_grandpa_finality_verifier::Config for Runtime { type ChainId = FeedId; } impl pallet_object_store::Config for Runtime { type RuntimeEvent = RuntimeEvent; } parameter_types! { // This value doesn't matter, we don't use it (`VestedTransferOrigin = EnsureNever` below). pub const MinVestedTransfer: Balance = 0; } impl orml_vesting::Config for Runtime { type RuntimeEvent = RuntimeEvent; type Currency = Balances; type MinVestedTransfer = MinVestedTransfer; type VestedTransferOrigin = EnsureNever<AccountId>; type WeightInfo = (); type MaxVestingSchedules = ConstU32<2>; type BlockNumberProvider = System; } construct_runtime!( pub struct Runtime where Block = Block, NodeBlock = opaque::Block, UncheckedExtrinsic = UncheckedExtrinsic { System: frame_system = 0, Timestamp: pallet_timestamp = 1, Subspace: pallet_subspace = 2, OffencesSubspace: pallet_offences_subspace = 3, Rewards: pallet_rewards = 9, Balances: pallet_balances = 4, TransactionFees: pallet_transaction_fees = 12, TransactionPayment: pallet_transaction_payment = 5, Utility: pallet_utility = 8, Feeds: pallet_feeds = 6, GrandpaFinalityVerifier: pallet_grandpa_finality_verifier = 13, ObjectStore: pallet_object_store = 10, Domains: pallet_domains = 11, Vesting: orml_vesting = 7, // Reserve some room for other pallets as we'll remove sudo pallet eventually. Sudo: pallet_sudo = 100, } ); /// The address format for describing accounts. pub type Address = sp_runtime::MultiAddress<AccountId, ()>; /// Block header type as expected by this runtime. pub type Header = generic::Header<BlockNumber, BlakeTwo256>; /// Block type as expected by this runtime. pub type Block = generic::Block<Header, UncheckedExtrinsic>; /// The SignedExtension to the basic transaction logic. pub type SignedExtra = ( frame_system::CheckNonZeroSender<Runtime>, frame_system::CheckSpecVersion<Runtime>, frame_system::CheckTxVersion<Runtime>, frame_system::CheckGenesis<Runtime>, frame_system::CheckMortality<Runtime>, frame_system::CheckNonce<Runtime>, frame_system::CheckWeight<Runtime>, pallet_transaction_payment::ChargeTransactionPayment<Runtime>, ); /// Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = generic::UncheckedExtrinsic<Address, RuntimeCall, Signature, SignedExtra>; /// Executive: handles dispatch to the various modules. pub type Executive = frame_executive::Executive< Runtime, Block, frame_system::ChainContext<Runtime>, Runtime, AllPalletsWithSystem, >; /// The payload being signed in transactions. pub type SignedPayload = generic::SignedPayload<RuntimeCall, SignedExtra>; fn extract_segment_headers(ext: &UncheckedExtrinsic) -> Option<Vec<SegmentHeader>> { match &ext.function { RuntimeCall::Subspace(pallet_subspace::Call::store_segment_headers { segment_headers }) => { Some(segment_headers.clone()) } _ => None, } } fn extract_feeds_block_object_mapping<I: Iterator<Item = Hash>>( base_offset: u32, objects: &mut Vec<BlockObject>, call: &pallet_feeds::Call<Runtime>, successful_calls: &mut Peekable<I>, ) { let call_hash = successful_calls.peek(); match call_hash { Some(hash) => { if <BlakeTwo256 as HashT>::hash(call.encode().as_slice()) != *hash { return; } // remove the hash and fetch the object mapping for this call successful_calls.next(); } None => return, } call.extract_call_objects() .into_iter() .for_each(|object_map| { objects.push(BlockObject::V0 { hash: object_map.key, offset: base_offset + object_map.offset, }) }) } fn extract_object_store_block_object_mapping( base_offset: u32, objects: &mut Vec<BlockObject>, call: &pallet_object_store::Call<Runtime>, ) { if let Some(call_object) = call.extract_call_object() { objects.push(BlockObject::V0 { hash: call_object.hash, offset: base_offset + call_object.offset, }); } } fn extract_utility_block_object_mapping<I: Iterator<Item = Hash>>( mut base_offset: u32, objects: &mut Vec<BlockObject>, call: &pallet_utility::Call<Runtime>, mut recursion_depth_left: u16, successful_calls: &mut Peekable<I>, ) { if recursion_depth_left == 0 { return; } recursion_depth_left -= 1; // Add enum variant to the base offset. base_offset += 1; match call { pallet_utility::Call::batch { calls } | pallet_utility::Call::batch_all { calls } | pallet_utility::Call::force_batch { calls } => { base_offset += Compact::compact_len(&(calls.len() as u32)) as u32; for call in calls { extract_call_block_object_mapping( base_offset, objects, call, recursion_depth_left, successful_calls, ); base_offset += call.encoded_size() as u32; } } pallet_utility::Call::as_derivative { index, call } => { base_offset += index.encoded_size() as u32; extract_call_block_object_mapping( base_offset, objects, call.as_ref(), recursion_depth_left, successful_calls, ); } pallet_utility::Call::dispatch_as { as_origin, call } => { base_offset += as_origin.encoded_size() as u32; extract_call_block_object_mapping( base_offset, objects, call.as_ref(), recursion_depth_left, successful_calls, ); } pallet_utility::Call::with_weight { call, .. } => { extract_call_block_object_mapping( base_offset, objects, call.as_ref(), recursion_depth_left, successful_calls, ); } pallet_utility::Call::__Ignore(_, _) => { // Ignore. } } } fn extract_call_block_object_mapping<I: Iterator<Item = Hash>>( mut base_offset: u32, objects: &mut Vec<BlockObject>, call: &RuntimeCall, recursion_depth_left: u16, successful_calls: &mut Peekable<I>, ) { // Add enum variant to the base offset. base_offset += 1; match call { RuntimeCall::Feeds(call) => { extract_feeds_block_object_mapping(base_offset, objects, call, successful_calls); } RuntimeCall::ObjectStore(call) => { extract_object_store_block_object_mapping(base_offset, objects, call); } RuntimeCall::Utility(call) => { extract_utility_block_object_mapping( base_offset, objects, call, recursion_depth_left, successful_calls, ); } _ => {} } } fn extract_block_object_mapping(block: Block, successful_calls: Vec<Hash>) -> BlockObjectMapping { let mut block_object_mapping = BlockObjectMapping::default(); let mut successful_calls = successful_calls.into_iter().peekable(); let mut base_offset = block.header.encoded_size() + Compact::compact_len(&(block.extrinsics.len() as u32)); for extrinsic in block.extrinsics { let signature_size = extrinsic .signature .as_ref() .map(|s| s.encoded_size()) .unwrap_or_default(); // Extrinsic starts with vector length and version byte, followed by optional signature and // `function` encoding. let base_extrinsic_offset = base_offset + Compact::compact_len( &((1 + signature_size + extrinsic.function.encoded_size()) as u32), ) + 1 + signature_size; extract_call_block_object_mapping( base_extrinsic_offset as u32, &mut block_object_mapping.objects, &extrinsic.function, MAX_OBJECT_MAPPING_RECURSION_DEPTH, &mut successful_calls, ); base_offset += extrinsic.encoded_size(); } block_object_mapping } fn extract_successful_bundles( domain_id: DomainId, extrinsics: Vec<UncheckedExtrinsic>, ) -> OpaqueBundles<Block, DomainNumber, DomainHash, Balance> { let successful_bundles = Domains::successful_bundles(domain_id); extrinsics .into_iter() .filter_map(|uxt| match uxt.function { RuntimeCall::Domains(pallet_domains::Call::submit_bundle { opaque_bundle }) if opaque_bundle.domain_id() == domain_id && successful_bundles.contains(&opaque_bundle.hash()) => { Some(opaque_bundle) } _ => None, }) .collect() } // TODO: Remove when proceeding to fraud proof v2. #[allow(unused)] fn extract_receipts( extrinsics: Vec<UncheckedExtrinsic>, domain_id: DomainId, ) -> Vec<ExecutionReceipt<BlockNumber, Hash, DomainNumber, DomainHash, Balance>> { let successful_bundles = Domains::successful_bundles(domain_id); extrinsics .into_iter() .filter_map(|uxt| match uxt.function { RuntimeCall::Domains(pallet_domains::Call::submit_bundle { opaque_bundle }) if opaque_bundle.domain_id() == domain_id && successful_bundles.contains(&opaque_bundle.hash()) => { Some(opaque_bundle.into_receipt()) } _ => None, }) .collect() } // TODO: Remove when proceeding to fraud proof v2. #[allow(unused)] fn extract_fraud_proofs( extrinsics: Vec<UncheckedExtrinsic>, domain_id: DomainId, ) -> Vec<FraudProof<NumberFor<Block>, Hash>> { // TODO: Ensure fraud proof extrinsic is infallible. extrinsics .into_iter() .filter_map(|uxt| match uxt.function { RuntimeCall::Domains(pallet_domains::Call::submit_fraud_proof { fraud_proof }) if fraud_proof.domain_id() == domain_id => { Some(*fraud_proof) } _ => None, }) .collect() } fn extract_pre_validation_object( extrinsic: UncheckedExtrinsic, ) -> PreValidationObject<Block, DomainNumber, DomainHash> { match extrinsic.function { RuntimeCall::Domains(pallet_domains::Call::submit_fraud_proof { fraud_proof }) => { PreValidationObject::FraudProof(*fraud_proof) } RuntimeCall::Domains(pallet_domains::Call::submit_bundle { opaque_bundle }) => { PreValidationObject::Bundle(opaque_bundle) } _ => PreValidationObject::Null, } } fn extrinsics_shuffling_seed<Block: BlockT>(header: Block::Header) -> Randomness { if header.number().is_zero() { Randomness::default() } else { let mut pre_digest: Option<_> = None; for log in header.digest().logs() { match ( log.as_subspace_pre_digest::<FarmerPublicKey>(), pre_digest.is_some(), ) { (Some(_), true) => panic!("Multiple Subspace pre-runtime digests in a header"), (None, _) => {} (s, false) => pre_digest = s, } } let pre_digest = pre_digest.expect("Header must contain one pre-runtime digest; qed"); let seed: &[u8] = b"extrinsics-shuffling-seed"; let randomness = derive_randomness(&pre_digest.solution, pre_digest.slot.into()); let mut data = Vec::with_capacity(seed.len() + randomness.len()); data.extend_from_slice(seed); data.extend_from_slice(randomness.as_ref()); Randomness::from(BlakeTwo256::hash_of(&data).to_fixed_bytes()) } } struct RewardAddress([u8; 32]); impl From<FarmerPublicKey> for RewardAddress { #[inline] fn from(farmer_public_key: FarmerPublicKey) -> Self { Self( farmer_public_key .as_slice() .try_into() .expect("Public key is always of correct size; qed"), ) } } impl From<RewardAddress> for AccountId32 { #[inline] fn from(reward_address: RewardAddress) -> Self { reward_address.0.into() } } impl_runtime_apis! { impl sp_api::Core<Block> for Runtime { fn version() -> RuntimeVersion { VERSION } fn execute_block(block: Block) { Executive::execute_block(block); } fn initialize_block(header: &<Block as BlockT>::Header) { Executive::initialize_block(header) } } impl sp_api::Metadata<Block> for Runtime { fn metadata() -> OpaqueMetadata { OpaqueMetadata::new(Runtime::metadata().into()) } fn metadata_at_version(version: u32) -> Option<OpaqueMetadata> { Runtime::metadata_at_version(version) } fn metadata_versions() -> sp_std::vec::Vec<u32> { Runtime::metadata_versions() } } impl sp_block_builder::BlockBuilder<Block> for Runtime { fn apply_extrinsic(extrinsic: <Block as BlockT>::Extrinsic) -> ApplyExtrinsicResult { Executive::apply_extrinsic(extrinsic) } fn finalize_block() -> <Block as BlockT>::Header { Executive::finalize_block() } fn inherent_extrinsics(data: sp_inherents::InherentData) -> Vec<<Block as BlockT>::Extrinsic> { data.create_extrinsics() } fn check_inherents( block: Block, data: sp_inherents::InherentData, ) -> sp_inherents::CheckInherentsResult { data.check_extrinsics(&block) } } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block> for Runtime { fn validate_transaction( source: TransactionSource, tx: <Block as BlockT>::Extrinsic, block_hash: <Block as BlockT>::Hash, ) -> TransactionValidity { Executive::validate_transaction(source, tx, block_hash) } } impl sp_offchain::OffchainWorkerApi<Block> for Runtime { fn offchain_worker(header: &<Block as BlockT>::Header) { Executive::offchain_worker(header) } } impl sp_objects::ObjectsApi<Block> for Runtime { fn extract_block_object_mapping(block: Block, successful_calls: Vec<Hash>) -> BlockObjectMapping { extract_block_object_mapping(block, successful_calls) } fn validated_object_call_hashes() -> Vec<Hash> { Feeds::successful_puts() } } impl sp_consensus_subspace::SubspaceApi<Block, FarmerPublicKey> for Runtime { fn history_size() -> HistorySize { <pallet_subspace::Pallet<Runtime>>::history_size() } fn max_pieces_in_sector() -> u16 { MAX_PIECES_IN_SECTOR } fn slot_duration() -> SlotDuration { SlotDuration::from_millis(Subspace::slot_duration()) } fn global_randomnesses() -> GlobalRandomnesses { Subspace::global_randomnesses() } fn solution_ranges() -> SolutionRanges { Subspace::solution_ranges() } fn submit_report_equivocation_extrinsic( equivocation_proof: EquivocationProof<<Block as BlockT>::Header>, ) -> Option<()> { Subspace::submit_equivocation_report(equivocation_proof) } fn submit_vote_extrinsic( signed_vote: SignedVote<NumberFor<Block>, <Block as BlockT>::Hash, FarmerPublicKey>, ) { let SignedVote { vote, signature } = signed_vote; let Vote::V0 { height, parent_hash, slot, solution, } = vote; Subspace::submit_vote(SignedVote { vote: Vote::V0 { height, parent_hash, slot, solution: solution.into_reward_address_format::<RewardAddress, AccountId32>(), }, signature, }) } fn is_in_block_list(farmer_public_key: &FarmerPublicKey) -> bool { // TODO: Either check tx pool too for pending equivocations or replace equivocation // mechanism with an alternative one, so that blocking happens faster Subspace::is_in_block_list(farmer_public_key) } fn segment_commitment(segment_index: SegmentIndex) -> Option<SegmentCommitment> { Subspace::segment_commitment(segment_index) } fn extract_segment_headers(ext: &<Block as BlockT>::Extrinsic) -> Option<Vec<SegmentHeader >> { extract_segment_headers(ext) } fn root_plot_public_key() -> Option<FarmerPublicKey> { Subspace::root_plot_public_key() } fn should_adjust_solution_range() -> bool { Subspace::should_adjust_solution_range() } fn chain_constants() -> ChainConstants { Subspace::chain_constants() } } impl sp_domains::transaction::PreValidationObjectApi<Block, DomainNumber, DomainHash> for Runtime { fn extract_pre_validation_object( extrinsic: <Block as BlockT>::Extrinsic, ) -> sp_domains::transaction::PreValidationObject<Block, DomainNumber, DomainHash> { extract_pre_validation_object(extrinsic) } } impl sp_domains::DomainsApi<Block, DomainNumber, DomainHash> for Runtime { fn submit_bundle_unsigned( opaque_bundle: OpaqueBundle<NumberFor<Block>, <Block as BlockT>::Hash, DomainNumber, DomainHash, Balance>, ) { Domains::submit_bundle_unsigned(opaque_bundle) } fn extract_successful_bundles( domain_id: DomainId, extrinsics: Vec<<Block as BlockT>::Extrinsic>, ) -> OpaqueBundles<Block, DomainNumber, DomainHash, Balance> { extract_successful_bundles(domain_id, extrinsics) } fn extrinsics_shuffling_seed(header: <Block as BlockT>::Header) -> Randomness { extrinsics_shuffling_seed::<Block>(header) } fn domain_runtime_code(domain_id: DomainId) -> Option<Vec<u8>> { Domains::domain_runtime_code(domain_id) } fn runtime_id(domain_id: DomainId) -> Option<sp_domains::RuntimeId> { Domains::runtime_id(domain_id) } fn domain_instance_data(domain_id: DomainId) -> Option<(DomainInstanceData, NumberFor<Block>)> { Domains::domain_instance_data(domain_id) } fn timestamp() -> Moment{ Timestamp::now() } fn domain_tx_range(_: DomainId) -> U256 { U256::MAX } fn genesis_state_root(domain_id: DomainId) -> Option<H256> { Domains::genesis_state_root(domain_id) } fn head_receipt_number(domain_id: DomainId) -> NumberFor<Block> { Domains::head_receipt_number(domain_id) } fn oldest_receipt_number(domain_id: DomainId) -> NumberFor<Block> { Domains::oldest_receipt_number(domain_id) } fn block_tree_pruning_depth() -> NumberFor<Block> { Domains::block_tree_pruning_depth() } fn domain_block_limit(domain_id: DomainId) -> Option<sp_domains::DomainBlockLimit> { Domains::domain_block_limit(domain_id) } fn non_empty_er_exists(domain_id: DomainId) -> bool { Domains::non_empty_er_exists(domain_id) } } impl sp_domains::BundleProducerElectionApi<Block, Balance> for Runtime { fn bundle_producer_election_params(domain_id: DomainId) -> Option<BundleProducerElectionParams<Balance>> { Domains::bundle_producer_election_params(domain_id) } fn operator(operator_id: OperatorId) -> Option<(OperatorPublicKey, Balance)> { Domains::operator(operator_id) } } impl sp_session::SessionKeys<Block> for Runtime { fn generate_session_keys(seed: Option<Vec<u8>>) -> Vec<u8> { SessionKeys::generate(seed) } fn decode_session_keys( encoded: Vec<u8>, ) -> Option<Vec<(Vec<u8>, KeyTypeId)>> { SessionKeys::decode_into_raw_public_keys(&encoded) } } impl frame_system_rpc_runtime_api::AccountNonceApi<Block, AccountId, Index> for Runtime { fn account_nonce(account: AccountId) -> Index { System::account_nonce(account) } } impl pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi<Block, Balance> for Runtime { fn query_info( uxt: <Block as BlockT>::Extrinsic, len: u32, ) -> pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo<Balance> { TransactionPayment::query_info(uxt, len) } fn query_fee_details( uxt: <Block as BlockT>::Extrinsic, len: u32, ) -> pallet_transaction_payment::FeeDetails<Balance> { TransactionPayment::query_fee_details(uxt, len) } fn query_weight_to_fee(weight: Weight) -> Balance { TransactionPayment::weight_to_fee(weight) } fn query_length_to_fee(length: u32) -> Balance { TransactionPayment::length_to_fee(length) } } }
use super::client::PlayerClient; use super::{IncomingMessage, LobbyState, OutgoingMessage}; use super::{LobbyChannel, LobbyCommand, LobbyError, LobbyResponse, ResponseChannel}; use crate::database::games::{DBGameError, DBGameStatus, DatabaseGame}; use crate::game::{ builder::GameBuilder, snapshot::{GameSnapshot, Snapshots}, }; use crate::utils; use futures::future::AbortHandle; use tokio::{ sync::mpsc::{self, Receiver}, sync::oneshot, task, }; use warp::filters::ws::WebSocket; use std::collections::HashMap; const MAX_NUM_PLAYERS: usize = 10; /// A lobby for an individual game. The Lobby acts as an interface between the /// Thavalon game instance, the DatabaseGame which keeps the game state in sync /// with the database, and all players connected to the game. pub struct Lobby { game_over_channel: Option<oneshot::Sender<bool>>, database_game: DatabaseGame, friend_code: String, player_ids_to_client_ids: HashMap<String, String>, // Map of client IDs to player ID and display name. client_ids_to_player_info: HashMap<String, (String, String)>, clients: HashMap<String, PlayerClient>, status: LobbyState, builder: Option<GameBuilder>, snapshots: Option<Snapshots>, game_abort_handle: Option<AbortHandle>, to_lobby: LobbyChannel, } impl Lobby { /// Creates a new lobby instance on a separate Tokio thread. /// /// # Arguments /// /// * `end_game_channel` A channel this lobby should publish to when it's finished running. /// /// # Returns /// /// * `LobbyChannel` A channel for sending messages to this lobby. pub async fn new(game_over_channel: oneshot::Sender<bool>) -> LobbyChannel { let (tx, rx) = mpsc::channel(10); let to_lobby = tx.clone(); task::spawn(async move { let database_game = DatabaseGame::new().await.unwrap(); let friend_code = database_game.get_friend_code().clone(); let lobby = Lobby { game_over_channel: Some(game_over_channel), database_game, friend_code, player_ids_to_client_ids: HashMap::with_capacity(MAX_NUM_PLAYERS), client_ids_to_player_info: HashMap::with_capacity(MAX_NUM_PLAYERS), clients: HashMap::with_capacity(MAX_NUM_PLAYERS), status: LobbyState::Lobby, builder: Some(GameBuilder::new()), snapshots: None, game_abort_handle: None, to_lobby, }; lobby.listen(rx).await }); tx } /// Gets the friend code for the lobby in question. fn get_friend_code(&self) -> LobbyResponse { LobbyResponse::FriendCode(self.friend_code.clone()) } /// Adds a player to the lobby and all associated games. async fn add_player(&mut self, player_id: String, display_name: String) -> LobbyResponse { log::info!( "Attempting to add player {} to lobby {}.", player_id, self.friend_code ); // First, check if this player is already in game. If so, this is a reconnect. Otherwise, // this is a new player. if self.player_ids_to_client_ids.contains_key(&player_id) { return self.reconnect_player(&player_id, &display_name); } // Unlike reconnecting, new players may only join when the game is in Lobby. if self.status != LobbyState::Lobby { log::warn!( "Player {} attempted to join in-progress or finished game {}.", player_id, self.friend_code ); return LobbyResponse::Standard(Err(LobbyError::InvalidStateError)); } // The checks passed. Try adding the player into the game. if let Err(e) = self .database_game .add_player(player_id.clone(), display_name.clone()) .await { log::error!( "Error while adding player {} to game {}. {}", player_id, self.friend_code, e ); let return_err = match e { DBGameError::UpdateError => Err(LobbyError::DatabaseError), DBGameError::InvalidStateError => Err(LobbyError::InvalidStateError), DBGameError::DuplicateDisplayName => Err(LobbyError::DuplicateDisplayName), _ => { log::error!("An unknown error occurred in game {}.", self.friend_code); Err(LobbyError::UnknownError) } }; return LobbyResponse::Standard(return_err); } // Player added to the database game. Now add the player to the game instance. let (sender, receiver) = self .builder .as_mut() .unwrap() .add_player(display_name.clone()); // Generate a unique client ID for the player and update all our dictionaries. let client_id = utils::generate_random_string(32, false); let client = PlayerClient::new(client_id.clone(), self.to_lobby.clone(), sender, receiver); log::info!( "Successfully added player {} to game {} with unique client ID {}.", player_id, self.friend_code, client_id ); self.player_ids_to_client_ids .insert(player_id.clone(), client_id.clone()); self.client_ids_to_player_info .insert(client_id.clone(), (player_id, display_name)); self.clients.insert(client_id.clone(), client); LobbyResponse::JoinGame(Ok(client_id)) } /// Reconnect a player to an existing game in progress. Helper for add_player. fn reconnect_player(&self, player_id: &str, display_name: &str) -> LobbyResponse { log::info!( "Player {} is already in game {}, reconnecting.", player_id, self.friend_code ); // Reconnecting can only happen for games in progress, as lobbies just kick disconnected players // and finished games are finished. if self.status != LobbyState::Game { log::warn!( "Player {} attempted to join game not in progress {}.", player_id, self.friend_code ); return LobbyResponse::Standard(Err(LobbyError::InvalidStateError)); } let client_id = self .player_ids_to_client_ids .get(player_id) .unwrap() .clone(); let existing_display_name = &self.client_ids_to_player_info.get(&client_id).unwrap().1; if existing_display_name != &display_name { log::warn!( "Player {} attempted to reconnect with display name {}, but previously had display name {}.", player_id, display_name, existing_display_name); return LobbyResponse::Standard(Err(LobbyError::NameChangeOnReconnectError)); } return LobbyResponse::JoinGame(Ok(client_id)); } /// Removes a player from the lobby and game. async fn remove_player(&mut self, client_id: String) { log::info!( "Removing client {} from game {}.", client_id, self.friend_code ); let player_id = match self.client_ids_to_player_info.remove(&client_id) { Some((player_id, _)) => player_id, None => { log::warn!("No player ID found matching client ID {}.", client_id); return; } }; log::info!( "Found player {} for client {}. Removing player.", player_id, client_id ); let display_name = self.database_game.remove_player(&player_id).await.unwrap(); if display_name == None { log::warn!("No player display name found for player {}.", player_id); return; } let display_name = display_name.unwrap(); self.builder.as_mut().unwrap().remove_player(&display_name); self.player_ids_to_client_ids.remove(&player_id); self.clients.remove(&client_id); self.on_player_list_change().await; log::info!("Successfully removed player {} from the game.", player_id); } /// Updates a player's connections to and from the game and to and from the /// client. async fn update_player_connections( &mut self, client_id: String, ws: WebSocket, ) -> LobbyResponse { log::info!("Updating connections for client {}.", client_id); let client = match self.clients.get_mut(&client_id) { Some(client) => client, None => { log::warn!( "Client {} tried to connect to lobby {} but is not registered", client_id, self.friend_code ); let _ = ws.close().await; return LobbyResponse::Standard(Err(LobbyError::InvalidClientID)); } }; client.update_websocket(ws).await; self.on_player_list_change().await; LobbyResponse::Standard(Ok(())) } /// Sends a pong back to the client that requested it. async fn send_pong(&mut self, client_id: String) -> LobbyResponse { let client = match self.clients.get_mut(&client_id) { Some(client) => client, None => { log::error!("Client {} does not exist. Cannot send Pong.", client_id); return LobbyResponse::Standard(Err(LobbyError::InvalidClientID)); } }; let message = OutgoingMessage::Pong("Pong".to_string()); let message = serde_json::to_string(&message).unwrap(); client.send_message(message).await; LobbyResponse::Standard(Ok(())) } /// Handles a change to the player list, due to a player joining or leaving the game. async fn on_player_list_change(&mut self) { // Only broadcast to players if the game hasn't started yet. // TODO: Maybe a helpful message to players that someone has disconnected. // Would need to have a way to lookup name by the disconnected client ID. if self.status == LobbyState::Lobby { let current_players = self.builder.as_ref().unwrap().get_player_list(); self.broadcast_message(&OutgoingMessage::PlayerList(current_players.to_vec())) .await; } } // Handles dealing with a disconnected player. // If the lobby isn't in progress or done, a disconnect should remove the player. // Otherwise, nothing happens. async fn on_player_disconnect(&mut self, client_id: String) -> LobbyResponse { log::info!( "Client {} has disconnected from game {}.", client_id, self.friend_code ); // If we're in the lobby phase, a disconnect counts as leaving the game. if self.status == LobbyState::Lobby { self.remove_player(client_id).await; } LobbyResponse::Standard(Ok(())) } /// Starts the game and updates statuses async fn start_game(&mut self) -> LobbyResponse { // The only thing that can fail is updating the database. In this case, // the lobby is probably dead, so panic to blow up everything. if let Err(e) = self.database_game.start_game().await { log::error!("Error while starting game {}. {}", self.friend_code, e); panic!(); } let builder = self.builder.take().unwrap(); let (abort_handle, abort_registration) = AbortHandle::new_pair(); self.game_abort_handle = Some(abort_handle); match builder.start(self.to_lobby.clone(), abort_registration) { Ok((snapshots, _)) => { self.snapshots = Some(snapshots); // Tell the players the game is about to start to move to the game page. self.broadcast_message(&OutgoingMessage::LobbyState(LobbyState::Game)) .await; self.status = LobbyState::Game; LobbyResponse::None } Err(err) => { // Starting the game can fail, for example if there are too many players in the lobby // Since that isn't necessarily a fatal error, don't close the lobby log::error!("Error creating game {}: {}", self.friend_code, err); LobbyResponse::Standard(Err(LobbyError::InvalidStateError)) } } } // End the lobby, including ending the database game and aborting the game thread. async fn end_game(&mut self) -> LobbyResponse { self.status = LobbyState::Finished; self.database_game .end_game() .await .expect("Failed to end database game!"); // game_abort_handle is None if the game has not been started. In that case, do nothing to end it. if let Some(handle) = self.game_abort_handle.take() { handle.abort() } self.game_over_channel .take() .unwrap() .send(true) .expect("Failed to notify lobby manager!"); LobbyResponse::None } /// Sends the current player list to the client. async fn send_player_list(&mut self, client_id: String) -> LobbyResponse { let mut client = self.clients.get_mut(&client_id).unwrap(); let player_list = self.builder.as_ref().unwrap().get_player_list().to_vec(); let player_list = OutgoingMessage::PlayerList(player_list); let player_list = serde_json::to_string(&player_list).unwrap(); client.send_message(player_list).await; LobbyResponse::None } /// Sends the current state of the lobby to the client. async fn send_current_state(&mut self, client_id: String) -> LobbyResponse { let mut client = self.clients.get_mut(&client_id).unwrap(); let state = OutgoingMessage::LobbyState(self.status.clone()); let message = serde_json::to_string(&state).unwrap(); client.send_message(message).await; LobbyResponse::None } /// Gets all snapshots that have occurred for a given client ID. async fn get_snapshots(&mut self, client_id: String) -> LobbyResponse { let (_, display_name) = &self.client_ids_to_player_info[&client_id]; let snapshot = self .snapshots .as_ref() .unwrap() .get(display_name) .unwrap() .lock() .unwrap() .clone(); let mut client = self.clients.get_mut(&client_id).unwrap(); let message = OutgoingMessage::Snapshot(snapshot); let message = serde_json::to_string(&message).unwrap(); client.send_message(message).await; LobbyResponse::None } /// Handles a player focus change event by telling all clients that a player's /// visibility has changed. async fn player_focus_changed( &mut self, client_id: String, is_tabbed_out: bool, ) -> LobbyResponse { let (_, display_name) = &self.client_ids_to_player_info[&client_id]; let display_name = display_name.clone(); let message = OutgoingMessage::PlayerFocusChange { displayName: display_name, isTabbedOut: is_tabbed_out, }; self.broadcast_message(&message).await; LobbyResponse::None } /// Broadcasts a message to all clients in the lobby. async fn broadcast_message(&mut self, message: &OutgoingMessage) { let message = serde_json::to_string(&message).unwrap(); for client in self.clients.values_mut() { client.send_message(message.clone()).await; } } /// Begins a loop for the lobby to listen for incoming commands. /// This function should only return when the game ends or when a fatal /// error occurs. async fn listen(mut self, mut receiver: Receiver<(LobbyCommand, Option<ResponseChannel>)>) { while let Some(msg) = receiver.recv().await { if self.status == LobbyState::Finished { break; } let (msg_contents, result_channel) = msg; let results = match msg_contents { LobbyCommand::AddPlayer { player_id, display_name, } => self.add_player(player_id, display_name).await, LobbyCommand::GetFriendCode => self.get_friend_code(), LobbyCommand::IsClientRegistered { client_id } => { LobbyResponse::IsClientRegistered(self.clients.contains_key(&client_id)) } LobbyCommand::ConnectClientChannels { client_id, ws } => { self.update_player_connections(client_id, ws).await } LobbyCommand::Ping { client_id } => self.send_pong(client_id).await, LobbyCommand::GetLobbyState { client_id } => { self.send_current_state(client_id).await } LobbyCommand::StartGame => self.start_game().await, LobbyCommand::EndGame => self.end_game().await, LobbyCommand::PlayerDisconnect { client_id } => { self.on_player_disconnect(client_id).await } LobbyCommand::GetPlayerList { client_id } => self.send_player_list(client_id).await, LobbyCommand::GetSnapshots { client_id } => self.get_snapshots(client_id).await, LobbyCommand::PlayerFocusChange { client_id, is_tabbed_out, } => self.player_focus_changed(client_id, is_tabbed_out).await, }; if let Some(channel) = result_channel { channel .send(results) .expect("Could not send a result message back to caller."); } } } }
//! # Feature Table //! //! Data model and parsers for the DDBJ/ENA/GenBank Feature Table. //! //! See: http://www.insdc.org/files/feature_table.html use nom::{ IResult, branch::{ alt, }, bytes::complete::{ tag, take_while_m_n, take_while, take_while1, }, character::{ is_alphanumeric, }, combinator::{ cut, map, opt, verify, }, error::{ ParseError, }, multi::{ // many1, separated_list, }, sequence::{ tuple, }, }; use super::parser::Nommed; #[derive(Debug, PartialEq, Eq)] pub struct FeatureTable { features: Vec<FeatureRecord> } #[derive(Debug, PartialEq, Eq)] pub struct FeatureRecord { key: String, location: LocOp, qualifiers: Vec<Qualifier> } // impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for FeatureRecord { // fn nom(input: &'a str) -> IResult<&'a str, FeatureRecord, E> { // } // } /// An ID that's valid within the feature table. /// /// This is: /// * At least one letter /// * Upper case, lower case letters /// * Numbers 0..9 /// * Underscore (_) /// * Hyphen (-) /// * Single quote (') /// * Asterisk (*) /// The maximum length is 20 characters. #[derive(Debug, PartialEq, Eq)] pub struct FtString(String); // litle utility for ranges. // // Note: couldn't use 'a'..='b' because this is an iterator, so doesn't // implement `Copy`. #[derive(Clone, Copy)] struct Interval<T>(T, T); impl <T : PartialOrd> Interval<T> { fn contains(&self, e: &T) -> bool { self.0 <= *e && *e <= self.1 } } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for FtString { fn nom(input: &'a str) -> IResult<&'a str, FtString, E> { let uc = Interval('A', 'Z'); let lc = Interval('a', 'z'); let di = Interval('0', '9'); let misc = "_-'*"; let ft_char = { move |c: char| uc.contains(&c) || lc.contains(&c) || di.contains(&c) || misc.contains(c) }; let alpha = { move |c: char| uc.contains(&c) || lc.contains(&c) }; map( verify( take_while_m_n(1, 20, ft_char), move |s: &str| s.chars().any(alpha) ), |s: &str| FtString(s.to_string()) )(input) } } #[derive(Debug, PartialEq, Eq)] pub struct Qualifier { name: FtString, value: Option<QualifierValue> } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Qualifier { fn nom(input: &'a str) -> IResult<&'a str, Qualifier, E> { let parse_name = map(tuple((tag("/"), FtString::nom)), |(_, n)| n); let parse_value = map(tuple((tag("="), QualifierValue::nom)), |(_, v)| v); map( tuple((parse_name, opt(parse_value))), |(name, value)| Qualifier{ name, value } )(input) } } #[derive(Debug, PartialEq, Eq)] pub enum QualifierValue { QuotedText(String), VocabularyTerm(FtString), ReferenceNumber(u32), } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for QualifierValue{ fn nom(input: &'a str) -> IResult<&'a str, QualifierValue, E> { let parse_quoted_text = map( tuple((tag("\""), take_while(|c| c != '"'), tag("\""))), |(_, v, _): (&str, &str, &str)| QualifierValue::QuotedText(v.to_string())); let parse_vocabulary_term = map( FtString::nom, QualifierValue::VocabularyTerm); let parse_reference_number = map( tuple((tag("["), u32::nom, tag("]"))), |(_, d, _)| QualifierValue::ReferenceNumber(d)); alt(( parse_quoted_text, parse_vocabulary_term, parse_reference_number ))(input) } } // // // Location data model starts here // // Should really be in a sub-module I guess // // /// A point within a sequence, representing a specific nucleotide. Counts from 1. #[derive(Debug, PartialEq, Eq)] pub struct Point(u32); impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Point { fn nom(input: &'a str) -> IResult<&'a str, Point, E> { map(u32::nom, Point)(input) } } /// A position between two bases in a sequence. /// pub /// For example, 122^123. The locations must be consecutive. /// /// For example, 100^1 for a circular sequence of length 100. #[derive(Debug, PartialEq, Eq)] pub struct Between(u32, u32); impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Between { fn nom(input: &'a str) -> IResult<&'a str, Between, E> { map( tuple(( u32::nom, tag("^"), u32::nom )), |(from, _, to)| Between(from, to) )(input) } } #[derive(Debug, PartialEq, Eq)] pub enum Position { Point(Point), Between(Between) } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Position { fn nom(input: &'a str) -> IResult<&'a str, Position, E> { alt(( map(Between::nom, Position::Between), map(Point::nom, Position::Point) ))(input) } } #[derive(Debug, PartialEq, Eq)] pub enum Local { Point(Point), Between(Between), Within { from: Point, to: Point }, Span { from: Position, to: Position, before_from: bool, after_to: bool }, } impl Local { pub fn span(from: u32, to: u32) -> Local { Local::Span { from: Position::Point(Point(from)), to: Position::Point(Point(to)), before_from: false, after_to: false } } } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Local { fn nom(input: &'a str) -> IResult<&'a str, Local, E> { let parse_within = map( tuple((Point::nom, tag("."), Point::nom)), |(from, _, to)| Local::Within { from, to }); let parse_span = map( tuple(( opt(tag("<")), Position::nom, tag(".."), opt(tag(">")), Position::nom)), |(before_from, from, _, after_to, to)| Local::Span { from, to, before_from: before_from.is_some(), after_to: after_to.is_some() } ); alt(( map(Between::nom, Local::Between), parse_within, parse_span, map(Point::nom, Local::Point), // must do this last as it's a prefix of the others ))(input) } } #[derive(Debug, PartialEq, Eq)] pub enum Loc { Remote { within: String, at: Local }, Local(Local) } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for Loc { fn nom(input: &'a str) -> IResult<&'a str, Loc, E> { let parse_accession = take_while1(|c| { let b = c as u8; is_alphanumeric(b) || b == b'.' }); alt(( map( tuple((parse_accession, tag(":"), Local::nom)), |(within, _, at)| Loc::Remote { within: within.to_string(), at } ), map(Local::nom, Loc::Local) ))(input) } } #[derive(Debug, PartialEq, Eq)] pub enum LocOp { Loc(Loc), Complement(Box<LocOp>), Join(Vec<LocOp>), Order(Vec<LocOp>) } impl <'a, E : ParseError<&'a str>> Nommed<&'a str, E> for LocOp { fn nom(input: &'a str) -> IResult<&'a str, LocOp, E> { let parse_complement = map( tuple(( tag("complement("), cut(LocOp::nom), tag(")") )), |(_, loc, _)| loc ); let parse_join = map( tuple(( tag("join("), cut(separated_list(tag(","), LocOp::nom)), tag(")") )), |(_, locs, _)| locs ); let parse_order = map( tuple(( tag("order("), cut(separated_list(tag(","), LocOp::nom)), tag(")") )), |(_, locs, _)| locs ); alt(( map(Loc::nom, LocOp::Loc), map(parse_complement, |loc| LocOp::Complement(Box::new(loc))), map(parse_join, LocOp::Join), map(parse_order, LocOp::Order) ))(input) } } #[cfg(test)] mod tests { use super::*; use nom::error::{ convert_error, VerboseError, }; fn assert_nom_to_expected<'a, T>() -> impl Fn(&'a str, T) -> () where T: Nommed<&'a str, VerboseError<&'a str>> + std::fmt::Debug + PartialEq { move |input: &str, expected: T| { match T::nom(input) { Ok((rem, ref res)) if !rem.is_empty() => panic!("Non-empty remaining input {}, parsed out {:?}", rem, res), Ok((_, res)) => assert_eq!(res, expected, "Got result {:?} but expected {:?}", res, expected), Err(nom::Err::Error(e)) | Err(nom::Err::Failure(e)) => panic!("Problem: {}", convert_error(input, e)), e => panic!("Unknown error: {:?}", e) } } } // #[test] // fn test_parse_feature_record_from_spec() { // let expect = assert_nom_to_expected::<FeatureRecord>(); // expect( // r#" // source 1..1000 // /culture_collection="ATCC:11775" // /culture_collection="CECT:515" // "#, // FeatureRecord { // key: "source".to_string(), // location: LocOp::Loc(Loc::Local(Local::span(1, 1000))), // qualifiers: vec![] // } // ) // } #[test] fn test_parse_qualifiers_from_spec() { let expect = assert_nom_to_expected::<Qualifier>(); expect( "/pseudo", Qualifier { name: FtString("pseudo".to_string()), value: None }); expect( "/citation=[1]", Qualifier { name: FtString("citation".to_string()), value: Some(QualifierValue::ReferenceNumber(1)) }); expect( "/gene=\"arsC\"", Qualifier { name: FtString("gene".to_string()), value: Some(QualifierValue::QuotedText("arsC".to_string()))}); expect( "/rpt_type=DISPERSED", Qualifier { name: FtString("rpt_type".to_string()), value: Some(QualifierValue::VocabularyTerm(FtString("DISPERSED".to_string())))}); } #[test] fn test_parse_locations_from_spec() { let expect = assert_nom_to_expected::<LocOp>(); expect( "467", LocOp::Loc(Loc::Local(Local::Point(Point(467))))); expect( "340..565", LocOp::Loc(Loc::Local(Local::Span { from: Position::Point(Point(340)), to: Position::Point(Point(565)), before_from: false, after_to: false }))); expect( "<345..500", LocOp::Loc(Loc::Local(Local::Span { from: Position::Point(Point(345)), to: Position::Point(Point(500)), before_from: true, after_to: false }))); expect( "<1..888", LocOp::Loc(Loc::Local(Local::Span { from: Position::Point(Point(1)), to: Position::Point(Point(888)), before_from: true, after_to: false }))); expect( "1..>888", LocOp::Loc(Loc::Local(Local::Span { from: Position::Point(Point(1)), to: Position::Point(Point(888)), before_from: false, after_to: true }))); expect( "102.110", LocOp::Loc(Loc::Local(Local::Within { from: Point(102), to: Point(110) }))); expect( "123^124", LocOp::Loc(Loc::Local(Local::Between(Between(123, 124))))); expect( "join(12..78)", LocOp::Join(vec![ LocOp::Loc(Loc::Local(Local::span(12, 78)))])); expect( "join(12..78,134..202)", LocOp::Join(vec![ LocOp::Loc(Loc::Local(Local::span(12, 78))), LocOp::Loc(Loc::Local(Local::span(134, 202)))])); expect( "complement(34..126)", LocOp::Complement(Box::new(LocOp::Loc(Loc::Local(Local::span(34, 126)))))); expect( "complement(join(2691..4571,4918..5163))", LocOp::Complement(Box::new(LocOp::Join(vec![ LocOp::Loc(Loc::Local(Local::span(2691, 4571))), LocOp::Loc(Loc::Local(Local::span(4918, 5163))) ])))); expect( "join(complement(4918..5163),complement(2691..4571))", LocOp::Join(vec![ LocOp::Complement(Box::new(LocOp::Loc(Loc::Local(Local::span(4918, 5163))))), LocOp::Complement(Box::new(LocOp::Loc(Loc::Local(Local::span(2691, 4571))))) ])); expect( "J00194.1:100..202", LocOp::Loc(Loc::Remote{ within: String::from("J00194.1"), at: Local::span(100, 202) })); expect( "join(1..100,J00194.1:100..202)", LocOp::Join(vec![ LocOp::Loc(Loc::Local(Local::span(1, 100))), LocOp::Loc(Loc::Remote { within: String::from("J00194.1"), at: Local::span(100, 202)}) ])); } }
use parquet::basic::LogicalType; use parquet::file::reader::{FileReader, SerializedFileReader}; use parquet::record::Field; use std::{collections::HashMap, convert::TryFrom, path::Path}; pub struct Data { pub types: HashMap<String, LogicalType>, pub data: HashMap<String, Vec<i64>>, } impl Data { pub fn new() -> Self { Data { types: HashMap::new(), data: HashMap::new(), } } } pub fn load<'a>(path: &Path) -> parquet::errors::Result<Data> { let reader = SerializedFileReader::try_from(path)?; let mut data = Data::new(); let fields = reader.metadata().file_metadata().schema().get_fields(); for field in fields { let info = field.get_basic_info(); let name = info.name().to_string(); data.types.insert(name, info.logical_type()); } for row in reader.into_iter() { for (name, value) in row.get_column_iter() { data.data .entry(name.to_string()) .and_modify(|e| e.push(value)); } } Ok(data) } #[cfg(test)] mod tests { use crate::load::*; use std::path::Path; #[test] fn test_load() { let path = Path::new("./test/data/example.parquet"); if let Ok(data) = load(path) { for (name, value) in data.data.iter() { println!("{} : {:?}", name, value); } assert!(false) } else { assert!(false) } } }
// Copyright (c) 2021, Roel Schut. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. use gdnative::api::CPUParticles2D; use crate::*; #[derive(NativeClass)] #[inherit(CPUParticles2D)] pub struct PlayerDestroyParticles {} #[methods] impl PlayerDestroyParticles { fn new(_owner: &CPUParticles2D) -> Self { PlayerDestroyParticles {} } #[export] fn _ready(&self, owner: &CPUParticles2D) { owner.set_one_shot(true); } #[allow(non_snake_case)] #[export] fn _on_Timer_timeout(&self, owner: &CPUParticles2D) { owner.get_tree() .map(|tree| unsafe { tree.assume_safe() }) .and_then(|tree| { tree.change_scene("res://ui/TitleScreen.tscn").ok() }) .expect("Failed to reload"); godot_print!("-----------"); } }
pub use self::arch::*; #[cfg(target_arch = "x86")] #[path="x86/paging.rs"] mod arch; #[cfg(target_arch = "x86_64")] #[path="x86_64/paging.rs"] mod arch;
use assembly_fdb::{mem::Database, store}; use color_eyre::eyre::{self, WrapErr}; use mapr::Mmap; use std::{fs::File, io::BufWriter, path::PathBuf, time::Instant}; use structopt::StructOpt; #[derive(StructOpt)] /// Creates a FDB file with the same table structure as the input file, but without any rows struct Options { /// Input FDB file src: PathBuf, /// Destination for template file dest: PathBuf, } fn main() -> eyre::Result<()> { color_eyre::install()?; let opts = Options::from_args(); let start = Instant::now(); let src_file = File::open(&opts.src) .wrap_err_with(|| format!("Failed to open input file '{}'", opts.src.display()))?; let mmap = unsafe { Mmap::map(&src_file)? }; let buffer: &[u8] = &mmap; let dest_file = File::create(&opts.dest) .wrap_err_with(|| format!("Failed to crate output file '{}'", opts.dest.display()))?; let mut dest_out = BufWriter::new(dest_file); println!("Creating template, this may take a few milliseconds..."); let src_db = Database::new(buffer); let mut dest_db = store::Database::new(); for src_table in src_db.tables()?.iter() { let src_table = src_table?; let mut dest_table = store::Table::new(0); for src_column in src_table.column_iter() { dest_table.push_column(src_column.name_raw(), src_column.value_type()); } dest_db.push_table(src_table.name_raw(), dest_table); } dest_db .write(&mut dest_out) .wrap_err("Could not write output file")?; let duration = start.elapsed(); println!( "Finished in {}.{:#03}s", duration.as_secs(), duration.subsec_millis(), ); Ok(()) }
#![allow(missing_docs)] #[cfg(feature = "sdram")] mod as4c16m32msa; #[cfg(feature = "sdram")] pub use as4c16m32msa::*; #[cfg(feature = "sdram")] mod is42s16400j; #[cfg(feature = "sdram")] pub use is42s16400j::*; #[cfg(feature = "sdram")] mod is42s32800g; #[cfg(feature = "sdram")] pub use is42s32800g::*; #[cfg(feature = "sdram")] mod mt48lc4m32b2; #[cfg(feature = "sdram")] pub use mt48lc4m32b2::*;
fn main() { #[cfg(all(windows, not(debug_assertions)))] { // Set application icon. let mut res = winres::WindowsResource::new(); res.set_icon("resources/icon/infinite_minesweeper.ico"); res.compile().unwrap(); } }
use super::{Result, Settings, KV}; use reqwest::Client; const USER_AGENT: &str = concat!( env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"), " (+", env!("CARGO_PKG_HOMEPAGE"), ")" ); pub async fn update_consul(settings: &Settings, kvs: Vec<KV>) -> Result<()> { let client = Client::builder().user_agent(USER_AGENT).build()?; for kv in kvs { let mut req = client .request( reqwest::Method::PUT, &format!("{}/v1/kv{}", settings.consul_addr, kv.key), ) .body(kv.value.to_string()); if let Some(token) = settings.consul_token.as_deref() { req = req.bearer_auth(token); } let req = req.build()?; println!("{:?}", req); let resp = client.execute(req).await?; resp.error_for_status_ref()?; println!("{}", resp.text().await?); } Ok(()) }
// Generated by `scripts/generate.js` use std::os::raw::c_char; use std::ops::Deref; use std::ptr; use std::cmp; use std::mem; use utils::c_bindings::*; use utils::vk_convert::*; use utils::vk_null::*; use utils::vk_ptr::*; use utils::vk_traits::*; use vulkan::vk::*; /// Wrapper for [VkAabbPositionsKHR](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkAabbPositionsKHR.html). #[derive(Debug, Clone)] pub struct VkAabbPositions { pub min_x: f32, pub min_y: f32, pub min_z: f32, pub max_x: f32, pub max_y: f32, pub max_z: f32, } #[doc(hidden)] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct RawVkAabbPositions { pub min_x: f32, pub min_y: f32, pub min_z: f32, pub max_x: f32, pub max_y: f32, pub max_z: f32, } impl VkWrappedType<RawVkAabbPositions> for VkAabbPositions { fn vk_to_raw(src: &VkAabbPositions, dst: &mut RawVkAabbPositions) { dst.min_x = src.min_x; dst.min_y = src.min_y; dst.min_z = src.min_z; dst.max_x = src.max_x; dst.max_y = src.max_y; dst.max_z = src.max_z; } } impl VkRawType<VkAabbPositions> for RawVkAabbPositions { fn vk_to_wrapped(src: &RawVkAabbPositions) -> VkAabbPositions { VkAabbPositions { min_x: src.min_x, min_y: src.min_y, min_z: src.min_z, max_x: src.max_x, max_y: src.max_y, max_z: src.max_z, } } } impl Default for VkAabbPositions { fn default() -> VkAabbPositions { VkAabbPositions { min_x: 0.0, min_y: 0.0, min_z: 0.0, max_x: 0.0, max_y: 0.0, max_z: 0.0, } } } impl VkSetup for VkAabbPositions { fn vk_setup(&mut self, fn_table: *mut VkFunctionTable) { } } impl VkFree for RawVkAabbPositions { fn vk_free(&self) { } }
use std::fmt; use std::hash::{Hash, Hasher}; use std::mem::{MaybeUninit, size_of}; use std::ptr::{read, drop_in_place}; use std::ops::{Deref, DerefMut}; use std::slice::{self, from_raw_parts, from_raw_parts_mut}; use std::iter::FromIterator; use crate::vector::Vector; use crate::array::{Array, ArrayIndex}; use crate::util::PointerExt; pub struct ArrayVec<T: Array> { array: MaybeUninit<T>, len: T::Index, } impl<T: Array> fmt::Debug for ArrayVec<T> where T::Item: fmt::Debug { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self[..], fmt) } } impl<T: Array> Hash for ArrayVec<T> where T::Item: Hash { fn hash<H: Hasher>(&self, h: &mut H) { self[..].hash(h) } } impl<T: Array> ArrayVec<T> { pub fn into_inner(mut self) -> Result<T, Self> { if self.len() == self.capacity() { self.len = Default::default(); Ok(unsafe { read(self.array.as_ptr()) }) } else { Err(self) } } } unsafe impl<T: Array> Vector for ArrayVec<T> { type Item = T::Item; #[inline] fn with_capacity(cap: usize) -> Self { assert!(cap <= T::len()); ArrayVec { array: MaybeUninit::uninit(), len: Default::default(), } } #[inline] fn capacity(&self) -> usize { T::len() } #[inline] fn reserve(&mut self, additional: usize) { self.reserve_exact(additional); } #[inline] fn reserve_exact(&mut self, additional: usize) { assert!(T::len() - self.len() >= additional) } #[inline] fn shrink_to_fit(&mut self) { } fn into_boxed_slice(self) -> Box<[T::Item]> { unimplemented!() } #[inline] unsafe fn set_len(&mut self, len: usize) { self.len = ArrayIndex::from_usize(len); } #[inline] fn len(&self) -> usize { ArrayIndex::to_usize(self.len) } #[inline] fn as_ptr(&self) -> *const T::Item { Array::as_uninit(&self.array).as_ptr() as *const _ } #[inline] fn as_mut_ptr(&mut self) -> *mut T::Item { Array::as_uninit_mut(&mut self.array).as_mut_ptr() as *mut _ } } impl<T: Array> Drop for ArrayVec<T> { fn drop(&mut self) { let len = ArrayIndex::to_usize(self.len); if len > 0 { let ptr = Array::as_uninit_mut(&mut self.array).as_mut_ptr() as *mut T::Item; unsafe { self.set_len(0); for i in 0..len { drop_in_place(ptr.uoffset(i)); } } } } } impl<T: Array> From<T> for ArrayVec<T> { fn from(array: T) -> Self { ArrayVec { array: MaybeUninit::new(array), len: ArrayIndex::from_usize(T::len()), } } } pub struct ArrayVecIntoIter<T: Array> { inner: ArrayVec<T>, start: T::Index, end: T::Index, } impl<T: Array> ArrayVecIntoIter<T> { fn new(mut inner: ArrayVec<T>) -> Self { let (start, end) = unsafe { let len = inner.len(); inner.set_len(0); (ArrayIndex::from_usize(0), ArrayIndex::from_usize(len)) }; ArrayVecIntoIter { inner, start, end, } } fn ptr_size() -> usize { let size = size_of::<<Self as Iterator>::Item>(); if size == 0 { 1 } else { size } } fn ptr(&mut self, index: T::Index) -> *mut <Self as Iterator>::Item { (self.inner.as_mut_ptr() as usize + ArrayIndex::to_usize(index) * Self::ptr_size()) as *mut _ } } impl<T: Array> Iterator for ArrayVecIntoIter<T> { type Item = <ArrayVec<T> as Vector>::Item; fn next(&mut self) -> Option<Self::Item> { if self.start < self.end { unsafe { let start = self.start; self.start = ArrayIndex::from_usize(ArrayIndex::to_usize(start) + 1); Some(read(self.ptr(start))) } } else { None } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let len = ArrayIndex::to_usize(self.end) - ArrayIndex::to_usize(self.start); (len, Some(len)) } #[inline] fn count(self) -> usize { self.size_hint().0 } } impl<T: Array> DoubleEndedIterator for ArrayVecIntoIter<T> { fn next_back(&mut self) -> Option<Self::Item> { if self.start < self.end { unsafe { self.end = ArrayIndex::from_usize(ArrayIndex::to_usize(self.end) - 1); let end = self.end; Some(read(self.ptr(end))) } } else { None } } } impl<T: Array> ExactSizeIterator for ArrayVecIntoIter<T> { } impl<T: Array> IntoIterator for ArrayVec<T> { type Item = T::Item; type IntoIter = ArrayVecIntoIter<T>; #[inline] fn into_iter(self) -> Self::IntoIter { ArrayVecIntoIter::new(self) } } impl<T: Array, R, RHS: Deref<Target=[R]>> PartialEq<RHS> for ArrayVec<T> where T::Item: PartialEq<R> { fn eq(&self, other: &RHS) -> bool { **self == *other.deref() } } impl<T: Array> Eq for ArrayVec<T> where T::Item: Eq { } impl<T: Array> Drop for ArrayVecIntoIter<T> { fn drop(&mut self) { for _ in self { } } } impl<T: Array> Clone for ArrayVec<T> where T::Item: Clone { fn clone(&self) -> Self { self.iter().cloned().collect() } } impl<T: Array> Default for ArrayVec<T> { fn default() -> Self { Self::new() } } impl<T: Array> Extend<T::Item> for ArrayVec<T> { fn extend<I: IntoIterator<Item=T::Item>>(&mut self, iter: I) { let iter = iter.into_iter(); self.reserve(iter.size_hint().0); for v in iter { self.push(v); } } } impl<T: Array> FromIterator<T::Item> for ArrayVec<T> { fn from_iter<I: IntoIterator<Item=T::Item>>(iter: I) -> Self { let mut s = Self::new(); s.extend(iter); s } } impl<'a, T: Array> IntoIterator for &'a ArrayVec<T> { type Item = &'a T::Item; type IntoIter = slice::Iter<'a, T::Item>; #[inline] fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a, T: Array> IntoIterator for &'a mut ArrayVec<T> { type Item = &'a mut T::Item; type IntoIter = slice::IterMut<'a, T::Item>; #[inline] fn into_iter(self) -> Self::IntoIter { self.iter_mut() } } impl<T: Array> Deref for ArrayVec<T> { type Target = [T::Item]; fn deref(&self) -> &Self::Target { unsafe { from_raw_parts(Array::as_uninit(&self.array).as_ptr() as *const _, ArrayIndex::to_usize(self.len)) } } } impl<T: Array> DerefMut for ArrayVec<T> { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { from_raw_parts_mut(Array::as_uninit_mut(&mut self.array).as_mut_ptr() as *mut _, ArrayIndex::to_usize(self.len)) } } }
use priv_prelude::*; use future_utils; #[derive(Clone, PartialEq)] /// Represents an ethernet frame. pub struct EtherFrame { buffer: Bytes, } impl fmt::Debug for EtherFrame { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let payload = self.payload(); f .debug_struct("EtherFrame") .field("source_mac", &self.source_mac()) .field("dest_mac", &self.dest_mac()) .field("payload", match payload { EtherPayload::Arp(ref arp) => arp, EtherPayload::Ipv4(ref ipv4) => ipv4, EtherPayload::Unknown { .. } => &payload, }) .finish() } } /// The header fields of an ethernet packet. #[derive(Clone, Copy, Debug, PartialEq)] pub struct EtherFields { /// The frame's source MAC address. pub source_mac: MacAddr, /// The frame's destination MAC address. pub dest_mac: MacAddr, } #[derive(Clone, Debug)] /// The payload of an ethernet frame. pub enum EtherPayload { /// An ARP packet Arp(ArpPacket), /// An Ipv4 packet Ipv4(Ipv4Packet), /// A packet with an unrecognised protocol. Unknown { /// The ethertype of the protocol. ethertype: u16, /// The packet's payload data. payload: Bytes, }, } /// The fields of the payload of an ethernet frame. Can be used along with `EtherFields` to /// describe/construct an ethernet frame and its contents. pub enum EtherPayloadFields { /// An ARP packet Arp { /// The ARP packet's fields. fields: ArpFields, }, /// An Ipv4 packet Ipv4 { /// The Ipv4 packet's header fields. fields: Ipv4Fields, /// The Ipv4 packet's payload payload_fields: Ipv4PayloadFields, }, } impl EtherPayloadFields { /// The total length of an ethernet frame with this payload pub fn total_frame_len(&self) -> usize { 14 + match *self { EtherPayloadFields::Arp { .. } => 28, EtherPayloadFields::Ipv4 { ref payload_fields, .. } => { payload_fields.total_packet_len() }, } } } fn set_fields(buffer: &mut [u8], fields: EtherFields) { buffer[0..6].clone_from_slice(fields.dest_mac.as_bytes()); buffer[6..12].clone_from_slice(fields.source_mac.as_bytes()); } impl EtherFrame { /// Construct a new `EthernetFrame`. Using `new_from_fields_recursive` can avoid an extra /// allocation if you are also constructing the frame's payload. pub fn new_from_fields( fields: EtherFields, payload: EtherPayload, ) -> EtherFrame { let len = 14 + match payload { EtherPayload::Arp(ref arp) => arp.as_bytes().len(), EtherPayload::Ipv4(ref ipv4) => ipv4.as_bytes().len(), EtherPayload::Unknown { ref payload, .. } => payload.len(), }; let mut buffer = unsafe { BytesMut::uninit(len) }; set_fields(&mut buffer, fields); let ethertype = match payload { EtherPayload::Arp(..) => 0x0806, EtherPayload::Ipv4(..) => 0x0800, EtherPayload::Unknown { ethertype, .. } => ethertype, }; NetworkEndian::write_u16(&mut buffer[12..14], ethertype); buffer[14..].clone_from_slice(match payload { EtherPayload::Arp(ref arp) => arp.as_bytes(), EtherPayload::Ipv4(ref ipv4) => ipv4.as_bytes(), EtherPayload::Unknown { ref payload, .. } => payload, }); EtherFrame { buffer: buffer.freeze(), } } /// Construct a new `EthernetFrame`. pub fn new_from_fields_recursive( fields: EtherFields, payload_fields: EtherPayloadFields, ) -> EtherFrame { let len = payload_fields.total_frame_len(); let mut buffer = unsafe { BytesMut::uninit(len) }; EtherFrame::write_to_buffer(&mut buffer, fields, payload_fields); EtherFrame { buffer: buffer.freeze(), } } /// Create a new ethernet frame, writing it to the given buffer. pub fn write_to_buffer( buffer: &mut [u8], fields: EtherFields, payload_fields: EtherPayloadFields, ) { let ethertype = match payload_fields { EtherPayloadFields::Arp { .. } => 0x0806, EtherPayloadFields::Ipv4 { .. } => 0x0800, }; NetworkEndian::write_u16(&mut buffer[12..14], ethertype); set_fields(buffer, fields); match payload_fields { EtherPayloadFields::Arp { fields } => { ArpPacket::write_to_buffer(&mut buffer[14..], fields); }, EtherPayloadFields::Ipv4 { fields, payload_fields } => { Ipv4Packet::write_to_buffer(&mut buffer[14..], fields, payload_fields); }, } } /// Get the fields of this ethernet frame. pub fn fields(&self) -> EtherFields { EtherFields { source_mac: self.source_mac(), dest_mac: self.dest_mac(), } } /// Set the fields of this ethernet frame. pub fn set_fields(&mut self, fields: EtherFields) { let buffer = mem::replace(&mut self.buffer, Bytes::new()); let mut buffer = BytesMut::from(buffer); set_fields(&mut buffer, fields); self.buffer = buffer.freeze(); } /// Construct a new ethernet frame from the given buffer. pub fn from_bytes(buffer: Bytes) -> EtherFrame { EtherFrame { buffer, } } /// Get the frame's sender MAC address. pub fn source_mac(&self) -> MacAddr { MacAddr::from_bytes(&self.buffer[6..12]) } /// Get the frame's destination MAC address. pub fn dest_mac(&self) -> MacAddr { MacAddr::from_bytes(&self.buffer[0..6]) } /// Get the frame's payload pub fn payload(&self) -> EtherPayload { match NetworkEndian::read_u16(&self.buffer[12..14]) { 0x0806 => EtherPayload::Arp(ArpPacket::from_bytes(self.buffer.slice_from(14))), 0x0800 => EtherPayload::Ipv4(Ipv4Packet::from_bytes(self.buffer.slice_from(14))), p => EtherPayload::Unknown { ethertype: p, payload: self.buffer.slice_from(14), }, } } /// Returns the underlying buffer. pub fn as_bytes(&self) -> &Bytes { &self.buffer } /// Consume the frame and return the underlying buffer pub fn into_bytes(self) -> Bytes { self.buffer } } /// An ethernet connection, used to send/receive ethernet frames to/from the plug at the other end. #[derive(Debug)] pub struct EtherPlug { /// The sender. pub tx: UnboundedSender<EtherFrame>, /// The receiver. pub rx: UnboundedReceiver<EtherFrame>, } impl EtherPlug { /// Construct an ethernet 'wire' connection the two given plugs. pub fn new_wire() -> (EtherPlug, EtherPlug) { let (a_tx, b_rx) = future_utils::mpsc::unbounded(); let (b_tx, a_rx) = future_utils::mpsc::unbounded(); let a = EtherPlug { tx: a_tx, rx: a_rx, }; let b = EtherPlug { tx: b_tx, rx: b_rx, }; (a, b) } }
use std::convert::Infallible; use std::io::Read; use warp::filters::path::FullPath; use warp::Filter; async fn request_screenshot(path: FullPath) -> Result<Vec<u8>, Infallible> { let dir = tempfile::tempdir().unwrap(); let cmd = tokio::process::Command::new("google-chrome") .arg("--no-sandbox") .arg("--screenshot") .arg("--headless") .arg( "http://localhost:9000/chrome_gigem%7BlmA0_N1c3_10c4t1oN_buddy%7D".to_string() + path.as_str(), ) .current_dir(dir.path()) .spawn() .expect("Process failed to spawn. Dying fast."); assert!(cmd.await.expect("Child did not die?").success()); let path = dir.path().join("screenshot.png"); println!("{}", path.as_os_str().to_str().unwrap()); Ok(std::fs::File::open(path) .map(|mut file| { let mut data = Vec::new(); if file.read_to_end(&mut data).is_err() { Vec::new() } else { data } }) .unwrap_or(Vec::new())) } #[tokio::main] async fn main() { let chrome_available = warp::path!("chrome_gigem%7BlmA0_N1c3_10c4t1oN_buddy%7D" / String) .map(|b64: String| { println!("{}", b64); let decoded = base64::decode(&b64).unwrap(); String::from_utf8(decoded).unwrap() }) .map(|html| warp::reply::html(html)); let render = warp::path::full().and_then(request_screenshot); warp::serve(chrome_available.or(render)) .run(([0, 0, 0, 0], 9000)) .await; }
// This file is part of libfringe, a low-level green threading library. // Copyright (c) Nathan Zadoks <nathan@nathan7.eu>, // whitequark <whitequark@whitequark.org> // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. // To understand the machine code in this file, keep in mind these facts: // * OR1K C ABI has a "red zone": 128 bytes under the top of the stack // that is defined to be unmolested by signal handlers, interrupts, etc. // Leaf functions can use the red zone without adjusting r1 or r2. // * OR1K C ABI passes the first argument in r3. We also use r3 to pass a value // while swapping context; this is an arbitrary choice // (we clobber all registers and could use any of them) but this allows us // to reuse the swap function to perform the initial call. use stack::Stack; pub const STACK_ALIGNMENT: usize = 4; #[derive(Debug, Clone, Copy)] pub struct StackPointer(*mut usize); pub unsafe fn init(stack: &Stack, f: unsafe extern "C" fn(usize) -> !) -> StackPointer { #[naked] unsafe extern "C" fn trampoline() { asm!( r#" # gdb has a hardcoded check that rejects backtraces where frame addresses # do not monotonically decrease. It is turned off if the function is called # "__morestack" and that is hardcoded. So, to make gdb backtraces match # the actual unwinder behavior, we call ourselves "__morestack" and mark # the symbol as local; it shouldn't interfere with anything. __morestack: .local __morestack # When a normal function is entered, the return address is pushed onto the stack, # and the first thing it does is pushing the frame pointer. The init trampoline # is not a normal function; on entry the stack pointer is one word above the place # where the return address should be, and right under it the return address as # well as the stack pointer are already pre-filled. So, simply move the stack # pointer where it belongs; and add CFI just like in any other function prologue. l.addi r1, r1, -8 .cfi_def_cfa_offset 8 .cfi_offset r2, -8 l.or r2, r1, r0 .cfi_def_cfa_register r2 # Call f. l.lwz r9, 8(r1) l.jr r9 l.nop .Lend: .size __morestack, .Lend-__morestack "# : : : : "volatile") } unsafe fn push(sp: &mut StackPointer, val: usize) { sp.0 = sp.0.offset(-1); *sp.0 = val } let mut sp = StackPointer(stack.base() as *mut usize); push(&mut sp, f as usize); // function let rsp = sp; push(&mut sp, trampoline as usize); // trampoline / linked return address push(&mut sp, 0xdead0bbb); // initial %ebp / linked %ebp rsp } #[inline(always)] pub unsafe fn swap(arg: usize, old_sp: *mut StackPointer, new_sp: StackPointer, new_stack: &Stack) -> usize { // Address of the topmost CFA stack slot. let new_cfa = (new_stack.base() as *mut usize).offset(-3); #[naked] unsafe extern "C" fn trampoline() { asm!( r#" # Remember the frame and instruction pointers in the callee, to link # the stacks together later. l.or r18, r2, r0 l.or r19, r9, r0 # Save instruction pointer of the old context. l.sw -4(r1), r9 # Save frame pointer explicitly; the unwinder uses it to find CFA of # the caller, and so it has to have the correct value immediately after # the call instruction that invoked the trampoline. l.sw -8(r1), r2 # Save stack pointer of the old context. l.sw 0(r4), r1 # Load stack pointer of the new context. l.or r1, r0, r5 # Load frame and instruction pointers of the new context. l.lwz r2, -8(r1) l.lwz r9, -4(r1) # Put the frame and instruction pointers into the trampoline stack frame, # making it appear to return right after the call instruction that invoked # this trampoline. This is done after the loads above, since on the very first # swap, the saved r2/r9 intentionally alias 0(r6)/4(r6). l.sw 0(r6), r18 l.sw 4(r6), r19 # Return into new context. l.jr r9 l.nop "# : : : : "volatile") } let ret: usize; asm!( r#" # Push instruction pointer of the old context and switch to # the new context. l.jal ${1} l.nop "# : "={r3}" (ret) : "s" (trampoline as usize) "{r3}" (arg) "{r4}" (old_sp) "{r5}" (new_sp.0) "{r6}" (new_cfa) :/*"r0", "r1", "r2", "r3",*/"r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31", "flags", "memory" : "volatile"); ret }
use crate::{ bit_set, bit_set::{word, Word}, }; /// A constant value of [Access::size](trait.Access.html#tymethod.size). pub trait Capacity { const CAPACITY: u64; } pub trait Access { /// The potential bit size of the container. /// /// But the container is not guaranteed to be able to reach that size: /// It can fail to allocate at any point before that size is reached. fn size(&self) -> u64; /// Check whether bit at `i` is enabled. fn access(&self, i: u64) -> bool; } macro_rules! impl_Access_for_words { ($($ty:ty),*) => ($( #[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))] impl Capacity for $ty { const CAPACITY: u64 = std::mem::size_of::<$ty>() as u64 * 8; } impl Access for $ty { #[inline] fn size(&self) -> u64 { Self::CAPACITY } #[inline] fn access(&self, i: u64) -> bool { (*self & Self::bit(word::cast(i))) != Self::ZERO } } )*) } impl_Access_for_words!(u8, u16, u32, u64, u128, usize); impl<T: Capacity + Access> Access for [T] { fn size(&self) -> u64 { T::CAPACITY * word::cast::<usize, u64>(self.len()) } fn access(&self, i: u64) -> bool { let (index, offset) = bit_set::address(i, T::CAPACITY); self.get(index).map_or(false, |w| w.access(offset)) } }
use std::env; use std::fs::File; use std::io::Write; use std::path::Path; use std::process::Command; fn main() { let out_dir = env::var("OUT_DIR").unwrap(); let dest_path = Path::new(&out_dir).join("gnvim_version.rs"); let mut f = File::create(&dest_path).unwrap(); let mut cmd = Command::new("git"); cmd.arg("describe").arg("--always").arg("--tags"); let version = cmd.output().unwrap(); if !version.status.success() { panic!("Failed to get version from git"); } let mut version_str = String::from_utf8(version.stdout).unwrap(); version_str.pop(); f.write_all( format!("const VERSION: &str = \"{}\";", version_str) .into_bytes() .as_slice(), ) .unwrap(); }
// TODO use predicate instead of equijoin // This is an inner equijoin only for now. use super::{DbIterator}; use super::tuple::{Tuple}; #[derive(Debug, Clone)] pub struct NestedLoopsJoin<I> { // intitialize input_l: I, input_r: I, current_l: Option<Tuple>, col_l: usize, col_r: usize, } impl<I: DbIterator> NestedLoopsJoin<I> { pub fn new( mut input_l: I, input_r: I, col_l: usize, col_r: usize, ) -> Self { let current_l = input_l.next(); NestedLoopsJoin { input_l: input_l, input_r: input_r, current_l: current_l, col_l: col_l, col_r: col_r, } } } impl <I: DbIterator> DbIterator for NestedLoopsJoin<I> where Self: Sized, { fn next(&mut self) -> Option<Tuple> { while let Some(mut tuple_r) = self.input_r.next() { let current_l = self.current_l.clone().unwrap(); if current_l[self.col_l] == tuple_r[self.col_r] { return Some(current_l.append(&mut tuple_r)); } } self.input_r.reset(); self.current_l = self.input_l.next(); if self.current_l.is_none() { return None } else { self.next() } } fn reset(&mut self) { self.input_l.reset(); self.input_r.reset(); } }
//! Message Module //! //! //! //! //! //! Check link below for more info: //! https://github.com/ValvePython/steam/blob/09f4f51a287ee7aec1f159c7e8098add5f14bed3/steam/core/msg/headers.py #[cfg(test)] mod tests { use protobuf::Message; use steam_language_gen::{MessageHeader, MessageHeaderExt, SerializableBytes, DeserializableBytes}; use steam_language_gen::generated::enums::{EMsg, EUniverse}; use steam_language_gen::generated::headers::{ExtendedMessageHeader, StandardMessageHeader}; use steam_language_gen::generated::messages::{MsgChannelEncryptRequest, MsgClientChatEnter}; /// ChannelEncryptRequest /// This has standard header fn get_channel_encrypt_request() -> Vec<u8> { let on_connection_packet = vec![ 23, 5, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 1, 0, 0, 0, 1, 0, 0, 0, 66, 126, 251, 245, 88, 122, 243, 123, 102, 163, 11, 54, 151, 145, 31, 54, ]; on_connection_packet } /// ClientChatEnter, EMsg(807) fn get_client_chat_enter() -> Vec<u8> { let struct_msg_data = vec![ 0x27, 0x03, 0x00, 0x00, 0x24, 0x02, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xEF, 0xAC, 0x15, 0x89, 0x00, 0x01, 0x00, 0x10, 0x01, 0x8E, 0x56, 0x11, 0x00, 0xBC, 0x4E, 0x2A, 0x00, 0x00, 0x00, 0x88, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0xBC, 0x4E, 0x2A, 0x00, 0x00, 0x00, 0x70, 0x01, 0xBC, 0x4E, 0x2A, 0x00, 0x00, 0x00, 0x70, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x53, 0x61, 0x78, 0x74, 0x6F, 0x6E, 0x20, 0x48, 0x65, 0x6C, 0x6C, 0x00, 0x00, 0x4D, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4F, 0x62, 0x6A, 0x65, 0x63, 0x74, 0x00, 0x07, 0x73, 0x74, 0x65, 0x61, 0x6D, 0x69, 0x64, 0x00, 0xAC, 0x15, 0x89, 0x00, 0x01, 0x00, 0x10, 0x01, 0x02, 0x70, 0x65, 0x72, 0x6D, 0x69, 0x73, 0x73, 0x69, 0x6F, 0x6E, 0x73, 0x00, 0x7B, 0x03, 0x00, 0x00, 0x02, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6C, 0x73, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x08, 0x00, 0x4D, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4F, 0x62, 0x6A, 0x65, 0x63, 0x74, 0x00, 0x07, 0x73, 0x74, 0x65, 0x61, 0x6D, 0x69, 0x64, 0x00, 0x00, 0x28, 0x90, 0x00, 0x01, 0x00, 0x10, 0x01, 0x02, 0x70, 0x65, 0x72, 0x6D, 0x69, 0x73, 0x73, 0x69, 0x6F, 0x6E, 0x73, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6C, 0x73, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x08, 0x00, 0x4D, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4F, 0x62, 0x6A, 0x65, 0x63, 0x74, 0x00, 0x07, 0x73, 0x74, 0x65, 0x61, 0x6D, 0x69, 0x64, 0x00, 0xB0, 0xDC, 0x5B, 0x04, 0x01, 0x00, 0x10, 0x01, 0x02, 0x70, 0x65, 0x72, 0x6D, 0x69, 0x73, 0x73, 0x69, 0x6F, 0x6E, 0x73, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6C, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x00, 0x4D, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4F, 0x62, 0x6A, 0x65, 0x63, 0x74, 0x00, 0x07, 0x73, 0x74, 0x65, 0x61, 0x6D, 0x69, 0x64, 0x00, 0x39, 0xCB, 0x77, 0x05, 0x01, 0x00, 0x10, 0x01, 0x02, 0x70, 0x65, 0x72, 0x6D, 0x69, 0x73, 0x73, 0x69, 0x6F, 0x6E, 0x73, 0x00, 0x1A, 0x03, 0x00, 0x00, 0x02, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6C, 0x73, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x08, 0xE8, 0x03, 0x00, 0x00, ]; struct_msg_data } #[test] fn deserialize_client_chat_enter() { let message = get_client_chat_enter(); let emsg = EMsg::from_raw_message(&message).unwrap(); let message_complete = EMsg::strip_message(&message); let (header, message): (&[u8], &[u8]) = ExtendedMessageHeader::split_from_bytes(message_complete); assert_eq!(EMsg::ClientChatEnter, emsg); let msg = MsgClientChatEnter::from_bytes(message); println!(": {:#?}", msg); } #[test] fn deserialize_msg_encrypt_request() { let message = b"\x17\x05\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00".to_vec(); let emsg = EMsg::from_raw_message(&message).unwrap(); let message_complete = EMsg::strip_message(&message); let (header, message): (&[u8], &[u8]) = StandardMessageHeader::split_from_bytes(message_complete); let msgheader_default: StandardMessageHeader = StandardMessageHeader::new(); assert_eq!(EMsg::ChannelEncryptRequest, emsg); assert_eq!(msgheader_default.to_bytes(), header); assert_eq!(StandardMessageHeader::from_bytes(header), msgheader_default); let msg = MsgChannelEncryptRequest { protocol_version: 1, universe: EUniverse::Public }; assert_eq!(MsgChannelEncryptRequest::from_bytes(message), msg); } } // if message is proto: emsg_enum, raw_data from packet // new MessageHeaderProtobuf // steammessages_base_pb2. CMSGProtobufHeader // if not proto: emsg_enum, raw_data from packet -> extender // novo ExtendedMessageHeader
////////////////////////////////////////////////// // General notes // // - Rust is an expression-based language // ////////////////////////////////////////////////// // Functions // // - Must declare the type of each parameter // - Function bodies are made up of a series of // statements optionally ending in an expression // - Expressions DO NOT end with semicolons // - Only statements end with semicolons fn another_function(x: i32, y: i32) { println!("x is {}, y is {}", x, y); } fn expression_example() -> i32 { let x = 5; x + 1 } fn main() { another_function(5, 6); let x = expression_example(); println!("x = {}", x); let x = 5; let y = { let x = 3; x + 1 }; println!("y = {}", y); }
use std::fs::File; use std::io::Read; #[derive(Debug)] pub struct Parser { name: String, lines: Vec<String>, idx: usize, } impl Parser { pub fn new(path: &str, name: &str) -> Self { let file_name = format!("{}{}", path, name); let mut file = File::open(file_name).expect("File not found!"); let class_name = name.strip_suffix(".vm").unwrap(); println!("Class name -> {}", class_name); let mut strings = String::new(); file.read_to_string(&mut strings) .expect("Something went wrong reading the file!"); let lines = strings .split('\n') .map(|str| str.to_string()) .collect::<Vec<String>>(); Parser { name: class_name.to_string(), lines: lines, idx: 0, } } pub fn has_more_commands(&self) -> bool { self.lines.get(self.idx).is_some() } pub fn advance(&mut self) { self.idx += 1; } pub fn command_type(&self) -> CommandType { let line = self.lines.get(self.idx).unwrap(); let line = line.split("//").collect::<Vec<&str>>(); let words = line[0].split_whitespace().collect::<Vec<&str>>(); CommandType::new(self.name.clone(), words) } } #[derive(Debug, Eq, PartialEq)] pub enum Arithmetic { Add, Sub, And, Or, Not, Neg, Eq, Lt, Gt, } #[derive(Debug, Eq, PartialEq)] pub enum CommandType { CArithmetic(Arithmetic), CPush(String, String, usize), CPop(String, String, usize), CLabel(String), CGoto(String), CIf(String), CFunction(String, usize), CReturn, CCall(String, usize), NotCommand, } impl CommandType { fn new(class_name: String, words: Vec<&str>) -> Self { println!("PARSER {:?}", words); match &words.len() { 1 => { let command = words[0]; match command { "add" => CommandType::CArithmetic(Arithmetic::Add), "sub" => CommandType::CArithmetic(Arithmetic::Sub), "and" => CommandType::CArithmetic(Arithmetic::And), "or" => CommandType::CArithmetic(Arithmetic::Or), "not" => CommandType::CArithmetic(Arithmetic::Not), "neg" => CommandType::CArithmetic(Arithmetic::Neg), "eq" => CommandType::CArithmetic(Arithmetic::Eq), "lt" => CommandType::CArithmetic(Arithmetic::Lt), "gt" => CommandType::CArithmetic(Arithmetic::Gt), "return"=> CommandType::CReturn, _ => CommandType::NotCommand, } }, 2 => { let command = words[0]; let arg = words[1].to_string(); match command { "label" => CommandType::CLabel(arg), "goto" => CommandType::CGoto(arg), "if-goto" => CommandType::CIf(arg), _ => CommandType::NotCommand, } }, 3 => { let command = words[0]; let arg1 = words[1].to_string(); let arg2: Result<usize, _> = words[2].parse(); match command { "push" => CommandType::CPush(class_name, arg1, arg2.unwrap()), "pop" => CommandType::CPop(class_name, arg1, arg2.unwrap()), "function" => CommandType::CFunction(arg1, arg2.unwrap()), "call" => CommandType::CCall(arg1, arg2.unwrap()), _ => CommandType::NotCommand, } }, _ => CommandType::NotCommand } } }
use hilbert_qexp::diff_op::rankin_cohen; use hilbert_qexp::elements::{div_mut, relations_over_q, HmfGen}; use hilbert_qexp::bignum::Sqrt2Q; use hilbert_qexp::bignum::RealQuadElement; use parallel_wt::*; use mixed_wt::*; use flint::fmpq::Fmpq; use std::fs::File; use serde; use serde_pickle; use std::io::Write; /// Corresponds to s2^a * s4^b * s6^c where (a, b, c) = idx. #[derive(Clone, Debug)] pub struct MonomFormal { pub idx: (usize, usize, usize), } fn monom_s2_s4_s6(prec: usize, expts: (usize, usize, usize)) -> HmfGen<Fmpq> { let mut tmp = HmfGen::new(2, prec); let mut res = HmfGen::new(2, prec); let s2 = s2_form(prec); let s4 = s4_form(prec); let s6 = s6_form(prec); let (e2, e4, e6) = expts; res.pow_mut(&s2, e2); tmp.pow_mut(&s6, e6); if e6 > 0 { res *= &tmp; } tmp.pow_mut(&s4, e4); if e4 > 0 { res *= &tmp; } res } impl MonomFormal { pub fn to_form(&self, prec: usize) -> HmfGen<Fmpq> { monom_s2_s4_s6(prec, self.idx) } } /// Return a vector of (a, b, c) s.t. 2*a + 4*b + 6*c = k. pub fn tpls_of_wt(k: usize) -> Vec<(usize, usize, usize)> { let mut res = Vec::new(); let c_max = k / 6; for c in (0..(c_max + 1)).rev() { let b_max = (k - 6 * c) / 4; for b in (0..(b_max + 1)).rev() { let rem = k - (6 * c + 4 * b); if is_even!(rem) { res.push((rem >> 1, b, c)); } } } res } pub fn monoms_of_s2_s4_s6(k: usize) -> Vec<MonomFormal> { tpls_of_wt(k) .into_iter() .map(|x| MonomFormal { idx: x }) .collect() } pub fn r_elt_as_pol_over_q(f: &HmfGen<Fmpq>) -> Option<Vec<(MonomFormal, Fmpq)>> { let prec = f.prec; let monoms = monoms_of_s2_s4_s6(f.weight.unwrap().0); let mut forms: Vec<_> = monoms.iter().map(|x| x.to_form(prec)).collect(); forms.insert(0, f.clone()); let rels = relations_over_q(&forms); if rels.len() == 1 && !rels[0][0].is_zero() { let cfs: Vec<_> = rels[0] .iter() .skip(1) .map(|x| -&(x / &rels[0][0])) .collect(); Some(monoms.into_iter().zip(cfs.into_iter()).collect()) } else { None } } pub fn three_forms_a1_0(prec: usize) -> Vec<HmfGen<Sqrt2Q>> { let s2 = Into::<HmfGen<Sqrt2Q>>::into(&s2_form(prec)); let s4 = Into::<HmfGen<Sqrt2Q>>::into(&s4_form(prec)); let s6 = Into::<HmfGen<Sqrt2Q>>::into(&s6_form(prec)); vec![ rankin_cohen(1, &s2, &s4).unwrap(), rankin_cohen(1, &s2, &s6).unwrap(), rankin_cohen(1, &s4, &s6).unwrap(), ] } pub fn three_forms_a1_1(prec: usize) -> Vec<HmfGen<Sqrt2Q>> { let s2 = Into::<HmfGen<Sqrt2Q>>::into(&s2_form(prec)); let s4 = Into::<HmfGen<Sqrt2Q>>::into(&s4_form(prec)); let s6 = Into::<HmfGen<Sqrt2Q>>::into(&s6_form(prec)); let s5 = Into::<HmfGen<Sqrt2Q>>::into(&s5_form(prec)); vec![ rankin_cohen(1, &s2, &s5).unwrap(), rankin_cohen(1, &s4, &s5).unwrap(), rankin_cohen(1, &s6, &s5).unwrap(), ] } pub fn div_by_s5(f: &HmfGen<Fmpq>) -> HmfGen<Fmpq> { let s5 = s5_form(f.prec); let mut res = HmfGen::new(2, f.prec); div_mut(&mut res, f, &s5); res } type Tuple3 = (usize, usize, usize); pub fn mixed_weight_forms( df: usize, is_odd: bool, prec: usize, len: usize, ) -> Vec<(HmfGen<Sqrt2Q>, Tuple3, Tuple3)> { if is_odd { mixed_weight_forms_odd(df, prec, len) } else { mixed_weight_forms_even(df, prec, len) } } pub fn mixed_weight_forms_even( df: usize, prec: usize, len: usize, ) -> Vec<(HmfGen<Sqrt2Q>, Tuple3, Tuple3)> { let mut num = 0; let mut res = Vec::with_capacity(len); for (i, m) in (2..).flat_map(monoms_of_s2_s4_s6).enumerate() { for n in (2..).flat_map(monoms_of_s2_s4_s6).take(i + 1) { if num >= len { return res; } let f_m = m.to_form(prec); let f_n = n.to_form(prec); if let Ok(f) = rankin_cohen(df, &From::from(&f_m), &From::from(&f_n)) { if !f.is_zero() { num += 1; res.push((f, m.idx, n.idx)); } } else { panic!(); } } } res } pub fn mixed_weight_forms_odd( df: usize, prec: usize, len: usize, ) -> Vec<(HmfGen<Sqrt2Q>, Tuple3, Tuple3)> { let mut num = 0; let mut res = Vec::with_capacity(len); let s5 = s5_form(prec); for n in (2..).flat_map(monoms_of_s2_s4_s6) { if num >= len { return res; } let f_n = n.to_form(prec); if let Ok(f) = rankin_cohen(df, &From::from(&s5.clone()), &From::from(&f_n)) { if !f.is_zero() { num += 1; res.push((f, (0, 0, 0), n.idx)); } } else { panic!(); } } res } pub type ParaWtPolyQ = Vec<(MonomFormal, Fmpq)>; /// If the bracket of f an g is of odd weight, this returns a modular form /// divided by s5. pub fn bracket_inner_prod_as_pol_over_q( f: &HmfGen<Sqrt2Q>, g: &HmfGen<Sqrt2Q>, ) -> Option<ParaWtPolyQ> { let h = bracket_inner_prod(f, g); if h.is_zero() { return Some(vec![]); } if !h.rt_part().is_zero() { None } else { let h_ir = h.ir_part(); if is_even!(h_ir.weight.unwrap().0) { r_elt_as_pol_over_q(&h_ir) } else { let f = div_by_s5(&h_ir); r_elt_as_pol_over_q(&f) } } } pub fn brackets(forms: &[HmfGen<Sqrt2Q>]) -> Vec<ParaWtPolyQ> { forms .iter() .enumerate() .flat_map(|(i, f)| { forms .iter() .skip(i + 1) .map(|g| bracket_inner_prod_as_pol_over_q(f, g).unwrap()) .collect::<Vec<_>>() }) .collect() } pub fn save_as_pickle<T>(a: T, f: &mut File) where T: serde::Serialize, { let v = serde_pickle::to_vec(&a, false).unwrap(); f.write(&v).unwrap(); } pub fn save_brackets_for_candidates<'a, I>(vals_iter: I, len: usize) where I: Iterator<Item = &'a (usize, u32)>, { for i_parity in vals_iter { let i = i_parity.0; let parity = i_parity.1; println!("{}, {}", i, parity); let prec = (2 * i + 6) / 5 + 10; let forms_w_monoms = mixed_weight_forms(i, !is_even!(parity), prec, len); { let monoms = forms_w_monoms .iter() .map(|f_t| (f_t.1, f_t.2)) .collect::<Vec<_>>(); let ref mut monms_file = File::create(format!("./data/str{}_{}_monoms.sobj", i, parity)).unwrap(); save_as_pickle(&monoms, monms_file); } { let weights = forms_w_monoms .iter() .map(|f| f.0.weight.unwrap().0) .collect::<Vec<_>>(); println!("{:?}", weights); let ref mut weights_file = File::create(format!("./data/str{}_{}_weights.sobj", i, parity)).unwrap(); save_as_pickle(&weights, weights_file); } { let forms = forms_w_monoms .clone() .into_iter() .map(|f| f.0) .collect::<Vec<_>>(); let brs = brackets(&forms); let brs = brs.iter() .map(|br| { br.iter() .map(|x| (x.0.idx, x.1.clone())) .collect::<Vec<_>>() }) .collect::<Vec<_>>(); let ref mut br_file = File::create(format!("./data/str{}_{}_brs.sobj", i, parity)).unwrap(); save_as_pickle(&brs, br_file); } } }
/**********************************************************\ | | | hprose | | | | Official WebSite: http://www.hprose.com/ | | http://www.hprose.org/ | | | \**********************************************************/ /**********************************************************\ * * * io/writer_refer.rs * * * * hprose writer reference struct for Rust * * * * LastModified: Sep 28, 2016 * * Author: Chen Fei <cf@hprose.com> * * * \**********************************************************/ use std::collections::HashMap; use super::ByteWriter; use super::tags::*; use super::util::get_uint_bytes; pub struct WriterRefer { refs: HashMap<isize, u32>, lastref: u32 } impl WriterRefer { #[inline] pub fn new() -> WriterRefer { WriterRefer { refs: HashMap::new(), lastref: 0 } } pub fn set<T>(&mut self, p: *const T) { let i = p as isize; if i > 0 { self.refs.insert(p as isize, self.lastref); } self.lastref += 1; } pub fn write<T>(&mut self, w: &mut ByteWriter, p: *const T) -> bool { let i = p as isize; self.refs.get(&i).map_or(false, |n| { w.write_byte(TAG_REF); w.write(get_uint_bytes(&mut [0; 20], *n as u64)); w.write_byte(TAG_SEMICOLON); true }) } #[inline] pub fn reset(&mut self) { self.refs.clear(); } }
use ranges::Ranges; pub struct CPUCache { line_size: u8, line_size_bits: u8, sets_min_1: u64, assoc: u64, tags: Vec<u64>, } /* Returns the base-2 logarithm of x. Returns None if x is not a power of two. */ fn log2(x: u32) -> Option<u8> { /* Any more than 32 and we overflow anyway... */ for i in 0..32 { if (1u32 << i) == x { return Some(i); } } None } impl CPUCache { pub fn new(size: u32, line_size: u8, assoc: u32) -> CPUCache { let sets = ((size / line_size as u32) / assoc) as u64; CPUCache { // size: size as u64, line_size: line_size, line_size_bits: log2(line_size as u32).unwrap(), // sets, sets_min_1: sets - 1, assoc: assoc as u64, tags: vec![0; (size / line_size as u32) as usize], } } pub fn exchange(&mut self, new_addr: u64, old_addr: u64) { let old_tag = old_addr >> self.line_size_bits; let new_tag = new_addr >> self.line_size_bits; let old_set_no = old_tag & self.sets_min_1; let new_set_no = new_tag & self.sets_min_1; if old_tag != 0 && old_set_no != new_set_no { panic!( "Expected to only exchange cache lines inside the same set! old_addr={:x} new_addr={:x} old_set_no={} new_set_no={}", old_addr, new_addr, old_set_no, new_set_no ); } let set_no = new_set_no; let tag_index_start = (set_no * self.assoc) as usize; let tag_index_end = ((set_no + 1) * self.assoc) as usize; let set = &mut self.tags[tag_index_start..tag_index_end]; for tag in set.iter_mut() { if *tag == old_tag { *tag = new_tag; return; } } panic!("Couldn't find tag {:x} in set {}", old_tag, set_no); } pub fn get_cached_ranges(&self) -> Vec<(u64, u64)> { let mut ranges = Ranges::new(); for tag in &self.tags { if *tag != 0 { let start = tag << self.line_size_bits; ranges.add(start, self.line_size as u64); } } ranges.get() } }
use super::regex::{Regex, Region}; use super::scope::*; use super::syntax_definition::*; use yaml_rust::{YamlLoader, Yaml, ScanError}; use yaml_rust::yaml::Hash; use std::collections::HashMap; use std::error::Error; use std::path::Path; use std::ops::DerefMut; #[derive(Debug, thiserror::Error)] #[non_exhaustive] pub enum ParseSyntaxError { /// Invalid YAML file syntax, or at least something yaml_rust can't handle #[error("Invalid YAML file syntax: {0}")] InvalidYaml(#[from] ScanError), /// The file must contain at least one YAML document #[error("The file must contain at least one YAML document")] EmptyFile, /// Some keys are required for something to be a valid `.sublime-syntax` #[error("Missing mandatory key in YAML file: {0}")] MissingMandatoryKey(&'static str), /// Invalid regex #[error("Error while compiling regex '{0}': {1}")] RegexCompileError(String, #[source] Box<dyn Error + Send + Sync + 'static>), /// A scope that syntect's scope implementation can't handle #[error("Invalid scope: {0}")] InvalidScope(ParseScopeError), /// A reference to another file that is invalid #[error("Invalid file reference")] BadFileRef, /// Syntaxes must have a context named "main" #[error("Context 'main' is missing")] MainMissing, /// Some part of the YAML file is the wrong type (e.g a string but should be a list) /// Sorry this doesn't give you any way to narrow down where this is. /// Maybe use Sublime Text to figure it out. #[error("Type mismatch")] TypeMismatch, } fn get_key<'a, R, F: FnOnce(&'a Yaml) -> Option<R>>(map: &'a Hash, key: &'static str, f: F) -> Result<R, ParseSyntaxError> { map.get(&Yaml::String(key.to_owned())) .ok_or(ParseSyntaxError::MissingMandatoryKey(key)) .and_then(|x| f(x).ok_or(ParseSyntaxError::TypeMismatch)) } fn str_to_scopes(s: &str, repo: &mut ScopeRepository) -> Result<Vec<Scope>, ParseSyntaxError> { s.split_whitespace() .map(|scope| repo.build(scope).map_err(ParseSyntaxError::InvalidScope)) .collect() } struct ParserState<'a> { scope_repo: &'a mut ScopeRepository, variables: HashMap<String, String>, variable_regex: Regex, backref_regex: Regex, lines_include_newline: bool, } // `__start` must not include prototypes from the actual syntax definition, // otherwise it's possible that a prototype makes us pop out of `__start`. static START_CONTEXT: &str = " __start: - meta_include_prototype: false - match: '' push: __main __main: - include: main "; impl SyntaxDefinition { /// In case you want to create your own SyntaxDefinition's in memory from strings. /// /// Generally you should use a [`SyntaxSet`]. /// /// `fallback_name` is an optional name to use when the YAML doesn't provide a `name` key. /// /// [`SyntaxSet`]: ../struct.SyntaxSet.html pub fn load_from_str( s: &str, lines_include_newline: bool, fallback_name: Option<&str>, ) -> Result<SyntaxDefinition, ParseSyntaxError> { let docs = match YamlLoader::load_from_str(s) { Ok(x) => x, Err(e) => return Err(ParseSyntaxError::InvalidYaml(e)), }; if docs.is_empty() { return Err(ParseSyntaxError::EmptyFile); } let doc = &docs[0]; let mut scope_repo = SCOPE_REPO.lock().unwrap(); SyntaxDefinition::parse_top_level(doc, scope_repo.deref_mut(), lines_include_newline, fallback_name) } fn parse_top_level(doc: &Yaml, scope_repo: &mut ScopeRepository, lines_include_newline: bool, fallback_name: Option<&str>) -> Result<SyntaxDefinition, ParseSyntaxError> { let h = doc.as_hash().ok_or(ParseSyntaxError::TypeMismatch)?; let mut variables = HashMap::new(); if let Ok(map) = get_key(h, "variables", |x| x.as_hash()) { for (key, value) in map.iter() { if let (Some(key_str), Some(val_str)) = (key.as_str(), value.as_str()) { variables.insert(key_str.to_owned(), val_str.to_owned()); } } } let contexts_hash = get_key(h, "contexts", |x| x.as_hash())?; let top_level_scope = scope_repo.build(get_key(h, "scope", |x| x.as_str())?) .map_err(ParseSyntaxError::InvalidScope)?; let mut state = ParserState { scope_repo, variables, variable_regex: Regex::new(r"\{\{([A-Za-z0-9_]+)\}\}".into()), backref_regex: Regex::new(r"\\\d".into()), lines_include_newline, }; let mut contexts = SyntaxDefinition::parse_contexts(contexts_hash, &mut state)?; if !contexts.contains_key("main") { return Err(ParseSyntaxError::MainMissing); } SyntaxDefinition::add_initial_contexts( &mut contexts, &mut state, top_level_scope, ); let mut file_extensions = Vec::new(); for extension_key in &["file_extensions", "hidden_file_extensions"] { if let Ok(v) = get_key(h, extension_key, |x| x.as_vec()) { file_extensions.extend(v.iter().filter_map(|y| y.as_str().map(|s| s.to_owned()))) } } let defn = SyntaxDefinition { name: get_key(h, "name", |x| x.as_str()).unwrap_or_else(|_| fallback_name.unwrap_or("Unnamed")).to_owned(), scope: top_level_scope, file_extensions, // TODO maybe cache a compiled version of this Regex first_line_match: get_key(h, "first_line_match", |x| x.as_str()) .ok() .map(|s| s.to_owned()), hidden: get_key(h, "hidden", |x| x.as_bool()).unwrap_or(false), variables: state.variables, contexts, }; Ok(defn) } fn parse_contexts(map: &Hash, state: &mut ParserState<'_>) -> Result<HashMap<String, Context>, ParseSyntaxError> { let mut contexts = HashMap::new(); for (key, value) in map.iter() { if let (Some(name), Some(val_vec)) = (key.as_str(), value.as_vec()) { let is_prototype = name == "prototype"; let mut namer = ContextNamer::new(name); SyntaxDefinition::parse_context(val_vec, state, &mut contexts, is_prototype, &mut namer)?; } } Ok(contexts) } fn parse_context(vec: &[Yaml], // TODO: Maybe just pass the scope repo if that's all that's needed? state: &mut ParserState<'_>, contexts: &mut HashMap<String, Context>, is_prototype: bool, namer: &mut ContextNamer) -> Result<String, ParseSyntaxError> { let mut context = Context::new(!is_prototype); let name = namer.next(); for y in vec.iter() { let map = y.as_hash().ok_or(ParseSyntaxError::TypeMismatch)?; let mut is_special = false; if let Ok(x) = get_key(map, "meta_scope", |x| x.as_str()) { context.meta_scope = str_to_scopes(x, state.scope_repo)?; is_special = true; } if let Ok(x) = get_key(map, "meta_content_scope", |x| x.as_str()) { context.meta_content_scope = str_to_scopes(x, state.scope_repo)?; is_special = true; } if let Ok(x) = get_key(map, "meta_include_prototype", |x| x.as_bool()) { context.meta_include_prototype = x; is_special = true; } if let Ok(true) = get_key(map, "clear_scopes", |x| x.as_bool()) { context.clear_scopes = Some(ClearAmount::All); is_special = true; } if let Ok(x) = get_key(map, "clear_scopes", |x| x.as_i64()) { context.clear_scopes = Some(ClearAmount::TopN(x as usize)); is_special = true; } if !is_special { if let Ok(x) = get_key(map, "include", Some) { let reference = SyntaxDefinition::parse_reference( x, state, contexts, namer, false)?; context.patterns.push(Pattern::Include(reference)); } else { let pattern = SyntaxDefinition::parse_match_pattern( map, state, contexts, namer)?; if pattern.has_captures { context.uses_backrefs = true; } context.patterns.push(Pattern::Match(pattern)); } } } contexts.insert(name.clone(), context); Ok(name) } fn parse_reference(y: &Yaml, state: &mut ParserState<'_>, contexts: &mut HashMap<String, Context>, namer: &mut ContextNamer, with_escape: bool) -> Result<ContextReference, ParseSyntaxError> { if let Some(s) = y.as_str() { let parts: Vec<&str> = s.split('#').collect(); let sub_context = if parts.len() > 1 { Some(parts[1].to_owned()) } else { None }; if parts[0].starts_with("scope:") { Ok(ContextReference::ByScope { scope: state.scope_repo .build(&parts[0][6..]) .map_err(ParseSyntaxError::InvalidScope)?, sub_context, with_escape, }) } else if parts[0].ends_with(".sublime-syntax") { let stem = Path::new(parts[0]) .file_stem() .and_then(|x| x.to_str()) .ok_or(ParseSyntaxError::BadFileRef)?; Ok(ContextReference::File { name: stem.to_owned(), sub_context, with_escape, }) } else { Ok(ContextReference::Named(parts[0].to_owned())) } } else if let Some(v) = y.as_vec() { let subname = SyntaxDefinition::parse_context(v, state, contexts, false, namer)?; Ok(ContextReference::Inline(subname)) } else { Err(ParseSyntaxError::TypeMismatch) } } fn parse_match_pattern(map: &Hash, state: &mut ParserState<'_>, contexts: &mut HashMap<String, Context>, namer: &mut ContextNamer) -> Result<MatchPattern, ParseSyntaxError> { let raw_regex = get_key(map, "match", |x| x.as_str())?; let regex_str = Self::parse_regex(raw_regex, state)?; // println!("{:?}", regex_str); let scope = get_key(map, "scope", |x| x.as_str()) .ok() .map(|s| str_to_scopes(s, state.scope_repo)) .unwrap_or_else(|| Ok(vec![]))?; let captures = if let Ok(map) = get_key(map, "captures", |x| x.as_hash()) { Some(Self::parse_captures(map, &regex_str, state)?) } else { None }; let mut has_captures = false; let operation = if get_key(map, "pop", Some).is_ok() { // Thanks @wbond for letting me know this is the correct way to check for captures has_captures = state.backref_regex.search(&regex_str, 0, regex_str.len(), None); MatchOperation::Pop } else if let Ok(y) = get_key(map, "push", Some) { MatchOperation::Push(SyntaxDefinition::parse_pushargs(y, state, contexts, namer)?) } else if let Ok(y) = get_key(map, "set", Some) { MatchOperation::Set(SyntaxDefinition::parse_pushargs(y, state, contexts, namer)?) } else if let Ok(y) = get_key(map, "embed", Some) { // Same as push so we translate it to what it would be let mut embed_escape_context_yaml = vec!(); let mut commands = Hash::new(); commands.insert(Yaml::String("meta_include_prototype".to_string()), Yaml::Boolean(false)); embed_escape_context_yaml.push(Yaml::Hash(commands)); if let Ok(s) = get_key(map, "embed_scope", Some) { commands = Hash::new(); commands.insert(Yaml::String("meta_content_scope".to_string()), s.clone()); embed_escape_context_yaml.push(Yaml::Hash(commands)); } if let Ok(v) = get_key(map, "escape", Some) { let mut match_map = Hash::new(); match_map.insert(Yaml::String("match".to_string()), v.clone()); match_map.insert(Yaml::String("pop".to_string()), Yaml::Boolean(true)); if let Ok(y) = get_key(map, "escape_captures", Some) { match_map.insert(Yaml::String("captures".to_string()), y.clone()); } embed_escape_context_yaml.push(Yaml::Hash(match_map)); let escape_context = SyntaxDefinition::parse_context( &embed_escape_context_yaml, state, contexts, false, namer, )?; MatchOperation::Push(vec![ContextReference::Inline(escape_context), SyntaxDefinition::parse_reference(y, state, contexts, namer, true)?]) } else { return Err(ParseSyntaxError::MissingMandatoryKey("escape")); } } else { MatchOperation::None }; let with_prototype = if let Ok(v) = get_key(map, "with_prototype", |x| x.as_vec()) { // should a with_prototype include the prototype? I don't think so. let subname = Self::parse_context(v, state, contexts, true, namer)?; Some(ContextReference::Inline(subname)) } else if let Ok(v) = get_key(map, "escape", Some) { let subname = namer.next(); let mut context = Context::new(false); let mut match_map = Hash::new(); match_map.insert(Yaml::String("match".to_string()), Yaml::String(format!("(?={})", v.as_str().unwrap()))); match_map.insert(Yaml::String("pop".to_string()), Yaml::Boolean(true)); let pattern = SyntaxDefinition::parse_match_pattern(&match_map, state, contexts, namer)?; if pattern.has_captures { context.uses_backrefs = true; } context.patterns.push(Pattern::Match(pattern)); contexts.insert(subname.clone(), context); Some(ContextReference::Inline(subname)) } else { None }; let pattern = MatchPattern::new( has_captures, regex_str, scope, captures, operation, with_prototype, ); Ok(pattern) } fn parse_pushargs(y: &Yaml, state: &mut ParserState<'_>, contexts: &mut HashMap<String, Context>, namer: &mut ContextNamer) -> Result<Vec<ContextReference>, ParseSyntaxError> { // check for a push of multiple items if y.as_vec().map_or(false, |v| !v.is_empty() && (v[0].as_str().is_some() || (v[0].as_vec().is_some() && v[0].as_vec().unwrap()[0].as_hash().is_some()))) { // this works because Result implements FromIterator to handle the errors y.as_vec() .unwrap() .iter() .map(|x| SyntaxDefinition::parse_reference(x, state, contexts, namer, false)) .collect() } else { let reference = SyntaxDefinition::parse_reference(y, state, contexts, namer, false)?; Ok(vec![reference]) } } fn parse_regex(raw_regex: &str, state: &ParserState<'_>) -> Result<String, ParseSyntaxError> { let regex = Self::resolve_variables(raw_regex, state); let regex = replace_posix_char_classes(regex); let regex = if state.lines_include_newline { regex_for_newlines(regex) } else { // If the passed in strings don't include newlines (unlike Sublime) we can't match on // them using the original regex. So this tries to rewrite the regex in a way that // allows matching against lines without newlines (essentially replacing `\n` with `$`). regex_for_no_newlines(regex) }; Self::try_compile_regex(&regex)?; Ok(regex) } fn resolve_variables(raw_regex: &str, state: &ParserState<'_>) -> String { let mut result = String::new(); let mut index = 0; let mut region = Region::new(); while state.variable_regex.search(raw_regex, index, raw_regex.len(), Some(&mut region)) { let (begin, end) = region.pos(0).unwrap(); result.push_str(&raw_regex[index..begin]); let var_pos = region.pos(1).unwrap(); let var_name = &raw_regex[var_pos.0..var_pos.1]; let var_raw = state.variables.get(var_name).map(String::as_ref).unwrap_or(""); let var_resolved = Self::resolve_variables(var_raw, state); result.push_str(&var_resolved); index = end; } if index < raw_regex.len() { result.push_str(&raw_regex[index..]); } result } fn try_compile_regex(regex_str: &str) -> Result<(), ParseSyntaxError> { // Replace backreferences with a placeholder value that will also appear in errors let regex_str = substitute_backrefs_in_regex(regex_str, |i| Some(format!("<placeholder_{}>", i))); if let Some(error) = Regex::try_compile(&regex_str) { Err(ParseSyntaxError::RegexCompileError(regex_str, error)) } else { Ok(()) } } fn parse_captures( map: &Hash, regex_str: &str, state: &mut ParserState<'_>, ) -> Result<CaptureMapping, ParseSyntaxError> { let valid_indexes = get_consuming_capture_indexes(regex_str); let mut captures = Vec::new(); for (key, value) in map.iter() { if let (Some(key_int), Some(val_str)) = (key.as_i64(), value.as_str()) { if valid_indexes.contains(&(key_int as usize)) { captures.push((key_int as usize, str_to_scopes(val_str, state.scope_repo)?)); } } } Ok(captures) } /// Sublime treats the top level context slightly differently from /// including the main context from other syntaxes. When main is popped /// it is immediately re-added and when it is `set` over the file level /// scope remains. This behaviour is emulated through some added contexts /// that are the actual top level contexts used in parsing. /// See <https://github.com/trishume/syntect/issues/58> for more. fn add_initial_contexts( contexts: &mut HashMap<String, Context>, state: &mut ParserState<'_>, top_level_scope: Scope, ) { let yaml_docs = YamlLoader::load_from_str(START_CONTEXT).unwrap(); let yaml = &yaml_docs[0]; let start_yaml : &[Yaml] = yaml["__start"].as_vec().unwrap(); SyntaxDefinition::parse_context(start_yaml, state, contexts, false, &mut ContextNamer::new("__start")).unwrap(); if let Some(start) = contexts.get_mut("__start") { start.meta_content_scope = vec![top_level_scope]; } let main_yaml : &[Yaml] = yaml["__main"].as_vec().unwrap(); SyntaxDefinition::parse_context(main_yaml, state, contexts, false, &mut ContextNamer::new("__main")).unwrap(); let meta_include_prototype = contexts["main"].meta_include_prototype; let meta_scope = contexts["main"].meta_scope.clone(); let meta_content_scope = contexts["main"].meta_content_scope.clone(); if let Some(outer_main) = contexts.get_mut("__main") { outer_main.meta_include_prototype = meta_include_prototype; outer_main.meta_scope = meta_scope; outer_main.meta_content_scope = meta_content_scope; } // add the top_level_scope as a meta_content_scope to main so // pushes from other syntaxes add the file scope // TODO: this order is not quite correct if main also has a meta_scope if let Some(main) = contexts.get_mut("main") { main.meta_content_scope.insert(0, top_level_scope); } } } struct ContextNamer { name: String, anonymous_index: Option<usize>, } impl ContextNamer { fn new(name: &str) -> ContextNamer { ContextNamer { name: name.to_string(), anonymous_index: None, } } fn next(&mut self) -> String { let name = if let Some(index) = self.anonymous_index { format!("#anon_{}_{}", self.name, index) } else { self.name.clone() }; self.anonymous_index = Some(self.anonymous_index.map(|i| i + 1).unwrap_or(0)); name } } /// In fancy-regex, POSIX character classes only match ASCII characters. /// /// Sublime's syntaxes expect them to match Unicode characters as well, so transform them to /// corresponding Unicode character classes. fn replace_posix_char_classes(regex: String) -> String { regex.replace("[:alpha:]", r"\p{L}") .replace("[:alnum:]", r"\p{L}\p{N}") .replace("[:lower:]", r"\p{Ll}") .replace("[:upper:]", r"\p{Lu}") .replace("[:digit:]", r"\p{Nd}") } /// Some of the regexes include `$` and expect it to match end of line, /// e.g. *before* the `\n` in `test\n`. /// /// In fancy-regex, `$` means end of text by default, so that would /// match *after* `\n`. Using `(?m:$)` instead means it matches end of line. /// /// Note that we don't want to add a `(?m)` in the beginning to change the /// whole regex because that would also change the meaning of `^`. In /// fancy-regex, that also matches at the end of e.g. `test\n` which is /// different from onig. It would also change `.` to match more. fn regex_for_newlines(regex: String) -> String { if !regex.contains('$') { return regex; } let rewriter = RegexRewriterForNewlines { parser: Parser::new(regex.as_bytes()), }; rewriter.rewrite() } struct RegexRewriterForNewlines<'a> { parser: Parser<'a>, } impl<'a> RegexRewriterForNewlines<'a> { fn rewrite(mut self) -> String { let mut result = Vec::new(); while let Some(c) = self.parser.peek() { match c { b'$' => { self.parser.next(); result.extend_from_slice(br"(?m:$)"); } b'\\' => { self.parser.next(); result.push(c); if let Some(c2) = self.parser.peek() { self.parser.next(); result.push(c2); } } b'[' => { let (mut content, _) = self.parser.parse_character_class(); result.append(&mut content); } _ => { self.parser.next(); result.push(c); } } } String::from_utf8(result).unwrap() } } /// Rewrite a regex that matches `\n` to one that matches `$` (end of line) instead. /// That allows the regex to be used to match lines that don't include a trailing newline character. /// /// The reason we're doing this is because the regexes in the syntax definitions assume that the /// lines that are being matched on include a trailing newline. /// /// Note that the rewrite is just an approximation and there's a couple of cases it can not handle, /// due to `$` being an anchor whereas `\n` matches a character. fn regex_for_no_newlines(regex: String) -> String { if !regex.contains(r"\n") { return regex; } // A special fix to rewrite a pattern from the `Rd` syntax that the RegexRewriter can not // handle properly. let regex = regex.replace("(?:\\n)?", "(?:$|)"); let rewriter = RegexRewriterForNoNewlines { parser: Parser::new(regex.as_bytes()), }; rewriter.rewrite() } struct RegexRewriterForNoNewlines<'a> { parser: Parser<'a>, } impl<'a> RegexRewriterForNoNewlines<'a> { fn rewrite(mut self) -> String { let mut result = Vec::new(); while let Some(c) = self.parser.peek() { match c { b'\\' => { self.parser.next(); if let Some(c2) = self.parser.peek() { self.parser.next(); // Replacing `\n` with `$` in `\n?` or `\n+` would make parsing later fail // with "target of repeat operator is invalid" let c3 = self.parser.peek(); if c2 == b'n' && c3 != Some(b'?') && c3 != Some(b'+') && c3 != Some(b'*') { result.extend_from_slice(b"$"); } else { result.push(c); result.push(c2); } } else { result.push(c); } } b'[' => { let (mut content, matches_newline) = self.parser.parse_character_class(); if matches_newline && self.parser.peek() != Some(b'?') { result.extend_from_slice(b"(?:"); result.append(&mut content); result.extend_from_slice(br"|$)"); } else { result.append(&mut content); } } _ => { self.parser.next(); result.push(c); } } } String::from_utf8(result).unwrap() } } fn get_consuming_capture_indexes(regex: &str) -> Vec<usize> { let parser = ConsumingCaptureIndexParser { parser: Parser::new(regex.as_bytes()), }; parser.get_consuming_capture_indexes() } struct ConsumingCaptureIndexParser<'a> { parser: Parser<'a>, } impl<'a> ConsumingCaptureIndexParser<'a> { /// Find capture groups which are not inside lookarounds. /// /// If, in a YAML syntax definition, a scope stack is applied to a capture group inside a /// lookaround, (i.e. "captures:\n x: scope.stack goes.here", where "x" is the number of a /// capture group in a lookahead/behind), those those scopes are not applied, so no need to /// even parse them. fn get_consuming_capture_indexes(mut self) -> Vec<usize> { let mut result = Vec::new(); let mut stack = Vec::new(); let mut cap_num = 0; let mut in_lookaround = false; stack.push(in_lookaround); result.push(cap_num); while let Some(c) = self.parser.peek() { match c { b'\\' => { self.parser.next(); self.parser.next(); } b'[' => { self.parser.parse_character_class(); } b'(' => { self.parser.next(); // add the current lookaround state to the stack so we can just pop at a closing paren stack.push(in_lookaround); if let Some(c2) = self.parser.peek() { if c2 != b'?' { // simple numbered capture group cap_num += 1; // if we are not currently in a lookaround, // add this capture group number to the valid ones if !in_lookaround { result.push(cap_num); } } else { self.parser.next(); if let Some(c3) = self.parser.peek() { self.parser.next(); if c3 == b'=' || c3 == b'!' { // lookahead in_lookaround = true; } else if c3 == b'<' { if let Some(c4) = self.parser.peek() { if c4 == b'=' || c4 == b'!' { self.parser.next(); // lookbehind in_lookaround = true; } } } else if c3 == b'P' { if let Some(c4) = self.parser.peek() { if c4 == b'<' { // named capture group cap_num += 1; // if we are not currently in a lookaround, // add this capture group number to the valid ones if !in_lookaround { result.push(cap_num); } } } } } } } } b')' => { if let Some(value) = stack.pop() { in_lookaround = value; } self.parser.next(); } _ => { self.parser.next(); } } } result } } struct Parser<'a> { bytes: &'a [u8], index: usize, } impl<'a> Parser<'a> { fn new(bytes: &[u8]) -> Parser { Parser { bytes, index: 0, } } fn peek(&self) -> Option<u8> { self.bytes.get(self.index).copied() } fn next(&mut self) { self.index += 1; } fn parse_character_class(&mut self) -> (Vec<u8>, bool) { let mut content = Vec::new(); let mut negated = false; let mut nesting = 0; let mut matches_newline = false; self.next(); content.push(b'['); if let Some(b'^') = self.peek() { self.next(); content.push(b'^'); negated = true; } // An unescaped `]` is allowed after `[` or `[^` and doesn't mean the end of the class. if let Some(b']') = self.peek() { self.next(); content.push(b']'); } while let Some(c) = self.peek() { match c { b'\\' => { self.next(); content.push(c); if let Some(c2) = self.peek() { self.next(); if c2 == b'n' && !negated && nesting == 0 { matches_newline = true; } content.push(c2); } } b'[' => { self.next(); content.push(b'['); nesting += 1; } b']' => { self.next(); content.push(b']'); if nesting == 0 { break; } nesting -= 1; } _ => { self.next(); content.push(c); } } } (content, matches_newline) } } #[cfg(test)] mod tests { use crate::parsing::syntax_definition::*; use crate::parsing::Scope; use super::*; #[test] fn can_parse() { let defn: SyntaxDefinition = SyntaxDefinition::load_from_str("name: C\nscope: source.c\ncontexts: {main: []}", false, None) .unwrap(); assert_eq!(defn.name, "C"); assert_eq!(defn.scope, Scope::new("source.c").unwrap()); let exts_empty: Vec<String> = Vec::new(); assert_eq!(defn.file_extensions, exts_empty); assert!(!defn.hidden); assert!(defn.variables.is_empty()); let defn2: SyntaxDefinition = SyntaxDefinition::load_from_str(" name: C scope: source.c file_extensions: [c, h] hidden_file_extensions: [k, l] hidden: true variables: ident: '[QY]+' contexts: prototype: - match: lol scope: source.php main: - match: \\b(if|else|for|while|{{ident}})\\b scope: keyword.control.c keyword.looping.c captures: 1: meta.preprocessor.c++ 2: keyword.control.include.c++ push: [string, 'scope:source.c#main', 'CSS.sublime-syntax#rule-list-body'] with_prototype: - match: wow pop: true - match: '\"' push: string string: - meta_scope: string.quoted.double.c - meta_include_prototype: false - match: \\\\. scope: constant.character.escape.c - match: '\"' pop: true ", false, None) .unwrap(); assert_eq!(defn2.name, "C"); let top_level_scope = Scope::new("source.c").unwrap(); assert_eq!(defn2.scope, top_level_scope); let exts: Vec<String> = vec!["c", "h", "k", "l"].into_iter().map(String::from).collect(); assert_eq!(defn2.file_extensions, exts); assert!(defn2.hidden); assert_eq!(defn2.variables.get("ident").unwrap(), "[QY]+"); let n: Vec<Scope> = Vec::new(); println!("{:?}", defn2); // unreachable!(); let main = &defn2.contexts["main"]; assert_eq!(main.meta_content_scope, vec![top_level_scope]); assert_eq!(main.meta_scope, n); assert!(main.meta_include_prototype); assert_eq!(defn2.contexts["__main"].meta_content_scope, n); assert_eq!(defn2.contexts["__start"].meta_content_scope, vec![top_level_scope]); assert_eq!(defn2.contexts["string"].meta_scope, vec![Scope::new("string.quoted.double.c").unwrap()]); let first_pattern: &Pattern = &main.patterns[0]; match *first_pattern { Pattern::Match(ref match_pat) => { let m: &CaptureMapping = match_pat.captures.as_ref().expect("test failed"); assert_eq!(&m[0], &(1,vec![Scope::new("meta.preprocessor.c++").unwrap()])); use crate::parsing::syntax_definition::ContextReference::*; // this is sadly necessary because Context is not Eq because of the Regex let expected = MatchOperation::Push(vec![ Named("string".to_owned()), ByScope { scope: Scope::new("source.c").unwrap(), sub_context: Some("main".to_owned()), with_escape: false, }, File { name: "CSS".to_owned(), sub_context: Some("rule-list-body".to_owned()), with_escape: false, }, ]); assert_eq!(format!("{:?}", match_pat.operation), format!("{:?}", expected)); assert_eq!(match_pat.scope, vec![Scope::new("keyword.control.c").unwrap(), Scope::new("keyword.looping.c").unwrap()]); assert!(match_pat.with_prototype.is_some()); } _ => unreachable!(), } } #[test] fn can_parse_embed_as_with_prototypes() { let old_def = SyntaxDefinition::load_from_str(r#" name: C scope: source.c file_extensions: [c, h] variables: ident: '[QY]+' contexts: main: - match: '(>)\s*' captures: 1: meta.tag.style.begin.html punctuation.definition.tag.end.html push: - [{ meta_include_prototype: false }, { meta_content_scope: 'source.css.embedded.html' }, { match: '(?i)(?=</style)', pop: true }] - scope:source.css with_prototype: - match: (?=(?i)(?=</style)) pop: true "#,false, None).unwrap(); let mut def_with_embed = SyntaxDefinition::load_from_str(r#" name: C scope: source.c file_extensions: [c, h] variables: ident: '[QY]+' contexts: main: - match: '(>)\s*' captures: 1: meta.tag.style.begin.html punctuation.definition.tag.end.html embed: scope:source.css embed_scope: source.css.embedded.html escape: (?i)(?=</style) "#,false, None).unwrap(); // We will soon do an `assert_eq!()`. But there is one difference we must expect, namely // that for `def_with_embed`, the value of `ContextReference::ByScope::with_escape` will be // `true`, whereas for `old_def` it will be `false`. So manually adjust `with_escape` to // `false` so that `assert_eq!()` will work. let def_with_embed_context = def_with_embed.contexts.get_mut("main").unwrap(); if let Pattern::Match(ref mut match_pattern) = def_with_embed_context.patterns[0] { if let MatchOperation::Push(ref mut context_references) = match_pattern.operation { if let ContextReference::ByScope { ref mut with_escape, .. } = context_references[1] { *with_escape = false; } } } assert_eq!(old_def.contexts["main"], def_with_embed.contexts["main"]); } #[test] fn errors_on_embed_without_escape() { let def = SyntaxDefinition::load_from_str(r#" name: C scope: source.c file_extensions: [c, h] variables: ident: '[QY]+' contexts: main: - match: '(>)\s*' captures: 1: meta.tag.style.begin.html punctuation.definition.tag.end.html embed: scope:source.css embed_scope: source.css.embedded.html "#,false, None); assert!(def.is_err()); match def.unwrap_err() { ParseSyntaxError::MissingMandatoryKey(key) => assert_eq!(key, "escape"), _ => unreachable!("Got unexpected ParseSyntaxError"), } } #[test] fn errors_on_regex_compile_error() { let def = SyntaxDefinition::load_from_str(r#" name: C scope: source.c file_extensions: [test] contexts: main: - match: '[a' scope: keyword.name "#,false, None); assert!(def.is_err()); match def.unwrap_err() { ParseSyntaxError::RegexCompileError(ref regex, _) => assert_eq!("[a", regex), _ => unreachable!("Got unexpected ParseSyntaxError"), } } #[test] fn can_parse_ugly_yaml() { let defn: SyntaxDefinition = SyntaxDefinition::load_from_str(" name: LaTeX scope: text.tex.latex contexts: main: - match: '((\\\\)(?:framebox|makebox))\\b' captures: 1: support.function.box.latex 2: punctuation.definition.backslash.latex push: - [{meta_scope: meta.function.box.latex}, {match: '', pop: true}] - argument - optional-arguments argument: - match: '\\{' scope: punctuation.definition.group.brace.begin.latex - match: '(?=\\S)' pop: true optional-arguments: - match: '(?=\\S)' pop: true ", false, None) .unwrap(); assert_eq!(defn.name, "LaTeX"); let top_level_scope = Scope::new("text.tex.latex").unwrap(); assert_eq!(defn.scope, top_level_scope); let first_pattern: &Pattern = &defn.contexts["main"].patterns[0]; match *first_pattern { Pattern::Match(ref match_pat) => { let m: &CaptureMapping = match_pat.captures.as_ref().expect("test failed"); assert_eq!(&m[0], &(1,vec![Scope::new("support.function.box.latex").unwrap()])); //use parsing::syntax_definition::ContextReference::*; // TODO: check the first pushed reference is Inline(...) and has a meta_scope of meta.function.box.latex // TODO: check the second pushed reference is Named("argument".to_owned()) // TODO: check the third pushed reference is Named("optional-arguments".to_owned()) assert!(match_pat.with_prototype.is_none()); } _ => unreachable!(), } } #[test] fn names_anonymous_contexts() { let def = SyntaxDefinition::load_from_str( r#" scope: source.c contexts: main: - match: a push: a a: - meta_scope: a - match: x push: - meta_scope: anonymous_x - match: anything push: - meta_scope: anonymous_x_2 - match: y push: - meta_scope: anonymous_y - match: z escape: 'test' "#, false, None ).unwrap(); assert_eq!(def.contexts["a"].meta_scope, vec![Scope::new("a").unwrap()]); assert_eq!(def.contexts["#anon_a_0"].meta_scope, vec![Scope::new("anonymous_x").unwrap()]); assert_eq!(def.contexts["#anon_a_1"].meta_scope, vec![Scope::new("anonymous_x_2").unwrap()]); assert_eq!(def.contexts["#anon_a_2"].meta_scope, vec![Scope::new("anonymous_y").unwrap()]); assert_eq!(def.contexts["#anon_a_3"].patterns.len(), 1); // escape } #[test] fn can_use_fallback_name() { let def = SyntaxDefinition::load_from_str(r#" scope: source.c contexts: main: - match: '' "#,false, Some("C")); assert_eq!(def.unwrap().name, "C"); } #[test] fn can_rewrite_regex_for_newlines() { fn rewrite(s: &str) -> String { regex_for_newlines(s.to_string()) } assert_eq!(&rewrite(r"a"), r"a"); assert_eq!(&rewrite(r"\b"), r"\b"); assert_eq!(&rewrite(r"(a)"), r"(a)"); assert_eq!(&rewrite(r"[a]"), r"[a]"); assert_eq!(&rewrite(r"[^a]"), r"[^a]"); assert_eq!(&rewrite(r"[]a]"), r"[]a]"); assert_eq!(&rewrite(r"[[a]]"), r"[[a]]"); assert_eq!(&rewrite(r"^"), r"^"); assert_eq!(&rewrite(r"$"), r"(?m:$)"); assert_eq!(&rewrite(r"^ab$"), r"^ab(?m:$)"); assert_eq!(&rewrite(r"\^ab\$"), r"\^ab\$"); assert_eq!(&rewrite(r"(//).*$"), r"(//).*(?m:$)"); // Do not rewrite this `$` because it's in a char class and doesn't mean end of line assert_eq!(&rewrite(r"[a$]"), r"[a$]"); } #[test] fn can_rewrite_regex_for_no_newlines() { fn rewrite(s: &str) -> String { regex_for_no_newlines(s.to_string()) } assert_eq!(&rewrite(r"a"), r"a"); assert_eq!(&rewrite(r"\b"), r"\b"); assert_eq!(&rewrite(r"(a)"), r"(a)"); assert_eq!(&rewrite(r"[a]"), r"[a]"); assert_eq!(&rewrite(r"[^a]"), r"[^a]"); assert_eq!(&rewrite(r"[]a]"), r"[]a]"); assert_eq!(&rewrite(r"[[a]]"), r"[[a]]"); assert_eq!(&rewrite(r"\n"), r"$"); assert_eq!(&rewrite(r"\[\n"), r"\[$"); assert_eq!(&rewrite(r"a\n?"), r"a\n?"); assert_eq!(&rewrite(r"a\n+"), r"a\n+"); assert_eq!(&rewrite(r"a\n*"), r"a\n*"); assert_eq!(&rewrite(r"[abc\n]"), r"(?:[abc\n]|$)"); assert_eq!(&rewrite(r"[^\n]"), r"[^\n]"); assert_eq!(&rewrite(r"[^]\n]"), r"[^]\n]"); assert_eq!(&rewrite(r"[\n]?"), r"[\n]?"); // Removing the `\n` might result in an empty character class, so we should leave it. assert_eq!(&rewrite(r"[\n]"), r"(?:[\n]|$)"); assert_eq!(&rewrite(r"[]\n]"), r"(?:[]\n]|$)"); // In order to properly understand nesting, we'd have to have a full parser, so ignore it. assert_eq!(&rewrite(r"[[a]&&[\n]]"), r"[[a]&&[\n]]"); assert_eq!(&rewrite(r"ab(?:\n)?"), r"ab(?:$|)"); assert_eq!(&rewrite(r"(?<!\n)ab"), r"(?<!$)ab"); assert_eq!(&rewrite(r"(?<=\n)ab"), r"(?<=$)ab"); } #[test] fn can_get_valid_captures_from_regex() { let regex = "hello(test)(?=(world))(foo(?P<named>bar))"; println!("{:?}", regex); let valid_indexes = get_consuming_capture_indexes(regex); println!("{:?}", valid_indexes); assert_eq!(valid_indexes, [0, 1, 3, 4]); } #[test] fn can_get_valid_captures_from_regex2() { let regex = "hello(test)[(?=tricked](foo(bar))"; println!("{:?}", regex); let valid_indexes = get_consuming_capture_indexes(regex); println!("{:?}", valid_indexes); assert_eq!(valid_indexes, [0, 1, 2, 3]); } #[test] fn can_get_valid_captures_from_nested_regex() { let regex = "hello(test)(?=(world(?!(te(?<=(st))))))(foo(bar))"; println!("{:?}", regex); let valid_indexes = get_consuming_capture_indexes(regex); println!("{:?}", valid_indexes); assert_eq!(valid_indexes, [0, 1, 5, 6]); } }
#[macro_use] extern crate log; #[macro_use] extern crate rustful; extern crate env_logger; extern crate rust_captcha; use rustful::{Server, TreeRouter}; use std::error::Error; use std::env; use rust_captcha::requesthandler::{RequestHandler, CaptchaMethod}; fn precondition_checks() -> bool { match env::var("REDIS_HOST") { Err(_) => { error!("Environment variable REDIS_HOST not set."); false }, Ok(_) => true } } fn main() { env_logger::init(); let port: u16 = env::var_os("PORT").unwrap().into_string().unwrap().parse::<u16>().unwrap(); if !precondition_checks() { error!("Failed to start server."); return; } info!("Starting service on port {} ...", port); let ret = Server { handlers: insert_routes! { TreeRouter::new() => { "/new/:difficulty/:max_tries/:ttl" => Post: RequestHandler::new(CaptchaMethod::New), "/solution/:id/:solution" => Post: RequestHandler::new(CaptchaMethod::Solution) } }, host: port.into(), ..Server::default() }.run(); match ret { Ok(_) => { }, Err(e) => error!("Could not start server: {}", e.description()) } }
use std::io::Read; use std::io::BufReader; use std::io::Error as IoError; use std::ascii::AsciiExt; use color::{ColorType}; use image::{DecodingResult, ImageDecoder, ImageResult, ImageError}; extern crate byteorder; use self::byteorder::{BigEndian, ByteOrder}; /// PPM decoder pub struct PPMDecoder<R> { reader: BufReader<R>, width: u32, height: u32, maxwhite: u32, } impl<R: Read> PPMDecoder<R> { /// Create a new decoder that decodes from the stream ```r``` pub fn new(read: R) -> ImageResult<PPMDecoder<R>> { let mut buf = BufReader::new(read); let mut magic: [u8; 2] = [0, 0]; try!(buf.read_exact(&mut magic[..])); // Skip magic constant if magic[0] != b'P' || (magic[1] != b'3' && magic[1] != b'6') { return Err(ImageError::FormatError("Expected magic constant for ppm, P3 or P6".to_string())); } // Remove this once the reader can read plain ppm if magic[1] == b'3' { return Err(ImageError::FormatError("Plain format is not yet supported".to_string())) } let width = try!(PPMDecoder::read_next_u32(&mut buf)); let height = try!(PPMDecoder::read_next_u32(&mut buf)); let maxwhite = try!(PPMDecoder::read_next_u32(&mut buf)); if !(maxwhite <= u16::max_value() as u32) { return Err(ImageError::FormatError("Image maxval is not less or equal to 65535".to_string())) } Ok(PPMDecoder { reader: buf, width: width, height: height, maxwhite: maxwhite, }) } /// Reads a string as well as a single whitespace after it, ignoring comments fn read_next_string(reader: &mut BufReader<R>) -> ImageResult<String> { let mut bytes = Vec::new(); // pair input bytes with a bool mask to remove comments let mark_comments = reader .bytes() .scan(true, |partof, read| { let byte = match read { Err(err) => return Some((*partof, Err(err))), Ok(byte) => byte, }; let cur_enabled = *partof && byte != b'#'; let next_enabled = cur_enabled || (byte == b'\r' || byte == b'\n'); *partof = next_enabled; return Some((cur_enabled, Ok(byte))); }); for (_, byte) in mark_comments.filter(|ref e| e.0) { match byte { Ok(b'\t') | Ok(b'\n') | Ok(b'\x0b') | Ok(b'\x0c') | Ok(b'\r') | Ok(b' ') => { if !bytes.is_empty() { break // We're done as we already have some content } }, Ok(byte) => { bytes.push(byte); }, Err(_) => break, } } if bytes.is_empty() { return Err(ImageError::FormatError("Unexpected eof".to_string())) } if !bytes.as_slice().is_ascii() { return Err(ImageError::FormatError("Non ascii character in preamble".to_string())) } String::from_utf8(bytes).map_err(|_| ImageError::FormatError("Couldn't read preamble".to_string())) } fn read_next_u32(reader: &mut BufReader<R>) -> ImageResult<u32> { let s = try!(PPMDecoder::read_next_string(reader)); s.parse::<u32>().map_err(|_| ImageError::FormatError("Invalid number in preamble".to_string())) } } impl<R: Read> ImageDecoder for PPMDecoder<R> { fn dimensions(&mut self) -> ImageResult<(u32, u32)> { Ok((self.width, self.height)) } fn colortype(&mut self) -> ImageResult<ColorType> { match self.bytewidth() { 1 => Ok(ColorType::RGB(8)), 2 => Ok(ColorType::RGB(16)), _ => Err(ImageError::FormatError("Don't know how to decode PPM with more than 16 bits".to_string())), } } fn row_len(&mut self) -> ImageResult<usize> { Ok((self.width*3*self.bytewidth()) as usize) } fn read_scanline(&mut self, _buf: &mut [u8]) -> ImageResult<u32> { unimplemented!(); } fn read_image(&mut self) -> ImageResult<DecodingResult> { let opt_size = self.width.checked_mul(self.height) .map_or(None, |v| v.checked_mul(3)) .map_or(None, |v| v.checked_mul(self.bytewidth())); let size = match opt_size { Some(v) => v, None => return Err(ImageError::DimensionError), }; let mut data = vec![0 as u8; size as usize]; match self.reader.read_exact(&mut data) { Ok(_) => {}, Err(e) => return Err(ImageError::IoError(e)), }; if self.bytewidth() == 1 { Ok(DecodingResult::U8(data)) } else { let mut out = vec![0 as u16; (self.width*self.height*3) as usize]; for (o, i) in out.chunks_mut(1).zip(data.chunks(2)) { o[0] = BigEndian::read_u16(i); } Ok(DecodingResult::U16(out)) } } } impl<R: Read> PPMDecoder<R> { fn bytewidth(&self) -> u32 { if self.maxwhite < 256 { 1 } else { 2 } } } /// Tests parsing binary buffers were written based on and validated against `pamfile` from /// netpbm (http://netpbm.sourceforge.net/). #[cfg(test)] mod tests { use super::*; #[test] fn minimal_form() { // Violates current specification (October 2016 ) but accepted by both netpbm and ImageMagick decode_minimal_image(&b"P61 1 255 123"[..]); decode_minimal_image(&b"P6 1 1 255 123"[..]); decode_minimal_image(&b"P6 1 1 255 123\xFF"[..]); // Too long should not be an issue } #[test] fn comment_in_token() { decode_minimal_image(&b"P6 1 1 2#comment\n55 123"[..]); // Terminating LF decode_minimal_image(&b"P6 1 1 2#comment\r55 123"[..]); // Terminating CR decode_minimal_image(&b"P6 1 1#comment\n 255 123"[..]); // Comment after token decode_minimal_image(&b"P6 1 1 #comment\n255 123"[..]); // Comment before token decode_minimal_image(&b"P6#comment\n 1 1 255 123"[..]); // Begin of header decode_minimal_image(&b"P6 1 1 255#comment\n 123"[..]); // End of header } #[test] fn whitespace() { decode_minimal_image(&b"P6\x091\x091\x09255\x09123"[..]); // TAB decode_minimal_image(&b"P6\x0a1\x0a1\x0a255\x0a123"[..]); // LF decode_minimal_image(&b"P6\x0b1\x0b1\x0b255\x0b123"[..]); // VT decode_minimal_image(&b"P6\x0c1\x0c1\x0c255\x0c123"[..]); // FF decode_minimal_image(&b"P6\x0d1\x0d1\x0d255\x0d123"[..]); // CR // Spaces tested before decode_minimal_image(&b"P61\x09\x0a\x0b\x0c\x0d1 255 123"[..]); // All whitespace, combined } /// Tests for decoding error, assuming `encoded` is ppm encoding for the very simplistic image /// containing a single pixel with one byte values (1, 2, 3). fn decode_minimal_image(encoded: &[u8]) { let content = vec![49 as u8, 50, 51]; let mut decoder = PPMDecoder::new(encoded).unwrap(); assert_eq!(decoder.dimensions().unwrap(), (1, 1)); assert_eq!(decoder.colortype().unwrap(), ColorType::RGB(8)); assert_eq!(decoder.row_len().unwrap(), 3); assert_eq!(decoder.bytewidth(), 1); match decoder.read_image().unwrap() { DecodingResult::U8(image) => assert_eq!(image, content), _ => assert!(false), } } #[test] fn wrong_tag() { assert!(PPMDecoder::new(&b"P5 1 1 255 1"[..]).is_err()); } #[test] fn invalid_characters() { assert!(PPMDecoder::new(&b"P6 1chars1 255 1"[..]).is_err()); // No text outside of comments assert!(PPMDecoder::new(&b"P6 1\xFF1 255 1"[..]).is_err()); // No invalid ascii chars assert!(PPMDecoder::new(&b"P6 0x01 1 255 1"[..]).is_err()); // Numbers only as decimal } /// These violate the narrow specification of ppm but are commonly supported in other programs. /// Fail fast and concise is important here as these might be received as input files. #[test] fn unsupported_extensions() { assert!(PPMDecoder::new(&b"P6 1 1 65536 1"[..]).is_err()); // No bitwidth above 16 } }
#[doc = "Reader of register BIST_ADDR_START"] pub type R = crate::R<u32, super::BIST_ADDR_START>; #[doc = "Writer for register BIST_ADDR_START"] pub type W = crate::W<u32, super::BIST_ADDR_START>; #[doc = "Register BIST_ADDR_START `reset()`'s with value 0"] impl crate::ResetValue for super::BIST_ADDR_START { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `COL_ADDR_START`"] pub type COL_ADDR_START_R = crate::R<u16, u16>; #[doc = "Write proxy for field `COL_ADDR_START`"] pub struct COL_ADDR_START_W<'a> { w: &'a mut W, } impl<'a> COL_ADDR_START_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff); self.w } } #[doc = "Reader of field `ROW_ADDR_START`"] pub type ROW_ADDR_START_R = crate::R<u16, u16>; #[doc = "Write proxy for field `ROW_ADDR_START`"] pub struct ROW_ADDR_START_W<'a> { w: &'a mut W, } impl<'a> ROW_ADDR_START_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16); self.w } } impl R { #[doc = "Bits 0:15 - Column start address. Useful to apply BIST to a part of an Flash. The value of this field should be in a legal range (a value outside of the legal range has an undefined result, and may lock up the BIST state machine). This legal range is dependent on the number of columns of the SRAM the BIST is applied to (as specified by BIST_CTL.SRAMS_ENABLED). E.g. for a Flash with n columns, the legal range is \\[0, n-1\\]."] #[inline(always)] pub fn col_addr_start(&self) -> COL_ADDR_START_R { COL_ADDR_START_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:31 - Row start address. Useful to apply BIST to a part of an Flash. The value of this field should be in a legal range (a value outside of the legal range has an undefined result, and may lock up the BIST state machine). This legal range is dependent on the number of rows of the SRAM the BIST is applied to (as specified by BIST_CTL.SRAMS_ENABLED). E.g. for a Flash with m columns, the legal range is \\[0, m-1\\]."] #[inline(always)] pub fn row_addr_start(&self) -> ROW_ADDR_START_R { ROW_ADDR_START_R::new(((self.bits >> 16) & 0xffff) as u16) } } impl W { #[doc = "Bits 0:15 - Column start address. Useful to apply BIST to a part of an Flash. The value of this field should be in a legal range (a value outside of the legal range has an undefined result, and may lock up the BIST state machine). This legal range is dependent on the number of columns of the SRAM the BIST is applied to (as specified by BIST_CTL.SRAMS_ENABLED). E.g. for a Flash with n columns, the legal range is \\[0, n-1\\]."] #[inline(always)] pub fn col_addr_start(&mut self) -> COL_ADDR_START_W { COL_ADDR_START_W { w: self } } #[doc = "Bits 16:31 - Row start address. Useful to apply BIST to a part of an Flash. The value of this field should be in a legal range (a value outside of the legal range has an undefined result, and may lock up the BIST state machine). This legal range is dependent on the number of rows of the SRAM the BIST is applied to (as specified by BIST_CTL.SRAMS_ENABLED). E.g. for a Flash with m columns, the legal range is \\[0, m-1\\]."] #[inline(always)] pub fn row_addr_start(&mut self) -> ROW_ADDR_START_W { ROW_ADDR_START_W { w: self } } }
use std::{ convert::TryInto, net::{IpAddr, SocketAddr}, }; use bytes::{BufMut, BytesMut}; use num_enum::{FromPrimitive, IntoPrimitive, TryFromPrimitive}; use std::net::ToSocketAddrs; use tokio::{ io::{AsyncRead, AsyncReadExt}, net::TcpStream, }; // Version of SOCKS proxy protocol pub const SOCKS_VERSION: u8 = 0x05; // The reserved bit defined per RFC pub const RESERVED: u8 = 0x00; // SOCK5 CMD Type #[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u8)] pub enum Command { Connect = 0x01, // Currently unsupported Bind = 0x02, // Currently unsupported UdpAssosiate = 0x3, } // Client Authentication Methods #[derive(Debug, Eq, PartialEq, FromPrimitive, IntoPrimitive)] #[repr(u8)] pub enum AuthMethods { // No Authentication NoAuth = 0x00, // UNSUPPORTED GssApi = 0x01, // Authenticate with a username / password UserPass = 0x02, // Methods other than these #[num_enum(default)] Others, // Cannot authenticate NoMethods = 0xFF, } // Data structure for the client's init message pub struct InitReq { pub version: u8, // Methods pub mtds: Vec<AuthMethods>, } impl InitReq { pub async fn read<T>(reader: &mut T) -> anyhow::Result<Self> where T: AsyncRead + Unpin, { let version = reader.read_u8().await?; let mtd_count = reader.read_u8().await?; let mut mtds = vec![0; mtd_count as usize]; reader.read_exact(&mut mtds).await?; let mtds = mtds .into_iter() .map(|x| x.into()) .collect::<Vec<AuthMethods>>(); Ok(Self { version, mtds }) } } // Data structure for the client's init message pub struct InitReply { pub version: u8, // Methods pub mtd: AuthMethods, } impl InitReply { pub fn no_method() -> Self { Self { version: SOCKS_VERSION, mtd: AuthMethods::NoMethods, } } pub fn method(mtd: AuthMethods) -> Self { Self { version: SOCKS_VERSION, mtd: mtd, } } } impl From<InitReply> for BytesMut { fn from(reply: InitReply) -> Self { let mut buf = BytesMut::new(); buf.put_u8(reply.version); buf.put_u8(reply.mtd.into()); buf } } // The response code used by cmd reply #[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u8)] pub enum RespCode { Success = 0x00, // general SOCKS server failure GeneralFailure = 0x01, // connection not allowed by ruleset RuleFailure = 0x02, NetworkUnreachable = 0x03, HostUnreachable = 0x04, ConnectionRefused = 0x05, TtlExpired = 0x06, CommandNotSupported = 0x07, AddrTypeNotSupported = 0x08, } // addr variant types #[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u8)] pub enum AddrType { V4 = 0x01, Domain = 0x03, V6 = 0x04, } impl From<IpAddr> for AddrType { fn from(ip: IpAddr) -> Self { match ip { IpAddr::V4(_) => Self::V4, IpAddr::V6(_) => Self::V6, } } } // The aggregated type for address (e.g. IPv4 and IPv6 addresses). #[derive(Debug, Eq, PartialEq)] pub struct Addr { // Address Type pub atyp: AddrType, pub addr: SocketAddr, } impl Addr { pub async fn read<T>(reader: &mut T) -> anyhow::Result<Self> where T: AsyncRead + Unpin, { let atyp: AddrType = reader.read_u8().await?.try_into()?; let addr: SocketAddr = match atyp { AddrType::V4 => { let mut ipaddr = [0u8; 4]; reader.read_exact(&mut ipaddr).await?; let ipaddr: IpAddr = ipaddr.into(); (ipaddr, reader.read_u16().await?).into() } // TODO: Can we do it better here? AddrType::Domain => { let len = reader.read_u8().await?; let mut domain = vec![0u8; len as usize]; reader.read_exact(&mut domain).await?; let mut domain = String::from_utf8_lossy(&domain[..]).to_string(); domain.push_str(&":"); domain.push_str(&reader.read_u16().await?.to_string()); domain.to_socket_addrs()?.collect::<Vec<SocketAddr>>()[0] } AddrType::V6 => { let mut ipaddr = [0u8; 16]; reader.read_exact(&mut ipaddr).await?; let ipaddr: IpAddr = ipaddr.into(); (ipaddr, reader.read_u16().await?).into() } }; Ok(Self { atyp, addr }) } } impl From<SocketAddr> for Addr { fn from(socket: SocketAddr) -> Self { Self { atyp: socket.ip().into(), addr: socket, } } } impl From<Addr> for BytesMut { fn from(addr: Addr) -> BytesMut { let mut buf = BytesMut::new(); buf.put_u8(addr.atyp.into()); match addr.addr.ip() { IpAddr::V4(ip) => buf.put_slice(&mut ip.octets()), IpAddr::V6(ip) => buf.put_slice(&mut ip.octets()), }; buf.put_u16(addr.addr.port()); buf } } // Command request, last step before transmiting data! #[derive(Debug, Eq, PartialEq)] pub struct CmdRequest { pub version: u8, pub command: Command, pub addr: Addr, } impl CmdRequest { pub async fn read<T>(reader: &mut T) -> anyhow::Result<Self> where T: AsyncRead + Unpin, { let version = reader.read_u8().await?; let command = reader.read_u8().await?.try_into()?; // read the reserved bit reader.read_u8().await?; let addr = Addr::read(reader).await?; Ok(Self { version, command, addr, }) } } impl From<CmdRequest> for BytesMut { fn from(req: CmdRequest) -> Self { let mut buf = BytesMut::new(); let addr: BytesMut = req.addr.into(); buf.put_u8(req.version); buf.put_u8(RESERVED); buf.put_u8(req.command.into()); buf.extend_from_slice(&addr); buf } } // Reply for the Cmd request #[derive(Debug, Eq, PartialEq)] pub struct CmdReply { pub version: u8, pub code: RespCode, pub addr: Addr, } impl CmdReply { pub fn failure(code: RespCode) -> Self { Self { version: SOCKS_VERSION, code, addr: Addr { atyp: AddrType::V4, addr: "127.0.0.1:1080".parse().unwrap(), }, } } pub fn success(addr: Addr) -> Self { Self { version: SOCKS_VERSION, code: RespCode::Success, addr, } } pub async fn read(reader: &mut TcpStream) -> anyhow::Result<Self> { Ok(Self { version: reader.read_u8().await?, code: reader.read_u8().await?.try_into()?, addr: Addr::read(reader).await?, }) } } impl From<CmdReply> for BytesMut { fn from(req: CmdReply) -> Self { let mut buf = BytesMut::new(); let addr: BytesMut = req.addr.into(); buf.put_u8(req.version); buf.put_u8(req.code.into()); buf.put_u8(RESERVED); buf.extend_from_slice(&addr); buf } } #[cfg(test)] mod tests { use super::{Addr, AddrType}; use bytes::BytesMut; #[test] fn addr_parse() { let buf: BytesMut = Addr { atyp: AddrType::V4, addr: ([203, 107, 42, 43], 443).into(), } .into(); let buf: Vec<u8> = buf.to_vec(); assert_eq!(vec![1, 203, 107, 42, 43, 1, 187], buf) } }