text stringlengths 8 4.13M |
|---|
// VGA library for BIOS-mode
use prelude::*;
use core::fmt;
use core::intrinsics::{volatile_store, volatile_load};
#[repr(u8)] #[derive(Clone, Copy, PartialEq, Eq)]
pub enum VgaColor {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Magenta = 5,
Brown = 6,
LightGrey = 7,
DarkGrey = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
LightMagenta = 13,
LightBrown = 14,
White = 15
}
#[repr(u16)]
#[packed]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct VgaEntry {
pub attr: u8,
pub ch: u8,
}
impl fmt::Display for VgaEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// 8 bit color, 8 bit char -> 16bit VGA Entry
write!(f, "{}", ((self.attr as u16) << 8) | self.ch as u16)
}
}
//= 0xb8000 as *mut VgaEntry
// Screen information
const VGA_HEIGHT: isize = 25;
const VGA_WIDTH: isize = 80;
const LAST_OFFSET: isize = VGA_HEIGHT * VGA_WIDTH - 1;
// Cursor positioning
pub struct VgaFrameBuffer {
screen: *mut VgaEntry,
offset: isize,
pub fg: VgaColor,
pub bg: VgaColor
}
impl VgaFrameBuffer {
pub fn new() -> VgaFrameBuffer {
VgaFrameBuffer {screen: 0xb8000 as *mut VgaEntry,
offset: 0,
fg: VgaColor::Green,
bg: VgaColor::Black,
}
}
// Clear the VGA Frame Buffer
pub fn clear(&mut self) {
let empty = VgaEntry {attr: 0, ch: 0};
for i in 0..VGA_HEIGHT {
for j in 0..VGA_WIDTH {
let offs = i * VGA_WIDTH + j;
unsafe {
volatile_store(self.screen.offset(offs), empty)
}
}
}
self.offset = 0;
}
// Scroll the VGA Frame Buffer by one line
pub fn scroll(&mut self) {
for i in 1..VGA_HEIGHT {
for j in 0..VGA_WIDTH {
let offs = i * VGA_WIDTH + j;
let prior = (i-1)* VGA_WIDTH + j;
unsafe {
let entry = volatile_load(self.screen.offset(offs));
volatile_store(self.screen.offset(prior), entry);
}
}
}
// Clear the final row
let i = VGA_HEIGHT - 1;
let empty = VgaEntry {attr: 0, ch: 0};
for j in 0..VGA_WIDTH {
let offs = i * VGA_WIDTH + j;
unsafe {
volatile_store(self.screen.offset(offs), empty);
}
}
// set the cursor back to the beginning of a row
self.offset -= VGA_WIDTH;
}
}
impl fmt::Write for VgaFrameBuffer {
fn write_str(&mut self, s: &str) -> fmt::Result {
unsafe {
}
Ok(())
}
fn write_char(&mut self, c: char) -> fmt::Result {
let colr = (self.fg as u8) | ((self.bg as u8) << 4);
let entry = VgaEntry{attr: colr, ch: c as u8};
unsafe {
volatile_store(self.screen.offset(self.offset), entry);
}
self.offset += 1;
if self.offset > LAST_OFFSET {
self.scroll();
}
Ok(())
}
}
|
use std::fs;
use std::collections::VecDeque;
use std::collections::BTreeMap;
struct Computer{
ip: usize,
program: BTreeMap<usize, i64>,
relbase: i64,
input: VecDeque<i64>
}
impl Computer {
fn new(program: BTreeMap<usize, i64>, input: VecDeque<i64>) -> Computer {
Computer { ip: 0, program: program, relbase: 0, input: input}
}
fn mode(&mut self, shift: usize) -> i64 {
let modes_num = self.program.get(&self.ip).unwrap_or(&0) / 10_i64.pow(shift as u32 + 1) % 10;
match modes_num {
0 => { *(self.program.get(&(*(self.program.get(&(self.ip + shift)).unwrap_or(&0)) as usize)).unwrap_or(&0)) }
1 => { *(self.program.get(&(self.ip + shift)).unwrap()) }
2 => { *(self.program.get(&((*(self.program.get(&(self.ip + shift)).unwrap_or(&0)) + self.relbase) as usize)).unwrap_or(&0)) }
_ => { panic!(); }
}
}
fn pos_mode(&mut self, shift: usize) -> i64 {
let modes_num = self.program.get(&self.ip).unwrap_or(&0) / 10_i64.pow(shift as u32 + 1) % 10;
match modes_num {
0 => { *(self.program.get(&(self.ip + shift)).unwrap()) }
2 => { (*(self.program.get(&(self.ip + shift)).unwrap_or(&0)) + self.relbase) as i64 }
_ => { panic!(); }
}
}
fn execute(&mut self) -> Vec<i64> {
let mut result: Vec<i64> = Vec::new();
loop {
match self.program.get(&self.ip).unwrap()%100 {
1 => {
let (op1, op2, op3) = (self.mode(1), self.mode(2), self.pos_mode(3));
self.program.insert(op3 as usize, op1 + op2);
self.ip += 4;
},
2 => {
let (op1, op2, op3) = (self.mode(1), self.mode(2), self.pos_mode(3));
self.program.insert(op3 as usize, op1 * op2);
self.ip += 4;
}
3 => {
let op3 = self.pos_mode(3);
self.program.insert(op3 as usize, self.input.pop_front().unwrap_or(1));
self.ip += 2;
}
4 => {
let op1 = self.mode(1);
self.ip += 2;
result.push(op1);
}
5 => {
let (op1, op2) = (self.mode(1), self.mode(2));
self.ip = if op1 != 0 { op2 as usize } else { self.ip + 3 }
}
6 => {
let (op1, op2) = (self.mode(1), self.mode(2));
self.ip = if op1 == 0 { op2 as usize } else { self.ip + 3 }
}
7 => {
let (op1, op2, op3) = (self.mode(1), self.mode(2), self.pos_mode(3));
self.program.insert(op3 as usize, if op1 < op2 { 1 } else { 0 });
self.ip += 4;
}
8 => {
let (op1, op2, op3) = (self.mode(1), self.mode(2), self.pos_mode(3));
self.program.insert(op3 as usize, if op1 == op2 { 1 } else { 0 });
self.ip += 4;
}
9 => {
let op1 = self.mode(1);
self.ip += 2;
self.relbase += op1;
}
99 => {
return result;
}
_ => { panic!() }
}
}
}
}
fn part1(programvec: Vec<i64>) -> Vec<i64> {
let program: BTreeMap<usize, i64> = programvec.iter().enumerate().map(|(i, x)| (i,*x)).collect();
let input = VecDeque::from(vec![1]);
Computer::new(program, input).execute()
}
fn part2(programvec: Vec<i64>) -> Vec<i64> {
let program: BTreeMap<usize, i64> = programvec.iter().enumerate().map(|(i, x)| (i,*x)).collect();
let input = VecDeque::from(vec![2]);
Computer::new(program, input).execute()
}
fn main() {
let contents = fs::read_to_string("day9.txt").expect("Could not read file");
let program : Vec<i64> = contents.split(",").map(|i| i.parse::<i64>().unwrap()).collect();
println!("{:?}", part1(program.clone()));
println!("{:?}", part2(program.clone()));
} |
use alloc::boxed::Box;
use collections::vec::Vec;
use core::intrinsics::volatile_load;
use core::mem;
use drivers::io::{Io, Mmio};
use drivers::pci::config::PciConfig;
use arch::context::context_switch;
use fs::KScheme;
use super::{Hci, Packet, Pipe, Setup};
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Gtd {
flags: u32,
buffer: u32,
next: u32,
end: u32,
}
#[repr(packed)]
#[derive(Copy, Clone, Debug, Default)]
struct Ed {
flags: u32,
tail: u32,
head: u32,
next: u32,
}
const CTRL_CBSR: u32 = 0b11;
const CTRL_PLE: u32 = 1 << 2;
const CTRL_IE: u32 = 1 << 3;
const CTRL_CLE: u32 = 1 << 4;
const CTRL_BLE: u32 = 1 << 5;
const CTRL_HCFS: u32 = 0b11 << 6;
const CTRL_IR: u32 = 1 << 8;
const CTRL_RWC: u32 = 1 << 9;
const CTRL_RWE: u32 = 1 << 10;
const CMD_STS_HCR: u32 = 1;
const CMD_STS_CLF: u32 = 1 << 1;
const CMD_STS_BLF: u32 = 1 << 2;
const CMD_STS_OCR: u32 = 1 << 3;
const PORT_STS_CCS: u32 = 1;
const PORT_STS_PES: u32 = 1 << 1;
const PORT_STS_PSS: u32 = 1 << 2;
const PORT_STS_POCI: u32 = 1 << 3;
const PORT_STS_PPS: u32 = 1 << 8;
const PORT_STS_LSDA: u32 = 1 << 9;
const PORT_STS_CSC: u32 = 1 << 16;
const PORT_STS_PESC: u32 = 1 << 17;
const PORT_STS_PSSC: u32 = 1 << 18;
const PORT_STS_OCIC: u32 = 1 << 19;
const PORT_STS_PRSC: u32 = 1 << 20;
#[repr(packed)]
pub struct OhciRegs {
pub revision: Mmio<u32>,
pub control: Mmio<u32>,
pub cmd_sts: Mmio<u32>,
pub int_sts: Mmio<u32>,
pub int_en: Mmio<u32>,
pub int_dis: Mmio<u32>,
pub hcca: Mmio<u32>,
pub period_current: Mmio<u32>,
pub control_head: Mmio<u32>,
pub control_current: Mmio<u32>,
pub bulk_head: Mmio<u32>,
pub bulk_current: Mmio<u32>,
pub done_head: Mmio<u32>,
pub fm_interval: Mmio<u32>,
pub fm_remain: Mmio<u32>,
pub fm_num: Mmio<u32>,
pub periodic_start: Mmio<u32>,
pub ls_thresh: Mmio<u32>,
pub rh_desc_a: Mmio<u32>,
pub rh_desc_b: Mmio<u32>,
pub rh_sts: Mmio<u32>,
pub port_sts: [Mmio<u32>; 15],
}
#[repr(packed)]
pub struct OhciHcca {
pub interrupt_table: [u32; 32],
pub frame_number: u16,
pub padding: u16,
pub done_head: u32,
pub reserved: [u8; 116],
}
pub struct Ohci {
pub regs: &'static mut OhciRegs,
pub hcca: Box<OhciHcca>,
pub irq: u8,
}
impl KScheme for Ohci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
// d("OHCI IRQ\n");
}
}
}
impl Ohci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Self> {
pci.flag(4, 4, true); // Bus mastering
let base = pci.read(0x10) as usize & 0xFFFFFFF0;
let regs = &mut *(base as *mut OhciRegs);
let mut module = box Ohci {
regs: regs,
hcca: box OhciHcca {
interrupt_table: [0; 32],
frame_number: 0,
padding: 0,
done_head: 0,
reserved: [0; 116],
},
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
return module;
}
pub unsafe fn init(&mut self) {
syslog_info!(" + OHCI on: {:X}, IRQ: {:X}", (self.regs as *mut OhciRegs) as usize, self.irq);
/*
self.regs.hcca.write((&*self.hcca as *const OhciHcca) as u32);
debugln!("Enable: {:X}", self.regs.control.read());
loop {
let ctrl = self.regs.control.read();
let desired_ctrl = (ctrl & (0xFFFFFFFF - CTRL_HCFS)) | 0b10 << 6;
if ctrl != desired_ctrl {
self.regs.control.write(desired_ctrl);
} else {
break;
}
}
debugln!("CTRL: {:X} CMDSTS: {:X} HCCA: {:X}", self.regs.control.read(), self.regs.cmd_sts.read(), self.regs.hcca.read());
let ndp = self.regs.rh_desc_a.read() & 0xF;
for i in 0..ndp as usize {
debugln!("Port {}: {:X}", i, self.regs.port_sts[i].read());
if self.regs.port_sts[i].readf(PORT_STS_CCS) {
debugln!(" Device Found");
while ! self.regs.port_sts[i].readf(PORT_STS_PES) {
self.regs.port_sts[i].writef(PORT_STS_PES, true);
}
self.device(i as u8 + 1);
}
}
*/
}
}
impl Hci for Ohci {
fn msg(&mut self, address: u8, endpoint: u8, _pipe: Pipe, msgs: &[Packet]) -> usize {
let mut tds = Vec::new();
for msg in msgs.iter() {
match *msg {
Packet::Setup(setup) => tds.push(Gtd {
flags: 0b1111 << 28 | 0b00 << 19 | 1 << 18,
buffer: (setup as *const Setup) as u32,
next: 0,
end: (setup as *const Setup) as u32 + mem::size_of::<Setup>() as u32 - 1
}),
Packet::In(ref data) => tds.push(Gtd {
flags: 0b1111 << 28 | 0b10 << 19 | 1 << 18,
buffer: if data.is_empty() {
0
} else {
data.as_ptr() as u32
},
next: 0,
end: if data.is_empty() {
0
} else {
data.as_ptr() as u32 + data.len() as u32 - 1
}
}),
Packet::Out(ref data) => tds.push(Gtd {
flags: 0b1111 << 28 | 0b01 << 19 | 1 << 18,
buffer: if data.is_empty() {
0
} else {
data.as_ptr() as u32
},
next: 0,
end: if data.is_empty() {
0
} else {
data.as_ptr() as u32 + data.len() as u32 - 1
}
})
}
}
let mut count = 0;
if ! tds.is_empty() {
for i in 0..tds.len() - 1 {
tds[i].next = (&tds[i + 1] as *const Gtd) as u32;
}
let ed = box Ed {
flags: 8 << 16 | (endpoint as u32) << 7 | address as u32,
tail: (tds.last().unwrap() as *const Gtd) as u32 + mem::size_of::<Gtd>() as u32,
head: (tds.first().unwrap() as *const Gtd) as u32,
next: 0
};
//debugln!("ED: {:X}, FLG: {:X}, TAIL: {:X}, HEAD: {:X}, NEXT: {:X}", (&*ed as *const Ed) as usize, ed.flags, ed.tail, ed.head, ed.next);
while ! self.regs.control.readf(CTRL_CLE) {
self.regs.control.writef(CTRL_CLE, true);
}
self.regs.control_head.write((&*ed as *const Ed) as u32);
while ! self.regs.cmd_sts.readf(CMD_STS_CLF) {
self.regs.cmd_sts.writef(CMD_STS_CLF, true);
}
for td in tds.iter() {
//debugln!(" TD: {:X}, FLG: {:X}, BUF: {:X}, NEXT: {:X}, END: {:X}", (td as *const Gtd) as usize, td.flags, td.buffer, td.next, td.end);
while unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28 == 0b1111 << 28 {
unsafe { context_switch() };
}
let condition = (unsafe { volatile_load(td as *const Gtd).flags } & 0b1111 << 28) >> 28;
if condition != 0 {
//debugln!(" /TD: {:X}, FLG: {:X}, BUF: {:X}, NEXT: {:X}, END: {:X}", (td as *const Gtd) as usize, td.flags, td.buffer, td.next, td.end);
debugln!("Condition: {:X}", condition);
break;
} else {
count += (td.end - td.buffer) as usize;
}
}
/*
while self.regs.cmd_sts.readf(CMD_STS_CLF) {
self.regs.cmd_sts.writef(CMD_STS_CLF, false);
}
*/
while self.regs.control.readf(CTRL_CLE) {
self.regs.control.writef(CTRL_CLE, false);
}
self.regs.control_head.write(0);
//debugln!("/ED: {:X}, FLG: {:X}, TAIL: {:X}, HEAD: {:X}, NEXT: {:X}", (&*ed as *const Ed) as usize, ed.flags, ed.tail, ed.head, ed.next);
}
count
}
}
|
#[derive(Deserialize)]
pub struct LaunchResponse {
#[serde(rename = "Result")]
pub result: ResultState,
#[serde(rename = "Message")]
pub message: String,
}
#[derive(Debug,PartialEq,Deserialize)]
pub enum ResultState {
Success,
Failure,
}
pub trait Api {
// fn status() -> Option<bool>;
fn launch(password: &str) -> Result<LaunchResponse, String>;
}
|
extern crate queues;
use queues::*;
pub struct logger{
buf: queues::CircularBuffer<String>
}
impl logger{
pub fn new() -> Self
{
logger {buf : queues::CircularBuffer::<String>::new(35) }
}
pub fn write(&mut self, message: String)
{
self.buf.add(message);
}
pub fn to_console(&mut self)
{
let mut done: bool = false;
while !done{
let val = self.buf.remove();
match val
{
Ok(txt) => println!("{}", txt),
Err(_) => done = true
}
}
}
}
|
//! The module contains a [`Locator`] trait and implementations for it.
use core::ops::Bound;
use std::{
iter::{self, Once},
ops::{Range, RangeBounds},
};
use crate::{
grid::config::Entity,
grid::records::{ExactRecords, PeekableRecords, Records},
settings::object::{
Column, Columns, FirstColumn, FirstRow, LastColumn, LastRow, Object, Row, Rows,
},
};
/// Locator is an interface which searches for a particular thing in the [`Records`],
/// and returns coordinate of the foundings if any.
pub trait Locator<Records> {
/// A coordinate of the finding.
type Coordinate;
/// An iterator of the coordinates.
/// If it's empty it's considered that nothing is found.
type IntoIter: IntoIterator<Item = Self::Coordinate>;
/// Search for the thing in [`Records`], returning a list of coordinates.
fn locate(&mut self, records: Records) -> Self::IntoIter;
}
impl<B, R> Locator<R> for Columns<B>
where
B: RangeBounds<usize>,
R: Records,
{
type Coordinate = usize;
type IntoIter = Range<usize>;
fn locate(&mut self, records: R) -> Self::IntoIter {
let range = self.get_range();
let max = records.count_columns();
let (from, to) = bounds_to_usize(range.start_bound(), range.end_bound(), max);
from..to
}
}
impl<R> Locator<R> for Column {
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, _: R) -> Self::IntoIter {
iter::once((*self).into())
}
}
impl<R> Locator<R> for FirstColumn {
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, _: R) -> Self::IntoIter {
iter::once(0)
}
}
impl<R> Locator<R> for LastColumn
where
R: Records,
{
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, records: R) -> Self::IntoIter {
if records.count_columns() > 0 {
iter::once(records.count_columns() - 1)
} else {
iter::once(0)
}
}
}
impl<B, R> Locator<R> for Rows<B>
where
R: Records,
B: RangeBounds<usize>,
{
type Coordinate = usize;
type IntoIter = Range<usize>;
fn locate(&mut self, records: R) -> Self::IntoIter {
let (from, to) = bounds_to_usize(
self.get_range().start_bound(),
self.get_range().end_bound(),
records.count_columns(),
);
from..to
}
}
impl<R> Locator<R> for Row {
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, _: R) -> Self::IntoIter {
iter::once((*self).into())
}
}
impl<R> Locator<R> for FirstRow {
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, _: R) -> Self::IntoIter {
iter::once(0)
}
}
impl<R> Locator<R> for LastRow
where
R: ExactRecords,
{
type Coordinate = usize;
type IntoIter = Once<usize>;
fn locate(&mut self, records: R) -> Self::IntoIter {
if records.count_rows() > 0 {
iter::once(records.count_rows() - 1)
} else {
iter::once(0)
}
}
}
/// The structure is an implementation of [`Locator`] to search for a column by it's name.
/// A name is considered be a value in a first row.
///
/// So even if in reality there's no header, the first row will be considered to be one.
#[derive(Debug, Clone, Copy)]
pub struct ByColumnName<S>(S);
impl<S> ByColumnName<S> {
/// Constructs a new object of the structure.
pub fn new(text: S) -> Self
where
S: AsRef<str>,
{
Self(text)
}
}
impl<R, S> Locator<R> for ByColumnName<S>
where
S: AsRef<str>,
R: Records + ExactRecords + PeekableRecords,
{
type Coordinate = usize;
type IntoIter = Vec<usize>;
fn locate(&mut self, records: R) -> Self::IntoIter {
// todo: can be optimized by creating Iterator
(0..records.count_columns())
.filter(|col| records.get_text((0, *col)) == self.0.as_ref())
.collect::<Vec<_>>()
}
}
impl<S, R> Object<R> for ByColumnName<S>
where
S: AsRef<str>,
R: Records + PeekableRecords + ExactRecords,
{
type Iter = std::vec::IntoIter<Entity>;
fn cells(&self, records: &R) -> Self::Iter {
// todo: can be optimized by creating Iterator
(0..records.count_columns())
.filter(|col| records.get_text((0, *col)) == self.0.as_ref())
.map(Entity::Column)
.collect::<Vec<_>>()
.into_iter()
}
}
fn bounds_to_usize(
left: Bound<&usize>,
right: Bound<&usize>,
count_elements: usize,
) -> (usize, usize) {
match (left, right) {
(Bound::Included(x), Bound::Included(y)) => (*x, y + 1),
(Bound::Included(x), Bound::Excluded(y)) => (*x, *y),
(Bound::Included(x), Bound::Unbounded) => (*x, count_elements),
(Bound::Unbounded, Bound::Unbounded) => (0, count_elements),
(Bound::Unbounded, Bound::Included(y)) => (0, y + 1),
(Bound::Unbounded, Bound::Excluded(y)) => (0, *y),
(Bound::Excluded(_), Bound::Unbounded)
| (Bound::Excluded(_), Bound::Included(_))
| (Bound::Excluded(_), Bound::Excluded(_)) => {
unreachable!("A start bound can't be excluded")
}
}
}
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use core::{mem};
use crate::{
kty::{clockid_t, c_int, TIMER_ABSTIME, TFD_NONBLOCK},
syscall::{
clock_gettime, clock_settime, clock_getres, clock_nanosleep, timerfd_create
},
time::{timer::{Timer}, Time, time_from_timespec, time_to_timespec},
util::retry::{retry},
result::{Result},
lmem,
fd::{FdContainer},
};
/// A clock that can be used to measure time.
///
/// [field, 1]
/// The integer representing the clock.
#[derive(Pod, Copy, Clone, PartialEq, Eq)]
pub struct Clock(pub clockid_t);
/// Real ("wall time") clock that measures the time since 1970-01-01T00:00:00+00:00.
pub const REAL: Clock = Clock(0);
/// Real coarse ("wall time") clock that measures the time since
/// 1970-01-01T00:00:00+00:00.
///
/// = Remarks
///
/// This is less precise but more efficient than `REAL`.
pub const REAL_COARSE: Clock = Clock(5);
/// A monotonic clock since some arbitrary point in the past which isn't affected by time
/// jumps.
pub const MONO: Clock = Clock(1);
/// A coarse monotonic clock since some arbitrary point in the past which isn't affected
/// by time jumps.
///
/// = Remarks
///
/// This is less precise but more efficient than `MONO`.
pub const MONO_COARSE: Clock = Clock(6);
/// A monotonic clock since some arbitrary point in the past which isn't affected by time
/// jumps or time adjustments.
pub const MONO_RAW: Clock = Clock(4);
/// A clock that measures the CPU time used by this process.
pub const PROCESS: Clock = Clock(2);
/// A clock that measures the CPU time used by this thread.
pub const THREAD: Clock = Clock(3);
/// A monotonic clock since some arbitrary point in the past which isn't affected by time
/// jumps and continues to run while the system is suspended.
pub const BOOT: Clock = Clock(7);
// TODO: Names for the clocks below
// pub const CLOCK_REALTIME_ALARM : Clock = Clock(8);
// pub const CLOCK_BOOTTIME_ALARM : Clock = Clock(9);
// pub const CLOCK_SGI_CYCLE : Clock = Clock(10);
// pub const CLOCK_TAI : Clock = Clock(11);
impl Clock {
/// Returns the current time of the clock.
pub fn get_time(self) -> Result<Time> {
let mut timespec = lmem::zeroed();
rv!(clock_gettime(self.0, &mut timespec))?;
Ok(time_from_timespec(timespec))
}
/// Sets the time of the clock.
///
/// [argument, t]
/// The new time of the clock.
pub fn set_time(self, t: Time) -> Result {
let timespec = time_to_timespec(t);
rv!(clock_settime(self.0, ×pec))
}
/// Returns the resolution of the clock.
pub fn resolution(self) -> Result<Time> {
let mut timespec = lmem::zeroed();
rv!(clock_getres(self.0, &mut timespec))?;
Ok(time_from_timespec(timespec))
}
/// Sleeps until an absolute time.
///
/// [argument, t]
/// The time until which to sleep.
pub fn sleep_to(self, t: Time) -> Result {
let time = time_to_timespec(t);
let mut rem = lmem::zeroed();
retry(|| clock_nanosleep(self.0, TIMER_ABSTIME, &time, &mut rem)).map(|_| ())
}
/// Sleeps for an amount of time.
///
/// [argument, t]
/// The amount of time to sleep.
pub fn sleep_for(self, t: Time) -> Result {
let now = self.get_time()?;
self.sleep_to(now + t)
}
/// Creates a new timer.
pub fn timer(self) -> Result<Timer> {
let timer = rv!(timerfd_create(self.0, 0), -> c_int)?;
Ok(Timer::from_owned(timer))
}
/// Creates a new non-blocking timer.
pub fn timer_non_blocking(self) -> Result<Timer> {
let timer = rv!(timerfd_create(self.0, TFD_NONBLOCK), -> c_int)?;
Ok(Timer::from_owned(timer))
}
}
|
use lunatic::channel::{Receiver, Sender};
use puck::{
body::{
mime::{HTML, PLAIN},
Body,
},
Request, Response,
};
use serde::{Deserialize, Serialize};
fn home(_: Request) -> Response {
Response::build()
.header("Content-Type", HTML)
.body(Body::from_string("Hello World!"))
.status(200, "success")
.build()
}
fn hello(req: Request) -> Response {
let name = req.url().path().split('/').last().unwrap();
Response::build()
.header("Content-Type", HTML)
.body(Body::from_string(format!("<h1>Hello {}!</h1>", name)))
.status(200, "success")
.build()
}
#[derive(Serialize, Deserialize, Debug)]
enum Msg {
Send(String),
}
fn submit_info(req: Request, sender: Sender<Msg>) -> Response {
sender
.send(Msg::Send(
req.url().path().split('/').last().unwrap().to_string(),
))
.unwrap();
Response::build()
.header("Content-Type", PLAIN)
.body(Body::from_string("Submitted".to_string()))
.status(200, "success")
.build()
}
fn read_info(_: Request, reader: Receiver<Msg>) -> Response {
Response::build()
.header("Content-Type", PLAIN)
.body(Body::from_string(match reader.receive().unwrap() {
Msg::Send(msg) => msg,
}))
.status(200, "success")
.build()
}
fn echo_echo((_send, receive): (Sender<Msg>, Receiver<Msg>)) {
while let Ok(t) = receive.receive() {
println!("{:#?}", t);
}
}
#[puck::handler(
handle(at = "/", call = "home"),
handle(at = "/hello/<string>", call = "hello"),
handle(at = "/submit/<string>", call = "submit_info", send = "echo"),
handle(at = "/read", call = "read_info", receive = "echo"),
channel(name = "echo", message_type = "Msg", supervisor = "echo_echo")
)]
pub struct App;
fn main() {
puck::serve::<App, &str>("127.0.0.1:5050").unwrap()
}
#[cfg(test)]
mod test {
use std::io::{Read, Write};
fn proc(_: ()) {
super::main()
}
#[test]
fn test_channels() {
lunatic::Process::spawn_with((), proc).detach();
fn inner(_: ()) {
let mut stream = lunatic::net::TcpStream::connect("127.0.0.1:5050").unwrap();
write!(
stream,
"GET /submit/hello HTTP/1.1\r\nHost: localhost:5050\r\nUser-Agent: curl/7.64.1\r\nAccept: */*\r\n\r\n"
)
.unwrap();
let mut string = String::new();
stream.read_to_string(&mut string).unwrap();
assert_eq!(
string,
"HTTP/1.1 200 success\r\nContent-Type: text/plain;charset=utf-8\r\n\r\nSubmitted"
);
let mut stream = lunatic::net::TcpStream::connect("127.0.0.1:5050").unwrap();
write!(
stream,
"GET /read HTTP/1.1\r\nHost: localhost:5050\r\nUser-Agent: curl/7.64.1\r\nAccept: */*\r\n\r\n"
)
.unwrap();
let mut string = String::new();
stream.read_to_string(&mut string).unwrap();
assert_eq!(
string,
"HTTP/1.1 200 success\r\nContent-Type: text/html;charset=utf-8\r\n\r\nhello"
);
}
lunatic::Process::spawn_with((), inner).detach();
}
}
|
use anyhow::{anyhow, Context};
use std::{convert::TryInto, fs, path::PathBuf};
use structopt::StructOpt;
use crate::config::{self, Config};
const DEFAULT_CONFIG_PATH: &'static str = ".rl-config";
#[derive(Debug, StructOpt)]
#[structopt(
name = "bracketlib-book",
about = "An implementation of the Bracketlib Roguelike Tutorial using hecs"
)]
struct Opt {
/// Use an external config file
#[structopt(
short,
long,
parse(from_os_str),
required_if("create-config", "true"),
help = "Specify a config file to load"
)]
config: Option<PathBuf>,
/// Create a default config file
#[structopt(short = "z", long, help = "create a default config file")]
create_config: bool,
}
pub struct ParsedOpt {
pub config: Config,
}
pub fn parse_opt_args() -> anyhow::Result<ParsedOpt> {
let opt = Opt::from_args();
let user_config = if opt.create_config {
let path = opt
.config
.unwrap_or_else(|| PathBuf::from(DEFAULT_CONFIG_PATH));
if path.exists() {
return Err(anyhow!(format!(
"Could not write config '{}'. File already exists.",
path.to_string_lossy()
)));
}
let user_config = config::default_user_config();
let user_config_string = toml::to_string_pretty(&user_config)?;
fs::write(path, user_config_string)?;
user_config
} else {
match opt.config {
Some(path) => {
let config_string_string = fs::read_to_string(path)?;
toml::from_str(&config_string_string).context("Error parsing config file")?
}
None => match fs::read_to_string(DEFAULT_CONFIG_PATH) {
Ok(s) => toml::from_str(&s)?,
Err(_) => config::default_user_config(),
},
}
};
let config = user_config.try_into()?;
Ok(ParsedOpt { config })
}
|
//! `FromCast` and `IntoCast` implementations for portable 32-bit wide vectors
#![rustfmt::skip]
use crate::*;
impl_from_cast!(
i8x4[test_v32]: u8x4, m8x4, i16x4, u16x4, m16x4, i32x4, u32x4, f32x4, m32x4,
i64x4, u64x4, f64x4, m64x4, i128x4, u128x4, m128x4, isizex4, usizex4, msizex4
);
impl_from_cast!(
u8x4[test_v32]: i8x4, m8x4, i16x4, u16x4, m16x4, i32x4, u32x4, f32x4, m32x4,
i64x4, u64x4, f64x4, m64x4, i128x4, u128x4, m128x4, isizex4, usizex4, msizex4
);
impl_from_cast_mask!(
m8x4[test_v32]: i8x4, u8x4, i16x4, u16x4, m16x4, i32x4, u32x4, f32x4, m32x4,
i64x4, u64x4, f64x4, m64x4, i128x4, u128x4, m128x4, isizex4, usizex4, msizex4
);
impl_from_cast!(
i16x2[test_v32]: i8x2, u8x2, m8x2, u16x2, m16x2, i32x2, u32x2, f32x2, m32x2,
i64x2, u64x2, f64x2, m64x2, i128x2, u128x2, m128x2, isizex2, usizex2, msizex2
);
impl_from_cast!(
u16x2[test_v32]: i8x2, u8x2, m8x2, i16x2, m16x2, i32x2, u32x2, f32x2, m32x2,
i64x2, u64x2, f64x2, m64x2, i128x2, u128x2, m128x2, isizex2, usizex2, msizex2
);
impl_from_cast_mask!(
m16x2[test_v32]: i8x2, u8x2, m8x2, i16x2, u16x2, i32x2, u32x2, f32x2, m32x2,
i64x2, u64x2, f64x2, m64x2, i128x2, u128x2, m128x2, isizex2, usizex2, msizex2
);
|
use std::option::Option;
use crate::bknode::BkNode;
use crate::keyquery::KeyQuery;
use crate::metric::Metric;
use crate::Dist;
#[derive(Debug, Clone)]
struct BkFindEntry<'a, N: 'a + BkNode> {
dist: Dist,
node: &'a N,
}
pub struct BkFind<'a, KQ, N: 'a, M>
where
KQ: KeyQuery + Default,
N: 'a + BkNode<Key = <KQ as KeyQuery>::Key>,
M: Metric<<KQ as KeyQuery>::Query>,
{
kq: &'a KQ,
metric: &'a M,
needle: &'a KQ::Query,
tolerance: Dist,
root: Option<&'a N>,
stack: Vec<BkFindEntry<'a, N>>,
}
impl<'a, KQ, N, M> BkFind<'a, KQ, N, M>
where
KQ: 'a + KeyQuery + Default,
N: 'a + BkNode<Key = <KQ as KeyQuery>::Key>,
M: 'a + Metric<<KQ as KeyQuery>::Query>,
{
pub fn new(
kq: &'a KQ,
metric: &'a M,
max_depth_hint: usize,
root: Option<&'a N>,
tolerance: Dist,
needle: &'a KQ::Query,
) -> Self {
let stack = Vec::with_capacity(max_depth_hint);
BkFind {
kq,
metric,
needle,
tolerance,
root,
stack,
}
}
}
impl<'a, KQ, N, M> BkFind<'a, KQ, N, M>
where
KQ: 'a + KeyQuery + Default,
N: 'a + BkNode<Key = <KQ as KeyQuery>::Key>,
M: 'a + Metric<<KQ as KeyQuery>::Query>,
{
pub fn each<F: FnMut(Dist, &'a KQ::Key)>(mut self, mut callback: F)
where
F: FnMut(Dist, &'a KQ::Key),
{
// Temporary: we can cheat because the root node is the same type as the children for now.
if let Some(root) = self.root.take() {
let dist = self.kq.distance(self.metric, root.key(), self.needle);
self.stack.push(BkFindEntry {
dist: dist,
node: root,
})
}
while let Some(candidate) = self.stack.pop() {
// Enqueue the children.
let min: Dist = candidate.dist.saturating_sub(self.tolerance);
let max: Dist = candidate.dist.saturating_add(self.tolerance);
let children = candidate.node.children_vector();
for (dist, child) in children.iter() {
if min <= *dist && *dist <= max {
let child_dist = self.kq.distance(self.metric, child.key(), self.needle);
self.stack.push(BkFindEntry {
dist: child_dist,
node: *child,
})
}
}
// And maybe yield this node.
if candidate.dist <= self.tolerance {
callback(candidate.dist, candidate.node.key());
}
}
}
}
/* Needs GATs to define this as an iterator. RFC 1598.
impl<'a, KQ, N, M> Iterator for BkFind<'a, KQ, N, M>
where
KQ: 'a + KeyQuery + Default,
N: 'a + BkNode<Key = <KQ as KeyQuery>::Key>,
M: 'a + Metric<<KQ as KeyQuery>::Query>,
{
type Item = (Dist, KQ::Key);
fn next(&mut self) -> Option<(Dist, KQ::Key)> {
while let Some(candidate) = self.stack.pop() {
// Enqueue the children.
let min: usize = candidate.dist.saturating_sub(self.tolerance);
let max: usize = candidate.dist.saturating_add(self.tolerance);
for (dist, ref child) in candidate.node.children_vector().iter() {
if min <= dist && dist <= max {
let child_dist = self.kq.distance(self.metric, &child.key(), self.needle);
self.stack.push(BkFindEntry {
dist: child_dist,
node: child,
})
}
}
// And maybe yield this node.
if candidate.dist <= self.tolerance {
return Some((candidate.dist, candidate.node.key().clone()));
}
}
return None;
}
}
*/
|
#![no_std]
mod font_tbl;
pub use font_tbl::font_tbl; |
mod distance_field;
pub use distance_field::*;
|
fn read_line() -> String {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
}
fn main() {
let n = read_line().parse().unwrap();
let mut ss = Vec::new();
(0..n).for_each(|_| {
let s = read_line().parse().unwrap();
ss.push(s);
});
let solver = Solver::new(ss);
let stdout = solver.solve();
stdout.iter().for_each(|s| {
println!("{}", s);
})
}
struct Solver {
ss: Vec<String>,
}
impl Solver {
fn new(ss: Vec<String>) -> Solver {
Solver { ss: ss }
}
fn solve(&self) -> Vec<String> {
// dp[i] : x0〜xiがTrueになるパターン数
let mut dp = vec![std::u64::MAX; self.ss.len() + 1];
(0..=self.ss.len()).for_each(|i| {
dp[i] = if i == 0 {
1
} else if self.ss[i - 1] == "AND" {
dp[i - 1]
} else {
dp[i - 1] + 2_u64.pow((i) as u32)
};
});
let mut buf = Vec::new();
buf.push(format!("{}", dp.last().unwrap()));
buf
}
}
#[test]
fn test_solve_1() {
let solver = Solver::new(vec!["AND".to_owned()]);
assert_eq!(solver.solve(), vec!("1"));
}
#[test]
fn test_solve_2() {
let solver = Solver::new(vec!["OR".to_owned()]);
assert_eq!(solver.solve(), vec!("3"));
}
#[test]
fn test_solve_3() {
let solver = Solver::new(vec!["AND".to_owned(), "OR".to_owned()]);
assert_eq!(solver.solve(), vec!("5"));
}
|
struct Container(i32, i32);
trait Contains {
type A;
type B;
fn contains(&self, &Self::A, &Self::B) -> bool;
fn first(&self) -> i32;
fn last(&self) -> i32;
}
impl Contains for Container {
type A = i32;
type B = i32;
fn contains(&self, n1: &i32, n2: &i32) -> bool {
(&self.0 == n1) && (&self.1 == n2)
}
fn first(&self) -> i32 { self.0 }
fn last(&self) -> i32 { self.1 }
}
fn diff<C: Contains>(container: &C) -> i32 {
container.last() - container.first()
}
fn main() {
let n1 = 4;
let n2 = 15;
let container = Container(n1, n2);
println!("Does container contain {} and {}: {}",
&n1, &n2, container.contains(&n1, &n2));
println!("First: {}", container.first());
println!("Last: {}", container.last());
println!("The difference is: {}", diff(&container));
} |
use crate::utils::formatted_strings::APP_VERSION;
/// Parse CLI arguments, and exit if `--help`, `--version`, or an
/// unknown argument was supplied
pub fn parse_cli_args() {
let args = std::env::args().skip(1);
for arg in args {
match arg.as_str() {
"--help" | "-h" => print_help(),
"--version" | "-v" => print_version(),
_ => {
unknown_argument(&arg);
std::process::exit(1);
}
}
std::process::exit(0);
}
}
fn print_help() {
println!(
"Application to comfortably monitor your Internet traffic\n\
Usage: sniffnet [OPTIONS]\n\
Options:\n\
\t-h, --help Print help\n\
\t-v, --version Print version info\n\
(Run without options to start the app)"
);
}
fn print_version() {
println!("sniffnet {APP_VERSION}");
}
fn unknown_argument(arg: &str) {
eprintln!(
"sniffnet: unknown option '{arg}'\n\
For more information, try 'sniffnet --help'"
);
}
|
use std::{cell::RefCell, collections::HashMap, fmt::Display, rc::Rc, time::Instant};
use crate::{
parser::Operator, Block, BlockId, Call, Expr, Expression, ParserState, Span, Statement, VarId,
};
#[derive(Debug)]
pub enum ShellError {
Mismatch(String, Span),
Unsupported(Span),
InternalError(String),
VariableNotFound(Span),
}
#[derive(Debug, Clone)]
pub enum Value {
Bool { val: bool, span: Span },
Int { val: i64, span: Span },
String { val: String, span: Span },
List(Vec<Value>),
Block(BlockId),
Unknown,
}
impl PartialEq for Value {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Value::Bool { val: lhs, .. }, Value::Bool { val: rhs, .. }) => lhs == rhs,
(Value::Int { val: lhs, .. }, Value::Int { val: rhs, .. }) => lhs == rhs,
(Value::String { val: lhs, .. }, Value::String { val: rhs, .. }) => lhs == rhs,
(Value::List(l1), Value::List(l2)) => l1 == l2,
(Value::Block(b1), Value::Block(b2)) => b1 == b2,
_ => false,
}
}
}
impl Display for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Value::Bool { val, .. } => {
write!(f, "{}", val)
}
Value::Int { val, .. } => {
write!(f, "{}", val)
}
Value::String { val, .. } => write!(f, "{}", val),
Value::List(..) => write!(f, "<list>"),
Value::Block(..) => write!(f, "<block>"),
Value::Unknown => write!(f, "<unknown>"),
}
}
}
impl Value {
pub fn add(&self, rhs: &Value) -> Result<Value, ShellError> {
match (self, rhs) {
(Value::Int { val: lhs, .. }, Value::Int { val: rhs, .. }) => Ok(Value::Int {
val: lhs + rhs,
span: Span::unknown(),
}),
(Value::String { val: lhs, .. }, Value::String { val: rhs, .. }) => Ok(Value::String {
val: lhs.to_string() + rhs,
span: Span::unknown(),
}),
_ => Ok(Value::Unknown),
}
}
}
pub struct State<'a> {
pub parser_state: &'a ParserState,
}
pub struct StackFrame {
pub vars: HashMap<VarId, Value>,
pub parent: Option<Stack>,
}
#[derive(Clone)]
pub struct Stack(Rc<RefCell<StackFrame>>);
impl Default for Stack {
fn default() -> Self {
Self::new()
}
}
impl Stack {
pub fn new() -> Stack {
Stack(Rc::new(RefCell::new(StackFrame {
vars: HashMap::new(),
parent: None,
})))
}
pub fn get_var(&self, var_id: VarId) -> Result<Value, ShellError> {
let this = self.0.borrow();
match this.vars.get(&var_id) {
Some(v) => Ok(v.clone()),
_ => {
if let Some(parent) = &this.parent {
parent.get_var(var_id)
} else {
Err(ShellError::InternalError("variable not found".into()))
}
}
}
}
pub fn add_var(&self, var_id: VarId, value: Value) {
let mut this = self.0.borrow_mut();
this.vars.insert(var_id, value);
}
pub fn enter_scope(self) -> Stack {
Stack(Rc::new(RefCell::new(StackFrame {
vars: HashMap::new(),
parent: Some(self),
})))
}
pub fn print_stack(&self) {
println!("===frame===");
for (var, val) in &self.0.borrow().vars {
println!("{}: {:?}", var, val);
}
if let Some(parent) = &self.0.borrow().parent {
parent.print_stack()
}
}
}
pub fn eval_operator(
_state: &State,
_stack: Stack,
op: &Expression,
) -> Result<Operator, ShellError> {
match op {
Expression {
expr: Expr::Operator(operator),
..
} => Ok(operator.clone()),
Expression { span, .. } => Err(ShellError::Mismatch("operator".to_string(), *span)),
}
}
fn eval_call(state: &State, stack: Stack, call: &Call) -> Result<Value, ShellError> {
let decl = state.parser_state.get_decl(call.decl_id);
if let Some(block_id) = decl.body {
let stack = stack.enter_scope();
for (arg, param) in call
.positional
.iter()
.zip(decl.signature.required_positional.iter())
{
let result = eval_expression(state, stack.clone(), arg)?;
let var_id = param
.var_id
.expect("internal error: all custom parameters must have var_ids");
stack.add_var(var_id, result);
}
let block = state.parser_state.get_block(block_id);
eval_block(state, stack, block)
} else if decl.signature.name == "let" {
let var_id = call.positional[0]
.as_var()
.expect("internal error: missing variable");
let keyword_expr = call.positional[1]
.as_keyword()
.expect("internal error: missing keyword");
let rhs = eval_expression(state, stack.clone(), keyword_expr)?;
//println!("Adding: {:?} to {}", rhs, var_id);
stack.add_var(var_id, rhs);
Ok(Value::Unknown)
} else if decl.signature.name == "if" {
let cond = &call.positional[0];
let then_block = call.positional[1]
.as_block()
.expect("internal error: expected block");
let else_case = call.positional.get(2);
let result = eval_expression(state, stack.clone(), cond)?;
match result {
Value::Bool { val, .. } => {
if val {
let block = state.parser_state.get_block(then_block);
let stack = stack.enter_scope();
eval_block(state, stack, block)
} else if let Some(else_case) = else_case {
if let Some(else_expr) = else_case.as_keyword() {
if let Some(block_id) = else_expr.as_block() {
let block = state.parser_state.get_block(block_id);
let stack = stack.enter_scope();
eval_block(state, stack, block)
} else {
eval_expression(state, stack, else_expr)
}
} else {
eval_expression(state, stack, else_case)
}
} else {
Ok(Value::Unknown)
}
}
_ => Err(ShellError::Mismatch("bool".into(), Span::unknown())),
}
} else if decl.signature.name == "build-string" {
let mut output = vec![];
for expr in &call.positional {
let val = eval_expression(state, stack.clone(), expr)?;
output.push(val.to_string());
}
Ok(Value::String {
val: output.join(""),
span: call.head,
})
} else if decl.signature.name == "benchmark" {
let block = call.positional[0]
.as_block()
.expect("internal error: expected block");
let block = state.parser_state.get_block(block);
let stack = stack.enter_scope();
let start_time = Instant::now();
eval_block(state, stack, block)?;
let end_time = Instant::now();
println!("{} ms", (end_time - start_time).as_millis());
Ok(Value::Unknown)
} else if decl.signature.name == "for" {
let var_id = call.positional[0]
.as_var()
.expect("internal error: missing variable");
let keyword_expr = call.positional[1]
.as_keyword()
.expect("internal error: missing keyword");
let end_val = eval_expression(state, stack.clone(), keyword_expr)?;
let block = call.positional[2]
.as_block()
.expect("internal error: expected block");
let block = state.parser_state.get_block(block);
let stack = stack.enter_scope();
let mut x = Value::Int {
val: 0,
span: Span::unknown(),
};
loop {
if x == end_val {
break;
} else {
stack.add_var(var_id, x.clone());
eval_block(state, stack.clone(), block)?;
}
if let Value::Int { ref mut val, .. } = x {
*val += 1
}
}
Ok(Value::Unknown)
} else {
Ok(Value::Unknown)
}
}
pub fn eval_expression(
state: &State,
stack: Stack,
expr: &Expression,
) -> Result<Value, ShellError> {
match &expr.expr {
Expr::Bool(b) => Ok(Value::Bool {
val: *b,
span: expr.span,
}),
Expr::Int(i) => Ok(Value::Int {
val: *i,
span: expr.span,
}),
Expr::Var(var_id) => stack
.get_var(*var_id)
.map_err(move |_| ShellError::VariableNotFound(expr.span)),
Expr::Call(call) => eval_call(state, stack, call),
Expr::ExternalCall(_, _) => Err(ShellError::Unsupported(expr.span)),
Expr::Operator(_) => Ok(Value::Unknown),
Expr::BinaryOp(lhs, op, rhs) => {
let lhs = eval_expression(state, stack.clone(), lhs)?;
let op = eval_operator(state, stack.clone(), op)?;
let rhs = eval_expression(state, stack, rhs)?;
match op {
Operator::Plus => lhs.add(&rhs),
_ => Ok(Value::Unknown),
}
}
Expr::Subexpression(block_id) => {
let block = state.parser_state.get_block(*block_id);
let stack = stack.enter_scope();
eval_block(state, stack, block)
}
Expr::Block(block_id) => Ok(Value::Block(*block_id)),
Expr::List(x) => {
let mut output = vec![];
for expr in x {
output.push(eval_expression(state, stack.clone(), expr)?);
}
Ok(Value::List(output))
}
Expr::Table(_, _) => Err(ShellError::Unsupported(expr.span)),
Expr::Keyword(_, _, expr) => eval_expression(state, stack, expr),
Expr::String(s) => Ok(Value::String {
val: s.clone(),
span: expr.span,
}),
Expr::Signature(_) => Ok(Value::Unknown),
Expr::Garbage => Ok(Value::Unknown),
}
}
pub fn eval_block(state: &State, stack: Stack, block: &Block) -> Result<Value, ShellError> {
let mut last = Ok(Value::Unknown);
for stmt in &block.stmts {
if let Statement::Expression(expression) = stmt {
last = Ok(eval_expression(state, stack.clone(), expression)?);
}
}
last
}
|
pub const WORDLIST: &'static [&'static str] = &[
"Abakus",
"Abart",
"abbilden",
"Abbruch",
"Abdrift",
"Abendrot",
"Abfahrt",
"abfeuern",
"Abflug",
"abfragen",
"Abglanz",
"abhärten",
"abheben",
"Abhilfe",
"Abitur",
"Abkehr",
"Ablauf",
"ablecken",
"Ablösung",
"Abnehmer",
"abnutzen",
"Abonnent",
"Abrasion",
"Abrede",
"abrüsten",
"Absicht",
"Absprung",
"Abstand",
"absuchen",
"Abteil",
"Abundanz",
"abwarten",
"Abwurf",
"Abzug",
"Achse",
"Achtung",
"Acker",
"Aderlass",
"Adler",
"Admiral",
"Adresse",
"Affe",
"Affront",
"Afrika",
"Aggregat",
"Agilität",
"ähneln",
"Ahnung",
"Ahorn",
"Akazie",
"Akkord",
"Akrobat",
"Aktfoto",
"Aktivist",
"Albatros",
"Alchimie",
"Alemanne",
"Alibi",
"Alkohol",
"Allee",
"Allüre",
"Almosen",
"Almweide",
"Aloe",
"Alpaka",
"Alpental",
"Alphabet",
"Alpinist",
"Alraune",
"Altbier",
"Alter",
"Altflöte",
"Altruist",
"Alublech",
"Aludose",
"Amateur",
"Amazonas",
"Ameise",
"Amnesie",
"Amok",
"Ampel",
"Amphibie",
"Ampulle",
"Amsel",
"Amulett",
"Anakonda",
"Analogie",
"Ananas",
"Anarchie",
"Anatomie",
"Anbau",
"Anbeginn",
"anbieten",
"Anblick",
"ändern",
"andocken",
"Andrang",
"anecken",
"Anflug",
"Anfrage",
"Anführer",
"Angebot",
"Angler",
"Anhalter",
"Anhöhe",
"Animator",
"Anis",
"Anker",
"ankleben",
"Ankunft",
"Anlage",
"anlocken",
"Anmut",
"Annahme",
"Anomalie",
"Anonymus",
"Anorak",
"anpeilen",
"Anrecht",
"Anruf",
"Ansage",
"Anschein",
"Ansicht",
"Ansporn",
"Anteil",
"Antlitz",
"Antrag",
"Antwort",
"Anwohner",
"Aorta",
"Apfel",
"Appetit",
"Applaus",
"Aquarium",
"Arbeit",
"Arche",
"Argument",
"Arktis",
"Armband",
"Aroma",
"Asche",
"Askese",
"Asphalt",
"Asteroid",
"Ästhetik",
"Astronom",
"Atelier",
"Athlet",
"Atlantik",
"Atmung",
"Audienz",
"aufatmen",
"Auffahrt",
"aufholen",
"aufregen",
"Aufsatz",
"Auftritt",
"Aufwand",
"Augapfel",
"Auktion",
"Ausbruch",
"Ausflug",
"Ausgabe",
"Aushilfe",
"Ausland",
"Ausnahme",
"Aussage",
"Autobahn",
"Avocado",
"Axthieb",
"Bach",
"backen",
"Badesee",
"Bahnhof",
"Balance",
"Balkon",
"Ballett",
"Balsam",
"Banane",
"Bandage",
"Bankett",
"Barbar",
"Barde",
"Barett",
"Bargeld",
"Barkasse",
"Barriere",
"Bart",
"Bass",
"Bastler",
"Batterie",
"Bauch",
"Bauer",
"Bauholz",
"Baujahr",
"Baum",
"Baustahl",
"Bauteil",
"Bauweise",
"Bazar",
"beachten",
"Beatmung",
"beben",
"Becher",
"Becken",
"bedanken",
"beeilen",
"beenden",
"Beere",
"befinden",
"Befreier",
"Begabung",
"Begierde",
"begrüßen",
"Beiboot",
"Beichte",
"Beifall",
"Beigabe",
"Beil",
"Beispiel",
"Beitrag",
"beizen",
"bekommen",
"beladen",
"Beleg",
"bellen",
"belohnen",
"Bemalung",
"Bengel",
"Benutzer",
"Benzin",
"beraten",
"Bereich",
"Bergluft",
"Bericht",
"Bescheid",
"Besitz",
"besorgen",
"Bestand",
"Besuch",
"betanken",
"beten",
"betören",
"Bett",
"Beule",
"Beute",
"Bewegung",
"bewirken",
"Bewohner",
"bezahlen",
"Bezug",
"biegen",
"Biene",
"Bierzelt",
"bieten",
"Bikini",
"Bildung",
"Billard",
"binden",
"Biobauer",
"Biologe",
"Bionik",
"Biotop",
"Birke",
"Bison",
"Bitte",
"Biwak",
"Bizeps",
"blasen",
"Blatt",
"Blauwal",
"Blende",
"Blick",
"Blitz",
"Blockade",
"Blödelei",
"Blondine",
"Blues",
"Blume",
"Blut",
"Bodensee",
"Bogen",
"Boje",
"Bollwerk",
"Bonbon",
"Bonus",
"Boot",
"Bordarzt",
"Börse",
"Böschung",
"Boudoir",
"Boxkampf",
"Boykott",
"Brahms",
"Brandung",
"Brauerei",
"Brecher",
"Breitaxt",
"Bremse",
"brennen",
"Brett",
"Brief",
"Brigade",
"Brillanz",
"bringen",
"brodeln",
"Brosche",
"Brötchen",
"Brücke",
"Brunnen",
"Brüste",
"Brutofen",
"Buch",
"Büffel",
"Bugwelle",
"Bühne",
"Buletten",
"Bullauge",
"Bumerang",
"bummeln",
"Buntglas",
"Bürde",
"Burgherr",
"Bursche",
"Busen",
"Buslinie",
"Bussard",
"Butangas",
"Butter",
"Cabrio",
"campen",
"Captain",
"Cartoon",
"Cello",
"Chalet",
"Charisma",
"Chefarzt",
"Chiffon",
"Chipsatz",
"Chirurg",
"Chor",
"Chronik",
"Chuzpe",
"Clubhaus",
"Cockpit",
"Codewort",
"Cognac",
"Coladose",
"Computer",
"Coupon",
"Cousin",
"Cracking",
"Crash",
"Curry",
"Dach",
"Dackel",
"daddeln",
"daliegen",
"Dame",
"Dammbau",
"Dämon",
"Dampflok",
"Dank",
"Darm",
"Datei",
"Datsche",
"Datteln",
"Datum",
"Dauer",
"Daunen",
"Deckel",
"Decoder",
"Defekt",
"Degen",
"Dehnung",
"Deiche",
"Dekade",
"Dekor",
"Delfin",
"Demut",
"denken",
"Deponie",
"Design",
"Desktop",
"Dessert",
"Detail",
"Detektiv",
"Dezibel",
"Diadem",
"Diagnose",
"Dialekt",
"Diamant",
"Dichter",
"Dickicht",
"Diesel",
"Diktat",
"Diplom",
"Direktor",
"Dirne",
"Diskurs",
"Distanz",
"Docht",
"Dohle",
"Dolch",
"Domäne",
"Donner",
"Dorade",
"Dorf",
"Dörrobst",
"Dorsch",
"Dossier",
"Dozent",
"Drachen",
"Draht",
"Drama",
"Drang",
"Drehbuch",
"Dreieck",
"Dressur",
"Drittel",
"Drossel",
"Druck",
"Duell",
"Duft",
"Düne",
"Dünung",
"dürfen",
"Duschbad",
"Düsenjet",
"Dynamik",
"Ebbe",
"Echolot",
"Echse",
"Eckball",
"Edding",
"Edelweiß",
"Eden",
"Edition",
"Efeu",
"Effekte",
"Egoismus",
"Ehre",
"Eiablage",
"Eiche",
"Eidechse",
"Eidotter",
"Eierkopf",
"Eigelb",
"Eiland",
"Eilbote",
"Eimer",
"einatmen",
"Einband",
"Eindruck",
"Einfall",
"Eingang",
"Einkauf",
"einladen",
"Einöde",
"Einrad",
"Eintopf",
"Einwurf",
"Einzug",
"Eisbär",
"Eisen",
"Eishöhle",
"Eismeer",
"Eiweiß",
"Ekstase",
"Elan",
"Elch",
"Elefant",
"Eleganz",
"Element",
"Elfe",
"Elite",
"Elixier",
"Ellbogen",
"Eloquenz",
"Emigrant",
"Emission",
"Emotion",
"Empathie",
"Empfang",
"Endzeit",
"Energie",
"Engpass",
"Enkel",
"Enklave",
"Ente",
"entheben",
"Entität",
"entladen",
"Entwurf",
"Episode",
"Epoche",
"erachten",
"Erbauer",
"erblühen",
"Erdbeere",
"Erde",
"Erdgas",
"Erdkunde",
"Erdnuss",
"Erdöl",
"Erdteil",
"Ereignis",
"Eremit",
"erfahren",
"Erfolg",
"erfreuen",
"erfüllen",
"Ergebnis",
"erhitzen",
"erkalten",
"erkennen",
"erleben",
"Erlösung",
"ernähren",
"erneuern",
"Ernte",
"Eroberer",
"eröffnen",
"Erosion",
"Erotik",
"Erpel",
"erraten",
"Erreger",
"erröten",
"Ersatz",
"Erstflug",
"Ertrag",
"Eruption",
"erwarten",
"erwidern",
"Erzbau",
"Erzeuger",
"erziehen",
"Esel",
"Eskimo",
"Eskorte",
"Espe",
"Espresso",
"essen",
"Etage",
"Etappe",
"Etat",
"Ethik",
"Etikett",
"Etüde",
"Eule",
"Euphorie",
"Europa",
"Everest",
"Examen",
"Exil",
"Exodus",
"Extrakt",
"Fabel",
"Fabrik",
"Fachmann",
"Fackel",
"Faden",
"Fagott",
"Fahne",
"Faible",
"Fairness",
"Fakt",
"Fakultät",
"Falke",
"Fallobst",
"Fälscher",
"Faltboot",
"Familie",
"Fanclub",
"Fanfare",
"Fangarm",
"Fantasie",
"Farbe",
"Farmhaus",
"Farn",
"Fasan",
"Faser",
"Fassung",
"fasten",
"Faulheit",
"Fauna",
"Faust",
"Favorit",
"Faxgerät",
"Fazit",
"fechten",
"Federboa",
"Fehler",
"Feier",
"Feige",
"feilen",
"Feinripp",
"Feldbett",
"Felge",
"Fellpony",
"Felswand",
"Ferien",
"Ferkel",
"Fernweh",
"Ferse",
"Fest",
"Fettnapf",
"Feuer",
"Fiasko",
"Fichte",
"Fiktion",
"Film",
"Filter",
"Filz",
"Finanzen",
"Findling",
"Finger",
"Fink",
"Finnwal",
"Fisch",
"Fitness",
"Fixpunkt",
"Fixstern",
"Fjord",
"Flachbau",
"Flagge",
"Flamenco",
"Flanke",
"Flasche",
"Flaute",
"Fleck",
"Flegel",
"flehen",
"Fleisch",
"fliegen",
"Flinte",
"Flirt",
"Flocke",
"Floh",
"Floskel",
"Floß",
"Flöte",
"Flugzeug",
"Flunder",
"Flusstal",
"Flutung",
"Fockmast",
"Fohlen",
"Föhnlage",
"Fokus",
"folgen",
"Foliant",
"Folklore",
"Fontäne",
"Förde",
"Forelle",
"Format",
"Forscher",
"Fortgang",
"Forum",
"Fotograf",
"Frachter",
"Fragment",
"Fraktion",
"fräsen",
"Frauenpo",
"Freak",
"Fregatte",
"Freiheit",
"Freude",
"Frieden",
"Frohsinn",
"Frosch",
"Frucht",
"Frühjahr",
"Fuchs",
"Fügung",
"fühlen",
"Füller",
"Fundbüro",
"Funkboje",
"Funzel",
"Furnier",
"Fürsorge",
"Fusel",
"Fußbad",
"Futteral",
"Gabelung",
"gackern",
"Gage",
"gähnen",
"Galaxie",
"Galeere",
"Galopp",
"Gameboy",
"Gamsbart",
"Gandhi",
"Gang",
"Garage",
"Gardine",
"Garküche",
"Garten",
"Gasthaus",
"Gattung",
"gaukeln",
"Gazelle",
"Gebäck",
"Gebirge",
"Gebräu",
"Geburt",
"Gedanke",
"Gedeck",
"Gedicht",
"Gefahr",
"Gefieder",
"Geflügel",
"Gefühl",
"Gegend",
"Gehirn",
"Gehöft",
"Gehweg",
"Geige",
"Geist",
"Gelage",
"Geld",
"Gelenk",
"Gelübde",
"Gemälde",
"Gemeinde",
"Gemüse",
"genesen",
"Genuss",
"Gepäck",
"Geranie",
"Gericht",
"Germane",
"Geruch",
"Gesang",
"Geschenk",
"Gesetz",
"Gesindel",
"Gesöff",
"Gespan",
"Gestade",
"Gesuch",
"Getier",
"Getränk",
"Getümmel",
"Gewand",
"Geweih",
"Gewitter",
"Gewölbe",
"Geysir",
"Giftzahn",
"Gipfel",
"Giraffe",
"Gitarre",
"glänzen",
"Glasauge",
"Glatze",
"Gleis",
"Globus",
"Glück",
"glühen",
"Glutofen",
"Goldzahn",
"Gondel",
"gönnen",
"Gottheit",
"graben",
"Grafik",
"Grashalm",
"Graugans",
"greifen",
"Grenze",
"grillen",
"Groschen",
"Grotte",
"Grube",
"Grünalge",
"Gruppe",
"gruseln",
"Gulasch",
"Gummibär",
"Gurgel",
"Gürtel",
"Güterzug",
"Haarband",
"Habicht",
"hacken",
"hadern",
"Hafen",
"Hagel",
"Hähnchen",
"Haifisch",
"Haken",
"Halbaffe",
"Halsader",
"halten",
"Halunke",
"Handbuch",
"Hanf",
"Harfe",
"Harnisch",
"härten",
"Harz",
"Hasenohr",
"Haube",
"hauchen",
"Haupt",
"Haut",
"Havarie",
"Hebamme",
"hecheln",
"Heck",
"Hedonist",
"Heiler",
"Heimat",
"Heizung",
"Hektik",
"Held",
"helfen",
"Helium",
"Hemd",
"hemmen",
"Hengst",
"Herd",
"Hering",
"Herkunft",
"Hermelin",
"Herrchen",
"Herzdame",
"Heulboje",
"Hexe",
"Hilfe",
"Himbeere",
"Himmel",
"Hingabe",
"hinhören",
"Hinweis",
"Hirsch",
"Hirte",
"Hitzkopf",
"Hobel",
"Hochform",
"Hocker",
"hoffen",
"Hofhund",
"Hofnarr",
"Höhenzug",
"Hohlraum",
"Hölle",
"Holzboot",
"Honig",
"Honorar",
"horchen",
"Hörprobe",
"Höschen",
"Hotel",
"Hubraum",
"Hufeisen",
"Hügel",
"huldigen",
"Hülle",
"Humbug",
"Hummer",
"Humor",
"Hund",
"Hunger",
"Hupe",
"Hürde",
"Hurrikan",
"Hydrant",
"Hypnose",
"Ibis",
"Idee",
"Idiot",
"Igel",
"Illusion",
"Imitat",
"impfen",
"Import",
"Inferno",
"Ingwer",
"Inhalte",
"Inland",
"Insekt",
"Ironie",
"Irrfahrt",
"Irrtum",
"Isolator",
"Istwert",
"Jacke",
"Jade",
"Jagdhund",
"Jäger",
"Jaguar",
"Jahr",
"Jähzorn",
"Jazzfest",
"Jetpilot",
"jobben",
"Jochbein",
"jodeln",
"Jodsalz",
"Jolle",
"Journal",
"Jubel",
"Junge",
"Junimond",
"Jupiter",
"Jutesack",
"Juwel",
"Kabarett",
"Kabine",
"Kabuff",
"Käfer",
"Kaffee",
"Kahlkopf",
"Kaimauer",
"Kajüte",
"Kaktus",
"Kaliber",
"Kaltluft",
"Kamel",
"kämmen",
"Kampagne",
"Kanal",
"Känguru",
"Kanister",
"Kanone",
"Kante",
"Kanu",
"kapern",
"Kapitän",
"Kapuze",
"Karneval",
"Karotte",
"Käsebrot",
"Kasper",
"Kastanie",
"Katalog",
"Kathode",
"Katze",
"kaufen",
"Kaugummi",
"Kauz",
"Kehle",
"Keilerei",
"Keksdose",
"Kellner",
"Keramik",
"Kerze",
"Kessel",
"Kette",
"keuchen",
"kichern",
"Kielboot",
"Kindheit",
"Kinnbart",
"Kinosaal",
"Kiosk",
"Kissen",
"Klammer",
"Klang",
"Klapprad",
"Klartext",
"kleben",
"Klee",
"Kleinod",
"Klima",
"Klingel",
"Klippe",
"Klischee",
"Kloster",
"Klugheit",
"Klüngel",
"kneten",
"Knie",
"Knöchel",
"knüpfen",
"Kobold",
"Kochbuch",
"Kohlrabi",
"Koje",
"Kokosöl",
"Kolibri",
"Kolumne",
"Kombüse",
"Komiker",
"kommen",
"Konto",
"Konzept",
"Kopfkino",
"Kordhose",
"Korken",
"Korsett",
"Kosename",
"Krabbe",
"Krach",
"Kraft",
"Krähe",
"Kralle",
"Krapfen",
"Krater",
"kraulen",
"Kreuz",
"Krokodil",
"Kröte",
"Kugel",
"Kuhhirt",
"Kühnheit",
"Künstler",
"Kurort",
"Kurve",
"Kurzfilm",
"kuscheln",
"küssen",
"Kutter",
"Labor",
"lachen",
"Lackaffe",
"Ladeluke",
"Lagune",
"Laib",
"Lakritze",
"Lammfell",
"Land",
"Langmut",
"Lappalie",
"Last",
"Laterne",
"Latzhose",
"Laubsäge",
"laufen",
"Laune",
"Lausbub",
"Lavasee",
"Leben",
"Leder",
"Leerlauf",
"Lehm",
"Lehrer",
"leihen",
"Lektüre",
"Lenker",
"Lerche",
"Leseecke",
"Leuchter",
"Lexikon",
"Libelle",
"Libido",
"Licht",
"Liebe",
"liefern",
"Liftboy",
"Limonade",
"Lineal",
"Linoleum",
"List",
"Liveband",
"Lobrede",
"locken",
"Löffel",
"Logbuch",
"Logik",
"Lohn",
"Loipe",
"Lokal",
"Lorbeer",
"Lösung",
"löten",
"Lottofee",
"Löwe",
"Luchs",
"Luder",
"Luftpost",
"Luke",
"Lümmel",
"Lunge",
"lutschen",
"Luxus",
"Macht",
"Magazin",
"Magier",
"Magnet",
"mähen",
"Mahlzeit",
"Mahnmal",
"Maibaum",
"Maisbrei",
"Makel",
"malen",
"Mammut",
"Maniküre",
"Mantel",
"Marathon",
"Marder",
"Marine",
"Marke",
"Marmor",
"Märzluft",
"Maske",
"Maßanzug",
"Maßkrug",
"Mastkorb",
"Material",
"Matratze",
"Mauerbau",
"Maulkorb",
"Mäuschen",
"Mäzen",
"Medium",
"Meinung",
"melden",
"Melodie",
"Mensch",
"Merkmal",
"Messe",
"Metall",
"Meteor",
"Methode",
"Metzger",
"Mieze",
"Milchkuh",
"Mimose",
"Minirock",
"Minute",
"mischen",
"Missetat",
"mitgehen",
"Mittag",
"Mixtape",
"Möbel",
"Modul",
"mögen",
"Möhre",
"Molch",
"Moment",
"Monat",
"Mondflug",
"Monitor",
"Monokini",
"Monster",
"Monument",
"Moorhuhn",
"Moos",
"Möpse",
"Moral",
"Mörtel",
"Motiv",
"Motorrad",
"Möwe",
"Mühe",
"Mulatte",
"Müller",
"Mumie",
"Mund",
"Münze",
"Muschel",
"Muster",
"Mythos",
"Nabel",
"Nachtzug",
"Nackedei",
"Nagel",
"Nähe",
"Nähnadel",
"Namen",
"Narbe",
"Narwal",
"Nasenbär",
"Natur",
"Nebel",
"necken",
"Neffe",
"Neigung",
"Nektar",
"Nenner",
"Neptun",
"Nerz",
"Nessel",
"Nestbau",
"Netz",
"Neubau",
"Neuerung",
"Neugier",
"nicken",
"Niere",
"Nilpferd",
"nisten",
"Nocke",
"Nomade",
"Nordmeer",
"Notdurft",
"Notstand",
"Notwehr",
"Nudismus",
"Nuss",
"Nutzhanf",
"Oase",
"Obdach",
"Oberarzt",
"Objekt",
"Oboe",
"Obsthain",
"Ochse",
"Odyssee",
"Ofenholz",
"öffnen",
"Ohnmacht",
"Ohrfeige",
"Ohrwurm",
"Ökologie",
"Oktave",
"Ölberg",
"Olive",
"Ölkrise",
"Omelett",
"Onkel",
"Oper",
"Optiker",
"Orange",
"Orchidee",
"ordnen",
"Orgasmus",
"Orkan",
"Ortskern",
"Ortung",
"Ostasien",
"Ozean",
"Paarlauf",
"Packeis",
"paddeln",
"Paket",
"Palast",
"Pandabär",
"Panik",
"Panorama",
"Panther",
"Papagei",
"Papier",
"Paprika",
"Paradies",
"Parka",
"Parodie",
"Partner",
"Passant",
"Patent",
"Patzer",
"Pause",
"Pavian",
"Pedal",
"Pegel",
"peilen",
"Perle",
"Person",
"Pfad",
"Pfau",
"Pferd",
"Pfleger",
"Physik",
"Pier",
"Pilotwal",
"Pinzette",
"Piste",
"Plakat",
"Plankton",
"Platin",
"Plombe",
"plündern",
"Pobacke",
"Pokal",
"polieren",
"Popmusik",
"Porträt",
"Posaune",
"Postamt",
"Pottwal",
"Pracht",
"Pranke",
"Preis",
"Primat",
"Prinzip",
"Protest",
"Proviant",
"Prüfung",
"Pubertät",
"Pudding",
"Pullover",
"Pulsader",
"Punkt",
"Pute",
"Putsch",
"Puzzle",
"Python",
"quaken",
"Qualle",
"Quark",
"Quellsee",
"Querkopf",
"Quitte",
"Quote",
"Rabauke",
"Rache",
"Radclub",
"Radhose",
"Radio",
"Radtour",
"Rahmen",
"Rampe",
"Randlage",
"Ranzen",
"Rapsöl",
"Raserei",
"rasten",
"Rasur",
"Rätsel",
"Raubtier",
"Raumzeit",
"Rausch",
"Reaktor",
"Realität",
"Rebell",
"Rede",
"Reetdach",
"Regatta",
"Regen",
"Rehkitz",
"Reifen",
"Reim",
"Reise",
"Reizung",
"Rekord",
"Relevanz",
"Rennboot",
"Respekt",
"Restmüll",
"retten",
"Reue",
"Revolte",
"Rhetorik",
"Rhythmus",
"Richtung",
"Riegel",
"Rindvieh",
"Rippchen",
"Ritter",
"Robbe",
"Roboter",
"Rockband",
"Rohdaten",
"Roller",
"Roman",
"röntgen",
"Rose",
"Rosskur",
"Rost",
"Rotahorn",
"Rotglut",
"Rotznase",
"Rubrik",
"Rückweg",
"Rufmord",
"Ruhe",
"Ruine",
"Rumpf",
"Runde",
"Rüstung",
"rütteln",
"Saaltür",
"Saatguts",
"Säbel",
"Sachbuch",
"Sack",
"Saft",
"sagen",
"Sahneeis",
"Salat",
"Salbe",
"Salz",
"Sammlung",
"Samt",
"Sandbank",
"Sanftmut",
"Sardine",
"Satire",
"Sattel",
"Satzbau",
"Sauerei",
"Saum",
"Säure",
"Schall",
"Scheitel",
"Schiff",
"Schlager",
"Schmied",
"Schnee",
"Scholle",
"Schrank",
"Schulbus",
"Schwan",
"Seeadler",
"Seefahrt",
"Seehund",
"Seeufer",
"segeln",
"Sehnerv",
"Seide",
"Seilzug",
"Senf",
"Sessel",
"Seufzer",
"Sexgott",
"Sichtung",
"Signal",
"Silber",
"singen",
"Sinn",
"Sirup",
"Sitzbank",
"Skandal",
"Skikurs",
"Skipper",
"Skizze",
"Smaragd",
"Socke",
"Sohn",
"Sommer",
"Songtext",
"Sorte",
"Spagat",
"Spannung",
"Spargel",
"Specht",
"Speiseöl",
"Spiegel",
"Sport",
"spülen",
"Stadtbus",
"Stall",
"Stärke",
"Stativ",
"staunen",
"Stern",
"Stiftung",
"Stollen",
"Strömung",
"Sturm",
"Substanz",
"Südalpen",
"Sumpf",
"surfen",
"Tabak",
"Tafel",
"Tagebau",
"takeln",
"Taktung",
"Talsohle",
"Tand",
"Tanzbär",
"Tapir",
"Tarantel",
"Tarnname",
"Tasse",
"Tatnacht",
"Tatsache",
"Tatze",
"Taube",
"tauchen",
"Taufpate",
"Taumel",
"Teelicht",
"Teich",
"teilen",
"Tempo",
"Tenor",
"Terrasse",
"Testflug",
"Theater",
"Thermik",
"ticken",
"Tiefflug",
"Tierart",
"Tigerhai",
"Tinte",
"Tischler",
"toben",
"Toleranz",
"Tölpel",
"Tonband",
"Topf",
"Topmodel",
"Torbogen",
"Torlinie",
"Torte",
"Tourist",
"Tragesel",
"trampeln",
"Trapez",
"Traum",
"treffen",
"Trennung",
"Treue",
"Trick",
"trimmen",
"Trödel",
"Trost",
"Trumpf",
"tüfteln",
"Turban",
"Turm",
"Übermut",
"Ufer",
"Uhrwerk",
"umarmen",
"Umbau",
"Umfeld",
"Umgang",
"Umsturz",
"Unart",
"Unfug",
"Unimog",
"Unruhe",
"Unwucht",
"Uranerz",
"Urlaub",
"Urmensch",
"Utopie",
"Vakuum",
"Valuta",
"Vandale",
"Vase",
"Vektor",
"Ventil",
"Verb",
"Verdeck",
"Verfall",
"Vergaser",
"verhexen",
"Verlag",
"Vers",
"Vesper",
"Vieh",
"Viereck",
"Vinyl",
"Virus",
"Vitrine",
"Vollblut",
"Vorbote",
"Vorrat",
"Vorsicht",
"Vulkan",
"Wachstum",
"Wade",
"Wagemut",
"Wahlen",
"Wahrheit",
"Wald",
"Walhai",
"Wallach",
"Walnuss",
"Walzer",
"wandeln",
"Wanze",
"wärmen",
"Warnruf",
"Wäsche",
"Wasser",
"Weberei",
"wechseln",
"Wegegeld",
"wehren",
"Weiher",
"Weinglas",
"Weißbier",
"Weitwurf",
"Welle",
"Weltall",
"Werkbank",
"Werwolf",
"Wetter",
"wiehern",
"Wildgans",
"Wind",
"Wohl",
"Wohnort",
"Wolf",
"Wollust",
"Wortlaut",
"Wrack",
"Wunder",
"Wurfaxt",
"Wurst",
"Yacht",
"Yeti",
"Zacke",
"Zahl",
"zähmen",
"Zahnfee",
"Zäpfchen",
"Zaster",
"Zaumzeug",
"Zebra",
"zeigen",
"Zeitlupe",
"Zellkern",
"Zeltdach",
"Zensor",
"Zerfall",
"Zeug",
"Ziege",
"Zielfoto",
"Zimteis",
"Zobel",
"Zollhund",
"Zombie",
"Zöpfe",
"Zucht",
"Zufahrt",
"Zugfahrt",
"Zugvogel",
"Zündung",
"Zweck",
"Zyklop",
];
|
#[macro_use]
extern crate lazy_static;
extern crate regex;
use std::collections::HashSet;
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::result;
use std::str::FromStr;
use regex::Regex;
const HEIGHT: usize = 1000;
const WIDTH: usize = 1000;
#[derive(Debug, PartialEq)]
struct Area {
id: usize,
from_left: usize,
from_top: usize,
width: usize,
height: usize,
}
type Result<T> = result::Result<T, Box<Error>>;
impl FromStr for Area {
type Err = Box<Error>;
fn from_str(s: &str) -> Result<Area> {
lazy_static! {
static ref RE: Regex = Regex::new(
r"(?x)
\#
(?P<id>[0-9]+)
\s+@\s+
(?P<fl>[0-9]+),(?P<ft>[0-9]+):
\s+
(?P<w>[0-9]+)x(?P<h>[0-9]+)
"
)
.unwrap();
}
let caps = match RE.captures(s) {
None => panic!("Error capturing the stream"),
Some(caps) => caps,
};
Ok(Area {
id: caps["id"].parse()?,
from_left: caps["fl"].parse()?,
from_top: caps["ft"].parse()?,
width: caps["w"].parse()?,
height: caps["h"].parse()?,
})
}
}
fn part_1(lines: Vec<String>) -> usize {
// Initialize the fabric
let mut fabric = [[0usize; WIDTH]; HEIGHT];
// Fill it
for line in lines {
let area = Area::from_str(&line).unwrap();
for i in area.from_top..(area.from_top + area.height) {
for j in area.from_left..(area.from_left + area.width) {
fabric[i][j] += 1;
}
}
}
// Search for the number of overlapping areas
fabric
.iter()
.map(|l| l.into_iter().filter(|&&i| i > 1).count())
.collect::<Vec<usize>>()
.iter()
.fold(0usize, |mut s, i| {
s += i;
s
})
}
fn part_2(lines: Vec<String>) -> usize {
// Initialize a Vec<Vec<Vec<usize>>>, where usize is the
// id of an area
// The goal here is to find a single ID that corresponds
// to the non-overlapped one
let mut fabric: Vec<Vec<Vec<usize>>> = Vec::with_capacity(HEIGHT);
for i in 0..HEIGHT {
fabric.push(Vec::with_capacity(WIDTH));
for _ in 0..WIDTH {
fabric[i].push(Vec::new());
}
}
// original_ids will contains all the box IDs (as, maybe, the ids are not consecutive...)
let mut original_ids: HashSet<usize> = HashSet::new();
// Fill the structure
for line in lines {
let area = Area::from_str(&line).unwrap();
original_ids.insert(area.id);
for i in area.from_top..(area.from_top + area.height) {
for j in area.from_left..(area.from_left + area.width) {
fabric[i][j].push(area.id);
}
}
}
// Search for overlapped boxes
for i in 0..HEIGHT {
for j in 0..WIDTH {
if fabric[i][j].len() > 1 {
fabric[i][j].iter().for_each(|x| {
if original_ids.contains(&x) {
let _ = original_ids.remove(x);
}
});
}
}
}
// Get first element
*original_ids.iter().take(1).next().unwrap()
}
fn main() {
// Process the file
let args: Vec<String> = env::args().collect();
if args.len() == 1 {
eprintln!("Please to set a file name");
std::process::exit(1);
}
let f_pointer = File::open(&args[1]).expect("Unable to open the given file");
let f_lines: Vec<String> = BufReader::new(f_pointer)
.lines()
.map(|line| line.unwrap())
.collect();
// Compute the squares -- PART 1
let nb_occupied_squares = part_1(f_lines.clone());
println!("Nb occupied squares is {}", nb_occupied_squares);
// Check the ID -- PART 2
let free_area_id = part_2(f_lines);
println!("The ID of the free area is {}", free_area_id);
}
|
/*!
* Sylphrena AI input program - https://github.com/ShardAi
* Version - 1.0.0.0
*
* Copyright (c) 2017 Eirik Skjeggestad Dale
*/
use input::syl_wordgram::Unigram;
use input::syl_wordgram::Bigram;
use std::fs::File;
use std::io::prelude::*;
pub struct Sylnlp{
id: String,
unigrams: Vec<Unigram>,
bigrams: Vec<Bigram>
}
impl Sylnlp {
pub fn new(id: &str) -> Sylnlp{
let nlp = Sylnlp {
id: id.to_string(),
unigrams: Vec::new(),
bigrams: Vec::new()
};
println!("Created {}!", nlp.id.to_string());
return nlp;
}
pub fn train_nlp(&mut self, training_text: &str) {
println!("Training {0} using training text: {1}!", self.id.to_string(), training_text.to_string());
let mut f = File::open(training_text).expect("file not found");
let mut contents = String::new();
f.read_to_string(&mut contents).expect("Something went wrong reading the file");
contents = contents.replace("_", "");
contents = contents.replace("--", " ");
contents = contents.replace(".", "");
contents = contents.replace(",", "");
contents = contents.replace("=", "");
contents = contents.replace("+", "");
contents = contents.replace("?", "");
contents = contents.replace("!", "");
contents = contents.replace(":", "");
contents = contents.replace(";", "");
contents = contents.replace("[", "");
contents = contents.replace("]", "");
contents = contents.replace("/", "");
contents = contents.replace("{", "");
contents = contents.replace("}", "");
contents = contents.replace("(", "");
contents = contents.replace(")", "");
contents = contents.replace("\n", " ");
contents = contents.replace(" ", " ");
let training_set: Vec<&str> = contents.split(" ").collect();
let number_of_words = training_set.len();
let mut prev_word = "";
for word in training_set.iter() {
let mut unigrams_exist = false;
for i in 0..self.unigrams.len() {
if self.unigrams[i].matches(word) {
unigrams_exist = true;
break;
}
}
if !unigrams_exist {
self.unigrams.push(Unigram::new(word));
}
if prev_word != "" && word.to_string() != "" {
let mut bigrams_exist = false;
for i in 0..self.bigrams.len() {
if self.bigrams[i].matches(prev_word, word) {
bigrams_exist = true;
break;
}
}
if !bigrams_exist {
self.bigrams.push(Bigram::new(prev_word, word));
}
}
prev_word = word;
}
let mut top_prio = 0.0;
let mut top_i = 0;
for i in 0..self.unigrams.len() {
self.unigrams[i].calc_probability(number_of_words as f64);
if self.unigrams[i].get_probability() > top_prio {
top_prio = self.unigrams[i].get_probability();
top_i = i;
}
}
println!("Total of {0} unigrams found. Top unigram is number {1}; wordgram: \"{2}\", probability: {3}", self.unigrams.len(),
top_i, self.unigrams[top_i].get_word(), self.unigrams[top_i].get_probability());
top_prio = 0.0;
top_i = 0;
for i in 0..self.bigrams.len() {
self.bigrams[i].calc_probability(number_of_words as f64);
if self.bigrams[i].get_probability() > top_prio {
top_prio = self.bigrams[i].get_probability();
top_i = i;
}
}
println!("Total of {0} bigrams found. Top bigram is number {1}; wordgram: \"{2} {3}\", probability: {4}", self.unigrams.len(),
top_i, self.bigrams[top_i].get_word1(), self.bigrams[top_i].get_word2(), self.bigrams[top_i].get_probability());
}
} |
use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::io::{self};
use std::io::{BufReader, BufWriter};
use std::process;
// syscall version
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
let mut buf_in = BufReader::new(io::stdin());
do_cat(&mut buf_in);
} else {
for i in 1..args.len() {
let f = match File::open(&args[i]) {
Ok(file) => file,
Err(why) => panic!("couln't open {}: {}", &args[i], why.to_string()),
};
let mut buf_in = BufReader::new(f);
do_cat(&mut buf_in);
}
}
process::exit(0);
}
const BUFFER_SIZE: usize = 2048;
// open file associated with filepath, and then export the contents into stdout
fn do_cat(buf_in: &mut dyn BufRead) {
let mut buffer = [0; BUFFER_SIZE];
let mut befn: usize = 0;
let mut buf_out = BufWriter::new(io::stdout());
loop {
let n = match buf_in.read(&mut buffer) {
Ok(len) => len,
Err(why) => panic!("couldn't read file: {}", why.to_string()),
};
if n == 0 {
break;
}
// if n < befn, need to erase [n, befn)
for i in n..befn {
buffer[i] = 0;
}
buf_out.write(&buffer).unwrap();
befn = n;
}
// file goes out of scope, and automatically file will be closed
}
|
extern crate rand;
use std::io;
use rand::Rng;
fn main() {
let secret = rand::thread_rng()
.get_range(1, 100);
println!("загаданное число: {}", secret);
println!("угадай число");
println!("введите число");
let mut s = String::new();
io::stdin()
.read_line(&mut s)
.expect("не удалось прочитать ввод");
println!("вы ввели: {}", s);
}
|
// http://stackoverflow.com/questions/26945853/a-built-in-object-in-rust
fn main() {
} |
extern crate bitcoinrs_bytes;
extern crate bitcoinrs_net;
use std::env::args;
use bitcoinrs_net::{NetworkType, socket::open_connection};
fn main() {
let first_arg = args().skip(1).next().unwrap();
let handshake = open_connection(first_arg.parse().unwrap(), NetworkType::Main).unwrap();
println!("connected");
handshake.send_version_msg().unwrap();
println!("Sent version msg");
}
|
use std::time::Instant;
const INPUT: &str = include_str!("../input.txt");
fn part1() -> usize {
let mut encoded_count = 0;
let mut decoded_count = 0;
for line in INPUT.lines() {
encoded_count += 2; // quotes at beginning and end
let mut chars = line.chars().skip(1).take(line.len() - 2);
while let Some(c) = chars.next() {
encoded_count += 1;
decoded_count += 1;
if c == '\\' {
match chars.next().unwrap() {
'\\' | '"' => {
encoded_count += 1; // \ or "
}
'x' => {
encoded_count += 3; // x and 2 digits
chars.next().unwrap();
chars.next().unwrap();
}
_ => unreachable!(),
}
}
}
}
encoded_count - decoded_count
}
fn part2() -> usize {
let mut original_count = 0;
let mut newly_encoded_count = 0;
for line in INPUT.lines() {
original_count += line.len();
newly_encoded_count += 6; // quotes and escaped quotes at beginning and end
for c in line.chars().skip(1).take(line.len() - 2) {
match c {
'\\' | '"' => newly_encoded_count += 2,
_ => newly_encoded_count += 1,
}
}
}
newly_encoded_count - original_count
}
fn main() {
let start = Instant::now();
println!("part 1: {}", part1());
println!("part 1 took {}ms", (Instant::now() - start).as_millis());
let start = Instant::now();
println!("part 2: {}", part2());
println!("part 2 took {}ms", (Instant::now() - start).as_millis());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 1350);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 2085);
}
}
|
#[doc = "Register `MTLRxQOMR` reader"]
pub type R = crate::R<MTLRX_QOMR_SPEC>;
#[doc = "Register `MTLRxQOMR` writer"]
pub type W = crate::W<MTLRX_QOMR_SPEC>;
#[doc = "Field `RTC` reader - Receive Queue Threshold Control"]
pub type RTC_R = crate::FieldReader;
#[doc = "Field `RTC` writer - Receive Queue Threshold Control"]
pub type RTC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `FUP` reader - Forward Undersized Good Packets"]
pub type FUP_R = crate::BitReader;
#[doc = "Field `FUP` writer - Forward Undersized Good Packets"]
pub type FUP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FEP` reader - Forward Error Packets"]
pub type FEP_R = crate::BitReader;
#[doc = "Field `FEP` writer - Forward Error Packets"]
pub type FEP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RSF` reader - Receive Queue Store and Forward"]
pub type RSF_R = crate::BitReader;
#[doc = "Field `RSF` writer - Receive Queue Store and Forward"]
pub type RSF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DIS_TCP_EF` reader - Disable Dropping of TCP"]
pub type DIS_TCP_EF_R = crate::BitReader;
#[doc = "Field `DIS_TCP_EF` writer - Disable Dropping of TCP"]
pub type DIS_TCP_EF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EHFC` reader - Enable Hardware Flow Control"]
pub type EHFC_R = crate::BitReader;
#[doc = "Field `EHFC` writer - Enable Hardware Flow Control"]
pub type EHFC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RFA` reader - Threshold for Activating Flow Control"]
pub type RFA_R = crate::FieldReader;
#[doc = "Field `RFA` writer - Threshold for Activating Flow Control"]
pub type RFA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `RFD` reader - Threshold for Deactivating Flow Control"]
pub type RFD_R = crate::FieldReader;
#[doc = "Field `RFD` writer - Threshold for Deactivating Flow Control"]
pub type RFD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `RQS` reader - Receive Queue Size"]
pub type RQS_R = crate::FieldReader;
#[doc = "Field `RQS` writer - Receive Queue Size"]
pub type RQS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
impl R {
#[doc = "Bits 0:1 - Receive Queue Threshold Control"]
#[inline(always)]
pub fn rtc(&self) -> RTC_R {
RTC_R::new((self.bits & 3) as u8)
}
#[doc = "Bit 3 - Forward Undersized Good Packets"]
#[inline(always)]
pub fn fup(&self) -> FUP_R {
FUP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Forward Error Packets"]
#[inline(always)]
pub fn fep(&self) -> FEP_R {
FEP_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Receive Queue Store and Forward"]
#[inline(always)]
pub fn rsf(&self) -> RSF_R {
RSF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Disable Dropping of TCP"]
#[inline(always)]
pub fn dis_tcp_ef(&self) -> DIS_TCP_EF_R {
DIS_TCP_EF_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Enable Hardware Flow Control"]
#[inline(always)]
pub fn ehfc(&self) -> EHFC_R {
EHFC_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bits 8:10 - Threshold for Activating Flow Control"]
#[inline(always)]
pub fn rfa(&self) -> RFA_R {
RFA_R::new(((self.bits >> 8) & 7) as u8)
}
#[doc = "Bits 14:16 - Threshold for Deactivating Flow Control"]
#[inline(always)]
pub fn rfd(&self) -> RFD_R {
RFD_R::new(((self.bits >> 14) & 7) as u8)
}
#[doc = "Bits 20:22 - Receive Queue Size"]
#[inline(always)]
pub fn rqs(&self) -> RQS_R {
RQS_R::new(((self.bits >> 20) & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - Receive Queue Threshold Control"]
#[inline(always)]
#[must_use]
pub fn rtc(&mut self) -> RTC_W<MTLRX_QOMR_SPEC, 0> {
RTC_W::new(self)
}
#[doc = "Bit 3 - Forward Undersized Good Packets"]
#[inline(always)]
#[must_use]
pub fn fup(&mut self) -> FUP_W<MTLRX_QOMR_SPEC, 3> {
FUP_W::new(self)
}
#[doc = "Bit 4 - Forward Error Packets"]
#[inline(always)]
#[must_use]
pub fn fep(&mut self) -> FEP_W<MTLRX_QOMR_SPEC, 4> {
FEP_W::new(self)
}
#[doc = "Bit 5 - Receive Queue Store and Forward"]
#[inline(always)]
#[must_use]
pub fn rsf(&mut self) -> RSF_W<MTLRX_QOMR_SPEC, 5> {
RSF_W::new(self)
}
#[doc = "Bit 6 - Disable Dropping of TCP"]
#[inline(always)]
#[must_use]
pub fn dis_tcp_ef(&mut self) -> DIS_TCP_EF_W<MTLRX_QOMR_SPEC, 6> {
DIS_TCP_EF_W::new(self)
}
#[doc = "Bit 7 - Enable Hardware Flow Control"]
#[inline(always)]
#[must_use]
pub fn ehfc(&mut self) -> EHFC_W<MTLRX_QOMR_SPEC, 7> {
EHFC_W::new(self)
}
#[doc = "Bits 8:10 - Threshold for Activating Flow Control"]
#[inline(always)]
#[must_use]
pub fn rfa(&mut self) -> RFA_W<MTLRX_QOMR_SPEC, 8> {
RFA_W::new(self)
}
#[doc = "Bits 14:16 - Threshold for Deactivating Flow Control"]
#[inline(always)]
#[must_use]
pub fn rfd(&mut self) -> RFD_W<MTLRX_QOMR_SPEC, 14> {
RFD_W::new(self)
}
#[doc = "Bits 20:22 - Receive Queue Size"]
#[inline(always)]
#[must_use]
pub fn rqs(&mut self) -> RQS_W<MTLRX_QOMR_SPEC, 20> {
RQS_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Rx queue operating mode register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mtlrx_qomr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mtlrx_qomr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MTLRX_QOMR_SPEC;
impl crate::RegisterSpec for MTLRX_QOMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mtlrx_qomr::R`](R) reader structure"]
impl crate::Readable for MTLRX_QOMR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mtlrx_qomr::W`](W) writer structure"]
impl crate::Writable for MTLRX_QOMR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MTLRxQOMR to value 0x0070_0000"]
impl crate::Resettable for MTLRX_QOMR_SPEC {
const RESET_VALUE: Self::Ux = 0x0070_0000;
}
|
use amethyst::{
core::{
Transform,
//SystemDesc
},
//derive::SystemDesc,
ecs::{
Join, ReadStorage, System,
//SystemData, World,
WriteStorage
}
};
use crate::game::*;
pub struct CollisionSystem;
impl<'s> System<'s> for CollisionSystem {
type SystemData = (
WriteStorage< 's, Ball >,
ReadStorage< 's, Paddle >,
ReadStorage< 's, Transform >,
);
fn run ( &mut self, ( mut balls, paddles, transforms ): Self::SystemData ) {
// iterate balls
for( ball, transform ) in ( &mut balls, &transforms ).join() {
let ball_x = transform.translation().x;
let ball_y = transform.translation().y;
// Collision top/bottom
if ( ball_y <= ball.radius && ball.velocity[1] < 0.0 ) || ( ball_y >= ARENA_HEIGHT - ball.radius && ball.velocity[1] > 0.0 ) {
ball.velocity[1] = - ball.velocity[1];
}
// iterate paddles
for( paddle, paddle_transform ) in ( &paddles, &transforms ).join() {
let paddle_x = paddle_transform.translation().x - ( paddle.width * 0.5 );
let paddle_y = paddle_transform.translation().y - ( paddle.height * 0.5 );
// collision paddles
if ( // point in rect
ball_x >= ( paddle_x - ball.radius ) &&
ball_x <= ( paddle_x + paddle.width + ball.radius ) &&
ball_y >= ( paddle_y - ball.radius ) &&
ball_y <= ( paddle_y + paddle.height + ball.radius )
) &&
(
( // moving in left
paddle.side == Side::Left &&
ball.velocity[0] < 0.0
) ||
( // moving in right
paddle.side == Side::Right &&
ball.velocity[0] > 0.0
)
){
ball.velocity[0] = -ball.velocity[0];
}
}
}
}
}
|
use crate::config::DefaultConfig;
use crate::site::Site;
use crate::site::Site::{Found, Visited};
use std::future::Future;
use std::pin::Pin;
pub trait CrawlConfig {
fn should_crawl_site(&self, url: &str) -> bool;
fn url_extractor(&self, text: &str) -> Vec<String>;
fn network_fetch_text(&self, url: &str) -> Pin<Box<dyn Future<Output=Result<String, ()>>>>;
}
pub struct Crawler<'a> {
pub starting_point: &'a str,
pub config: Box<dyn CrawlConfig + 'a>,
}
impl<'a> Crawler<'a> {
pub fn new(starting_point: &'a str) -> Crawler {
Crawler {
starting_point,
config: Box::new(DefaultConfig::new(starting_point)),
}
}
pub async fn create_sitemap(&'a self) -> Pin<Box<dyn Future<Output=Site> + 'a>> {
self.sitemap_of(self.starting_point)
}
fn sitemap_of(&'a self, url: &'a str) -> Pin<Box<dyn Future<Output=Site> + 'a>> {
let sitemap = async move {
let url = url.clone();
if !self.config.should_crawl_site(url) {
return Found { url: url.to_string() };
}
let urls = self.config.network_fetch_text(url).await
.map(|text| self.config.url_extractor(text.as_str()));
match urls {
Ok(urls) => {
let mut children = vec!();
for u in urls {
children.push(self.sitemap_of(u.as_str()).await);
}
Visited {
url: url.to_string(),
children,
is_error: false,
}
}
Err(_) => Visited {
url: url.to_string(),
children: vec!(),
is_error: true,
},
}
};
Box::pin(sitemap)
}
}
|
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
#[cfg(test)]
mod tests {
use std::ffi::c_void;
use hound;
use id666_rs::ID666;
use super::*;
#[test]
fn it_works() {
unsafe {
let mut data = include_bytes!("../test_fixtures/twinbeeeeeeeee.spc").to_vec();
let spc = spc_new();
let filter = spc_filter_new();
let id6 = ID666::from(&mut data).unwrap();
spc_load_spc(
spc,
data.as_mut_ptr() as *mut c_void,
data.len() as ::std::os::raw::c_long,
);
spc_clear_echo(spc);
spc_filter_clear(filter);
spc_filter_set_gain(filter, 0x180);
let mut total_frames = id6.total_len.unwrap() / 2;
let mut buf = [0i16; 2 * 4096];
let spec = hound::WavSpec {
channels: 2,
sample_rate: 32000,
bits_per_sample: 16,
sample_format: hound::SampleFormat::Int,
};
let mut writer = hound::WavWriter::create("twinbeeeeeeeee.wav", spec).unwrap();
while total_frames != 0 {
let fc = if total_frames < 4096 {
total_frames
} else {
4096
};
spc_play(spc, (fc * 2) as i32, buf.as_mut_ptr());
spc_filter_run(filter, buf.as_mut_ptr(), (fc * 2) as i32);
for &b in buf.iter() {
writer.write_sample(b).unwrap();
}
total_frames -= fc;
}
spc_delete(spc);
spc_filter_delete(filter);
writer.finalize().unwrap();
}
}
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "alloc_system"]
#![crate_type = "rlib"]
#![no_std]
#![cfg_attr(not(stage0), allocator)]
#![unstable(feature = "alloc_system",
reason = "this library is unlikely to be stabilized in its current \
form or name",
issue = "27783")]
#![feature(allocator)]
#![feature(asm)]
#![feature(staged_api)]
// The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values. In practice, the alignment is a
// constant at the call site and the branch will be optimized out.
#[cfg(all(any(target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel",
target_arch = "powerpc")))]
const MIN_ALIGN: usize = 8;
#[cfg(all(any(target_arch = "x86",
target_arch = "x86_64",
target_arch = "aarch64")))]
const MIN_ALIGN: usize = 16;
extern "C" {
fn memmove(dst: *mut u8, src: *const u8, size: usize);
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> usize;
fn __rust_usable_size(size: usize, align: usize) -> usize;
}
|
use crate::config::{Configuration, ConfigurationChangeRequest};
use crate::errors::Error;
use crate::log::Log;
use crossbeam::{
channel::{bounded, Receiver, Sender, SendError, RecvError},
select,
};
use crate::raft::Raft;
/// Future is used to represent an action that may occur in the future.
pub trait Future {
/// Error blocks until the future arrives and then returns the error status
/// of the future. This may be called any number of times - all calls will
/// return the same value, however is not OK to call this method twice
/// concurrently on the same Future instance.
/// Error will only return generic errors related to raft, such
/// as ErrLeadershipLost, or ErrRaftShutdown. Some operations, such as
/// ApplyLog, may also return errors from other methods.
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>>;
}
/// IndexFuture is used for future actions that can result in a raft log entry
/// being created.
pub trait IndexFuture: Future {
/// Index holds the index of the newly applied log entry.
/// This must not be called until after the Error method has returned.
fn index(&self) -> u64;
}
/// ApplyFuture is used for Apply and can return the FSM response.
pub trait ApplyFuture<T>: IndexFuture {
/// Response returns the FSM response as returned by the FSM.Apply method. This
/// must not be called until after the Error method has returned.
/// Note that if FSM.Apply returns an error, it will be returned by Response,
/// and not by the Error method, so it is always important to check Response
/// for errors from the FSM.
fn response(&self) -> T;
}
/// ConfigurationFuture is used for GetConfiguration and can return the
/// latest configuration in use by Raft.
pub trait ConfigurationFuture: IndexFuture {
/// Configuration contains the latest configuration. This must
/// not be called until after the Error method has returned.
fn configuration(&self) -> Configuration;
}
/// SnapshotFuture is used for waiting on a user-triggered snapshot to complete.
pub trait SnapshotFuture: Future {
// TODO: implement snapshot future
// fn open(&self) -> ;
}
/// LeadershipTransferFuture is used for waiting on a user-triggered leadership
/// transfer to complete.
pub trait LeadershipTransferFuture: Future {}
/// ErrorFuture is used to return a static error.
pub struct ErrorFuture {
error: Error,
}
impl Future for ErrorFuture {
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>> {
Ok(self.error.clone())
}
}
impl IndexFuture for ErrorFuture {
fn index(&self) -> u64 {
0
}
}
impl ApplyFuture<()> for ErrorFuture {
fn response(&self) -> () {
()
}
}
/// DeferError can be embedded to allow a future
/// to provide an error in the future
pub struct DeferError {
err: Option<Error>,
err_tx_ch: Option<Sender<Error>>,
err_rx_ch: Receiver<Error>,
responded: bool,
shutdown_tx_ch: Sender<()>,
shutdown_rx_ch: Receiver<()>,
}
impl Default for DeferError {
fn default() -> Self {
let (etx, erx) = bounded::<Error>(1);
let (stx, srx) = bounded::<()>(1);
Self {
err: None,
err_tx_ch: Some(etx),
err_rx_ch: erx,
responded: false,
shutdown_tx_ch: stx,
shutdown_rx_ch: srx,
}
}
}
impl DeferError {
pub fn new() -> Self {
Self::default()
}
pub fn respond(&mut self, e: Option<Error>) -> Result<(), Box<dyn std::error::Error>> {
match self.err_tx_ch.clone() {
None => Ok(()),
Some(tx) => {
if self.responded {
return Ok(());
}
match e {
None => {
tx.send(Error::None);
self.responded = true;
self.err_tx_ch = None;
Ok(())
}
Some(e) => {
match tx.send(e) {
Ok(_) => {
self.err_tx_ch = None;
self.responded = true;
Ok(())
},
Err(e) => {
Err(Box::new(e))
}
}
}
}
}
}
}
}
impl Future for DeferError {
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>> {
let err = self.err.clone();
match err {
None => {
select! {
recv(self.err_rx_ch) -> result => {
match result {
Ok(e) => {
self.err = Some(e);
},
Err(e) => {
return Err(Box::new(e));
},
}
},
recv(self.shutdown_rx_ch) -> _ => {
self.err = Some(Error::RaftShutdown);
},
}
Ok(self.err.clone().unwrap())
},
Some(e) => Ok(e),
}
}
}
/// There are several types of requests that cause a configuration entry to
/// be appended to the log. These are encoded here for leaderLoop() to process.
/// This is internal to a single server.
pub struct ConfigurationChangeFuture<T> {
lf: LogFuture<T>,
req: ConfigurationChangeRequest,
}
/// `BootstrapFuture` is used to attempt a live bootstrap of the cluster. See the
/// `Raft` object's `BootstrapCluster` member function for more details.
pub struct BootStrapFuture {
defer_error: DeferError,
/// configuration is the proposed bootstrap configuration to apply.
configuration: Configuration
}
impl Future for BootStrapFuture {
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>> {
self.defer_error.error()
}
}
impl LeadershipTransferFuture for BootStrapFuture {}
/// `LogFuture` is used to apply a log entry and waits until
/// the log is considered committed.
pub struct LogFuture<T> {
defer_err: DeferError,
log: Log,
response: T,
dispatch: std::time::Instant,
}
impl<T: Clone> Future for LogFuture<T> {
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>> {
self.defer_err.error()
}
}
impl<T: Clone> IndexFuture for LogFuture<T> {
fn index(&self) -> u64 {
self.log.index
}
}
impl<T: Clone> ApplyFuture<T> for LogFuture<T> {
fn response(&self) -> T {
self.response.clone()
}
}
impl<T: Clone> LeadershipTransferFuture for LogFuture<T> {}
pub struct ShutdownFuture<'a, FSM, LF> {
raft: Option<&'a Raft<FSM, LF>>
}
impl<'a, FSM, LF> Future for ShutdownFuture<'a, FSM, LF> {
fn error(&mut self) -> Result<Error, Box<dyn std::error::Error>> {
match self.raft {
None => Err(Box::new(Error::None)),
Some(mut r) => {
r.wait_shutdown()
}
}
}
}
#[cfg(test)]
mod tests {
use crate::future::{DeferError, Future};
use crate::errors::Error;
#[test]
fn test_defer_error_future_success() {
let mut de = DeferError::new();
de.respond(None).unwrap();
assert_eq!(Error::None, de.error().unwrap());
assert_eq!(Error::None, de.error().unwrap());
}
#[test]
fn test_defer_error_future_error() {
let want = Error::NonVoter;
let mut de = DeferError::new();
de.respond(Some(want));
assert_eq!(Error::NonVoter, de.error().unwrap());
assert_eq!(Error::NonVoter, de.error().unwrap());
}
#[test]
fn test_defer_error_future_concurrent() {
let want = Some(Error::NonVoter);
let mut de = DeferError::new();
crossbeam::thread::scope(|s| {
s.spawn(|_| {
de.respond(want);
});
});
assert_eq!(Error::NonVoter, de.error().unwrap());
}
} |
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-BSD-3-Clause file.
//
// SPDX-License-Identifier: Apache-2.0 AND BSD-3-Clause
extern crate criterion;
extern crate linux_loader;
extern crate vm_memory;
use criterion::{criterion_group, criterion_main, Criterion};
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod x86_64;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
use x86_64::*;
#[cfg(target_arch = "aarch64")]
mod aarch64;
#[cfg(target_arch = "aarch64")]
use aarch64::*;
pub fn criterion_benchmark_nop(_: &mut Criterion) {}
criterion_group! {
name = benches;
config = Criterion::default().sample_size(500);
targets = criterion_benchmark
}
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "bzimage"))]
// Explicit (arch, feature) tuple required as clippy complains about
// `criterion_benchmark_bzimage` missing on aarch64.
criterion_group! {
name = benches_bzimage;
// Only ~125 runs fit in 5 seconds. Either extend the duration, or reduce
// the number of iterations.
config = Criterion::default().sample_size(100);
targets = criterion_benchmark_bzimage
}
// NOP because the `criterion_main!` macro doesn't support cfg(feature)
// macro expansions.
#[cfg(any(target_arch = "aarch64", not(feature = "bzimage")))]
criterion_group! {
name = benches_bzimage;
// Sample size must be >= 10.
// https://github.com/bheisler/criterion.rs/blob/0.3.0/src/lib.rs#L757
config = Criterion::default().sample_size(10);
targets = criterion_benchmark_nop
}
criterion_main! {
benches,
benches_bzimage
}
|
pub mod alloc;
pub mod data_structs;
use atag;
use mem::alloc::first_fit::FirstFitAlloc;
// Global var for heap allocator
static mut HEAP_ALLOC: FirstFitAlloc = FirstFitAlloc {begin: 0, end: 0};
pub fn init (mem_tag: atag::AtagMem) {
unsafe {
HEAP_ALLOC = FirstFitAlloc::new(mem_tag.start, mem_tag.size - mem_tag.start);
}
}
// These functions below provide definitions for symbols libcore
// expects which are not present on our bare metal target.
#[no_mangle]
pub extern fn __aeabi_memcpy (dest: *mut u8, src: *const u8, n: usize) -> *mut u8{
let mut i = 0;
while i < n {
unsafe { *dest.offset(i as isize) = *src.offset(i as isize); }
i += 1;
}
dest
}
#[no_mangle]
pub extern fn __aeabi_memcpy4 (dest: *mut u8, src: *const u8, n: usize) -> *mut u8{
__aeabi_memcpy (dest, src, n/4)
}
|
use actix::prelude::*;
use kosem_webapi::pairing_messages::*;
use kosem_webapi::{KosemResult, Uuid};
use crate::protocol_handlers::websocket_jsonrpc::WsJrpc;
use crate::role_actors::{HumanActor, PairingActor};
use crate::internal_messages::connection::{ConnectionClosed, RpcMessage};
use crate::internal_messages::pairing::{
CreateNewHumanActor, HumanAvailable, HumanJoiningProcedure, PairingPerformed,
ProcedureRequestingHuman, RemoveAvailableHuman, RemoveRequestForHuman,
};
#[derive(typed_builder::TypedBuilder)]
pub struct JoinerActor {
con_actor: actix::Addr<WsJrpc>,
uid: Uuid,
name: String,
}
impl actix::Actor for JoinerActor {
type Context = actix::Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
log::info!("Starting JoinerActor {} - {}", self.uid, self.name);
let response = kosem_webapi::handshake_messages::LoginConfirmed { uid: self.uid };
let message = RpcMessage::new("LoginConfirmed", response);
self.con_actor.do_send(message);
PairingActor::from_registry().do_send(HumanAvailable {
uid: self.uid,
addr: ctx.address(),
name: self.name.clone(),
});
}
fn stopped(&mut self, _ctx: &mut Self::Context) {
log::info!("Ending JoinerActor {}", self.uid);
PairingActor::from_registry().do_send(RemoveAvailableHuman { uid: self.uid });
}
}
impl actix::Handler<ConnectionClosed> for JoinerActor {
type Result = ();
fn handle(&mut self, _msg: ConnectionClosed, ctx: &mut actix::Context<Self>) -> Self::Result {
ctx.stop();
}
}
impl actix::Handler<ProcedureRequestingHuman> for JoinerActor {
type Result = ();
fn handle(&mut self, msg: ProcedureRequestingHuman, _ctx: &mut Self::Context) -> Self::Result {
self.con_actor.do_send(RpcMessage::new(
"AvailableProcedure",
kosem_webapi::pairing_messages::AvailableProcedure {
uid: msg.uid,
name: msg.orig_request.name,
},
));
}
}
impl actix::Handler<RemoveRequestForHuman> for JoinerActor {
type Result = ();
fn handle(&mut self, msg: RemoveRequestForHuman, _ctx: &mut Self::Context) -> Self::Result {
self.con_actor.do_send(RpcMessage::new(
"UnavailableProcedure",
kosem_webapi::pairing_messages::UnavailableProcedure { uid: msg.uid },
));
}
}
impl actix::Handler<JoinProcedure> for JoinerActor {
type Result = ResponseActFuture<Self, KosemResult<()>>;
fn handle(&mut self, msg: JoinProcedure, _ctx: &mut actix::Context<Self>) -> Self::Result {
log::info!("Human {} joined procedure {}", self.name, msg.uid);
Box::pin(
PairingActor::from_registry()
.send(HumanJoiningProcedure {
human_uid: self.uid,
request_uid: msg.uid,
})
.into_actor(self)
.then(|result, _actor, _ctx| {
let result = result.unwrap();
log::warn!("Join result is {:?}", result);
fut::result(result)
}),
)
}
}
impl actix::Handler<CreateNewHumanActor> for JoinerActor {
type Result = <CreateNewHumanActor as actix::Message>::Result;
fn handle(
&mut self,
msg: CreateNewHumanActor,
_ctx: &mut actix::Context<Self>,
) -> Self::Result {
HumanActor::builder()
.con_actor(self.con_actor.clone())
.procedure_actor(msg.procedure_addr)
.uid(self.uid)
.request_uid(msg.request_uid)
.name(self.name.clone())
.build()
.start()
}
}
impl actix::Handler<PairingPerformed> for JoinerActor {
type Result = <PairingPerformed as actix::Message>::Result;
fn handle(&mut self, msg: PairingPerformed, _ctx: &mut actix::Context<Self>) -> Self::Result {
msg.human_addr.clone().do_send(msg);
}
}
|
#[doc = "
Removes the common level of indention from description strings. For
instance, if an entire doc comment is indented 8 spaces we want to
remove those 8 spaces from every line.
The first line of a string is allowed to be intend less than
subsequent lines in the same paragraph in order to account for
instances where the string containing the doc comment is opened in the
middle of a line, and each of the following lines is indented.
"];
export mk_pass;
fn mk_pass() -> pass {
desc_pass::mk_pass(unindent)
}
fn unindent(s: str) -> str {
let lines = str::lines_any(s);
let saw_first_line = false;
let saw_second_line = false;
let min_indent = vec::foldl(uint::max_value, lines) {|min_indent, line|
// After we see the first non-whitespace line, look at
// the line we have. If it is not whitespace, and therefore
// part of the first paragraph, then ignore the indentation
// level of the first line
let ignore_previous_indents =
saw_first_line &&
!saw_second_line &&
!str::is_whitespace(line);
let min_indent = if ignore_previous_indents {
uint::max_value
} else {
min_indent
};
if saw_first_line {
saw_second_line = true;
}
if str::is_whitespace(line) {
min_indent
} else {
saw_first_line = true;
let spaces = 0u;
str::loop_chars(line) {|char|
// Only comparing against space because I wouldn't
// know what to do with mixed whitespace chars
if char == ' ' {
spaces += 1u;
true
} else {
false
}
};
math::min(min_indent, spaces)
}
};
if check vec::is_not_empty(lines) {
let unindented = [str::trim(vec::head(lines))]
+ vec::map(vec::tail(lines)) {|line|
if str::is_whitespace(line) {
line
} else {
assert str::byte_len(line) >= min_indent;
str::char_slice(line, min_indent, str::char_len(line))
}
};
str::connect(unindented, "\n")
} else {
s
}
}
#[test]
fn should_unindent() {
let s = " line1\n line2";
let r = unindent(s);
assert r == "line1\nline2";
}
#[test]
fn should_unindent_multiple_paragraphs() {
let s = " line1\n\n line2";
let r = unindent(s);
assert r == "line1\n\nline2";
}
#[test]
fn should_leave_multiple_indent_levels() {
// Line 2 is indented another level beyond the
// base indentation and should be preserved
let s = " line1\n\n line2";
let r = unindent(s);
assert r == "line1\n\n line2";
}
#[test]
fn should_ignore_first_line_indent() {
// Thi first line of the first paragraph may not be indented as
// far due to the way the doc string was written:
//
// #[doc = "Start way over here
// and continue here"]
let s = "line1\n line2";
let r = unindent(s);
assert r == "line1\nline2";
}
#[test]
fn should_not_ignore_first_line_indent_in_a_single_line_para() {
let s = "line1\n\n line2";
let r = unindent(s);
assert r == "line1\n\n line2";
} |
use std::fs;
use nom::{
bytes::complete::tag,
character::streaming::{i64 as parse_i64, line_ending},
multi::many1,
IResult,
};
#[derive(Debug)]
struct Point {
x: i64,
y: i64,
}
impl Point {
fn manhattan_distance(&self, other: &Point) -> i64 {
(self.x - other.x).abs() + (self.y - other.y).abs()
}
}
struct Pair {
sensor: Point,
beacon: Point,
}
fn parse_point(input: &str) -> IResult<&str, Point> {
let (input, _) = tag("x=")(input)?;
let (input, x) = parse_i64(input)?;
let (input, _) = tag(", y=")(input)?;
let (input, y) = parse_i64(input)?;
Ok((input, Point { x, y }))
}
fn parse_pair(input: &str) -> IResult<&str, Pair> {
let (input, _) = tag("Sensor at ")(input)?;
let (input, sensor) = parse_point(input)?;
let (input, _) = tag(": closest beacon is at ")(input)?;
let (input, beacon) = parse_point(input)?;
let (input, _) = line_ending(input)?;
Ok((input, Pair { sensor, beacon }))
}
#[derive(Clone, Debug)]
struct RangeInclusive<T> {
start: T,
end: T,
}
fn do_the_thing(input: &str, max_xy: i64) -> i64 {
let start = std::time::Instant::now();
let (input, pairs) = many1(parse_pair)(input).unwrap();
assert!(input.is_empty());
println!("Parsing took {:?}", start.elapsed());
let start = std::time::Instant::now();
// Start with 0..=max_xy empty Vecs
let mut ranges: Vec<Vec<RangeInclusive<i64>>> = Vec::with_capacity(max_xy as usize + 1);
ranges.resize_with((max_xy + 1) as usize, Vec::new);
println!("Making Vecs took {:?}", start.elapsed());
let start = std::time::Instant::now();
for pair in pairs {
let distance = pair.sensor.manhattan_distance(&pair.beacon);
// Find all the Y coordinates that are in range of the sensor
let y_start = pair.sensor.y - distance;
let y_end = pair.sensor.y + distance;
// Constrain both start and end to 0 and max_xy
let y_start = y_start.max(0).min(max_xy);
let y_end = y_end.max(0).min(max_xy);
for y in y_start..=y_end {
// Now find all the X coordinates of points on the Y coordinate that are
// in range example beacon = (0,0), sensor = (2,0), y = 1 would have 3
// poimts in range of the beacon on y = 1: (-1,1), (0,1), (1,1)
// distance would be 2, remainder would be 1
let remainder = distance - (y - pair.sensor.y).abs();
let mut x_start = pair.sensor.x - remainder;
let mut x_end = pair.sensor.x + remainder;
// Constrain both start and end to 0 and max_xy
x_start = x_start.max(0).min(max_xy);
x_end = x_end.max(0).min(max_xy);
// No longer filter known beacons, because those now count
// as "can't be what we're looking for"
// Add the range to the list of ranges for this Y coordinate
ranges[y as usize].push(RangeInclusive {
start: x_start,
end: x_end,
});
}
}
let duration = start.elapsed();
println!("Time to build ranges: {:?}", duration);
let start = std::time::Instant::now();
// Merge all the ranges for each Y coordinate, use find to return early if we find a row with a gap
let (x, y) = ranges
.into_iter()
.enumerate()
.find_map(|(y, mut x_coords)| {
x_coords.sort_unstable_by_key(|r| r.start);
x_coords =
x_coords
.into_iter()
.fold(Vec::<RangeInclusive<i64>>::new(), |mut acc, r| {
if let Some(last) = acc.last_mut() {
// Options are:
// Consecutive ranges: 1..=2, 3..=4 -> 1..=4
// Overlapping ranges: 1..=2, 2..=3 -> 1..=3
// Completely contained: 1..=3, 2..=2 -> 1..=3
// Completely separate: 1..=2, 4..=5 -> 1..=2, 4..=5
// This only works because the ranges are sorted by start
if last.end + 1 >= r.start {
// This should cover all except completely separate
last.end = last.end.max(r.end);
return acc;
}
}
acc.push(r);
acc
});
match x_coords.len() {
1 => {
if x_coords[0].start == 0 && x_coords[0].end == max_xy {
// This is not the row we're looking for
None
} else {
// If we start at 0, then max_xy is the only possible value
Some((
if x_coords[0].start == 0 {
max_xy
} else {
// If we don't start at 0, x == 0
0
},
y,
))
}
}
2 => {
// x is 1 more than end of first range
Some(((x_coords[0].end + 1), y))
}
_ => panic!("too many ranges"),
}
})
.expect("didn't find a row");
let duration = start.elapsed();
println!("Time to find row while merging: {:?}", duration);
x * 4000000 + (y as i64)
}
fn main() {
let input = fs::read_to_string("input.txt").unwrap();
println!("{:?}", do_the_thing(&input, 4000000));
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("Sensor at x=2, y=18: closest beacon is at x=-2, y=15
Sensor at x=9, y=16: closest beacon is at x=10, y=16
Sensor at x=13, y=2: closest beacon is at x=15, y=3
Sensor at x=12, y=14: closest beacon is at x=10, y=16
Sensor at x=10, y=20: closest beacon is at x=10, y=16
Sensor at x=14, y=17: closest beacon is at x=10, y=16
Sensor at x=8, y=7: closest beacon is at x=2, y=10
Sensor at x=2, y=0: closest beacon is at x=2, y=10
Sensor at x=0, y=11: closest beacon is at x=2, y=10
Sensor at x=20, y=14: closest beacon is at x=25, y=17
Sensor at x=17, y=20: closest beacon is at x=21, y=22
Sensor at x=16, y=7: closest beacon is at x=15, y=3
Sensor at x=14, y=3: closest beacon is at x=15, y=3
Sensor at x=20, y=1: closest beacon is at x=15, y=3
", 20 => 56000011)]
fn test(input: &str, max_xy: i64) -> i64 {
do_the_thing(&input, max_xy)
}
}
|
//! Slicing by value
#![deny(missing_docs)]
#![deny(rust_2018_compatibility)]
#![deny(rust_2018_idioms)]
#![deny(warnings)]
#![no_std]
mod from;
mod sealed;
#[cfg(test)]
mod tests;
mod to;
mod traits;
use core::{ops, slice};
use as_slice::{AsMutSlice, AsSlice};
use stable_deref_trait::StableDeref;
pub use {
from::OwningSliceFrom,
to::OwningSliceTo,
traits::{IntoSlice, IntoSliceFrom, IntoSliceTo, Truncate},
};
/// Owning slice of a `BUFFER`
#[derive(Clone, Copy)]
pub struct OwningSlice<BUFFER, INDEX>
where
BUFFER: AsSlice,
INDEX: sealed::Index,
{
pub(crate) buffer: BUFFER,
pub(crate) start: INDEX,
pub(crate) length: INDEX,
}
/// Equivalent to `buffer[start..start+length]` but by value
#[allow(non_snake_case)]
pub fn OwningSlice<B, I>(buffer: B, start: I, length: I) -> OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
let blen = buffer.as_slice().len();
let ustart = start.into();
let ulength = length.into();
assert!(ustart + ulength <= blen);
OwningSlice {
buffer,
start,
length,
}
}
impl<B, I> OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
/// Destroys the owning slice and returns the original buffer
pub fn unslice(self) -> B {
self.buffer
}
}
impl<B, I> AsSlice for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
type Element = B::Element;
fn as_slice(&self) -> &[B::Element] {
unsafe {
let p = self.buffer.as_slice().as_ptr().add(self.start.into());
let len = self.length.into();
slice::from_raw_parts(p, len)
}
}
}
impl<B, I> AsMutSlice for OwningSlice<B, I>
where
B: AsMutSlice,
I: sealed::Index,
{
fn as_mut_slice(&mut self) -> &mut [B::Element] {
unsafe {
let p = self
.buffer
.as_mut_slice()
.as_mut_ptr()
.add(self.start.into());
let len = self.length.into();
slice::from_raw_parts_mut(p, len)
}
}
}
impl<B, I> ops::Deref for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
type Target = [B::Element];
fn deref(&self) -> &[B::Element] {
self.as_slice()
}
}
impl<B, I> ops::DerefMut for OwningSlice<B, I>
where
B: AsMutSlice,
I: sealed::Index,
{
fn deref_mut(&mut self) -> &mut [B::Element] {
self.as_mut_slice()
}
}
impl<B, I> From<OwningSliceFrom<B, I>> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
fn from(slice: OwningSliceFrom<B, I>) -> OwningSlice<B, I> {
let length = I::from_usize(slice.len());
OwningSlice {
buffer: slice.buffer,
start: slice.start,
length,
}
}
}
impl<B, I> From<OwningSliceTo<B, I>> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
fn from(slice: OwningSliceTo<B, I>) -> OwningSlice<B, I> {
OwningSlice {
buffer: slice.buffer,
start: I::zero(),
length: slice.end,
}
}
}
unsafe impl<B, I> StableDeref for OwningSlice<B, I>
where
B: AsSlice + StableDeref,
I: sealed::Index,
{
}
impl<B, I> IntoSlice<I> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
type Slice = OwningSlice<B, I>;
fn into_slice(self, start: I, length: I) -> Self::Slice {
let len = self.len();
let ustart = start.into();
let ulength = length.into();
assert!(ustart + ulength <= len);
OwningSlice {
buffer: self.buffer,
start: self.start + start,
length,
}
}
}
impl<B> IntoSlice<u16> for OwningSlice<B, u8>
where
B: AsSlice,
{
type Slice = OwningSlice<B, u8>;
fn into_slice(self, start: u16, length: u16) -> Self::Slice {
let len = self.len();
assert!(usize::from(start) + usize::from(length) <= len);
// NOTE(cast) start, length < self.len() (self.length) <= u8::MAX
OwningSlice {
buffer: self.buffer,
start: self.start + start as u8,
length: length as u8,
}
}
}
impl<B> IntoSlice<u8> for OwningSlice<B, u16>
where
B: AsSlice,
{
type Slice = OwningSlice<B, u16>;
fn into_slice(self, start: u8, length: u8) -> Self::Slice {
let len = self.len();
assert!(usize::from(start) + usize::from(length) <= len);
OwningSlice {
buffer: self.buffer,
start: self.start + u16::from(start),
length: u16::from(length),
}
}
}
impl<B, I> IntoSliceFrom<I> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
type SliceFrom = OwningSlice<B, I>;
fn into_slice_from(self, start: I) -> Self::SliceFrom {
let len = self.len();
let ustart = start.into();
assert!(ustart <= len);
OwningSlice {
buffer: self.buffer,
start: self.start + start,
length: self.length - start,
}
}
}
impl<B> IntoSliceFrom<u16> for OwningSlice<B, u8>
where
B: AsSlice,
{
type SliceFrom = OwningSlice<B, u8>;
fn into_slice_from(self, start: u16) -> Self::SliceFrom {
let len = self.len();
assert!(usize::from(start) <= len);
// NOTE(cast) start < len (self.length) <= u8::MAX
OwningSlice {
buffer: self.buffer,
start: self.start + start as u8,
length: self.length - start as u8,
}
}
}
impl<B> IntoSliceFrom<u8> for OwningSlice<B, u16>
where
B: AsSlice,
{
type SliceFrom = OwningSlice<B, u16>;
fn into_slice_from(self, start: u8) -> Self::SliceFrom {
let len = self.len();
assert!(usize::from(start) <= len);
OwningSlice {
buffer: self.buffer,
start: self.start + u16::from(start),
length: self.length - u16::from(start),
}
}
}
impl<B, I> IntoSliceTo<I> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
type SliceTo = OwningSlice<B, I>;
fn into_slice_to(self, end: I) -> Self::SliceTo {
let len = self.len();
let uend = end.into();
assert!(uend <= len);
OwningSlice {
buffer: self.buffer,
start: self.start,
length: end,
}
}
}
impl<B> IntoSliceTo<u16> for OwningSlice<B, u8>
where
B: AsSlice,
{
type SliceTo = OwningSlice<B, u8>;
fn into_slice_to(self, end: u16) -> Self::SliceTo {
let len = self.len();
assert!(usize::from(end) <= len);
OwningSlice {
buffer: self.buffer,
start: self.start,
// NOTE(cast) end <= len (self.length) <= u8::MAX
length: end as u8,
}
}
}
impl<B> IntoSliceTo<u8> for OwningSlice<B, u16>
where
B: AsSlice,
{
type SliceTo = OwningSlice<B, u16>;
fn into_slice_to(self, end: u8) -> Self::SliceTo {
let len = self.len();
assert!(usize::from(end) <= len);
OwningSlice {
buffer: self.buffer,
start: self.start,
// NOTE(cast) end <= len <= u8::MAX
length: u16::from(end),
}
}
}
impl<B, I> Truncate<I> for OwningSlice<B, I>
where
B: AsSlice,
I: sealed::Index,
{
fn truncate(&mut self, len: I) {
if len < self.length {
self.length = len;
}
}
}
impl<B> Truncate<u16> for OwningSlice<B, u8>
where
B: AsSlice,
{
fn truncate(&mut self, len: u16) {
if len < u16::from(self.length) {
// NOTE(cast) `len < self.length <= u8::MAX`
self.length = len as u8;
}
}
}
impl<B> Truncate<u8> for OwningSlice<B, u16>
where
B: AsSlice,
{
fn truncate(&mut self, len: u8) {
if u16::from(len) < self.length {
self.length = u16::from(len);
}
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// UsageTopAvgMetricsMetadata : The object containing document metadata.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageTopAvgMetricsMetadata {
/// The day value from the user request that contains the returned usage data. (If day was used the request)
#[serde(rename = "day", skip_serializing_if = "Option::is_none")]
pub day: Option<serde_json::Value>,
/// The month value from the user request that contains the returned usage data. (If month was used the request)
#[serde(rename = "month", skip_serializing_if = "Option::is_none")]
pub month: Option<serde_json::Value>,
#[serde(rename = "pagination", skip_serializing_if = "Option::is_none")]
pub pagination: Option<Box<crate::models::UsageAttributionPagination>>,
}
impl UsageTopAvgMetricsMetadata {
/// The object containing document metadata.
pub fn new() -> UsageTopAvgMetricsMetadata {
UsageTopAvgMetricsMetadata {
day: None,
month: None,
pagination: None,
}
}
}
|
#![crate_name = "shared"]
#![crate_type = "lib"]
#![feature(zero_one)]
extern crate num;
pub mod triangles;
pub mod infinity;
pub mod primes;
pub mod fibonacci;
|
use std::io::prelude::*;
use http::*;
use std::fs::*;
pub struct FileSystem {
path: String,
}
impl FileSystem {
pub fn new(path: &str) -> FileSystem {
FileSystem { path: path.to_string() }
}
pub fn serve(&mut self, uri: &str, resp: &mut Response) {
let mut full_path = self.path.clone();
full_path.push_str(uri);
if let Ok(m) = metadata(&full_path) {
if m.is_file() {
if let Ok(ref mut f) = File::open(&full_path) {
resp.set_status(Status::ok());
resp.set_header("Content-Type", Self::get_mime(&full_path));
resp.set_length(m.len());
resp.send();
let buf_size = 10 * 1024 * 1024; // 10MB;
let mut buf: Vec<u8> = Vec::with_capacity(buf_size);
buf.resize(buf_size, 0);
while let Ok(n) = f.read(&mut buf) {
if n > 0 {
resp.send_data(&buf[0..n]);
} else {
break;
}
}
return;
}
}
}
println!("Not found: {}", full_path);
resp.set_not_found().send();
}
fn get_mime(path: &str) -> &str {
if let Some(idx) = path.rfind('.') {
let ext = &path[idx + 1..];
match ext {
"html" => "text/html",
"css" => "text/css",
"js" => "text/javascript",
"jpg" | "jpeg" => "image/jpeg",
"png" => "image/png",
"svg" => "image/svg+xml",
"woff" | "woff2" => "application/x-font-woff",
_ => "application/octet-stream",
}
} else {
// Default to binary data.
"application/octet-stream"
}
}
}
|
use std::fs;
enum Instr {
N, S, E, W,
L, R, F,
}
struct Move(Instr, i32);
fn parse(s: &str) -> Vec<Move> {
s.lines()
.map(|l| {
use Instr::*;
let n = l[1..].parse().unwrap();
let i = match &l[..1] {
"N" => N,
"S" => S,
"E" => E,
"W" => W,
"L" => L,
"R" => R,
"F" => F,
_ => unreachable!(),
};
Move(i, n)
})
.collect()
}
struct Ferry {
x: i32,
y: i32,
dir: i32,
}
impl Ferry {
fn new() -> Self {
Self {
x: 0,
y: 0,
dir: 90,
}
}
fn turn(&mut self, angle: i32) {
self.dir += angle;
if self.dir < 0 {
self.dir += 360;
}
if self.dir >= 360 {
self.dir -= 360;
}
}
fn step(&mut self, m: &Move) {
use Instr::*;
match m.0 {
N => self.y += m.1,
S => self.y -= m.1,
E => self.x += m.1,
W => self.x -= m.1,
L => self.turn(-m.1),
R => self.turn(m.1),
F => {
match self.dir {
0 => self.y += m.1,
90 => self.x += m.1,
180 => self.y -= m.1,
270 => self.x -= m.1,
_ => unreachable!(),
}
}
}
}
fn manhattan_distance(&self) -> i32 {
self.x.abs() + self.y.abs()
}
}
fn part1(input: &[Move]) {
let mut f = Ferry::new();
for m in input.iter() {
f.step(m);
}
println!("{}", f.manhattan_distance());
}
struct WaypointFerry {
ferry: Ferry,
dx: i32,
dy: i32,
}
impl WaypointFerry {
fn new() -> Self {
Self {
ferry: Ferry::new(),
dx: 10,
dy: 1,
}
}
fn move_ferry(&mut self, n: i32) {
self.ferry.x += n * self.dx;
self.ferry.y += n * self.dy;
}
fn turn(&mut self, mut angle: i32) {
if angle < 0 {
angle = 360 + angle;
}
for _ in (0..angle).step_by(90) {
std::mem::swap(&mut self.dx, &mut self.dy);
self.dy = -self.dy;
}
}
fn step(&mut self, m: &Move) {
use Instr::*;
match m.0 {
N => self.dy += m.1,
S => self.dy -= m.1,
E => self.dx += m.1,
W => self.dx -= m.1,
L => self.turn(-m.1),
R => self.turn(m.1),
F => self.move_ferry(m.1),
}
}
fn manhattan_distance(&self) -> i32 {
self.ferry.manhattan_distance()
}
}
fn part2(input: &[Move]) {
let mut wf = WaypointFerry::new();
for m in input.iter() {
wf.step(m);
}
println!("{}", wf.manhattan_distance());
}
fn main() {
let moves = parse(&fs::read_to_string("input").unwrap());
part1(&moves);
part2(&moves);
}
|
use wiring_rs;
use wiring_rs::thick2ofn as thick2ofn;
use wiring_rs::ofn2thick as ofn2thick;
use serde_json::{Value};
//subclass
//equivalence class (binary + nary)
//disjointclasses
//disjointunion
//
//class constructors
//
//property expression
//
//
fn round_trip(input : &str) -> bool {
//translate thick -> ofn -> thick
let thick2ofn = thick2ofn::thick_triple_parser::parse_triple(input);
let ofn2thick = ofn2thick::ofn_parser::parse_ofn(&thick2ofn);
//parse both original input and round-trip translation as JSON
let orig: Value = serde_json::from_str(input).unwrap();
let translated : Value = serde_json::from_str(&ofn2thick).unwrap();
//test whether (generated) JSON values are the same
orig == translated
}
#[test]
fn sub_class_of_axiom() {
let subclass_of = r#" {"subject": "ex:subClass", "predicate": "rdfs:subClassOf", "object": "ex:superClass"} "#;
assert!(round_trip(subclass_of));
}
//#[test]
//fn test_n_ary_equivalence_axiom() {
// let equivalence = r#" {"subject": "_:genid1", "predicate": "owl:equivalentClass", "object": {"rdf:rest": [{"object": {"rdf:rest": [{"object": {"rdf:rest": [{"object": "rdf:nil"}], "rdf:first": [{"object": "ex:equivalent2"}]}}], "rdf:first": [{"object": "ex:equivalent1"}]}}], "rdf:first": [{"object": "ex:equivalent3"}]}} "#;
// assert!(round_trip(equivalence));
//}
#[test]
fn binary_equivalence_axiom() {
let equivalence = r#" {"subject": "ex:lhs", "predicate": "owl:equivalentClass", "object": "ex:rhs" } "#;
assert!(round_trip(equivalence));
}
#[test]
fn disjoint_union_axiom() {
let disjoint_union = r#" {"subject": "ex:disjointUnion", "predicate": "owl:disjointUnionOf", "object": {"rdf:first": [{"object": "ex:u1"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:u2"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:u3"}], "rdf:rest": [{"object": "rdf:nil"}]}}]}}]}} "#;
assert!(round_trip(disjoint_union));
}
#[test]
fn complement_of_expression() {
let complement_of = r#" {"subject": "ex:complementOf", "predicate": "owl:equivalentClass", "object": {"rdf:type": [{"object": "owl:Class"}], "owl:complementOf": [{"object": "ex:complement"}]}} "#;
assert!(round_trip(complement_of));
}
#[test]
fn some_values_from() {
let existential_restriction = r#" {"subject": "ex:existential", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pExistential"}], "owl:someValuesFrom": [{"object": "ex:existentialFiller"}]}} "#;
assert!(round_trip(existential_restriction));
}
#[test]
fn has_self() {
let has_self = r#" {"subject": "ex:hasSelf", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pHasSelf"}], "owl:hasSelf": [{"object": "true^^xsd:boolean"}]}} "#;
assert!(round_trip(has_self));
}
#[test]
fn has_value() {
let has_value = r#" {"subject": "ex:hasValue", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pHasValue"}], "owl:hasValue": [{"object": "ex:a1"}]}} "#;
assert!(round_trip(has_value));
}
#[test]
fn max_cardinality() {
let max_cardinality = r#" {"subject": "ex:maxCardinality", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pMaxCardinality"}], "owl:maxCardinality": [{"object": "1^^xsd:nonNegativeInteger"}]}} "#;
assert!(round_trip(max_cardinality));
}
#[test]
fn max_qualified_cardinality() {
let max_qualified_cardinality = r#" {"subject": "ex:maxQualifiedCardinality", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pMaxQualifiedCardinality"}], "owl:maxQualifiedCardinality": [{"object": "1^^xsd:nonNegativeInteger"}], "owl:onClass": [{"object": "ex:maxCardinalityFiller"}]}} "#;
assert!(round_trip(max_qualified_cardinality));
}
#[test]
fn min_cardinality() {
let min_cardinality = r#" {"subject": "ex:minCardinality", "predicate": "owl:equivalentClass", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pMinCardinality"}], "owl:minCardinality": [{"object": "1^^xsd:nonNegativeInteger"}]}} "#;
assert!(round_trip(min_cardinality));
}
#[test]
fn min_qualified_cardinality() {
let min_qualified_cardinality = r#" {"subject": "ex:minQualifiedCardinality", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pMinQualifiedCardinality"}], "owl:minQualifiedCardinality": [{"object": "1^^xsd:nonNegativeInteger"}], "owl:onClass": [{"object": "ex:minCardinalityFiller"}]}} "#;
assert!(round_trip(min_qualified_cardinality));
}
#[test]
fn exact_cardinality_expression() {
let exact_cardinality = r#" {"subject": "ex:exactCardinality", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pExactCardinality"}], "owl:cardinality": [{"object": "2^^xsd:nonNegativeInteger"}]}} "#;
assert!(round_trip(exact_cardinality));
}
#[test]
fn exact_qualified_cardinality_expression() {
let exact_qualified_cardinality = r#" {"subject": "ex:exactQualifiedCardinality", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Restriction"}], "owl:onProperty": [{"object": "ex:pExactQualifiedCardinality"}], "owl:qualifiedCardinality": [{"object": "2^^xsd:nonNegativeInteger"}], "owl:onClass": [{"object": "ex:exactQualifiedCardinalityFiller"}]}} "#;
assert!(round_trip(exact_qualified_cardinality));
}
#[test]
fn intersection_expression() {
let intersection = r#" {"subject": "ex:intersection", "predicate": "rdfs:subClassOf", "object": {"owl:intersectionOf": [{"object": {"rdf:first": [{"object": "ex:I1"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:I2"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:I3"}], "rdf:rest": [{"object": "rdf:nil"}]}}]}}]}}], "rdf:type": [{"object": "owl:Class"}]}} "#;
assert!(round_trip(intersection));
}
#[test]
fn one_of() {
let one_of = r#" {"subject": "ex:oneOf", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Class"}], "owl:oneOf": [{"object": {"rdf:first": [{"object": "ex:a1"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:a2"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:a3"}], "rdf:rest": [{"object": "rdf:nil"}]}}]}}]}}]}} "#;
assert!(round_trip(one_of));
}
#[test]
fn union_of() {
let union_of = r#" {"subject": "ex:union", "predicate": "rdfs:subClassOf", "object": {"rdf:type": [{"object": "owl:Class"}], "owl:unionOf": [{"object": {"rdf:first": [{"object": "ex:u1"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:u2"}], "rdf:rest": [{"object": {"rdf:first": [{"object": "ex:u3"}], "rdf:rest": [{"object": "rdf:nil"}]}}]}}]}}]}} "#;
assert!(round_trip(union_of));
}
|
use std::fs;
use regex::{Regex, Captures};
use std::fmt;
pub fn get_passport_lines(filename: &String) -> Vec<String> {
let contents = fs::read_to_string(filename)
.expect("Something went wrong reading the file");
// normalize the input by replacing only the first occurance of an endline with a space
let re = Regex::new(r"([^\n])\n").unwrap();
re.replace_all(&contents, |caps: &Captures| {
format!("{} ", &caps[1])
})
.lines()
.map(|l|l.trim().to_string())
.collect()
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct KeyValue {
key: String,
value: String,
}
impl KeyValue {
pub fn parse(text: &String) -> KeyValue {
lazy_static! {
static ref RE: Regex = Regex::new(r"([a-z]*):([^:]*)").unwrap();
}
// Split out key value (ex: "iyr:2013")
let caps = RE.captures(&text).unwrap();
let key = caps.get(1).unwrap().as_str().to_string();
let value = caps.get(2).unwrap().as_str().to_string();
KeyValue { key, value }
}
pub fn is_valid(&self) -> bool {
lazy_static! {
static ref HGT: Regex = Regex::new(r"^([\d]+)(cm|in)$").unwrap();
static ref HCL: Regex = Regex::new(r"^#[0-9a-f]{6}$").unwrap();
static ref ECL: Regex = Regex::new(r"^(amb|blu|brn|gry|grn|hzl|oth)$").unwrap();
static ref PID: Regex = Regex::new(r"^[\d]{9}$").unwrap();
}
match self.key.as_str() {
"byr" => {
match self.value.parse::<usize>() {
Ok(n) => (n >= 1920 && n <= 2002),
Err(_) => false
}
},
"iyr" => {
match self.value.parse::<usize>() {
Ok(n) => (n >= 2010 && n <= 2020),
Err(_) => false
}
},
"eyr" => {
match self.value.parse::<usize>() {
Ok(n) => (n >= 2020 && n <= 2030),
Err(_) => false
}
},
"hgt" => {
let caps = HGT.captures(&self.value);
match caps {
Some(s) => {
let value = s.get(1).unwrap().as_str();
let units = s.get(2).unwrap().as_str();
KeyValue::is_valid_height(value, units)
},
None => false
}
},
"hcl" => {
HCL.is_match(&self.value)
},
"ecl" => {
ECL.is_match(&self.value)
},
"pid" => {
PID.is_match(&self.value)
},
"cid" => true,
_ => false,
}
}
// validate height
fn is_valid_height(value: &str, units: &str) -> bool {
match value.parse::<usize>() {
Ok(n) => {
match units {
"cm" => (n >= 150 && n <= 193),
"in" => (n >= 59 && n <= 76),
_ => false,
}
},
Err(_) => false
}
}
}
impl fmt::Display for KeyValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.key, self.value)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Passport {
entries: Vec<KeyValue>
}
impl Passport {
pub fn parse(line: &String) -> Passport {
Passport {
entries: line.split(' ').map(|kv| KeyValue::parse(&kv.to_string())).collect()
}
}
pub fn keys(&self) -> Vec<String> {
self.entries.iter().map(|kv| kv.key.clone()).collect()
}
pub fn north_pole_keys() -> Vec<&'static str> {
vec![
"byr", // (Birth Year)
"iyr", // (Issue Year)
"eyr", // (Expiration Year)
"hgt", // (Height)
"hcl", // (Hair Color)
"ecl", // (Eye Color)
"pid", // (Passport ID)
// cid not required
]
}
pub fn north_pole_keys_present(&self) -> bool {
let keys = self.keys();
for np in Passport::north_pole_keys() {
if !keys.contains(&np.to_string()) {
return false;
}
}
true
}
pub fn is_valid(&self) -> bool {
if !self.north_pole_keys_present() {
return false;
}
for e in &self.entries {
if !e.is_valid() {
return false;
}
}
true
}
}
pub fn day4(args: &[String]) -> i32 {
println!("Day 4");
if args.len() != 1 {
println!("Missing input file");
return -1;
}
let filename = &args[0];
println!("In file {}", filename);
let lines = get_passport_lines(filename);
let count = lines.iter()
.filter(|p| Passport::parse(&p).north_pole_keys_present())
.count();
println!("Part 1: {}", count);
let count = lines.iter()
.filter(|p| Passport::parse(&p).is_valid())
.count();
println!("Part 2: {}", count);
0
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
ApiTokens_List(#[from] api_tokens::list::Error),
#[error(transparent)]
ApiTokens_Get(#[from] api_tokens::get::Error),
#[error(transparent)]
ApiTokens_Create(#[from] api_tokens::create::Error),
#[error(transparent)]
ApiTokens_Remove(#[from] api_tokens::remove::Error),
#[error(transparent)]
ContinuousDataExports_List(#[from] continuous_data_exports::list::Error),
#[error(transparent)]
ContinuousDataExports_Get(#[from] continuous_data_exports::get::Error),
#[error(transparent)]
ContinuousDataExports_Create(#[from] continuous_data_exports::create::Error),
#[error(transparent)]
ContinuousDataExports_Update(#[from] continuous_data_exports::update::Error),
#[error(transparent)]
ContinuousDataExports_Remove(#[from] continuous_data_exports::remove::Error),
#[error(transparent)]
DeviceGroups_List(#[from] device_groups::list::Error),
#[error(transparent)]
DeviceTemplates_List(#[from] device_templates::list::Error),
#[error(transparent)]
DeviceTemplates_Get(#[from] device_templates::get::Error),
#[error(transparent)]
DeviceTemplates_Create(#[from] device_templates::create::Error),
#[error(transparent)]
DeviceTemplates_Update(#[from] device_templates::update::Error),
#[error(transparent)]
DeviceTemplates_Remove(#[from] device_templates::remove::Error),
#[error(transparent)]
DeviceTemplates_ListDevices(#[from] device_templates::list_devices::Error),
#[error(transparent)]
DeviceTemplates_GetMerged(#[from] device_templates::get_merged::Error),
#[error(transparent)]
Devices_List(#[from] devices::list::Error),
#[error(transparent)]
Devices_Get(#[from] devices::get::Error),
#[error(transparent)]
Devices_Create(#[from] devices::create::Error),
#[error(transparent)]
Devices_Update(#[from] devices::update::Error),
#[error(transparent)]
Devices_Remove(#[from] devices::remove::Error),
#[error(transparent)]
Devices_GetAttestation(#[from] devices::get_attestation::Error),
#[error(transparent)]
Devices_CreateAttestation(#[from] devices::create_attestation::Error),
#[error(transparent)]
Devices_UpdateAttestation(#[from] devices::update_attestation::Error),
#[error(transparent)]
Devices_RemoveAttestation(#[from] devices::remove_attestation::Error),
#[error(transparent)]
Devices_GetCloudProperties(#[from] devices::get_cloud_properties::Error),
#[error(transparent)]
Devices_ReplaceCloudProperties(#[from] devices::replace_cloud_properties::Error),
#[error(transparent)]
Devices_UpdateCloudProperties(#[from] devices::update_cloud_properties::Error),
#[error(transparent)]
Devices_GetCommandHistory(#[from] devices::get_command_history::Error),
#[error(transparent)]
Devices_RunCommand(#[from] devices::run_command::Error),
#[error(transparent)]
Devices_ListComponents(#[from] devices::list_components::Error),
#[error(transparent)]
Devices_GetComponentCommandHistory(#[from] devices::get_component_command_history::Error),
#[error(transparent)]
Devices_RunComponentCommand(#[from] devices::run_component_command::Error),
#[error(transparent)]
Devices_GetComponentProperties(#[from] devices::get_component_properties::Error),
#[error(transparent)]
Devices_ReplaceComponentProperties(#[from] devices::replace_component_properties::Error),
#[error(transparent)]
Devices_UpdateComponentProperties(#[from] devices::update_component_properties::Error),
#[error(transparent)]
Devices_GetComponentTelemetryValue(#[from] devices::get_component_telemetry_value::Error),
#[error(transparent)]
Devices_GetCredentials(#[from] devices::get_credentials::Error),
#[error(transparent)]
Devices_ListModules(#[from] devices::list_modules::Error),
#[error(transparent)]
Devices_GetModuleCommandHistory(#[from] devices::get_module_command_history::Error),
#[error(transparent)]
Devices_RunModuleCommand(#[from] devices::run_module_command::Error),
#[error(transparent)]
Devices_ListModuleComponents(#[from] devices::list_module_components::Error),
#[error(transparent)]
Devices_GetModuleComponentCommandHistory(#[from] devices::get_module_component_command_history::Error),
#[error(transparent)]
Devices_RunModuleComponentCommand(#[from] devices::run_module_component_command::Error),
#[error(transparent)]
Devices_GetModuleComponentProperties(#[from] devices::get_module_component_properties::Error),
#[error(transparent)]
Devices_ReplaceModuleComponentProperties(#[from] devices::replace_module_component_properties::Error),
#[error(transparent)]
Devices_UpdateModuleComponentProperties(#[from] devices::update_module_component_properties::Error),
#[error(transparent)]
Devices_GetModuleComponentTelemetryValue(#[from] devices::get_module_component_telemetry_value::Error),
#[error(transparent)]
Devices_GetModuleProperties(#[from] devices::get_module_properties::Error),
#[error(transparent)]
Devices_ReplaceModuleProperties(#[from] devices::replace_module_properties::Error),
#[error(transparent)]
Devices_UpdateModuleProperties(#[from] devices::update_module_properties::Error),
#[error(transparent)]
Devices_GetModuleTelemetryValue(#[from] devices::get_module_telemetry_value::Error),
#[error(transparent)]
Devices_GetProperties(#[from] devices::get_properties::Error),
#[error(transparent)]
Devices_ReplaceProperties(#[from] devices::replace_properties::Error),
#[error(transparent)]
Devices_UpdateProperties(#[from] devices::update_properties::Error),
#[error(transparent)]
Devices_GetTelemetryValue(#[from] devices::get_telemetry_value::Error),
#[error(transparent)]
Jobs_List(#[from] jobs::list::Error),
#[error(transparent)]
Jobs_Get(#[from] jobs::get::Error),
#[error(transparent)]
Jobs_Create(#[from] jobs::create::Error),
#[error(transparent)]
Jobs_GetDevices(#[from] jobs::get_devices::Error),
#[error(transparent)]
Jobs_Rerun(#[from] jobs::rerun::Error),
#[error(transparent)]
Jobs_Resume(#[from] jobs::resume::Error),
#[error(transparent)]
Jobs_Stop(#[from] jobs::stop::Error),
#[error(transparent)]
Roles_List(#[from] roles::list::Error),
#[error(transparent)]
Roles_Get(#[from] roles::get::Error),
#[error(transparent)]
Users_List(#[from] users::list::Error),
#[error(transparent)]
Users_Get(#[from] users::get::Error),
#[error(transparent)]
Users_Create(#[from] users::create::Error),
#[error(transparent)]
Users_Update(#[from] users::update::Error),
#[error(transparent)]
Users_Remove(#[from] users::remove::Error),
}
pub mod api_tokens {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::ApiTokenCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/apiTokens", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ApiTokenCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(operation_config: &crate::OperationConfig, token_id: &str) -> std::result::Result<models::ApiToken, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/apiTokens/{}", operation_config.base_path(), token_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ApiToken =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
token_id: &str,
body: &models::ApiToken,
) -> std::result::Result<models::ApiToken, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/apiTokens/{}", operation_config.base_path(), token_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ApiToken =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove(operation_config: &crate::OperationConfig, token_id: &str) -> std::result::Result<(), remove::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/apiTokens/{}", operation_config.base_path(), token_id);
let mut url = url::Url::parse(url_str).map_err(remove::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(remove::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod continuous_data_exports {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::ContinuousDataExportCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/continuousDataExports", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ContinuousDataExportCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
export_id: &str,
) -> std::result::Result<models::ContinuousDataExport, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/continuousDataExports/{}", operation_config.base_path(), export_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ContinuousDataExport =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
export_id: &str,
body: &models::ContinuousDataExport,
) -> std::result::Result<models::ContinuousDataExport, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/continuousDataExports/{}", operation_config.base_path(), export_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ContinuousDataExport =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
export_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::ContinuousDataExport, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/continuousDataExports/{}", operation_config.base_path(), export_id);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ContinuousDataExport =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove(operation_config: &crate::OperationConfig, export_id: &str) -> std::result::Result<(), remove::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/continuousDataExports/{}", operation_config.base_path(), export_id);
let mut url = url::Url::parse(url_str).map_err(remove::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(remove::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod device_groups {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::DeviceGroupCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceGroups", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceGroupCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod device_templates {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::DeviceTemplateCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTemplateCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
device_template_id: &str,
) -> std::result::Result<models::DeviceTemplate, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTemplate =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
device_template_id: &str,
body: &models::DeviceTemplate,
) -> std::result::Result<models::DeviceTemplate, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTemplate =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
device_template_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceTemplate, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTemplate =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove(operation_config: &crate::OperationConfig, device_template_id: &str) -> std::result::Result<(), remove::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(remove::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(remove::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_devices(
operation_config: &crate::OperationConfig,
device_template_id: &str,
) -> std::result::Result<models::DeviceCollection, list_devices::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}/devices", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(list_devices::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_devices::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_devices::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_devices::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCollection =
serde_json::from_slice(rsp_body).map_err(|source| list_devices::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_devices::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_devices {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_merged(
operation_config: &crate::OperationConfig,
device_template_id: &str,
) -> std::result::Result<models::DeviceTemplate, get_merged::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/deviceTemplates/{}/merged", operation_config.base_path(), device_template_id);
let mut url = url::Url::parse(url_str).map_err(get_merged::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_merged::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_merged::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_merged::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTemplate =
serde_json::from_slice(rsp_body).map_err(|source| get_merged::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_merged::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_merged {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod devices {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::DeviceCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(operation_config: &crate::OperationConfig, device_id: &str) -> std::result::Result<models::Device, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &models::Device,
) -> std::result::Result<models::Device, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::Device, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove(operation_config: &crate::OperationConfig, device_id: &str) -> std::result::Result<(), remove::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(remove::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(remove::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_attestation(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::Attestation, get_attestation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/attestation", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(get_attestation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_attestation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_attestation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_attestation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Attestation = serde_json::from_slice(rsp_body)
.map_err(|source| get_attestation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_attestation::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_attestation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_attestation(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &models::Attestation,
) -> std::result::Result<models::Attestation, create_attestation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/attestation", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(create_attestation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_attestation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_attestation::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_attestation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_attestation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Attestation = serde_json::from_slice(rsp_body)
.map_err(|source| create_attestation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create_attestation::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_attestation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_attestation(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::Attestation, update_attestation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/attestation", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(update_attestation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_attestation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_attestation::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_attestation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_attestation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Attestation = serde_json::from_slice(rsp_body)
.map_err(|source| update_attestation::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_attestation::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_attestation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_attestation(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<(), remove_attestation::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/attestation", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(remove_attestation::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_attestation::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove_attestation::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_attestation::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove_attestation::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove_attestation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_cloud_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::DeviceCloudProperties, get_cloud_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/cloudProperties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(get_cloud_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_cloud_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_cloud_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_cloud_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCloudProperties = serde_json::from_slice(rsp_body)
.map_err(|source| get_cloud_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_cloud_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_cloud_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn replace_cloud_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &models::DeviceCloudProperties,
) -> std::result::Result<models::DeviceCloudProperties, replace_cloud_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/cloudProperties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(replace_cloud_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(replace_cloud_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(replace_cloud_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(replace_cloud_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(replace_cloud_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCloudProperties = serde_json::from_slice(rsp_body)
.map_err(|source| replace_cloud_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(replace_cloud_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod replace_cloud_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_cloud_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceCloudProperties, update_cloud_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/cloudProperties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(update_cloud_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_cloud_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_cloud_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_cloud_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_cloud_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCloudProperties = serde_json::from_slice(rsp_body)
.map_err(|source| update_cloud_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_cloud_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_cloud_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_command_history(
operation_config: &crate::OperationConfig,
device_id: &str,
command_name: &str,
) -> std::result::Result<models::DeviceCommandCollection, get_command_history::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/commands/{}", operation_config.base_path(), device_id, command_name);
let mut url = url::Url::parse(url_str).map_err(get_command_history::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_command_history::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_command_history::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_command_history::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommandCollection = serde_json::from_slice(rsp_body)
.map_err(|source| get_command_history::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_command_history::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_command_history {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn run_command(
operation_config: &crate::OperationConfig,
device_id: &str,
command_name: &str,
body: &models::DeviceCommand,
) -> std::result::Result<models::DeviceCommand, run_command::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/commands/{}", operation_config.base_path(), device_id, command_name);
let mut url = url::Url::parse(url_str).map_err(run_command::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(run_command::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(run_command::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(run_command::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(run_command::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommand =
serde_json::from_slice(rsp_body).map_err(|source| run_command::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(run_command::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod run_command {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_components(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::Collection, list_components::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/components", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(list_components::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_components::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_components::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_components::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Collection = serde_json::from_slice(rsp_body)
.map_err(|source| list_components::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_components::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_components {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_component_command_history(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
command_name: &str,
) -> std::result::Result<models::DeviceCommandCollection, get_component_command_history::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/commands/{}",
operation_config.base_path(),
device_id,
component_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(get_component_command_history::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_component_command_history::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_component_command_history::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_component_command_history::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommandCollection = serde_json::from_slice(rsp_body)
.map_err(|source| get_component_command_history::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_component_command_history::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_component_command_history {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn run_component_command(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
command_name: &str,
body: &models::DeviceCommand,
) -> std::result::Result<models::DeviceCommand, run_component_command::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/commands/{}",
operation_config.base_path(),
device_id,
component_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(run_component_command::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(run_component_command::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(run_component_command::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(run_component_command::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(run_component_command::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommand = serde_json::from_slice(rsp_body)
.map_err(|source| run_component_command::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(run_component_command::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod run_component_command {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
) -> std::result::Result<models::DeviceProperties, get_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/properties",
operation_config.base_path(),
device_id,
component_name
);
let mut url = url::Url::parse(url_str).map_err(get_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| get_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn replace_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
body: &models::DeviceProperties,
) -> std::result::Result<models::DeviceProperties, replace_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/properties",
operation_config.base_path(),
device_id,
component_name
);
let mut url = url::Url::parse(url_str).map_err(replace_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(replace_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(replace_component_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(replace_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(replace_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| replace_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(replace_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod replace_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceProperties, update_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/properties",
operation_config.base_path(),
device_id,
component_name
);
let mut url = url::Url::parse(url_str).map_err(update_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_component_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| update_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_component_telemetry_value(
operation_config: &crate::OperationConfig,
device_id: &str,
component_name: &str,
telemetry_name: &str,
) -> std::result::Result<models::DeviceTelemetry, get_component_telemetry_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/components/{}/telemetry/{}",
operation_config.base_path(),
device_id,
component_name,
telemetry_name
);
let mut url = url::Url::parse(url_str).map_err(get_component_telemetry_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_component_telemetry_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_component_telemetry_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_component_telemetry_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTelemetry = serde_json::from_slice(rsp_body)
.map_err(|source| get_component_telemetry_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_component_telemetry_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_component_telemetry_value {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_credentials(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::DeviceCredentials, get_credentials::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/credentials", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(get_credentials::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_credentials::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_credentials::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_credentials::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCredentials = serde_json::from_slice(rsp_body)
.map_err(|source| get_credentials::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_credentials::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_credentials {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_modules(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::Collection, list_modules::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/modules", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(list_modules::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_modules::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_modules::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_modules::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Collection =
serde_json::from_slice(rsp_body).map_err(|source| list_modules::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_modules::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_modules {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_command_history(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
command_name: &str,
) -> std::result::Result<models::DeviceCommandCollection, get_module_command_history::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/commands/{}",
operation_config.base_path(),
device_id,
module_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_command_history::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_command_history::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_command_history::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_command_history::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommandCollection = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_command_history::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_command_history::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_command_history {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn run_module_command(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
command_name: &str,
body: &models::DeviceCommand,
) -> std::result::Result<models::DeviceCommand, run_module_command::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/commands/{}",
operation_config.base_path(),
device_id,
module_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(run_module_command::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(run_module_command::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(run_module_command::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(run_module_command::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(run_module_command::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommand = serde_json::from_slice(rsp_body)
.map_err(|source| run_module_command::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(run_module_command::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod run_module_command {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_module_components(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
) -> std::result::Result<models::Collection, list_module_components::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components",
operation_config.base_path(),
device_id,
module_name
);
let mut url = url::Url::parse(url_str).map_err(list_module_components::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_module_components::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_module_components::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_module_components::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Collection = serde_json::from_slice(rsp_body)
.map_err(|source| list_module_components::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_module_components::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_module_components {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_component_command_history(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
command_name: &str,
) -> std::result::Result<models::DeviceCommandCollection, get_module_component_command_history::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/commands/{}",
operation_config.base_path(),
device_id,
module_name,
component_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_component_command_history::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_component_command_history::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_component_command_history::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_component_command_history::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommandCollection = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_component_command_history::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_component_command_history::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_component_command_history {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn run_module_component_command(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
command_name: &str,
body: &models::DeviceCommand,
) -> std::result::Result<models::DeviceCommand, run_module_component_command::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/commands/{}",
operation_config.base_path(),
device_id,
module_name,
component_name,
command_name
);
let mut url = url::Url::parse(url_str).map_err(run_module_component_command::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(run_module_component_command::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(run_module_component_command::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(run_module_component_command::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(run_module_component_command::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceCommand = serde_json::from_slice(rsp_body)
.map_err(|source| run_module_component_command::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(run_module_component_command::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod run_module_component_command {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
) -> std::result::Result<models::DeviceProperties, get_module_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/properties",
operation_config.base_path(),
device_id,
module_name,
component_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn replace_module_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
body: &models::DeviceProperties,
) -> std::result::Result<models::DeviceProperties, replace_module_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/properties",
operation_config.base_path(),
device_id,
module_name,
component_name
);
let mut url = url::Url::parse(url_str).map_err(replace_module_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(replace_module_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(replace_module_component_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(replace_module_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(replace_module_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| replace_module_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(replace_module_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod replace_module_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_module_component_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceProperties, update_module_component_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/properties",
operation_config.base_path(),
device_id,
module_name,
component_name
);
let mut url = url::Url::parse(url_str).map_err(update_module_component_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_module_component_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_module_component_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_module_component_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_module_component_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| update_module_component_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_module_component_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_module_component_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_component_telemetry_value(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
component_name: &str,
telemetry_name: &str,
) -> std::result::Result<models::DeviceTelemetry, get_module_component_telemetry_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/components/{}/telemetry/{}",
operation_config.base_path(),
device_id,
module_name,
component_name,
telemetry_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_component_telemetry_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_component_telemetry_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_component_telemetry_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_component_telemetry_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTelemetry = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_component_telemetry_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_component_telemetry_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_component_telemetry_value {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
) -> std::result::Result<models::DeviceProperties, get_module_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/properties",
operation_config.base_path(),
device_id,
module_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn replace_module_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
body: &models::DeviceProperties,
) -> std::result::Result<models::DeviceProperties, replace_module_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/properties",
operation_config.base_path(),
device_id,
module_name
);
let mut url = url::Url::parse(url_str).map_err(replace_module_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(replace_module_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(replace_module_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(replace_module_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(replace_module_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| replace_module_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(replace_module_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod replace_module_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_module_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceProperties, update_module_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/properties",
operation_config.base_path(),
device_id,
module_name
);
let mut url = url::Url::parse(url_str).map_err(update_module_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_module_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_module_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(update_module_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_module_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| update_module_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_module_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_module_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_module_telemetry_value(
operation_config: &crate::OperationConfig,
device_id: &str,
module_name: &str,
telemetry_name: &str,
) -> std::result::Result<models::DeviceTelemetry, get_module_telemetry_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/modules/{}/telemetry/{}",
operation_config.base_path(),
device_id,
module_name,
telemetry_name
);
let mut url = url::Url::parse(url_str).map_err(get_module_telemetry_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_module_telemetry_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_module_telemetry_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_module_telemetry_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTelemetry = serde_json::from_slice(rsp_body)
.map_err(|source| get_module_telemetry_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_module_telemetry_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_module_telemetry_value {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
) -> std::result::Result<models::DeviceProperties, get_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/properties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties =
serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn replace_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &models::DeviceProperties,
) -> std::result::Result<models::DeviceProperties, replace_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/properties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(replace_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(replace_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(replace_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(replace_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(replace_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| replace_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(replace_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod replace_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_properties(
operation_config: &crate::OperationConfig,
device_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::DeviceProperties, update_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/devices/{}/properties", operation_config.base_path(), device_id);
let mut url = url::Url::parse(url_str).map_err(update_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update_properties::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceProperties = serde_json::from_slice(rsp_body)
.map_err(|source| update_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update_properties::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_telemetry_value(
operation_config: &crate::OperationConfig,
device_id: &str,
telemetry_name: &str,
) -> std::result::Result<models::DeviceTelemetry, get_telemetry_value::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/devices/{}/telemetry/{}",
operation_config.base_path(),
device_id,
telemetry_name
);
let mut url = url::Url::parse(url_str).map_err(get_telemetry_value::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_telemetry_value::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_telemetry_value::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_telemetry_value::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceTelemetry = serde_json::from_slice(rsp_body)
.map_err(|source| get_telemetry_value::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_telemetry_value::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_telemetry_value {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod jobs {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::JobCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::JobCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(operation_config: &crate::OperationConfig, job_id: &str) -> std::result::Result<models::Job, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}", operation_config.base_path(), job_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Job =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
job_id: &str,
body: &models::Job,
) -> std::result::Result<models::Job, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}", operation_config.base_path(), job_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Job =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_devices(
operation_config: &crate::OperationConfig,
job_id: &str,
) -> std::result::Result<models::JobDeviceStatusCollection, get_devices::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}/devices", operation_config.base_path(), job_id);
let mut url = url::Url::parse(url_str).map_err(get_devices::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_devices::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_devices::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_devices::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::JobDeviceStatusCollection =
serde_json::from_slice(rsp_body).map_err(|source| get_devices::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_devices::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_devices {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn rerun(
operation_config: &crate::OperationConfig,
job_id: &str,
rerun_id: &str,
) -> std::result::Result<models::Job, rerun::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}/rerun/{}", operation_config.base_path(), job_id, rerun_id);
let mut url = url::Url::parse(url_str).map_err(rerun::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(rerun::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(rerun::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(rerun::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Job =
serde_json::from_slice(rsp_body).map_err(|source| rerun::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(rerun::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod rerun {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn resume(operation_config: &crate::OperationConfig, job_id: &str) -> std::result::Result<(), resume::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}/resume", operation_config.base_path(), job_id);
let mut url = url::Url::parse(url_str).map_err(resume::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(resume::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(resume::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(resume::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(resume::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod resume {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn stop(operation_config: &crate::OperationConfig, job_id: &str) -> std::result::Result<(), stop::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/jobs/{}/stop", operation_config.base_path(), job_id);
let mut url = url::Url::parse(url_str).map_err(stop::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(stop::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(stop::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(stop::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(stop::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod stop {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod roles {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::RoleCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/roles", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RoleCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(operation_config: &crate::OperationConfig, role_id: &str) -> std::result::Result<models::Role, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/roles/{}", operation_config.base_path(), role_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Role =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod users {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::UserCollection, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/users", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::UserCollection =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(operation_config: &crate::OperationConfig, user_id: &str) -> std::result::Result<models::User, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
user_id: &str,
body: &models::User,
) -> std::result::Result<models::User, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
user_id: &str,
body: &serde_json::Value,
) -> std::result::Result<models::User, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove(operation_config: &crate::OperationConfig, user_id: &str) -> std::result::Result<(), remove::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/users/{}", operation_config.base_path(), user_id);
let mut url = url::Url::parse(url_str).map_err(remove::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(remove::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(remove::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod remove {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
//! The submodule that defines user threads.
use std::net::{ Shutdown, TcpStream };
use std::thread;
use std::thread::JoinHandle;
use protocol;
use state::server::common::{ UserList, SayTx, HearRx };
/// Spawn a user thread.
pub fn spawn(
client: TcpStream,
users: UserList,
say_tx: SayTx,
hear_rx: HearRx ) -> JoinHandle<()> {
thread::spawn(move|| {
init_user(client, users, say_tx, hear_rx)
})
}
/// Perform initialization for the user thread.
fn init_user(mut client: TcpStream, users: UserList, say_tx: SayTx, hear_rx: HearRx ) {
match protocol::receive(&mut client) {
Ok(protocol::Connect { name }) => {
let name_taken;
{
let mut _users = users.lock().unwrap();
name_taken = _users.contains(&name.to_string());
if !name_taken { _users.push(name.to_string()); }
}
if name_taken {
if let Err(e) = protocol::send(&mut client, protocol::Reject) {
println!("Error sending Reject message to client: {}", e);
}
}
else {
match protocol::send(&mut client, protocol::Accept) {
Ok(()) => {
let join_msg = format!("{} joined.", name);
println!("{}", join_msg);
if let Ok(()) = say_tx.send(protocol::Notice(join_msg)) {
handle_user(client, name, users, say_tx, hear_rx);
}
},
Err(e) => println!("Error sending Accept message to client: {}", e)
}
}
},
Ok(m) => println!("Initial message from client was not Connect! (got {:?})", m),
Err(e) => println!("Invalid initial message: {}", e)
}
}
/// The main loop for user threads.
fn handle_user(
mut client: TcpStream,
name: String,
users: UserList,
say_tx: SayTx,
hear_rx: HearRx ) {
let hear_thread = spawn_hear_thread(hear_rx, client.try_clone().unwrap());
loop {
match protocol::receive(&mut client) {
Ok(protocol::Say(message)) => {
println!("{}: {}", name, message);
let broadcast = protocol::Hear { author: name.clone(), payload: message.clone() };
if let Err(_) = say_tx.send(broadcast) { break; }
},
Ok(m) => {
println!("Received protocol-deviant message from {}: {:?}", name, m);
break;
},
Err(e) => {
let quit_msg = format!("{} quit. ({})", name, e);
println!("{}", quit_msg);
let _ = say_tx.send(protocol::Notice(quit_msg));
break;
}
}
}
let _ = client.shutdown(Shutdown::Both);
{ // scoping to release lock
let mut _users = users.lock().unwrap();
let (i, _) = _users.iter().enumerate().find(|&(_, item)| *item == name).unwrap();
_users.remove(i);
}
drop(say_tx);
let _ = hear_thread.join();
}
/// Spawn a thread that listens for messages from the server thread.
fn spawn_hear_thread(hear_rx: HearRx, mut client: TcpStream) -> JoinHandle<()> {
thread::spawn(move|| {
loop {
if let Ok(message) = hear_rx.recv() {
let _ = protocol::send(&mut client, message);
continue;
}
let _ = client.shutdown(Shutdown::Both);
break;
}
})
}
|
extern crate web_sys;
/// Implements [Program] to read shader files.
pub mod program;
pub use program::Program;
/// Implements [ProgramWrapper] to load from a Program.
pub mod program_wrapper;
pub use program_wrapper::ProgramWrapper;
|
use rand::distributions::{Distribution, Uniform};
use ray_tracer::camera::Camera;
use ray_tracer::hitable;
use ray_tracer::ray::Ray;
use ray_tracer::vec3::Vec3;
use std::io::{self, Write};
fn color(ray: Ray, world: &[&dyn hitable::Hitable]) -> Vec3 {
if let Some(rec) = hitable::hit(world, ray, 0., std::f32::MAX) {
return (rec.normal + 1.) * 0.5;
}
let unit_direction = ray.direction().make_unit_vector();
let t = 0.5 * (unit_direction.y + 1.0);
Vec3::new(1., 1., 1.) * (1.0 - t) + Vec3::new(0.5, 0.7, 1.0) * t
}
fn main() {
let mut stdout = io::stdout();
let nx = 200;
let ny = 100;
let ns = 100;
write!(stdout, "P3\n{} {}\n255\n", nx, ny).unwrap();
let cam = Camera::new();
let s1 = hitable::Sphere::new(Vec3::new(0., 0., -1.), 0.5);
let s2 = hitable::Sphere::new(Vec3::new(0., -100.5, -1.), 100.);
let world: Vec<&dyn hitable::Hitable> = vec![&s1, &s2];
let between = Uniform::new(0., 1.);
let mut rng = rand::thread_rng();
for j in (0..ny).rev() {
for i in 0..nx {
let mut col = Vec3::new(0., 0., 0.);
for _ in 0..ns {
let ray = cam.get_ray(
(i as f32 + between.sample(&mut rng)) / nx as f32,
(j as f32 + between.sample(&mut rng)) / ny as f32,
);
col += color(ray, &world);
}
col /= ns as f32;
let ir = (255.99 * col.x) as u32;
let ig = (255.99 * col.y) as u32;
let ib = (255.99 * col.z) as u32;
write!(stdout, "{} {} {}\n", ir, ig, ib).unwrap();
}
}
}
|
macro_rules! println {
($($arg:tt)*) => ({
use core::fmt::Write;
let _ = write!(&mut ::debug::SerialWriter::get(module_path!(), true), $($arg)*);
})
}
macro_rules! printf {
($($arg:tt)*) => ({
use core::fmt::Write;
let _ = write!(&mut ::debug::SerialWriter::get(module_path!(), false), $($arg)*);
})
}
macro_rules! err {
($x:ident) => {{
use common::error::Error;
Error::$x
}};
}
macro_rules! align {
($x:expr, $align:expr) => {{
($x + $align - 1) & (!($align - 1))
}};
}
|
extern crate getopts;
use getopts::Options;
use std::env;
use std::io;
use std::io::prelude::*;
use std::fs::File;
fn read_file(filename: String) -> Result<String, io::Error> {
let mut file = try!(File::open(filename));
let mut concat = String::new();
try!(file.read_to_string(&mut concat));
Ok(concat)
}
fn main() {
let args: Vec<String> = env::args().collect();
let mut opts = Options::new();
opts.optflag("n","number","print line number");
opts.optflag("h","help","print this help menu");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!(f.to_string()),
};
if matches.opt_present("h") {
println!("{}", opts.usage(""));
return;
};
if args.len() > 1 {
let mut count = 0;
for file in &args[1..] {
if &file[0..1] == "-" {
continue;
}
println!("{}", match read_file(file.clone()) {
Ok(concat) => concat.lines()
.enumerate()
.map(|(_,line)| if matches.opt_present("n") {
count += 1;
format!("{}: {}",(count),line)
} else {
format!("{}", line)
})
.fold(String::new(), |result, line| format!("{}\n{}",result,line)),
Err(_) => format!("{} is not exist", file),
});
}
}
}
|
// The MD2 Message-Digest Algorithm
// https://tools.ietf.org/html/rfc1319
// The S-table's values are derived from Pi
const S: [u8; 256] = [
0x29, 0x2E, 0x43, 0xC9, 0xA2, 0xD8, 0x7C, 0x01, 0x3D, 0x36, 0x54, 0xA1, 0xEC, 0xF0, 0x06, 0x13,
0x62, 0xA7, 0x05, 0xF3, 0xC0, 0xC7, 0x73, 0x8C, 0x98, 0x93, 0x2B, 0xD9, 0xBC, 0x4C, 0x82, 0xCA,
0x1E, 0x9B, 0x57, 0x3C, 0xFD, 0xD4, 0xE0, 0x16, 0x67, 0x42, 0x6F, 0x18, 0x8A, 0x17, 0xE5, 0x12,
0xBE, 0x4E, 0xC4, 0xD6, 0xDA, 0x9E, 0xDE, 0x49, 0xA0, 0xFB, 0xF5, 0x8E, 0xBB, 0x2F, 0xEE, 0x7A,
0xA9, 0x68, 0x79, 0x91, 0x15, 0xB2, 0x07, 0x3F, 0x94, 0xC2, 0x10, 0x89, 0x0B, 0x22, 0x5F, 0x21,
0x80, 0x7F, 0x5D, 0x9A, 0x5A, 0x90, 0x32, 0x27, 0x35, 0x3E, 0xCC, 0xE7, 0xBF, 0xF7, 0x97, 0x03,
0xFF, 0x19, 0x30, 0xB3, 0x48, 0xA5, 0xB5, 0xD1, 0xD7, 0x5E, 0x92, 0x2A, 0xAC, 0x56, 0xAA, 0xC6,
0x4F, 0xB8, 0x38, 0xD2, 0x96, 0xA4, 0x7D, 0xB6, 0x76, 0xFC, 0x6B, 0xE2, 0x9C, 0x74, 0x04, 0xF1,
0x45, 0x9D, 0x70, 0x59, 0x64, 0x71, 0x87, 0x20, 0x86, 0x5B, 0xCF, 0x65, 0xE6, 0x2D, 0xA8, 0x02,
0x1B, 0x60, 0x25, 0xAD, 0xAE, 0xB0, 0xB9, 0xF6, 0x1C, 0x46, 0x61, 0x69, 0x34, 0x40, 0x7E, 0x0F,
0x55, 0x47, 0xA3, 0x23, 0xDD, 0x51, 0xAF, 0x3A, 0xC3, 0x5C, 0xF9, 0xCE, 0xBA, 0xC5, 0xEA, 0x26,
0x2C, 0x53, 0x0D, 0x6E, 0x85, 0x28, 0x84, 0x09, 0xD3, 0xDF, 0xCD, 0xF4, 0x41, 0x81, 0x4D, 0x52,
0x6A, 0xDC, 0x37, 0xC8, 0x6C, 0xC1, 0xAB, 0xFA, 0x24, 0xE1, 0x7B, 0x08, 0x0C, 0xBD, 0xB1, 0x4A,
0x78, 0x88, 0x95, 0x8B, 0xE3, 0x63, 0xE8, 0x6D, 0xE9, 0xCB, 0xD5, 0xFE, 0x3B, 0x00, 0x1D, 0x39,
0xF2, 0xEF, 0xB7, 0x0E, 0x66, 0x58, 0xD0, 0xE4, 0xA6, 0x77, 0x72, 0xF8, 0xEB, 0x75, 0x4B, 0x0A,
0x31, 0x44, 0x50, 0xB4, 0x8F, 0xED, 0x1F, 0x1A, 0xDB, 0x99, 0x8D, 0x33, 0x9F, 0x11, 0x83, 0x14,
];
#[inline]
fn transform(state_and_checksum: &mut [u8; 64], block: &[u8]) {
debug_assert_eq!(state_and_checksum.len(), 64);
debug_assert_eq!(block.len(), Md2::BLOCK_LEN);
for j in 0..16 {
state_and_checksum[16 + j] = block[j];
state_and_checksum[32 + j] = state_and_checksum[16 + j] ^ state_and_checksum[j];
}
let mut t = 0u8;
for j in 0u8..18 {
for k in 0..48 {
state_and_checksum[k] ^= S[t as usize];
t = state_and_checksum[k];
}
t = t.wrapping_add(j);
}
t = state_and_checksum[48 + 15];
for j in 0..16 {
state_and_checksum[48 + j] ^= S[(block[j] ^ t) as usize];
t = state_and_checksum[48 + j];
}
}
pub fn md2<T: AsRef<[u8]>>(data: T) -> [u8; Md2::DIGEST_LEN] {
Md2::oneshot(data)
}
#[derive(Clone)]
pub struct Md2 {
buffer: [u8; Self::BLOCK_LEN],
state: [u8; 64],
offset: usize,
}
impl Md2 {
pub const BLOCK_LEN: usize = 16;
pub const DIGEST_LEN: usize = 16;
pub fn new() -> Self {
Self {
buffer: [0u8; Self::BLOCK_LEN],
state: [0u8; 64],
offset: 0usize,
}
}
pub fn update(&mut self, data: &[u8]) {
for i in 0..data.len() {
if self.offset == Self::BLOCK_LEN {
transform(&mut self.state, &self.buffer);
self.offset = 0;
}
self.buffer[self.offset] = data[i];
self.offset += 1;
}
if self.offset == Self::BLOCK_LEN {
transform(&mut self.state, &self.buffer);
self.offset = 0;
}
}
pub fn finalize(mut self) -> [u8; Self::DIGEST_LEN] {
let pad_byte = (Self::BLOCK_LEN - self.offset) as u8;
for i in self.offset..Self::BLOCK_LEN {
self.buffer[i] = pad_byte;
}
// last block
transform(&mut self.state, &self.buffer);
self.buffer.copy_from_slice(&self.state[48..]);
transform(&mut self.state, &self.buffer);
let mut output = [0u8; Self::DIGEST_LEN];
output.copy_from_slice(&self.state[..Self::DIGEST_LEN]);
output
}
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
let mut m = Self::new();
m.update(data.as_ref());
m.finalize()
}
}
#[test]
fn test_md2() {
// A.5 Test suite
// https://tools.ietf.org/html/rfc1319#appendix-A.5
assert_eq!(
&md2(""),
&hex::decode("8350e5a3e24c153df2275c9f80692773").unwrap()[..]
);
assert_eq!(
&md2("a"),
&hex::decode("32ec01ec4a6dac72c0ab96fb34c0b5d1").unwrap()[..]
);
assert_eq!(
&md2("abc"),
&hex::decode("da853b0d3f88d99b30283a69e6ded6bb").unwrap()[..]
);
assert_eq!(
&md2("message digest"),
&hex::decode("ab4f496bfb2a530b219ff33031fe06b0").unwrap()[..]
);
assert_eq!(
&md2("abcdefghijklmnopqrstuvwxyz"),
&hex::decode("4e8ddff3650292ab5a4108c3aa47940b").unwrap()[..]
);
assert_eq!(
&md2("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"),
&hex::decode("da33def2a42df13975352846c30338cd").unwrap()[..]
);
assert_eq!(
&md2("12345678901234567890123456789012345678901234567890123456789012345678901234567890"),
&hex::decode("d5976f79d83d3a0dc9806c3c66f3efd8").unwrap()[..]
);
}
|
use crate::prelude::*;
pub mod prelude {
pub use super::{ArrayRead, ArrayReadChildren};
}
/// Array read-from-index expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ArrayRead {
/// The two child expressions of this array read expression.
pub children: P<ArrayReadChildren>,
/// The bit width of this read expression.
///
/// This is a cache for the value bit width of the child
/// array expression to prevent the indirection over the
/// children structure if this value is used often.
pub bitvec_ty: BitvecTy,
}
/// The child expressions of a `Read` expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ArrayReadChildren {
/// The array expression.
///
/// This must be of array type.
pub array: AnyExpr,
/// The index where the array shall be read.
///
/// This must be of bitvec type.
pub index: AnyExpr,
}
impl ArrayReadChildren {
/// Creates a new `ArrayReadChildren` object.
///
/// Does not check any invariants of `ArrayRead`.
/// This function should be marked unsafe since it fails to hold any guarantees.
pub fn new(array: AnyExpr, index: AnyExpr) -> ArrayReadChildren {
ArrayReadChildren { array, index }
}
/// Creates a new boxed `ArrayReadChildren` object.
///
/// This is just a convenience wrapper around `ArrayReadChildren::new`.
pub fn new_boxed(array: AnyExpr, index: AnyExpr) -> P<ArrayReadChildren> {
P::new(ArrayReadChildren::new(array, index))
}
}
impl ArrayRead {
/// Returns a new `ArrayRead` expression for the given array expression
/// and reading at the given term expression index.
///
/// # Errors
///
/// - If the given `array` is not of array type.
/// - If the given `index` is not of bitvec type and does not match the
/// index bit width of the given array.
pub fn new<E1, E2>(array: E1, index: E2) -> ExprResult<ArrayRead>
where
E1: Into<AnyExpr>,
E2: Into<AnyExpr>,
{
let array = array.into();
let index = index.into();
let array_ty = expect_array_ty(&array)
.map_err(ExprError::from)
.map_err(|e| {
e.context_msg(
"Expected the left hand-side expression of the ArrayRead \
expression to be of array type.",
)
})?;
expect_type(array_ty.index_ty(), &index)
.map_err(ExprError::from)
.map_err(|e| {
e.context_msg(
"Expected the right hand-side expression of the ArrayRead \
expression to be of the same bitvector type as the index-type \
of the left hand-side array expression.",
)
})?;
Ok(ArrayRead {
bitvec_ty: array_ty.value_ty(),
children: ArrayReadChildren::new_boxed(array, index),
})
}
}
impl Children for ArrayReadChildren {
fn children(&self) -> ChildrenIter {
ChildrenIter::binary(&self.array, &self.index)
}
}
impl ChildrenMut for ArrayReadChildren {
fn children_mut(&mut self) -> ChildrenIterMut {
ChildrenIterMut::binary(&mut self.array, &mut self.index)
}
}
impl IntoChildren for ArrayReadChildren {
fn into_children(self) -> IntoChildrenIter {
IntoChildrenIter::binary(self.array, self.index)
}
}
impl HasType for ArrayRead {
fn ty(&self) -> Type {
self.bitvec_ty.ty()
}
}
impl HasKind for ArrayRead {
fn kind(&self) -> ExprKind {
ExprKind::ArrayRead
}
}
impl HasArity for ArrayRead {
fn arity(&self) -> usize {
2
}
}
impl From<ArrayRead> for AnyExpr {
fn from(array_read: ArrayRead) -> AnyExpr {
AnyExpr::ArrayRead(array_read)
}
}
impl Children for ArrayRead {
fn children(&self) -> ChildrenIter {
self.children.children()
}
}
impl ChildrenMut for ArrayRead {
fn children_mut(&mut self) -> ChildrenIterMut {
self.children.children_mut()
}
}
impl IntoChildren for ArrayRead {
fn into_children(self) -> IntoChildrenIter {
self.children.into_children()
}
}
|
use crate::distribution::{Discrete, DiscreteCDF};
use crate::statistics::*;
use crate::{Result, StatsError};
use rand::Rng;
/// Implements the [Discrete
/// Uniform](https://en.wikipedia.org/wiki/Discrete_uniform_distribution)
/// distribution
///
/// # Examples
///
/// ```
/// use statrs::distribution::{DiscreteUniform, Discrete};
/// use statrs::statistics::Distribution;
///
/// let n = DiscreteUniform::new(0, 5).unwrap();
/// assert_eq!(n.mean().unwrap(), 2.5);
/// assert_eq!(n.pmf(3), 1.0 / 6.0);
/// ```
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct DiscreteUniform {
min: i64,
max: i64,
}
impl DiscreteUniform {
/// Constructs a new discrete uniform distribution with a minimum value
/// of `min` and a maximum value of `max`.
///
/// # Errors
///
/// Returns an error if `max < min`
///
/// # Examples
///
/// ```
/// use statrs::distribution::DiscreteUniform;
///
/// let mut result = DiscreteUniform::new(0, 5);
/// assert!(result.is_ok());
///
/// result = DiscreteUniform::new(5, 0);
/// assert!(result.is_err());
/// ```
pub fn new(min: i64, max: i64) -> Result<DiscreteUniform> {
if max < min {
Err(StatsError::BadParams)
} else {
Ok(DiscreteUniform { min, max })
}
}
}
impl ::rand::distributions::Distribution<f64> for DiscreteUniform {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> f64 {
rng.gen_range(self.min..=self.max) as f64
}
}
impl DiscreteCDF<i64, f64> for DiscreteUniform {
/// Calculates the cumulative distribution function for the
/// discrete uniform distribution at `x`
///
/// # Formula
///
/// ```ignore
/// (floor(x) - min + 1) / (max - min + 1)
/// ```
fn cdf(&self, x: i64) -> f64 {
if x < self.min {
0.0
} else if x >= self.max {
1.0
} else {
let lower = self.min as f64;
let upper = self.max as f64;
let ans = (x as f64 - lower + 1.0) / (upper - lower + 1.0);
if ans > 1.0 {
1.0
} else {
ans
}
}
}
}
impl Min<i64> for DiscreteUniform {
/// Returns the minimum value in the domain of the discrete uniform
/// distribution
///
/// # Remarks
///
/// This is the same value as the minimum passed into the constructor
fn min(&self) -> i64 {
self.min
}
}
impl Max<i64> for DiscreteUniform {
/// Returns the maximum value in the domain of the discrete uniform
/// distribution
///
/// # Remarks
///
/// This is the same value as the maximum passed into the constructor
fn max(&self) -> i64 {
self.max
}
}
impl Distribution<f64> for DiscreteUniform {
/// Returns the mean of the discrete uniform distribution
///
/// # Formula
///
/// ```ignore
/// (min + max) / 2
/// ```
fn mean(&self) -> Option<f64> {
Some((self.min + self.max) as f64 / 2.0)
}
/// Returns the variance of the discrete uniform distribution
///
/// # Formula
///
/// ```ignore
/// ((max - min + 1)^2 - 1) / 12
/// ```
fn variance(&self) -> Option<f64> {
let diff = (self.max - self.min) as f64;
Some(((diff + 1.0) * (diff + 1.0) - 1.0) / 12.0)
}
/// Returns the entropy of the discrete uniform distribution
///
/// # Formula
///
/// ```ignore
/// ln(max - min + 1)
/// ```
fn entropy(&self) -> Option<f64> {
let diff = (self.max - self.min) as f64;
Some((diff + 1.0).ln())
}
/// Returns the skewness of the discrete uniform distribution
///
/// # Formula
///
/// ```ignore
/// 0
/// ```
fn skewness(&self) -> Option<f64> {
Some(0.0)
}
}
impl Median<f64> for DiscreteUniform {
/// Returns the median of the discrete uniform distribution
///
/// # Formula
///
/// ```ignore
/// (max + min) / 2
/// ```
fn median(&self) -> f64 {
(self.min + self.max) as f64 / 2.0
}
}
impl Mode<Option<i64>> for DiscreteUniform {
/// Returns the mode for the discrete uniform distribution
///
/// # Remarks
///
/// Since every element has an equal probability, mode simply
/// returns the middle element
///
/// # Formula
///
/// ```ignore
/// N/A // (max + min) / 2 for the middle element
/// ```
fn mode(&self) -> Option<i64> {
Some(((self.min + self.max) as f64 / 2.0).floor() as i64)
}
}
impl Discrete<i64, f64> for DiscreteUniform {
/// Calculates the probability mass function for the discrete uniform
/// distribution at `x`
///
/// # Remarks
///
/// Returns `0.0` if `x` is not in `[min, max]`
///
/// # Formula
///
/// ```ignore
/// 1 / (max - min + 1)
/// ```
fn pmf(&self, x: i64) -> f64 {
if x >= self.min && x <= self.max {
1.0 / (self.max - self.min + 1) as f64
} else {
0.0
}
}
/// Calculates the log probability mass function for the discrete uniform
/// distribution at `x`
///
/// # Remarks
///
/// Returns `f64::NEG_INFINITY` if `x` is not in `[min, max]`
///
/// # Formula
///
/// ```ignore
/// ln(1 / (max - min + 1))
/// ```
fn ln_pmf(&self, x: i64) -> f64 {
if x >= self.min && x <= self.max {
-((self.max - self.min + 1) as f64).ln()
} else {
f64::NEG_INFINITY
}
}
}
#[rustfmt::skip]
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use crate::statistics::*;
use crate::distribution::{DiscreteCDF, Discrete, DiscreteUniform};
use crate::consts::ACC;
fn try_create(min: i64, max: i64) -> DiscreteUniform {
let n = DiscreteUniform::new(min, max);
assert!(n.is_ok());
n.unwrap()
}
fn create_case(min: i64, max: i64) {
let n = try_create(min, max);
assert_eq!(min, n.min());
assert_eq!(max, n.max());
}
fn bad_create_case(min: i64, max: i64) {
let n = DiscreteUniform::new(min, max);
assert!(n.is_err());
}
fn get_value<T, F>(min: i64, max: i64, eval: F) -> T
where T: PartialEq + Debug,
F: Fn(DiscreteUniform) -> T
{
let n = try_create(min, max);
eval(n)
}
fn test_case<T, F>(min: i64, max: i64, expected: T, eval: F)
where T: PartialEq + Debug,
F: Fn(DiscreteUniform) -> T
{
let x = get_value(min, max, eval);
assert_eq!(expected, x);
}
#[test]
fn test_create() {
create_case(-10, 10);
create_case(0, 4);
create_case(10, 20);
create_case(20, 20);
}
#[test]
fn test_bad_create() {
bad_create_case(-1, -2);
bad_create_case(6, 5);
}
#[test]
fn test_mean() {
let mean = |x: DiscreteUniform| x.mean().unwrap();
test_case(-10, 10, 0.0, mean);
test_case(0, 4, 2.0, mean);
test_case(10, 20, 15.0, mean);
test_case(20, 20, 20.0, mean);
}
#[test]
fn test_variance() {
let variance = |x: DiscreteUniform| x.variance().unwrap();
test_case(-10, 10, 36.66666666666666666667, variance);
test_case(0, 4, 2.0, variance);
test_case(10, 20, 10.0, variance);
test_case(20, 20, 0.0, variance);
}
#[test]
fn test_entropy() {
let entropy = |x: DiscreteUniform| x.entropy().unwrap();
test_case(-10, 10, 3.0445224377234229965005979803657054342845752874046093, entropy);
test_case(0, 4, 1.6094379124341003746007593332261876395256013542685181, entropy);
test_case(10, 20, 2.3978952727983705440619435779651292998217068539374197, entropy);
test_case(20, 20, 0.0, entropy);
}
#[test]
fn test_skewness() {
let skewness = |x: DiscreteUniform| x.skewness().unwrap();
test_case(-10, 10, 0.0, skewness);
test_case(0, 4, 0.0, skewness);
test_case(10, 20, 0.0, skewness);
test_case(20, 20, 0.0, skewness);
}
#[test]
fn test_median() {
let median = |x: DiscreteUniform| x.median();
test_case(-10, 10, 0.0, median);
test_case(0, 4, 2.0, median);
test_case(10, 20, 15.0, median);
test_case(20, 20, 20.0, median);
}
#[test]
fn test_mode() {
let mode = |x: DiscreteUniform| x.mode().unwrap();
test_case(-10, 10, 0, mode);
test_case(0, 4, 2, mode);
test_case(10, 20, 15, mode);
test_case(20, 20, 20, mode);
}
#[test]
fn test_pmf() {
let pmf = |arg: i64| move |x: DiscreteUniform| x.pmf(arg);
test_case(-10, 10, 0.04761904761904761904762, pmf(-5));
test_case(-10, 10, 0.04761904761904761904762, pmf(1));
test_case(-10, 10, 0.04761904761904761904762, pmf(10));
test_case(-10, -10, 0.0, pmf(0));
test_case(-10, -10, 1.0, pmf(-10));
}
#[test]
fn test_ln_pmf() {
let ln_pmf = |arg: i64| move |x: DiscreteUniform| x.ln_pmf(arg);
test_case(-10, 10, -3.0445224377234229965005979803657054342845752874046093, ln_pmf(-5));
test_case(-10, 10, -3.0445224377234229965005979803657054342845752874046093, ln_pmf(1));
test_case(-10, 10, -3.0445224377234229965005979803657054342845752874046093, ln_pmf(10));
test_case(-10, -10, f64::NEG_INFINITY, ln_pmf(0));
test_case(-10, -10, 0.0, ln_pmf(-10));
}
#[test]
fn test_cdf() {
let cdf = |arg: i64| move |x: DiscreteUniform| x.cdf(arg);
test_case(-10, 10, 0.2857142857142857142857, cdf(-5));
test_case(-10, 10, 0.5714285714285714285714, cdf(1));
test_case(-10, 10, 1.0, cdf(10));
test_case(-10, -10, 1.0, cdf(-10));
}
#[test]
fn test_cdf_lower_bound() {
let cdf = |arg: i64| move |x: DiscreteUniform| x.cdf(arg);
test_case(0, 3, 0.0, cdf(-1));
}
#[test]
fn test_cdf_upper_bound() {
let cdf = |arg: i64| move |x: DiscreteUniform| x.cdf(arg);
test_case(0, 3, 1.0, cdf(5));
}
}
|
//! # Parsers for parts of the file
use crate::common::{
parser::{parse_crc_node, parse_u32_string},
CRCTreeNode,
};
use super::core::*;
use nom::{
bytes::complete::tag,
combinator::map_res,
multi::{fold_many_m_n, length_count},
number::complete::le_u32,
IResult,
};
use std::collections::BTreeMap;
use std::convert::TryFrom;
type FileRefData = CRCTreeNode<FileRef>;
fn extend_map(mut map: BTreeMap<u32, FileRef>, data: FileRefData) -> BTreeMap<u32, FileRef> {
map.insert(data.crc, data.data);
map
}
fn parse_file_ref(input: &[u8]) -> IResult<&[u8], FileRef> {
let (input, pack_file) = le_u32(input)?;
let (input, category) = le_u32(input)?;
Ok((
input,
FileRef {
pack_file,
category,
},
))
}
fn parse_file_ref_node(input: &[u8]) -> IResult<&[u8], FileRefData> {
parse_crc_node(parse_file_ref)(input)
}
fn parse_pack_file_ref(input: &[u8]) -> IResult<&[u8], PackFileRef> {
let (input, path) = parse_u32_string(input)?;
Ok((input, PackFileRef { path }))
}
const LE_THREE: [u8; 4] = u32::to_le_bytes(3);
/// Parse a complete PKI file from an in-memory buffer
pub fn parse_pki_file(input: &[u8]) -> IResult<&[u8], PackIndexFile> {
let (input, _version) = tag(LE_THREE)(input)?;
let (input, archives) = length_count(le_u32, parse_pack_file_ref)(input)?;
let (input, file_count) = map_res(le_u32, usize::try_from)(input)?;
let (input, files) = fold_many_m_n(
file_count,
file_count,
parse_file_ref_node,
BTreeMap::new,
extend_map,
)(input)?;
Ok((input, PackIndexFile { archives, files }))
}
|
extern crate colored;
use colored::*;
mod impel;
fn main() {
println!(
// "{hostname}: {cwd} {vcs}{vim}{pchar} ",
"{hostname}: {cwd} {vcs}{vim}",
hostname = impel::hostname().color("red").bold(),
cwd = impel::working_directory(),
vcs = impel::vcs().color("blue").bold(),
vim = impel::vim(),
// pchar = impel::pchar()
);
}
|
// This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#![cfg(unix)]
use assert_cmd::cargo::cargo_bin;
use nix::sys::signal::{kill, Signal::SIGINT};
use nix::unistd::Pid;
use std::{convert::TryInto, process::Command};
use std::{
path::Path,
process::{Child, ExitStatus},
thread,
time::Duration,
};
/// Wait for the given `child` the given number of `secs`.
///
/// Returns the `Some(exit status)` or `None` if the process did not finish in the given time.
pub fn wait_for(child: &mut Child, secs: usize) -> Option<ExitStatus> {
for i in 0..secs {
match child.try_wait().unwrap() {
Some(status) => {
if i > 5 {
eprintln!("Child process took {} seconds to exit gracefully", i);
}
return Some(status)
},
None => thread::sleep(Duration::from_secs(1)),
}
}
eprintln!("Took too long to exit (> {} seconds). Killing...", secs);
let _ = child.kill();
child.wait().unwrap();
None
}
/// Run the node for a while (30 seconds)
pub fn run_dev_node_for_a_while(base_path: &Path) {
let mut cmd = Command::new(cargo_bin("substrate"));
let mut cmd = cmd.args(&["--dev"]).arg("-d").arg(base_path).spawn().unwrap();
// Let it produce some blocks.
thread::sleep(Duration::from_secs(30));
assert!(cmd.try_wait().unwrap().is_none(), "the process should still be running");
// Stop the process
kill(Pid::from_raw(cmd.id().try_into().unwrap()), SIGINT).unwrap();
assert!(wait_for(&mut cmd, 40).map(|x| x.success()).unwrap_or_default());
}
|
use super::{ConfigAware, TestConfig};
pub struct StandaloneCluster {}
impl StandaloneCluster {
pub fn start() -> Self {
todo!();
}
}
impl ConfigAware for StandaloneCluster {
fn config(&self) -> TestConfig {
todo!();
}
}
impl Drop for StandaloneCluster {
fn drop(&mut self) {
todo!();
}
}
|
mod ivec2;
mod ivec3;
mod ivec4;
pub use ivec2::{ivec2, IVec2};
pub use ivec3::{ivec3, IVec3};
pub use ivec4::{ivec4, IVec4};
#[cfg(not(target_arch = "spirv"))]
mod test {
use super::*;
mod const_test_ivec2 {
#[cfg(not(feature = "cuda"))]
const_assert_eq!(
core::mem::align_of::<i32>(),
core::mem::align_of::<super::IVec2>()
);
#[cfg(feature = "cuda")]
const_assert_eq!(8, core::mem::align_of::<super::IVec2>());
const_assert_eq!(8, core::mem::size_of::<super::IVec2>());
}
mod const_test_ivec3 {
const_assert_eq!(
core::mem::align_of::<i32>(),
core::mem::align_of::<super::IVec3>()
);
const_assert_eq!(12, core::mem::size_of::<super::IVec3>());
}
mod const_test_ivec4 {
#[cfg(not(feature = "cuda"))]
const_assert_eq!(
core::mem::align_of::<i32>(),
core::mem::align_of::<super::IVec4>()
);
#[cfg(feature = "cuda")]
const_assert_eq!(16, core::mem::align_of::<super::IVec4>());
const_assert_eq!(16, core::mem::size_of::<super::IVec4>());
}
}
|
#[desc = "Redis bindings."];
#[license = "MIT"];
struct Redis;
// TODO: These types are all wrong, but they are all the types
enum Reply {
Status(~str),
Error(~str),
Integer(int),
Bulk(~str),
MultiBulk(~str),
}
impl Redis {
fn set(&self, key: &str, value: &str) {
println(fmt!("set: %? %?", key, value));
}
fn get(&self, key: &str) -> ~str {
println(fmt!("get: %?", key));
~"yeah"
}
}
|
use actix_web::{App, HttpServer};
use log::Level;
mod admin_meta;
mod config;
mod internal_meta;
mod public_transaction;
pub mod response;
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
//Initialize the log and set the print level
simple_logger::init_with_level(Level::Warn).unwrap();
HttpServer::new(|| {
App::new()
.data(config::get_db())
.data(config::ConfigPath::default())
.service(admin_meta::new_cert)
.service(admin_meta::update_cert)
.service(admin_meta::get_cert)
.service(public_transaction::digital_transaction)
.service(internal_meta::digital_meta)
})
.bind("127.0.0.1:8808")?
.run()
.await
}
|
use std::ffi::{CStr, CString};
use std::os::raw::c_char;
use serde_json::{Value, json};
pub use stardog_function::*;
#[no_mangle]
pub extern fn evaluate(subject: *mut c_char) -> *mut c_char {
let subject = unsafe { CStr::from_ptr(subject).to_str().unwrap() };
let values: Value = serde_json::from_str(subject).unwrap();
let value_0 = values["results"]["bindings"][0]["value_0"]["value"].as_str().unwrap();
let result = json!({
"head": {"vars":["value_0"]}, "results":{"bindings":[{"value_0":{"type":"literal","datatype": "http://www.w3.org/2001/XMLSchema#boolean", "value": is_string_numeric(value_0)}}]}
}).to_string().into_bytes();
unsafe { CString::from_vec_unchecked(result) }.into_raw()
}
fn is_string_numeric(str: &str) -> bool {
for c in str.chars() {
if !c.is_numeric() {
return false;
}
}
return true;
}
#[no_mangle]
pub extern fn doc() -> *mut c_char {
let output = b"
function description here
arguemnts:
value_0:literal first string to compare
value_1:literal second string to compare
".to_vec();
unsafe { CString::from_vec_unchecked(output) }.into_raw()
}
#[no_mangle]
pub extern fn cardinality_estimate(subject: *mut c_char) -> *mut c_char {
let subject = unsafe { CStr::from_ptr(subject).to_str().unwrap() };
let values: Value = serde_json::from_str(subject).unwrap();
let estimate = values["results"]["bindings"][0]["value_0"]["value"].as_str().unwrap();
let result = json!({
"head": {"vars":["value_0", "value_1"]}, "results":{"bindings":[
{"value_0":{"type":"literal","value": estimate}, "value_1":{"type":"literal","value": "ACCURATE"}}
]}
}).to_string().into_bytes();
unsafe { CString::from_vec_unchecked(result) }.into_raw()
} |
use log::*;
use tokio::time::Duration;
use crate::network::message::{ClientMessage, Message, ServerMessage};
use crate::network::ws_event::WsEvent;
use crate::network::ws_server;
use crate::server::pong_server::PongServer;
use quicksilver::Timer;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
pub async fn start() {
let _ = env_logger::builder()
.filter_module("webpong", log::LevelFilter::Trace)
.try_init();
let mut ws_server = ws_server::WebsocketServer::start()
.await
.expect("start ws server");
let mut pre_lobby_players = vec![];
// let mut players_in_game = vec![];
let mut new_player_in_game = false; //set to true when a new player joins to signal gamestate sync
let mut pong_server = PongServer::default();
let mut update_timer = Timer::time_per_second(100.0);
//lobby
loop {
while update_timer.tick() {
// next_tick = Instant::now() + Duration::from_millis(10);
while let Some(mut client) = ws_server.event_stream.next_event().await {
info!("Client {} connected", client.id);
let rand_string: String = thread_rng()
.sample_iter(&Alphanumeric)
.take(5)
.map(char::from)
.collect();
client.name = Some(rand_string.clone());
client
.send(&Message::ServerMessage(ServerMessage::SetName(rand_string)))
.await;
pre_lobby_players.push(client);
}
pre_lobby_players = pre_lobby_players
.into_iter()
.filter_map(|player| {
let event = player
.event_stream
.buffer()
.lock()
.expect("asd")
.pop_front();
if let Some(WsEvent::Error(e)) = event {
warn!("Err: {:?}", e);
Some(player)
} else if let Some(WsEvent::Message(Message::ClientMessage(
ClientMessage::EnterGame,
))) = event
{
info!("Player {} entering game", player);
pong_server.add_player(player);
new_player_in_game = true;
None
} else if let Some(WsEvent::Closed) = event {
None
} else {
Some(player)
}
})
.collect();
// for player in &mut players_in_game {
// if new_player_in_game {
// player
// .send(&Message::ServerMessage(ServerMessage::GameState(
// GameState::default(),
// )))
// .await;
// }
// }
new_player_in_game = false;
pong_server.tick().await;
// info!("players, pre-game: {} game: {}", pre_lobby_players.len(), players_in_game.len());
}
// tokio::time::sleep_until(next_tick).await;
tokio::time::sleep(Duration::from_millis(1)).await;
}
}
|
// Copyright (c) 2016, <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See COPYING.md for more information.
use std::any::Any;
use std::borrow::Cow;
use std::char::DecodeUtf16Error;
use std::error;
use std::ffi::NulError;
use std::fmt::{self, Display, Formatter};
use std::num::TryFromIntError;
use std::result;
use std::str::Utf8Error;
use std::sync::Arc;
use std::sync::mpsc::{RecvError, SendError, TryRecvError};
/// Terse macro which constructs an `Error`.
macro_rules! err {
($kind:ident) =>
(::error::err(::error::ErrorKind::$kind,
None,
None));
($kind:ident ($detail:expr)) =>
(::error::err(::error::ErrorKind::$kind,
Some(::std::borrow::Cow::from($detail)),
None));
($kind:ident ($fmt:expr, $($arg:expr),*)) =>
(::error::err(::error::ErrorKind::$kind,
Some(::std::borrow::Cow::from(format!($fmt, $($arg),*))),
None));
($kind:ident : $cause:expr) =>
(::error::err(::error::ErrorKind::$kind,
None,
Some(::std::sync::Arc::new($cause))));
($kind:ident ($detail:expr) : $cause:expr) =>
(::error::err(::error::ErrorKind::$kind,
Some(::std::borrow::Cow::from($detail)),
Some(::std::sync::Arc::new($cause))));
($kind:ident ($fmt:expr, $($arg:expr),*) : $cause:expr) =>
(::error::err(::error::ErrorKind::$kind,
Some(::std::borrow::Cow::from(format!($fmt, $($arg),*))),
Some(::std::sync::Arc::new($cause))));
}
/// Result type for `Error`.
pub type Result<T> = result::Result<T, Error>;
/// Enumeration of all error kinds that may occur within the library.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum ErrorKind {
BadIntegerCast,
ConnectionFailed,
EncodingError,
IncompatibleResource,
InvalidArgument,
IpcError,
LibraryError,
NoMatch,
RequestFailed,
ResourceExpired,
SystemError,
}
impl ErrorKind {
pub fn as_str (self) -> &'static str {
match self {
ErrorKind::BadIntegerCast => "bad integer cast",
ErrorKind::ConnectionFailed => "connection failed",
ErrorKind::EncodingError => "encoding error",
ErrorKind::IncompatibleResource => "incompatible resource",
ErrorKind::InvalidArgument => "invalid argument",
ErrorKind::IpcError => "ipc error",
ErrorKind::LibraryError => "library error",
ErrorKind::NoMatch => "no match",
ErrorKind::RequestFailed => "request failed",
ErrorKind::ResourceExpired => "resource expired",
ErrorKind::SystemError => "system error",
}
}
}
/// Function for constructing an `Error`.
/// Used by the `err!` macro.
pub fn err (kind: ErrorKind,
detail: Option<Cow<'static, str>>,
cause: Option<Arc<error::Error + 'static + Send + Sync>>)
-> Error
{
Error {
kind: kind,
detail: detail,
cause: cause,
}
}
/// Struct for all errors that may occur within the library.
#[derive(Clone, Debug)]
pub struct Error {
kind: ErrorKind,
detail: Option<Cow<'static, str>>,
cause: Option<Arc<error::Error + 'static + Send + Sync>>,
}
impl Error {
pub fn detail (&self) -> Option<&str> {
match self.detail {
Some(ref detail) => Some(detail.as_ref()),
None => None,
}
}
pub fn kind (&self) -> ErrorKind {
self.kind
}
}
impl Display for Error {
fn fmt (&self, f: &mut Formatter) -> fmt::Result {
try!(f.write_str(self.kind.as_str()));
if let Some(ref detail) = self.detail {
try!(write!(f, " ({})", detail));
}
if let Some(ref cause) = self.cause {
try!(write!(f, ": {}", *cause));
}
Ok(())
}
}
impl error::Error for Error {
fn cause (&self) -> Option<&error::Error> {
match self.cause {
Some(ref cause) => Some(cause.as_ref()),
None => None,
}
}
fn description (&self) -> &str {
self.kind.as_str()
}
}
impl From<DecodeUtf16Error> for Error {
fn from (cause: DecodeUtf16Error) -> Error {
err!(EncodingError: cause)
}
}
impl From<NulError> for Error {
fn from (cause: NulError) -> Error {
err!(EncodingError: cause)
}
}
impl From<RecvError> for Error {
fn from (cause: RecvError) -> Error {
err!(IpcError: cause)
}
}
impl<T: Any + Send + Sync> From<SendError<T>> for Error {
fn from (cause: SendError<T>) -> Error {
err!(IpcError: cause)
}
}
impl From<TryFromIntError> for Error {
fn from (cause: TryFromIntError) -> Error {
err!(BadIntegerCast: cause)
}
}
impl From<TryRecvError> for Error {
fn from (cause: TryRecvError) -> Error {
err!(IpcError: cause)
}
}
impl From<Utf8Error> for Error {
fn from (cause: Utf8Error) -> Error {
err!(EncodingError: cause)
}
}
|
use std::fmt;
use std::convert::From;
use std::io;
use std::error::Error as StdError;
use std::result;
use nix;
pub type Result<T> = result::Result<T, Error>;
/// An error arising from terminal operations.
///
/// The lower-level cause of the error, if any, will be returned by calling `cause()`.
#[derive(Debug)]
pub struct Error {
err: Box<StdError + Send + Sync>,
}
impl Error {
pub fn new<E>(error: E) -> Error
where E: Into<Box<StdError + Send + Sync>>
{
let err = error.into();
Error {
err: err,
}
}
}
impl StdError for Error {
fn description(&self) -> &str {
self.err.description()
}
fn cause(&self) -> Option<&StdError> {
self.err.cause()
}
}
impl From<nix::Error> for Error {
fn from(err: nix::Error) -> Self {
Error::new(err.errno().desc())
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::new(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.err.fmt(f)
}
}
|
use std::num::ParseIntError;
use std::num::ParseFloatError;
#[derive(Debug, Fail)]
pub enum ParseNumberError {
#[fail(display = "{}", _0)]
ParseIntError(ParseIntError),
#[fail(display = "{}", _0)]
ParseFloatError(ParseFloatError),
}
impl From<ParseFloatError> for ParseNumberError {
fn from(error: ParseFloatError) -> Self {
ParseNumberError::ParseFloatError(error)
}
}
impl From<ParseIntError> for ParseNumberError {
fn from(error: ParseIntError) -> Self {
ParseNumberError::ParseIntError(error)
}
}
|
#![no_std]
#![feature(in_band_lifetimes)]
pub mod alarm;
pub mod console;
pub mod crc;
pub mod debug_writer;
pub mod isl29035;
pub mod lldb;
pub mod nrf51822;
pub mod process_console;
pub mod rng;
pub mod si7021;
pub mod spi;
|
use crate::commands::list::list_roots;
use crate::config::Config;
use crate::utils;
use dialoguer;
use std::path::PathBuf;
pub fn remove(path: Option<PathBuf>, mut config: Config) -> Result<(), failure::Error> {
if path == None && config.paths.is_empty() {
println!("You are not currently tracking any directories. You can track projects in a");
println!("directory by using the `projects track` command:");
println!("");
println!(" projects track ~/code/");
println!("");
return Ok(());
}
let p = match path {
Some(path) => path,
None => select_root(&config)?,
};
let full_path = utils::normalize_path(&std::env::current_dir()?.join(p));
let removed = config.paths.remove(&full_path);
if removed {
config.store()?;
println!("No longer tracking {:?}", full_path);
} else {
println!("Not currently tracking {:?}", full_path);
}
Ok(())
}
pub fn select_root(config: &Config) -> Result<PathBuf, failure::Error> {
println!("You are currently tracking projects in the following directories.");
println!("Which would you like to remove?");
println!("");
let rootset = list_roots(config)?;
let roots = utils::sort_set(rootset);
let path_strings: Vec<String> = roots.iter().map(|root| {
format!("{}", root.display())
}).collect();
let mut selector = dialoguer::Select::new();
selector.items(&path_strings);
selector.default(0);
let result = selector.interact()?;
Ok(roots[result].clone())
}
|
use crate::attestations::{attestations::AttestableBlock, *};
use crate::rewards_and_penalties::rewards_and_penalties::StakeholderBlock;
use helper_functions::beacon_state_accessors::*;
use helper_functions::{
beacon_state_accessors::{get_randao_mix, get_total_active_balance, get_validator_churn_limit},
beacon_state_mutators::*,
crypto::{bls_verify, hash, hash_tree_root, signed_root},
misc::compute_activation_exit_epoch,
predicates::is_active_validator,
};
use itertools::{Either, Itertools};
use ssz_types::VariableList;
use std::cmp;
use typenum::Unsigned as _;
use types::consts::*;
use types::primitives::*;
use types::primitives::{Gwei, ValidatorIndex};
use types::types::{Eth1Data, HistoricalBatch};
use types::{
beacon_state::*,
config::{Config, MainnetConfig},
types::{Checkpoint, PendingAttestation, Validator},
};
pub fn process_epoch<T: Config>(state: &mut BeaconState<T>) {
process_justification_and_finalization(state);
process_rewards_and_penalties(state);
process_registry_updates(state);
process_slashings(state);
process_final_updates(state);
}
fn process_justification_and_finalization<T: Config>(
state: &mut BeaconState<T>,
) -> Result<(), Error> {
if get_current_epoch(state) <= T::genesis_epoch() + 1 {
return Ok(());
}
let previous_epoch = get_previous_epoch(state);
let current_epoch = get_current_epoch(state);
let old_previous_justified_checkpoint = state.previous_justified_checkpoint.clone();
let old_current_justified_checkpoint = state.current_justified_checkpoint.clone();
// Process justifications
state.previous_justified_checkpoint = state.current_justified_checkpoint.clone();
state.justification_bits.shift_up(1)?;
//Previous epoch
let matching_target_attestations = state.get_matching_target_attestations(previous_epoch);
if state.get_attesting_balance(matching_target_attestations) * 3
>= get_total_active_balance(state)? * 2
{
state.current_justified_checkpoint = Checkpoint {
epoch: previous_epoch,
root: get_block_root(state, previous_epoch)?,
};
state.justification_bits.set(1, true)?;
}
// Current epoch
let matching_target_attestations = state.get_matching_target_attestations(current_epoch);
if state.get_attesting_balance(matching_target_attestations) * 3
>= get_total_active_balance(state)? * 2
{
state.current_justified_checkpoint = Checkpoint {
epoch: current_epoch,
root: get_block_root(state, current_epoch)?,
};
state.justification_bits.set(0, true)?;
}
// The 2nd/3rd/4th most recent epochs are all justified, the 2nd using the 4th as source.
if (1..4).all(|i| state.justification_bits.get(i).unwrap_or(false))
&& old_previous_justified_checkpoint.epoch + 3 == current_epoch
{
state.finalized_checkpoint = old_previous_justified_checkpoint;
}
// The 2nd/3rd most recent epochs are both justified, the 2nd using the 3rd as source.
else if (1..3).all(|i| state.justification_bits.get(i).unwrap_or(false))
&& old_previous_justified_checkpoint.epoch + 2 == current_epoch
{
state.finalized_checkpoint = old_previous_justified_checkpoint;
}
// The 1st/2nd/3rd most recent epochs are all justified, the 1st using the 3nd as source.
if (0..3).all(|i| state.justification_bits.get(i).unwrap_or(false))
&& old_current_justified_checkpoint.epoch + 2 == current_epoch
{
state.finalized_checkpoint = old_current_justified_checkpoint;
}
// The 1st/2nd most recent epochs are both justified, the 1st using the 2nd as source.
else if (0..2).all(|i| state.justification_bits.get(i).unwrap_or(false))
&& old_current_justified_checkpoint.epoch + 1 == current_epoch
{
state.finalized_checkpoint = old_current_justified_checkpoint;
}
Ok(())
}
fn process_registry_updates<T: Config>(state: &mut BeaconState<T>) {
let state_copy = state.clone();
let is_eligible = |validator: &Validator| {
validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH
&& validator.effective_balance == T::max_effective_balance()
};
let is_exiting_validator = |validator: &Validator| {
is_active_validator(validator, get_current_epoch(&state_copy))
&& validator.effective_balance <= T::ejection_balance()
};
let (eligible, exiting): (Vec<_>, Vec<_>) = state
.validators
.iter_mut()
.enumerate()
.filter(|(_, validator)| is_eligible(validator) || is_exiting_validator(validator))
.partition_map(|(i, validator)| {
if is_eligible(validator) {
Either::Left(i)
} else {
Either::Right(i)
}
});
for index in eligible {
state.validators[index].activation_eligibility_epoch = get_current_epoch(&state_copy);
}
for index in exiting {
initiate_validator_exit(state, index as u64).unwrap();
}
// Queue validators eligible for activation and not dequeued for activation prior to finalized epoch
let activation_queue = state
.validators
.iter()
.enumerate()
.filter(|(index, validator)| {
validator.activation_eligibility_epoch != FAR_FUTURE_EPOCH
&& validator.activation_epoch
>= compute_activation_exit_epoch::<T>(state.finalized_checkpoint.epoch)
})
.sorted_by_key(|(_, validator)| validator.activation_eligibility_epoch)
.map(|(i, _)| i)
.collect_vec();
// Dequeued validators for activation up to churn limit (without resetting activation epoch)
let churn_limit = get_validator_churn_limit(&state).unwrap();
let delayed_activation_epoch =
compute_activation_exit_epoch::<T>(get_current_epoch(state) as u64);
for index in activation_queue.into_iter().take(churn_limit as usize) {
let validator = &mut state.validators[index];
if validator.activation_epoch == FAR_FUTURE_EPOCH {
validator.activation_epoch = delayed_activation_epoch;
}
}
}
fn process_rewards_and_penalties<T: Config>(state: &mut BeaconState<T>) -> Result<(), Error> {
if get_current_epoch(state) == T::genesis_epoch() {
return Ok(());
}
let (rewards, penalties) = state.get_attestation_deltas();
for index in 0..state.validators.len() {
increase_balance(state, index as ValidatorIndex, rewards[index]).unwrap();
decrease_balance(state, index as ValidatorIndex, penalties[index]).unwrap();
}
Ok(())
}
fn process_slashings<T: Config>(state: &mut BeaconState<T>) {
let epoch = get_current_epoch(state);
let total_balance = get_total_active_balance(state).unwrap();
for (index, validator) in state.validators.clone().iter().enumerate() {
if validator.slashed
&& epoch + T::EpochsPerSlashingsVector::U64 / 2 == validator.withdrawable_epoch
{
let increment = T::effective_balance_increment();
let slashings_sum = state.slashings.iter().sum::<u64>();
let penalty_numerator = validator.effective_balance / increment
* cmp::min(slashings_sum * 3, total_balance);
let penalty = penalty_numerator / total_balance * increment;
decrease_balance(state, index as u64, penalty).unwrap();
}
}
}
fn process_final_updates<T: Config>(state: &mut BeaconState<T>) {
let current_epoch = get_current_epoch(&state);
let next_epoch = current_epoch + 1 as Epoch;
//# Reset eth1 data votes
if (state.slot + 1) % T::SlotsPerEth1VotingPeriod::U64 == 0 {
state.eth1_data_votes = VariableList::from(vec![]);
}
//# Update effective balances with hysteresis
for (index, validator) in state.validators.iter_mut().enumerate() {
let balance = state.balances[index];
let half_increment = T::effective_balance_increment() / 2;
if balance < validator.effective_balance
|| validator.effective_balance + 3 * half_increment < balance
{
validator.effective_balance = cmp::min(
balance - balance % T::effective_balance_increment(),
T::max_effective_balance(),
);
}
}
//# Reset slashings
state.slashings[(next_epoch % T::EpochsPerHistoricalVector::U64) as usize] = 0 as Gwei;
//# Set randao mix
state.randao_mixes[(next_epoch % T::EpochsPerHistoricalVector::U64) as usize] =
get_randao_mix(&state, current_epoch).unwrap();
//# Set historical root accumulator
if next_epoch % (T::SlotsPerHistoricalRoot::U64 / T::SlotsPerEpoch::U64) == 0 {
let historical_batch = HistoricalBatch::<T> {
block_roots: state.block_roots.clone(),
state_roots: state.state_roots.clone(),
};
state
.historical_roots
.push(hash_tree_root(&historical_batch))
.unwrap();
}
//# Rotate current/previous epoch attestations
state.previous_epoch_attestations = state.current_epoch_attestations.clone();
state.current_epoch_attestations = VariableList::from(vec![]);
}
// #[cfg(test)]
// mod process_epoch_tests {
// use super::*;
// use mockall::mock;
// use types::{beacon_state::*, config::MainnetConfig};
// mock! {
// BeaconState<C: Config + 'static> {}
// trait BeaconStateAccessor {
// fn get_current_epoch(&self) -> Epoch;
// fn get_previous_epoch(&self) -> Epoch;
// fn get_block_root(&self, _epoch: Epoch) -> Result<H256, hfError>;
// }
// }
// // #[test]
// // fn test() {
// // // let mut bs: BeaconState<MainnetConfig> = BeaconState {
// // // ..BeaconState::default()
// // // };
// // let mut bs = MockBeaconState::<MainnetConfig>::new();
// // bs.expect_get_current_epoch().return_const(5_u64);
// // assert_eq!(5, bs.get_current_epoch());
// // }
// }
|
use crate::calendar::Calendar;
use crate::config::TConfig;
use crate::table::{Row, Table, TableState};
use std::cmp::Ordering;
use std::convert::TryInto;
use std::error::Error;
use std::process::Command;
use std::result::Result;
use task_hookrs::date::Date;
use task_hookrs::import::import;
use task_hookrs::status::TaskStatus;
use task_hookrs::task::Task;
use task_hookrs::uda::UDAValue;
use uuid::Uuid;
use chrono::{Datelike, Local, NaiveDate, NaiveDateTime, TimeZone};
use std::sync::{Arc, Mutex};
use std::{sync::mpsc, thread, time::Duration};
use tui::{
backend::Backend,
layout::{Alignment, Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
terminal::Frame,
text::{Span, Spans, Text},
widgets::{Block, BorderType, Borders, Clear, Paragraph},
};
use rustyline::error::ReadlineError;
use rustyline::line_buffer::LineBuffer;
use rustyline::At;
use rustyline::Editor;
use rustyline::Word;
use crate::util::Events;
use crate::util::Key;
use std::io::{self};
use tui::{backend::CrosstermBackend, Terminal};
const MAX_LINE: usize = 4096;
pub fn cmp(t1: &Task, t2: &Task) -> Ordering {
let urgency1 = match t1.urgency() {
Some(f) => *f,
None => 0.0,
};
let urgency2 = match t2.urgency() {
Some(f) => *f,
None => 0.0,
};
urgency2.partial_cmp(&urgency1).unwrap_or(Ordering::Less)
}
pub enum DateState {
BeforeToday,
EarlierToday,
LaterToday,
AfterToday,
}
pub fn get_date_state(reference: &Date) -> DateState {
let now = Local::now();
let reference = TimeZone::from_utc_datetime(now.offset(), reference);
let now = TimeZone::from_utc_datetime(now.offset(), &now.naive_utc());
if reference.date() < now.date() {
return DateState::BeforeToday;
}
if reference.date() == now.date() {
if reference.time() < now.time() {
return DateState::EarlierToday;
} else {
return DateState::LaterToday;
}
}
DateState::AfterToday
}
pub fn vague_format_date_time(from_dt: NaiveDateTime, to_dt: NaiveDateTime) -> String {
let mut seconds = (to_dt - from_dt).num_seconds();
let minus: &str;
if seconds < 0 {
seconds *= -1;
minus = "-";
} else {
minus = "";
}
if seconds >= 60 * 60 * 24 * 365 {
return format!("{}{}y", minus, seconds / 86400 / 365);
} else if seconds >= 60 * 60 * 24 * 90 {
return format!("{}{}mo", minus, seconds / 60 / 60 / 24 / 30);
} else if seconds >= 60 * 60 * 24 * 14 {
return format!("{}{}w", minus, seconds / 60 / 60 / 24 / 7);
} else if seconds >= 60 * 60 * 24 {
return format!("{}{}d", minus, seconds / 60 / 60 / 24);
} else if seconds >= 60 * 60 {
return format!("{}{}h", minus, seconds / 60 / 60);
} else if seconds >= 60 {
return format!("{}{}min", minus, seconds / 60);
} else {
return format!("{}{}s", minus, seconds);
}
}
fn centered_rect(percent_x: u16, percent_y: u16, r: Rect) -> Rect {
let popup_layout = Layout::default()
.direction(Direction::Vertical)
.constraints(
[
Constraint::Percentage((100 - percent_y) / 2),
Constraint::Percentage(percent_y),
Constraint::Percentage((100 - percent_y) / 2),
]
.as_ref(),
)
.split(r);
Layout::default()
.direction(Direction::Horizontal)
.constraints(
[
Constraint::Percentage((100 - percent_x) / 2),
Constraint::Percentage(percent_x),
Constraint::Percentage((100 - percent_x) / 2),
]
.as_ref(),
)
.split(popup_layout[1])[1]
}
pub enum AppMode {
TaskReport,
TaskFilter,
TaskAdd,
TaskAnnotate,
TaskSubprocess,
TaskLog,
TaskModify,
TaskHelpPopup,
TaskError,
Calendar,
}
pub struct TaskReportTable {
pub labels: Vec<String>,
pub columns: Vec<String>,
pub tasks: Vec<Vec<String>>,
virtual_tags: Vec<String>,
}
impl TaskReportTable {
pub fn new() -> Result<Self, Box<dyn Error>> {
let virtual_tags = vec![
"PROJECT",
"BLOCKED",
"UNBLOCKED",
"BLOCKING",
"DUE",
"DUETODAY",
"TODAY",
"OVERDUE",
"WEEK",
"MONTH",
"QUARTER",
"YEAR",
"ACTIVE",
"SCHEDULED",
"PARENT",
"CHILD",
"UNTIL",
"WAITING",
"ANNOTATED",
"READY",
"YESTERDAY",
"TOMORROW",
"TAGGED",
"PENDING",
"COMPLETED",
"DELETED",
"UDA",
"ORPHAN",
"PRIORITY",
"PROJECT",
"LATEST",
];
let mut task_report_table = Self {
labels: vec![],
columns: vec![],
tasks: vec![vec![]],
virtual_tags: virtual_tags.iter().map(|s| s.to_string()).collect::<Vec<_>>(),
};
task_report_table.export_headers()?;
Ok(task_report_table)
}
pub fn export_headers(&mut self) -> Result<(), Box<dyn Error>> {
self.columns = vec![];
self.labels = vec![];
let output = Command::new("task").arg("show").arg("report.next.columns").output()?;
let data = String::from_utf8(output.stdout)?;
for line in data.split('\n') {
if line.starts_with("report.next.columns") {
let column_names = line.split(' ').collect::<Vec<_>>()[1];
for column in column_names.split(',') {
self.columns.push(column.to_string());
}
}
}
let output = Command::new("task").arg("show").arg("report.next.labels").output()?;
let data = String::from_utf8(output.stdout)?;
for line in data.split('\n') {
if line.starts_with("report.next.labels") {
let label_names = line.split(' ').collect::<Vec<_>>()[1];
for label in label_names.split(',') {
self.labels.push(label.to_string());
}
}
}
Ok(())
}
pub fn generate_table(&mut self, tasks: &[Task]) {
self.tasks = vec![];
// get all tasks as their string representation
for task in tasks {
let mut item = vec![];
for name in &self.columns {
let s = self.get_string_attribute(name, &task);
item.push(s);
}
self.tasks.push(item)
}
}
pub fn simplify_table(&mut self) -> (Vec<Vec<String>>, Vec<String>) {
// find which columns are empty
let null_columns_len;
if !self.tasks.is_empty() {
null_columns_len = self.tasks[0].len();
} else {
return (vec![], vec![]);
}
let mut null_columns = vec![0; null_columns_len];
for task in &self.tasks {
for (i, s) in task.iter().enumerate() {
null_columns[i] += s.len();
}
}
// filter out columns where everything is empty
let mut tasks = vec![];
for task in &self.tasks {
let t = task.clone();
let t: Vec<String> = t
.iter()
.enumerate()
.filter(|&(i, _)| null_columns[i] != 0)
.map(|(_, e)| e.to_owned())
.collect();
tasks.push(t);
}
// filter out header where all columns are empty
let headers: Vec<String> = self
.labels
.iter()
.enumerate()
.filter(|&(i, _)| null_columns[i] != 0)
.map(|(_, e)| e.to_owned())
.collect();
(tasks, headers)
}
pub fn get_string_attribute(&self, attribute: &str, task: &Task) -> String {
match attribute {
"id" => task.id().unwrap_or_default().to_string(),
"due.relative" => match task.due() {
Some(v) => vague_format_date_time(Local::now().naive_utc(), NaiveDateTime::new(v.date(), v.time())),
None => "".to_string(),
},
"entry.age" => vague_format_date_time(
NaiveDateTime::new(task.entry().date(), task.entry().time()),
Local::now().naive_utc(),
),
"start.age" => match task.start() {
Some(v) => vague_format_date_time(NaiveDateTime::new(v.date(), v.time()), Local::now().naive_utc()),
None => "".to_string(),
},
"project" => match task.project() {
Some(p) => p.to_string(),
None => "".to_string(),
},
"depends.count" => match task.depends() {
Some(v) => {
if v.is_empty() {
"".to_string()
} else {
format!("{}", v.len())
}
}
None => "".to_string(),
},
"tags.count" => match task.tags() {
Some(v) => {
let t = v.iter().filter(|t| !self.virtual_tags.contains(t)).cloned().count();
if t == 0 {
"".to_string()
} else {
t.to_string()
}
}
None => "".to_string(),
},
"tags" => match task.tags() {
Some(v) => v
.iter()
.filter(|t| !self.virtual_tags.contains(t))
.cloned()
.collect::<Vec<_>>()
.join(","),
None => "".to_string(),
},
"description.count" => task.description().to_string(),
"description" => task.description().to_string(),
"urgency" => match &task.urgency() {
Some(f) => format!("{:.2}", *f),
None => "0.00".to_string(),
},
_ => "".to_string(),
}
}
}
pub struct TTApp {
pub should_quit: bool,
pub state: TableState,
pub context_filter: String,
pub context_name: String,
pub command: LineBuffer,
pub filter: LineBuffer,
pub modify: LineBuffer,
pub error: String,
pub tasks: Arc<Mutex<Vec<Task>>>,
pub task_report_table: TaskReportTable,
pub calendar_year: i32,
pub mode: AppMode,
pub config: TConfig,
pub hide_task_detail: bool
}
impl TTApp {
pub fn new() -> Result<Self, Box<dyn Error>> {
let mut app = Self {
should_quit: false,
state: TableState::default(),
tasks: Arc::new(Mutex::new(vec![])),
context_filter: "".to_string(),
context_name: "".to_string(),
command: LineBuffer::with_capacity(MAX_LINE),
filter: LineBuffer::with_capacity(MAX_LINE),
modify: LineBuffer::with_capacity(MAX_LINE),
error: "".to_string(),
mode: AppMode::TaskReport,
config: TConfig::default()?,
task_report_table: TaskReportTable::new()?,
calendar_year: Local::today().year(),
hide_task_detail: false,
};
for c in "status:pending ".chars() {
app.filter.insert(c, 1);
}
app.get_context()?;
app.update()?;
Ok(app)
}
pub fn get_context(&mut self) -> Result<(), Box<dyn Error>> {
let output = Command::new("task").arg("_get").arg("rc.context").output()?;
self.context_name = String::from_utf8(output.stdout)?;
self.context_name = self.context_name.strip_suffix('\n').unwrap_or("").to_string();
let output = Command::new("task")
.arg("_get")
.arg(format!("rc.context.{}", self.context_name))
.output()?;
self.context_filter = String::from_utf8(output.stdout)?;
self.context_filter = self.context_filter.strip_suffix('\n').unwrap_or("").to_string();
Ok(())
}
pub fn draw(&mut self, f: &mut Frame<impl Backend>) {
match self.mode {
AppMode::TaskReport
| AppMode::TaskFilter
| AppMode::TaskAdd
| AppMode::TaskAnnotate
| AppMode::TaskError
| AppMode::TaskHelpPopup
| AppMode::TaskSubprocess
| AppMode::TaskLog
| AppMode::TaskModify => self.draw_task(f),
AppMode::Calendar => self.draw_calendar(f),
}
}
pub fn draw_calendar(&mut self, f: &mut Frame<impl Backend>) {
let dates_with_styles = self.get_dates_with_styles();
let rects = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0)].as_ref())
.split(f.size());
let today = Local::today();
let c = Calendar::default()
.block(
Block::default()
.title(Spans::from(vec![
Span::styled("Task", Style::default().add_modifier(Modifier::DIM)),
Span::from("|"),
Span::styled("Calendar", Style::default().add_modifier(Modifier::BOLD)),
]))
.borders(Borders::ALL)
.border_type(BorderType::Rounded),
)
.year(self.calendar_year)
.date_style(dates_with_styles)
.months_per_row(self.config.uda_calendar_months_per_row);
f.render_widget(c, rects[0]);
}
pub fn get_dates_with_styles(&self) -> Vec<(NaiveDate, Style)> {
let mut tasks_with_styles = vec![];
let tasks_is_empty = self.tasks.lock().unwrap().is_empty();
let tasks_len = self.tasks.lock().unwrap().len();
if !tasks_is_empty {
let tasks = &self.tasks.lock().unwrap();
let tasks_with_due_dates = tasks.iter().filter(|t| t.due().is_some());
tasks_with_styles
.extend(tasks_with_due_dates.map(|t| (t.due().unwrap().clone().date(), self.style_for_task(t))))
}
tasks_with_styles
}
pub fn draw_task(&mut self, f: &mut Frame<impl Backend>) {
let tasks_is_empty = self.tasks.lock().unwrap().is_empty();
let tasks_len = self.tasks.lock().unwrap().len();
while !tasks_is_empty && self.state.selected().unwrap_or_default() >= tasks_len {
self.previous();
}
let rects = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0), Constraint::Length(3)].as_ref())
.split(f.size());
if self.hide_task_detail {
let full_table_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Percentage(100)].as_ref())
.split(rects[0]);
self.draw_task_report(f, full_table_layout[0]);
}
else {
let split_task_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())
.split(rects[0]);
self.draw_task_report(f, split_task_layout[0]);
self.draw_task_details(f, split_task_layout[1]);
}
let selected = self.state.selected().unwrap_or_default();
let task_id = if tasks_len == 0 {
0
} else {
self.tasks.lock().unwrap()[selected].id().unwrap_or_default()
};
match self.mode {
AppMode::TaskReport => self.draw_command(f, rects[1], self.filter.as_str(), "Filter Tasks"),
AppMode::TaskFilter => {
f.set_cursor(rects[1].x + self.filter.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.filter.as_str(),
Span::styled("Filter Tasks", Style::default().add_modifier(Modifier::BOLD)),
);
}
AppMode::TaskModify => {
f.set_cursor(rects[1].x + self.modify.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.modify.as_str(),
Span::styled(
format!("Modify Task {}", task_id).as_str(),
Style::default().add_modifier(Modifier::BOLD),
),
);
}
AppMode::TaskLog => {
f.set_cursor(rects[1].x + self.command.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.command.as_str(),
Span::styled("Log Tasks", Style::default().add_modifier(Modifier::BOLD)),
);
}
AppMode::TaskSubprocess => {
f.set_cursor(rects[1].x + self.command.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.command.as_str(),
Span::styled("Shell Command", Style::default().add_modifier(Modifier::BOLD)),
);
}
AppMode::TaskAnnotate => {
f.set_cursor(rects[1].x + self.command.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.command.as_str(),
Span::styled(
format!("Annotate Task {}", task_id).as_str(),
Style::default().add_modifier(Modifier::BOLD),
),
);
}
AppMode::TaskAdd => {
f.set_cursor(rects[1].x + self.command.pos() as u16 + 1, rects[1].y + 1);
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.command.as_str(),
Span::styled("Add Task", Style::default().add_modifier(Modifier::BOLD)),
);
}
AppMode::TaskError => {
f.render_widget(Clear, rects[1]);
self.draw_command(
f,
rects[1],
self.error.as_str(),
Span::styled("Error", Style::default().add_modifier(Modifier::BOLD)),
);
}
AppMode::TaskHelpPopup => {
self.draw_command(f, rects[1], self.filter.as_str(), "Filter Tasks");
self.draw_help_popup(f, f.size());
}
AppMode::Calendar => {
panic!("Reached unreachable code. Something went wrong");
}
}
}
fn draw_help_popup(&self, f: &mut Frame<impl Backend>, rect: Rect) {
let text = vec![
Spans::from(""),
Spans::from(vec![
Span::from(" /"),
Span::from(" "),
Span::styled(
"task {string} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Filter task report"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" a"),
Span::from(" "),
Span::styled(
"task add {string} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Add new task"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" A"),
Span::from(" "),
Span::styled(
"task annotate {string} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Annotate task"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" d"),
Span::from(" "),
Span::styled(
"task done {selected} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Mark task as done"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" e"),
Span::from(" "),
Span::styled(
"task edit {selected} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Open selected task in editor"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" j"),
Span::from(" "),
Span::styled(
"{selected+=1} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Move down in task report"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" k"),
Span::from(" "),
Span::styled(
"{selected-=1} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Move up in task report"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" l"),
Span::from(" "),
Span::styled(
"task log {string} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Log new task"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" m"),
Span::from(" "),
Span::styled(
"task modify {string} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Modify selected task"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" q"),
Span::from(" "),
Span::styled(
"exit ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Quit"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" s"),
Span::from(" "),
Span::styled(
"task start/stop {selected} ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Toggle start and stop"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" u"),
Span::from(" "),
Span::styled(
"task undo ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Undo"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" x"),
Span::from(" "),
Span::styled(
"task delete ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Delete task"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" ?"),
Span::from(" "),
Span::styled(
"help ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Show this help menu"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" !"),
Span::from(" "),
Span::styled(
"shell ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Custom shell command"),
]),
Spans::from(""),
Spans::from(vec![
Span::from(" v"),
Span::from(" "),
Span::styled(
"toggle details ",
Style::default().add_modifier(Modifier::BOLD),
),
Span::from(" "),
Span::from("- Toggle task detail panel"),
]),
Spans::from(""),
];
let paragraph = Paragraph::new(text)
.block(
Block::default()
.title(Span::styled("Help", Style::default().add_modifier(Modifier::BOLD)))
.borders(Borders::ALL)
.border_type(BorderType::Rounded),
)
.alignment(Alignment::Left);
let area = centered_rect(80, 90, rect);
f.render_widget(Clear, area);
f.render_widget(paragraph, area);
}
fn draw_command<'a, T>(&self, f: &mut Frame<impl Backend>, rect: Rect, text: &str, title: T)
where
T: Into<Spans<'a>>,
{
let p = Paragraph::new(Text::from(text)).block(
Block::default()
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.title(title.into()),
);
f.render_widget(p, rect);
}
fn draw_task_details(&mut self, f: &mut Frame<impl Backend>, rect: Rect) {
if self.tasks.lock().unwrap().is_empty() {
f.render_widget(
Block::default()
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.title("Task not found"),
rect,
);
return;
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let output = Command::new("task").arg(format!("{}", task_id)).output();
if let Ok(output) = output {
let data = String::from_utf8(output.stdout).unwrap_or(format!(
"Unable to get description of task with id: {}. Please report as an issue on github.",
task_id
));
let p = Paragraph::new(Text::from(&data[..])).block(
Block::default()
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.title(format!("Task {}", task_id)),
);
f.render_widget(p, rect);
}
}
fn task_by_index(&self, i: usize) -> Option<Task> {
let tasks = &self.tasks.lock().unwrap();
if i > tasks.len() {
None
} else {
Some(tasks[i].clone())
}
}
fn task_by_uuid(&self, uuid: Uuid) -> Option<Task> {
let tasks = &self.tasks.lock().unwrap();
let m = tasks.iter().find(|t| *t.uuid() == uuid);
match m {
Some(v) => Some(v.clone()),
None => None,
}
}
fn style_for_task(&self, task: &Task) -> Style {
let virtual_tag_names_in_precedence = &self.config.rule_precedence_color;
let mut style = Style::default();
for tag_name in virtual_tag_names_in_precedence {
if task
.tags()
.unwrap_or(&vec![])
.contains(&tag_name.to_string().replace(".", "").to_uppercase())
{
let color_tag_name = format!("color.{}", tag_name);
let c = self.config.color.get(&color_tag_name).cloned().unwrap_or_default();
style = style.fg(c.fg).bg(c.bg);
for modifier in c.modifiers {
style = style.add_modifier(modifier);
}
break;
}
}
style
}
fn draw_task_report(&mut self, f: &mut Frame<impl Backend>, rect: Rect) {
let (tasks, headers) = self.task_report();
if tasks.is_empty() {
let mut style = Style::default();
match self.mode {
AppMode::TaskReport => style = style.add_modifier(Modifier::BOLD),
_ => style = style.add_modifier(Modifier::DIM),
}
f.render_widget(
Block::default()
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.title(Spans::from(vec![
Span::styled("Task", style),
Span::from("|"),
Span::styled("Calendar", Style::default().add_modifier(Modifier::DIM)),
])),
rect,
);
return;
}
let maximum_column_width = rect.width as i16 / tasks[0].len() as i16;
let mut description_column_width = rect.width as i16 / tasks[0].len() as i16;
let mut description_column_index = 0;
// set widths proportional to the content
let mut widths: Vec<i16> = vec![0; tasks[0].len()];
for (i, header) in headers.iter().enumerate() {
widths[i] = header.len() as i16 + 2;
if header != "Description" {
description_column_width += std::cmp::max(0, maximum_column_width - widths[i]);
} else {
description_column_index = i;
}
}
widths[0] += 1;
let sum_of_remaining_widths: i16 = widths
.iter()
.enumerate()
.filter(|(i, w)| i != &description_column_index)
.map(|(i, w)| w)
.sum();
if description_column_width + sum_of_remaining_widths >= rect.width as i16 - 4 {
description_column_width = rect.width as i16 - sum_of_remaining_widths - 10;
}
for task in &tasks {
for (i, attr) in task.iter().enumerate() {
if i == description_column_index {
widths[i] = std::cmp::max(
widths[i],
std::cmp::min(attr.len() as i16 + 2, description_column_width),
);
} else {
widths[i] = std::cmp::max(widths[i], std::cmp::min(attr.len() as i16, maximum_column_width));
}
}
}
let selected = self.state.selected().unwrap_or_default();
let header = headers.iter();
let mut rows = vec![];
let mut highlight_style = Style::default();
for (i, task) in tasks.iter().enumerate() {
let style = self.style_for_task(&self.tasks.lock().unwrap()[i]);
if i == selected {
highlight_style = style;
if self.config.uda_selection_bold {
highlight_style = highlight_style.add_modifier(Modifier::BOLD);
}
if self.config.uda_selection_italic {
highlight_style = highlight_style.add_modifier(Modifier::ITALIC);
}
if self.config.uda_selection_dim {
highlight_style = highlight_style.add_modifier(Modifier::DIM);
}
if self.config.uda_selection_blink {
highlight_style = highlight_style.add_modifier(Modifier::SLOW_BLINK);
}
}
rows.push(Row::StyledData(task.iter(), style));
}
let constraints: Vec<Constraint> = widths
.iter()
.map(|i| Constraint::Min((*i).try_into().unwrap_or(10)))
.collect();
let mut style = Style::default();
match self.mode {
AppMode::TaskReport => style = style.add_modifier(Modifier::BOLD),
_ => style = style.add_modifier(Modifier::DIM),
}
let t = Table::new(header, rows.into_iter())
.block(
Block::default()
.borders(Borders::ALL)
.border_type(BorderType::Rounded)
.title(Spans::from(vec![
Span::styled("Task", style),
Span::from("|"),
Span::styled("Calendar", Style::default().add_modifier(Modifier::DIM)),
])),
)
.highlight_style(highlight_style)
.highlight_symbol(&self.config.uda_selection_indicator)
.widths(&constraints);
f.render_stateful_widget(t, rect, &mut self.state);
}
pub fn task_report(&mut self) -> (Vec<Vec<String>>, Vec<String>) {
let alltasks = &*(self.tasks.lock().unwrap());
self.task_report_table.generate_table(alltasks);
let (tasks, headers) = self.task_report_table.simplify_table();
(tasks, headers)
}
pub fn update(&mut self) -> Result<(), Box<dyn Error>> {
self.export_tasks()?;
self.update_tags();
Ok(())
}
pub fn next(&mut self) {
if self.tasks.lock().unwrap().is_empty() {
return;
}
let i = match self.state.selected() {
Some(i) => {
if i >= self.tasks.lock().unwrap().len() - 1 {
0
} else {
i + 1
}
}
None => 0,
};
self.state.select(Some(i));
}
pub fn previous(&mut self) {
if self.tasks.lock().unwrap().is_empty() {
return;
}
let i = match self.state.selected() {
Some(i) => {
if i == 0 {
self.tasks.lock().unwrap().len() - 1
} else {
i - 1
}
}
None => 0,
};
self.state.select(Some(i));
}
pub fn export_tasks(&mut self) -> Result<(), Box<dyn Error>> {
let mut task = Command::new("task");
task.arg("rc.json.array=on");
task.arg("export");
let filter = if self.context_filter != *"" {
let t = format!("{} {}", self.filter.as_str(), self.context_filter);
t
} else {
self.filter.as_str().into()
};
match shlex::split(&filter) {
Some(cmd) => {
for s in cmd {
task.arg(&s);
}
}
None => {
task.arg("");
}
}
let output = task.output()?;
let data = String::from_utf8(output.stdout)?;
let imported = import(data.as_bytes())?;
*(self.tasks.lock().unwrap()) = imported;
self.tasks.lock().unwrap().sort_by(cmp);
Ok(())
}
pub fn task_subprocess(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let shell = self.command.as_str().replace("'", "\\'");
match shlex::split(&shell) {
Some(cmd) => {
// first argument must be a binary
let mut command = Command::new(&cmd[0]);
// remaining arguments are args
for (i, s) in cmd.iter().enumerate() {
if i == 0 {
continue;
}
command.arg(&s);
}
let output = command.output();
match output {
Ok(_) => {
self.command.update("", 0);
Ok(())
}
Err(_) => Err(format!("Shell command `{}` exited with non-zero output", shell,)),
}
}
None => Err(format!("Cannot run subprocess. Unable to shlex split `{}`", shell)),
}
}
pub fn task_log(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let mut command = Command::new("task");
command.arg("log");
let shell = self.command.as_str().replace("'", "\\'");
match shlex::split(&shell) {
Some(cmd) => {
for s in cmd {
command.arg(&s);
}
let output = command.output();
match output {
Ok(_) => {
self.command.update("", 0);
Ok(())
}
Err(_) => Err(format!(
"Cannot run `task log {}`. Check documentation for more information",
shell
)),
}
}
None => Err(format!(
"Unable to run `task log`. Cannot shlex split `{}`",
shell.as_str()
)),
}
}
pub fn task_modify(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let mut command = Command::new("task");
command.arg(format!("{}", task_id)).arg("modify");
let shell = self.modify.as_str().replace("'", "\\'");
match shlex::split(&shell) {
Some(cmd) => {
for s in cmd {
command.arg(&s);
}
let output = command.output();
match output {
Ok(_) => {
self.modify.update("", 0);
Ok(())
}
Err(_) => Err(format!(
"Cannot run `task {} modify {}`. Check documentation for more information",
task_id, shell,
)),
}
}
None => Err(format!(
"Unable to run `task {} modify`. Cannot shlex split `{}`",
task_id, shell,
)),
}
}
pub fn task_annotate(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let mut command = Command::new("task");
command.arg(format!("{}", task_id)).arg("annotate");
let shell = self.command.as_str().replace("'", "\\'");
match shlex::split(&shell) {
Some(cmd) => {
for s in cmd {
command.arg(&s);
}
let output = command.output();
match output {
Ok(_) => {
self.command.update("", 0);
Ok(())
}
Err(_) => Err(format!(
"Cannot run `task {} annotate {}`. Check documentation for more information",
task_id, shell
)),
}
}
None => Err(format!(
"Unable to run `task {} annotate`. Cannot shlex split `{}`",
task_id, shell
)),
}
}
pub fn task_add(&mut self) -> Result<(), String> {
let mut command = Command::new("task");
command.arg("add");
let shell = self.command.as_str().replace("'", "\\'");
match shlex::split(&shell) {
Some(cmd) => {
for s in cmd {
command.arg(&s);
}
let output = command.output();
match output {
Ok(_) => {
self.command.update("", 0);
Ok(())
}
Err(_) => Err(format!(
"Cannot run `task add {}`. Check documentation for more information",
shell
)),
}
}
None => Err(format!("Unable to run `task add`. Cannot shlex split `{}`", shell)),
}
}
pub fn task_virtual_tags(task_id: u64) -> Result<String, String> {
let output = Command::new("task").arg(format!("{}", task_id)).output();
match output {
Ok(output) => {
let data = String::from_utf8(output.stdout).unwrap_or_default();
for line in data.split('\n') {
if line.starts_with("Virtual tags") {
let line = line.to_string();
let line = line.replace("Virtual tags", "");
return Ok(line);
}
}
Err(format!(
"Cannot find any tags for `task {}`. Check documentation for more information",
task_id
))
}
Err(_) => Err(format!(
"Cannot run `task {}`. Check documentation for more information",
task_id
)),
}
}
pub fn task_start_or_stop(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let mut command = "start";
for tag in TTApp::task_virtual_tags(task_id)?.split(' ') {
if tag == "ACTIVE" {
command = "stop"
}
}
let output = Command::new("task").arg(format!("{}", task_id)).arg(command).output();
match output {
Ok(_) => Ok(()),
Err(_) => Err(format!(
"Cannot run `task {}` for task `{}`. Check documentation for more information",
command, task_id,
)),
}
}
pub fn task_delete(&self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let output = Command::new("task")
.arg("rc.confirmation=off")
.arg(format!("{}", task_id))
.arg("delete")
.output();
match output {
Ok(_) => Ok(()),
Err(_) => Err(format!(
"Cannot run `task delete` for task `{}`. Check documentation for more information",
task_id
)),
}
}
pub fn task_done(&mut self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let output = Command::new("task").arg(format!("{}", task_id)).arg("done").output();
match output {
Ok(_) => Ok(()),
Err(_) => Err(format!(
"Cannot run `task done` for task `{}`. Check documentation for more information",
task_id
)),
}
}
pub fn task_undo(&self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let output = Command::new("task").arg("rc.confirmation=off").arg("undo").output();
match output {
Ok(_) => Ok(()),
Err(_) => Err("Cannot run `task undo`. Check documentation for more information".to_string()),
}
}
pub fn task_edit(&self) -> Result<(), String> {
if self.tasks.lock().unwrap().is_empty() {
return Ok(());
}
let selected = self.state.selected().unwrap_or_default();
let task_id = self.tasks.lock().unwrap()[selected].id().unwrap_or_default();
let r = Command::new("task").arg(format!("{}", task_id)).arg("edit").spawn();
match r {
Ok(child) => {
let output = child.wait_with_output();
match output {
Ok(output) => {
if !output.status.success() {
Err(format!(
"`task edit` for task `{}` failed. {}{}",
task_id,
String::from_utf8(output.stdout).unwrap_or_default(),
String::from_utf8(output.stderr).unwrap_or_default()
))
} else {
Ok(())
}
}
Err(err) => Err(format!("Cannot run `task edit` for task `{}`. {}", task_id, err)),
}
}
_ => Err(format!(
"Cannot start `task edit` for task `{}`. Check documentation for more information",
task_id
)),
}
}
pub fn task_current(&self) -> Option<Task> {
if self.tasks.lock().unwrap().is_empty() {
return None;
}
let selected = self.state.selected().unwrap_or_default();
Some(self.tasks.lock().unwrap()[selected].clone())
}
pub fn update_tags(&mut self) {
let tasks = &mut *self.tasks.lock().unwrap();
// dependency scan
for l_i in 0..tasks.len() {
let default_deps = vec![];
let deps = tasks[l_i].depends().unwrap_or(&default_deps).clone();
for dep in deps {
for r_i in 0..tasks.len() {
if tasks[r_i].uuid() == &dep {
let lstatus = tasks[l_i].status();
let rstatus = tasks[r_i].status();
if lstatus != &TaskStatus::Completed
&& lstatus != &TaskStatus::Deleted
&& rstatus != &TaskStatus::Completed
&& rstatus != &TaskStatus::Deleted
{
add_tag(&mut tasks[l_i], "BLOCKED".to_string());
add_tag(&mut tasks[r_i], "BLOCKING".to_string());
}
break;
}
}
}
}
// other virtual tags
// TODO: support all virtual tags that taskwarrior supports
for mut task in tasks.iter_mut() {
match task.status() {
TaskStatus::Waiting => add_tag(&mut task, "WAITING".to_string()),
TaskStatus::Completed => add_tag(&mut task, "COMPLETED".to_string()),
TaskStatus::Pending => add_tag(&mut task, "PENDING".to_string()),
TaskStatus::Deleted => add_tag(&mut task, "DELETED".to_string()),
TaskStatus::Recurring => (),
}
if task.start().is_some() {
add_tag(&mut task, "ACTIVE".to_string());
}
if task.scheduled().is_some() {
add_tag(&mut task, "SCHEDULED".to_string());
}
if task.parent().is_some() {
add_tag(&mut task, "INSTANCE".to_string());
}
if task.until().is_some() {
add_tag(&mut task, "UNTIL".to_string());
}
if task.annotations().is_some() {
add_tag(&mut task, "ANNOTATED".to_string());
}
if task.tags().is_some() {
if !task
.tags()
.unwrap()
.iter()
.filter(|s| !self.task_report_table.virtual_tags.contains(s))
.next()
.is_none()
{
add_tag(&mut task, "TAGGED".to_string());
}
}
if task.mask().is_some() {
add_tag(&mut task, "TEMPLATE".to_string());
}
if task.project().is_some() {
add_tag(&mut task, "PROJECT".to_string());
}
if task.priority().is_some() {
add_tag(&mut task, "PROJECT".to_string());
}
if task.due().is_some() {
add_tag(&mut task, "DUE".to_string());
}
if let Some(d) = task.due() {
let status = task.status();
// due today
if status != &TaskStatus::Completed && status != &TaskStatus::Deleted {
let today = Local::now().naive_utc().date();
match get_date_state(d) {
DateState::EarlierToday | DateState::LaterToday => {
add_tag(&mut task, "TODAY".to_string());
add_tag(&mut task, "DUETODAY".to_string());
}
_ => (),
}
}
}
if let Some(d) = task.due() {
let status = task.status();
// overdue
if status != &TaskStatus::Completed
&& status != &TaskStatus::Deleted
&& status != &TaskStatus::Recurring
{
let now = Local::now().naive_utc();
let d = NaiveDateTime::new(d.date(), d.time());
if d < now {
add_tag(&mut task, "OVERDUE".to_string());
}
}
}
}
}
pub fn handle_input(
&mut self,
input: Key,
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
events: &Events,
) -> Result<(), Box<dyn Error>> {
match self.mode {
AppMode::TaskReport => match input {
Key::Ctrl('c') | Key::Char('q') => self.should_quit = true,
Key::Char(']') => {
self.mode = AppMode::Calendar;
}
Key::Char('r') => self.update()?,
Key::Down | Key::Char('j') => self.next(),
Key::Up | Key::Char('k') => self.previous(),
Key::Char('d') => match self.task_done() {
Ok(_) => self.update()?,
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Char('x') => match self.task_delete() {
Ok(_) => self.update()?,
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Char('s') => match self.task_start_or_stop() {
Ok(_) => self.update()?,
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Char('u') => match self.task_undo() {
Ok(_) => self.update()?,
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Char('e') => {
events.pause_event_loop(terminal);
let r = self.task_edit();
events.resume_event_loop(terminal);
match r {
Ok(_) => self.update()?,
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
}
}
Key::Char('m') => {
self.mode = AppMode::TaskModify;
match self.task_current() {
Some(t) => {
let s = format!("{} ", t.description());
self.modify.update(&s, s.len())
}
None => self.modify.update("", 0),
}
}
Key::Char('!') => {
self.mode = AppMode::TaskSubprocess;
}
Key::Char('l') => {
self.mode = AppMode::TaskLog;
}
Key::Char('a') => {
self.mode = AppMode::TaskAdd;
}
Key::Char('A') => {
self.mode = AppMode::TaskAnnotate;
}
Key::Char('?') => {
self.mode = AppMode::TaskHelpPopup;
}
Key::Char('/') => {
self.mode = AppMode::TaskFilter;
}
Key::Char('v') => {
self.hide_task_detail = !self.hide_task_detail;
}
_ => {}
},
AppMode::TaskHelpPopup => match input {
Key::Esc => {
self.mode = AppMode::TaskReport;
}
_ => {}
},
AppMode::TaskModify => match input {
Key::Char('\n') => match self.task_modify() {
Ok(_) => {
self.mode = AppMode::TaskReport;
self.update()?;
}
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Esc => {
self.modify.update("", 0);
self.mode = AppMode::TaskReport;
}
_ => handle_movement(&mut self.modify, input),
},
AppMode::TaskSubprocess => match input {
Key::Char('\n') => match self.task_subprocess() {
Ok(_) => {
self.mode = AppMode::TaskReport;
self.update()?;
}
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Esc => {
self.command.update("", 0);
self.mode = AppMode::TaskReport;
}
_ => handle_movement(&mut self.command, input),
},
AppMode::TaskLog => match input {
Key::Char('\n') => match self.task_log() {
Ok(_) => {
self.mode = AppMode::TaskReport;
self.update()?;
}
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Esc => {
self.command.update("", 0);
self.mode = AppMode::TaskReport;
}
_ => handle_movement(&mut self.command, input),
},
AppMode::TaskAnnotate => match input {
Key::Char('\n') => match self.task_annotate() {
Ok(_) => {
self.mode = AppMode::TaskReport;
self.update()?;
}
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Esc => {
self.command.update("", 0);
self.mode = AppMode::TaskReport;
}
_ => handle_movement(&mut self.command, input),
},
AppMode::TaskAdd => match input {
Key::Char('\n') => match self.task_add() {
Ok(_) => {
self.mode = AppMode::TaskReport;
self.update()?;
}
Err(e) => {
self.mode = AppMode::TaskError;
self.error = e;
}
},
Key::Esc => {
self.command.update("", 0);
self.mode = AppMode::TaskReport;
}
_ => handle_movement(&mut self.command, input),
},
AppMode::TaskFilter => match input {
Key::Char('\n') | Key::Esc => {
self.mode = AppMode::TaskReport;
self.update()?;
}
_ => handle_movement(&mut self.filter, input),
},
AppMode::TaskError => self.mode = AppMode::TaskReport,
AppMode::Calendar => match input {
Key::Char('[') => {
self.mode = AppMode::TaskReport;
}
Key::Up | Key::Char('k') => {
if self.calendar_year > 0 {
self.calendar_year -= 1
}
}
Key::Down | Key::Char('j') => self.calendar_year += 1,
Key::Ctrl('c') | Key::Char('q') => self.should_quit = true,
_ => {}
},
}
Ok(())
}
}
pub fn handle_movement(linebuffer: &mut LineBuffer, input: Key) {
match input {
Key::Ctrl('f') | Key::Right => {
linebuffer.move_forward(1);
}
Key::Ctrl('b') | Key::Left => {
linebuffer.move_backward(1);
}
Key::Char(c) => {
linebuffer.insert(c, 1);
}
Key::Ctrl('h') | Key::Backspace => {
linebuffer.backspace(1);
}
Key::Ctrl('d') | Key::Delete => {
linebuffer.delete(1);
}
Key::Ctrl('a') | Key::Home => {
linebuffer.move_home();
}
Key::Ctrl('e') | Key::End => {
linebuffer.move_end();
}
Key::Ctrl('k') => {
linebuffer.kill_line();
}
Key::Ctrl('u') => {
linebuffer.discard_line();
}
Key::Ctrl('w') => {
linebuffer.delete_prev_word(Word::Emacs, 1);
}
Key::Alt('d') => {
linebuffer.delete_word(At::AfterEnd, Word::Emacs, 1);
}
Key::Alt('f') => {
linebuffer.move_to_next_word(At::AfterEnd, Word::Emacs, 1);
}
Key::Alt('b') => {
linebuffer.move_to_prev_word(Word::Emacs, 1);
}
Key::Alt('t') => {
linebuffer.transpose_words(1);
}
_ => {}
}
}
pub fn add_tag(task: &mut Task, tag: String) {
match task.tags_mut() {
Some(t) => t.push(tag),
None => task.set_tags(Some(vec![tag])),
}
}
#[cfg(test)]
mod tests {
use crate::app::TTApp;
use crate::util::setup_terminal;
use std::io::stdin;
use std::{sync::mpsc, thread, time::Duration};
use task_hookrs::import::import;
use task_hookrs::task::Task;
#[test]
fn test_app() {
let app = TTApp::new();
assert_eq!(app.context_name, "".to_string());
println!("{:?}", app.tasks.lock().unwrap()[0]);
dbg!(&app.task_current().unwrap().tags());
dbg!(app.style_for_task(&app.task_current().unwrap()));
//println!("{:?}", app.task_report_columns);
//println!("{:?}", app.task_report_labels);
// let (t, h, c) = app.task_report();
// app.next();
// app.next();
// app.modify = "Cannot add this string ' because it has a single quote".to_string();
// println!("{}", app.modify);
// // if let Ok(tasks) = import(stdin()) {
// // for task in tasks {
// // println!("Task: {}, entered {:?} is {} -> {}",
// // task.uuid(),
// // task.entry(),
// // task.status(),
// // task.description());
// // }
// // }
}
}
|
use std::fs;
use test_case::test_case;
use bitbuffer::{BitRead, BitReadBuffer, BitReadStream, BitWrite, BitWriteStream, LittleEndian};
use std::collections::HashMap;
use tf_demo_parser::demo::header::Header;
use tf_demo_parser::demo::message::Message;
use tf_demo_parser::demo::packet::datatable::SendTableName;
use tf_demo_parser::demo::packet::Packet;
use tf_demo_parser::demo::parser::{DemoHandler, Encode, NullHandler, RawPacketStream};
use tf_demo_parser::demo::sendprop::{RawSendPropDefinition, SendPropIdentifier, SendPropName};
use tf_demo_parser::{Demo, Parse};
#[test_case("test_data/small.dem"; "small.dem")]
#[test_case("test_data/gully.dem"; "gully.dem")]
#[test_case("test_data/comp.dem"; "comp.dem")]
#[test_case("test_data/malformed_cvar.dem"; "malformed_cvar.dem")]
#[test_case("test_data/unicode-saytext.dem"; "unicode-saytext.dem")]
#[test_case("test_data/nousers.dem"; "nousers.dem")]
#[test_case("test_data/decal.dem"; "decal.dem")]
#[test_case("test_data/saytext2.dem"; "saytext2.dem")]
#[test_case("test_data/emptysaytext.dem"; "emptysaytext.dem")]
fn re_encode_test(input_file: &str) {
let file = fs::read(input_file).expect("Unable to read file");
let demo = Demo::new(&file);
let mut out_buffer = Vec::with_capacity(file.len());
let mut stream = demo.get_stream();
let header = Header::read(&mut stream).unwrap();
let header_size = stream.pos() / 8;
assert_eq!(1072, header_size);
let mut packets = RawPacketStream::new(stream);
let mut handler = DemoHandler::parse_all_with_analyser(NullHandler);
{
let mut out_stream = BitWriteStream::new(&mut out_buffer, LittleEndian);
header.write(&mut out_stream).unwrap();
}
assert_eq!(file[0..header_size], out_buffer);
let mut prop_names: HashMap<
SendPropIdentifier,
(SendTableName, SendPropName, RawSendPropDefinition),
> = HashMap::new();
while let Some(packet) = packets.next(&handler.state_handler).unwrap() {
if let Packet::DataTables(data_table) = &packet {
for table in data_table.tables.iter() {
for prop_def in &table.props {
prop_names.insert(
prop_def.identifier(),
(table.name.clone(), prop_def.name.clone(), prop_def.clone()),
);
}
}
}
out_buffer.clear();
{
let mut out_stream = BitWriteStream::new(&mut out_buffer, LittleEndian);
packet
.encode(&mut out_stream, &handler.state_handler)
.unwrap();
}
let mut re_read = BitReadStream::new(BitReadBuffer::new(&out_buffer, LittleEndian));
let re_decoded = Packet::parse(&mut re_read, &handler.state_handler)
.unwrap_or_else(|_| panic!("while parsing {:?}", packet.packet_type()));
assert_eq!(packet.packet_type(), re_decoded.packet_type());
match (&packet, &re_decoded) {
(
Packet::Message(msg) | Packet::Signon(msg),
Packet::Message(re_msg) | Packet::Signon(re_msg),
) => {
assert_eq!(msg.tick, re_msg.tick);
assert_eq!(msg.meta, re_msg.meta);
assert_eq!(
msg.messages
.iter()
.map(|msg| msg.get_message_type())
.collect::<Vec<_>>(),
re_msg
.messages
.iter()
.map(|msg| msg.get_message_type())
.collect::<Vec<_>>()
);
assert_eq!(msg.messages.len(), re_msg.messages.len());
for (msg, re_msg) in msg.messages.iter().zip(re_msg.messages.iter()) {
assert_eq!(msg.get_message_type(), re_msg.get_message_type());
match (msg, re_msg) {
(Message::PacketEntities(msg), Message::PacketEntities(re_msg)) => {
assert_eq!(msg.updated_base_line, re_msg.updated_base_line);
assert_eq!(msg.base_line, re_msg.base_line);
assert_eq!(msg.delta, re_msg.delta);
assert_eq!(msg.max_entries, re_msg.max_entries);
assert_eq!(msg.removed_entities, re_msg.removed_entities);
assert_eq!(msg.entities.len(), re_msg.entities.len());
for (ent, re_ent) in msg.entities.iter().zip(re_msg.entities.iter()) {
let props = ent
.props
.iter()
.map(|prop| {
(
prop_names.get(&prop.identifier).unwrap(),
prop.value.clone(),
)
})
.collect::<Vec<_>>();
let re_props = re_ent
.props
.iter()
.map(|prop| {
(
prop_names.get(&prop.identifier).unwrap(),
prop.value.clone(),
)
})
.collect::<Vec<_>>();
pretty_assertions::assert_eq!(props, re_props);
}
}
(msg, re_msg) => assert_eq!(msg, re_msg),
}
}
}
(Packet::StringTables(packet), Packet::StringTables(re_packet)) => {
assert_eq!(packet.tick, re_packet.tick);
assert_eq!(packet.tables.len(), re_packet.tables.len());
for (table, re_table) in packet.tables.iter().zip(re_packet.tables.iter()) {
assert_eq!(table, re_table);
}
}
(packet, re_decoded) => {
assert_eq!(packet, re_decoded);
}
}
handler.handle_packet(packet).unwrap();
}
}
|
//! This module contains feature tests: minimalistic tests which check features in isolation
//! and their combination.
mod breaks;
mod dispatch;
mod fleet;
mod format;
mod limits;
mod multjob;
mod pickdev;
mod priorities;
mod relations;
mod reload;
mod skills;
mod timing;
mod work_balance;
|
use futures::channel::mpsc::{unbounded, UnboundedReceiver};
use futures::executor::ThreadPool;
use futures::task::SpawnExt;
use futures::StreamExt;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
// use tokio::runtime::Runtime;
use tokio::sync::{
oneshot::{self, Sender},
watch,
};
use crate::proto::kvraftpb::*;
use crate::raft::{self, ApplyMsg};
pub const SNAPSHOT_INTERVAL: u64 = 20;
pub struct KvServer {
pub rf: raft::Node,
me: usize,
// snapshot if log grows this big
maxraftstate: Option<usize>,
// Your definitions here.
// kv store
kv_store: HashMap<String, String>,
// record of each clent request seq
client_seq: HashMap<String, u64>,
// raft channel
apply_ch: Option<UnboundedReceiver<ApplyMsg>>,
// result sender channel
result_ch: HashMap<u64, Sender<(u64, Option<String>)>>,
}
impl KvServer {
pub fn new(
servers: Vec<crate::proto::raftpb::RaftClient>,
me: usize,
persister: Box<dyn raft::persister::Persister>,
maxraftstate: Option<usize>,
) -> KvServer {
// You may need initialization code here.
let (tx, apply_ch) = unbounded();
let snapshot = persister.snapshot();
let rf = raft::Raft::new(servers, me, persister, tx);
let rf_node = raft::Node::new(rf);
let mut kvserver = KvServer {
rf: rf_node,
me,
maxraftstate,
kv_store: HashMap::new(),
client_seq: HashMap::new(),
apply_ch: Some(apply_ch),
result_ch: HashMap::new(),
};
kvserver.restore(&snapshot);
kvserver
}
}
impl KvServer {
fn log_compaction(&mut self, applied_index: u64) {
if let Some(log_size) = self.maxraftstate {
if !self.rf.check_log_size(log_size) && (applied_index + 1) % SNAPSHOT_INTERVAL == 0 {
info!("[kvserver] -----------------start snapshot---------------------");
let snapshot = self.gen_snapshot();
self.rf.snapshot(applied_index, &snapshot);
}
}
}
fn gen_snapshot(&self) -> Vec<u8> {
let kv = self.kv_store.clone();
let cs = self.client_seq.clone();
let snapshot = SnapShot {
key: kv.keys().cloned().collect(),
value: kv.values().cloned().collect(),
client_name: cs.keys().cloned().collect(),
seqs: cs.values().cloned().collect(),
};
let mut data = vec![];
labcodec::encode(&snapshot, &mut data).unwrap();
data
}
fn install_snapshot(&mut self, snapshot: &[u8], term: u64, index: u64) {
// info!("[kvserver][install snapshot]");
if snapshot.is_empty() {
return;
}
self.rf.cond_install_snapshot(term, index, snapshot);
self.restore(snapshot);
}
fn restore(&mut self, snapshot: &[u8]) {
if let Ok(snapshot) = labcodec::decode::<SnapShot>(snapshot) {
self.kv_store = snapshot
.key
.into_iter()
.zip(snapshot.value.into_iter())
.collect();
self.client_seq = snapshot
.client_name
.into_iter()
.zip(snapshot.seqs.into_iter())
.collect();
}
}
}
impl KvServer {
/// Only for suppressing deadcode warnings.
#[doc(hidden)]
pub fn __suppress_deadcode(&mut self) {
let _ = &self.me;
let _ = &self.maxraftstate;
}
pub fn is_leader(&self) -> bool {
self.rf.is_leader()
}
pub fn get_apply_channel(&mut self) -> UnboundedReceiver<ApplyMsg> {
self.apply_ch.take().unwrap()
}
pub fn get(&self, key: String) -> String {
match self.kv_store.get(&key) {
Some(value) => value.clone(),
None => String::new(),
}
}
pub fn append(&mut self, key: String, value: String) {
let entry = self.kv_store.entry(key).or_insert_with(|| "".to_string());
*entry += &value;
}
pub fn put(&mut self, key: String, value: String) {
self.kv_store.insert(key, value);
}
}
async fn apply_command(server: Arc<Mutex<KvServer>>, stop_signal: Arc<watch::Receiver<bool>>) {
let mut rx = {
let mut server = server.lock().unwrap();
server.get_apply_channel()
};
while !*stop_signal.borrow() {
if let Some(apply_msg) = rx.next().await {
match apply_msg {
ApplyMsg::Command { data, index } => {
if data.is_empty() {
continue;
}
// info!(
// "[kvserver][rx] apply channel command: {:?} index: {}",
// data, index
// );
let command = match labcodec::decode::<Command>(&data) {
Ok(cmd) => cmd,
_ => continue,
};
let (value, ch) = {
let mut server = server.lock().unwrap();
let ch = server.result_ch.remove(&index);
let entry = server
.client_seq
.entry(command.identifier.clone())
.or_insert(0);
// info!(
// "[server]: current seq:{}, command from:{}, {:?}",
// entry, command.identifier, command
// );
let value = match (command.op, *entry < command.seq) {
(3, _) => Some(server.get(command.key)),
(2, true) => {
server.append(command.key, command.value);
// server.client_seq.insert(command.identifier, command.seq);
None
}
(1, true) => {
server.put(command.key, command.value);
// server.client_seq.insert(command.identifier, command.seq);
None
}
_ => None,
};
server.client_seq.insert(command.identifier, command.seq);
// info!("[server][store]: {:?}", server.kv_store);
// info!(
// "[apply_command]: {:?}, {:?}, {:?}",
// server.rf.raft.lock().unwrap(),
// cmd,
// server.kv_store.get(&cmd.key)
// );
server.log_compaction(index);
(value, ch)
};
if let Some(ch) = ch {
ch.send((server.lock().unwrap().rf.term(), value)).unwrap();
}
}
ApplyMsg::Snapshot { data, term, index } => {
let mut server = server.lock().unwrap();
server.install_snapshot(&data, term, index);
}
}
}
}
}
// Choose concurrency paradigm.
//
// You can either drive the kv server by the rpc framework,
//
// ```rust
// struct Node { server: Arc<Mutex<KvServer>> }
// ```
//
// or spawn a new thread runs the kv server and communicate via
// a channel.
//
// ```rust
// struct Node { sender: Sender<Msg> }
// ```
#[derive(Clone)]
pub struct Node {
// Your definitions here.
server: Arc<Mutex<KvServer>>,
thread_pool: Arc<ThreadPool>,
stop_channel: Arc<watch::Sender<bool>>,
}
impl Node {
pub fn new(kv: KvServer) -> Node {
// Your code here.
// let runtime = Arc::new(Runtime::new().unwrap());
let server = Arc::new(Mutex::new(kv));
let thread_pool = Arc::new(ThreadPool::new().unwrap());
let (tx, rx) = watch::channel(false);
thread_pool
.spawn(apply_command(Clone::clone(&server), Arc::new(rx)))
.unwrap();
Node {
server,
thread_pool,
stop_channel: Arc::new(tx),
}
}
/// the tester calls kill() when a KVServer instance won't
/// be needed again. you are not required to do anything
/// in kill(), but it might be convenient to (for example)
/// turn off debug output from this instance.
pub fn kill(&self) {
// If you want to free some resources by `raft::Node::kill` method,
// you should call `raft::Node::kill` here also to prevent resource leaking.
// Since the test framework will call kvraft::Node::kill only.
// self.server.kill();
// info!("[kvserver] -----------------shutdown server--------------");
let rf = &self.server.lock().unwrap().rf;
rf.kill();
self.stop_channel.send(true).unwrap();
// Your code here, if desired.
}
/// The current term of this peer.
pub fn term(&self) -> u64 {
self.get_state().term()
}
/// Whether this peer believes it is the leader.
pub fn is_leader(&self) -> bool {
self.get_state().is_leader()
}
pub fn get_state(&self) -> raft::State {
// Your code here.
let server = self.server.lock().unwrap();
raft::State {
term: server.rf.term(),
is_leader: server.rf.is_leader(),
}
}
}
#[async_trait::async_trait]
impl KvService for Node {
// CAVEATS: Please avoid locking or sleeping here, it may jam the network.
async fn get(&self, arg: GetRequest) -> labrpc::Result<GetReply> {
// Your code here.
let server = self.server.clone();
self.thread_pool
.spawn_with_handle(async move {
let GetRequest {
key,
identifier,
seq,
} = arg;
let mut reply = GetReply {
wrong_leader: false,
err: "".to_string(),
value: "".to_string(),
};
let (tx, rx) = oneshot::channel();
let current_term = {
let mut server = server.lock().unwrap();
if !server.is_leader() {
reply.wrong_leader = true;
return Ok(reply);
}
// let server_seq = server.client_seq.entry(identifier.clone()).or_insert(0);
let current_term;
// if seq > *server_seq {
let cmd = Command {
op: 3,
key,
value: "".to_string(),
identifier,
seq,
};
match server.rf.start(&cmd) {
Err(_) => {
reply.wrong_leader = true;
return Ok(reply);
}
Ok((index, term)) => {
server.result_ch.insert(index, tx);
current_term = term;
}
}
// }
current_term
};
match rx.await {
Ok((term, value)) => {
if current_term != term {
reply.err = "leader changed".to_string();
} else {
reply.value = value.unwrap_or_else(|| "".to_string());
}
}
Err(_) => {
return Err(labrpc::Error::Recv(futures::channel::oneshot::Canceled));
}
}
Ok(reply)
})
.unwrap()
.await
// hanlder.unwrap().await
}
// CAVEATS: Please avoid locking or sleeping here, it may jam the network.
async fn put_append(&self, arg: PutAppendRequest) -> labrpc::Result<PutAppendReply> {
// Your code here.
let server = self.server.clone();
self.thread_pool
.spawn_with_handle(async move {
let PutAppendRequest {
key,
value,
op,
identifier,
seq,
} = arg;
let mut reply = PutAppendReply {
wrong_leader: false,
err: "".to_string(),
};
let (tx, rx) = oneshot::channel();
let current_term = {
let mut server = server.lock().unwrap();
if !server.is_leader() {
reply.wrong_leader = true;
return Ok(reply);
}
// let server_seq = server.client_seq.entry(identifier.clone()).or_insert(0);
let current_term;
// if seq > *server_seq {
let cmd = Command {
op,
key,
value,
identifier,
seq,
};
match server.rf.start(&cmd) {
Err(_) => {
reply.wrong_leader = true;
return Ok(reply);
}
Ok((index, term)) => {
server.result_ch.insert(index, tx);
current_term = term;
}
}
// }
current_term
};
match rx.await {
Ok((term, _)) => {
if current_term != term {
reply.err = "leader changed".to_string();
}
}
Err(_e) => {
return Err(labrpc::Error::Recv(futures::channel::oneshot::Canceled));
}
}
Ok(reply)
})
.unwrap()
.await
}
}
|
use core::panic;
use std::io::{self, Read};
use bytes::buf::Reader;
use bytes::{Buf, Bytes};
use chunked_bytes::ChunkedBytes;
use lazy_static::lazy_static;
use parquet::data_type::AsBytes;
use parquet::errors::Result;
use parquet::file::reader::{ChunkReader, Length};
use regex::Regex;
use rusoto_core::Region;
use rusoto_s3::{GetObjectOutput, GetObjectRequest, S3Client, S3};
use tokio::io::AsyncReadExt;
use tokio::sync::mpsc::{channel, Receiver, Sender};
use tokio_stream::StreamExt;
enum Range {
FromPositionTo(u64, u64),
FromEnd(u64),
}
struct ContentRange {
// start_pos: u64,
// end_pos: u64,
total_length: u64,
}
fn get_content_range(response: &GetObjectOutput) -> ContentRange {
lazy_static! {
static ref BYTES_REGEX: Regex = Regex::new(r"bytes (\d+)-(\d+)/([0-9*]+)").unwrap();
};
let content_range_captures = BYTES_REGEX
.captures(response.content_range.as_ref().unwrap().as_str())
.unwrap();
ContentRange {
// start_pos: content_range_captures.get(1).unwrap().as_str().parse::<u64>().unwrap(),
// end_pos: content_range_captures.get(2).unwrap().as_str().parse::<u64>().unwrap(),
total_length: content_range_captures
.get(3)
.unwrap()
.as_str()
.parse::<u64>()
.unwrap(),
}
}
async fn fetch_range(client: S3Client, url: (String, String), range: Range) -> GetObjectOutput {
let range_str = match range {
Range::FromPositionTo(start_pos, length) => format!("bytes={}-{}", start_pos, length),
Range::FromEnd(length) => format!("bytes=-{}", length),
};
let get_obj_req = GetObjectRequest {
bucket: url.0,
key: url.1,
range: Some(range_str),
..Default::default()
};
client.get_object(get_obj_req).await.unwrap()
}
struct DownloadPart {
start_pos: u64,
length: u64,
reader_channel: Sender<Bytes>,
}
pub struct S3ChunkReader {
url: (String, String),
length: u64,
read_size: u64,
total_size: u64,
coordinator: Option<Sender<Option<DownloadPart>>>,
reader_channel: Option<Receiver<Bytes>>,
buf: Reader<ChunkedBytes>,
}
impl S3ChunkReader {
pub fn new(url: (String, String), total_size: u64) -> S3ChunkReader {
S3ChunkReader {
url,
length: total_size,
read_size: 0,
total_size,
coordinator: None,
reader_channel: None,
buf: ChunkedBytes::new().reader(),
}
}
pub async fn new_unknown_size(url: (String, String), region: Region) -> S3ChunkReader {
let client = S3Client::new(region);
let response = fetch_range(client.clone(), url.clone(), Range::FromEnd(4)).await;
let content_range = get_content_range(&response);
let mut magic_number: Vec<u8> = vec![];
response
.body
.unwrap()
.into_async_read()
.read_to_end(&mut magic_number)
.await
.unwrap();
if magic_number.as_bytes() != "PAR1".as_bytes() {
panic!("Not a parquet file");
}
Self::new(url, content_range.total_length)
}
pub async fn start(&mut self, region: Region) {
let (s, mut r) = channel(1);
let url = self.url.clone();
self.coordinator = Some(s);
tokio::spawn(async move {
while let Some(download_part) = r.recv().await.unwrap_or(None) {
let client = S3Client::new(region.clone());
let url = url.clone();
tokio::spawn(async move {
let response = fetch_range(
client,
url,
Range::FromPositionTo(
download_part.start_pos,
download_part.start_pos + download_part.length - 1,
),
)
.await;
let mut body = response.body.unwrap();
while let Ok(Some(data)) = body.try_next().await {
let reader_channel = download_part.reader_channel.clone();
reader_channel.send(data).await.unwrap_or(());
}
});
}
});
}
}
impl Length for S3ChunkReader {
fn len(&self) -> u64 {
self.length
}
}
impl Read for S3ChunkReader {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let remaining_size = (self.length - self.read_size) as usize;
if remaining_size > 0 {
match self.reader_channel.take() {
Some(mut reader_channel) => {
let self_buf = self.buf.get_mut();
let data = reader_channel.blocking_recv().unwrap();
let added_size = data.len();
self.read_size += added_size as u64;
if self_buf.is_empty() && buf.len() >= added_size {
buf[0..added_size].copy_from_slice(&data);
self.reader_channel = Some(reader_channel);
return Ok(added_size);
}
self_buf.put_bytes(data);
self.reader_channel = Some(reader_channel);
}
None => unimplemented!(),
};
}
self.buf.read(buf)
}
}
impl ChunkReader for S3ChunkReader {
type T = S3ChunkReader;
fn get_read(&self, start_pos: u64, length: usize) -> Result<Self::T> {
let (s, r) = channel(16);
self.coordinator
.clone()
.unwrap()
.blocking_send(Some(DownloadPart {
start_pos,
length: length as u64,
reader_channel: s,
}))
.unwrap_or_else(|err| {
eprintln!("Error {}", err);
unimplemented!()
});
Ok(S3ChunkReader {
url: self.url.clone(),
length: length as u64,
read_size: 0,
total_size: self.total_size,
coordinator: self.coordinator.clone(),
reader_channel: Some(r),
buf: ChunkedBytes::new().reader(),
})
}
}
|
fn main() {
// ## distingush between String and str slice
let mut a: String = String::from("나는 도비에요 ㅎㅎ");
let b: &str = "^_^";
a.push_str(b);
let c: char = '@'; // use '
a.push(c);
let one: &str = "나는 성필이에요";
let two: String = one.to_string();
// Indexing
for c in a.chars() {
println!("{}", c)
}
for b in two.bytes() {
println!("{}", b)
}
}
|
use emulator_rs;
fn main() {
let mut cpu = emulator_rs::frontend::rv32i::cpu::CPU::new(0,4096);
let entry_point = cpu.load_elf("src/bin/rv32i-sb".to_string());
println!("Entry Point loaded");
println!("{:?}", cpu);
cpu.get_registers().set_pc(entry_point + 0x100);
cpu.run().unwrap();
println!("{:?}", cpu.get_registers()[10]);
}
|
mod cons_list;
mod drop;
mod memory_leak;
mod my_box;
mod tree;
fn main() {
drop::drop_main();
my_box::my_box_main();
cons_list::list::list_main();
cons_list::rc_list::list_main();
cons_list::rc_refcell_list::list_main();
memory_leak::main();
tree::main();
}
|
extern crate parser_c;
extern crate walkdir;
use std::fs::read_to_string;
use parser_c::parse_str;
use walkdir::WalkDir;
#[test]
fn eval_smoke() {
for item in WalkDir::new("smoke") {
if let Ok(entry) = item {
if entry.path().extension().map_or(false, |v| v == "c") {
let input = read_to_string(entry.path()).unwrap();
match parse_str(&input, &entry.path().display().to_string()) {
Err(err) => {
panic!("error: {}", err);
}
Ok(_) => {
println!("smoke test passed: {}", entry.path().display());
}
}
}
}
}
}
|
use serde_json;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::collections::HashMap;
struct JsonFile {
stem: String,
content: String,
}
fn get_dir_json(P: &Path) -> io::Result<()> {
let entries = fs::read_dir(P)?
.map(|res| res.map(|e| e.path()))
.map(|path| path.map(|p| get_file_json(p).unwrap()))
.collect::<Result<Vec<_>, io::Error>>()?;
Ok(())
}
fn serialize_json_file(input: Vec<JsonFile>) -> io::Result<serde_json::Value> {
let hash: HashMap<String, serde_json::Value> = HashMap::new();
in
Ok()
}
fn get_file_json(file: PathBuf) -> io::Result<JsonFile> {
Ok(JsonFile {
stem: match file.file_stem() {
None => panic!("Could not read file stem"),
Some(file_stem) => match file_stem.to_str() {
None => panic!("Could not convert OsStr to String"),
Some(file_stem_str) => file_stem_str,
}
}.to_string(),
content: fs::read_to_string(file)?
})
}
fn main() {
get_dir_json(Path::new("/home/zexa/Projects/get-files/src/examples"))
.expect("Could not read directory");
}
|
#[doc = "Reader of register BIST_ADDR"]
pub type R = crate::R<u32, super::BIST_ADDR>;
#[doc = "Reader of field `COL_ADDR`"]
pub type COL_ADDR_R = crate::R<u16, u16>;
#[doc = "Reader of field `ROW_ADDR`"]
pub type ROW_ADDR_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - Current column address."]
#[inline(always)]
pub fn col_addr(&self) -> COL_ADDR_R {
COL_ADDR_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bits 16:31 - Current row address."]
#[inline(always)]
pub fn row_addr(&self) -> ROW_ADDR_R {
ROW_ADDR_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
|
//! Rendering Base
use crate::color::*;
use crate::Pixel;
use crate::Color;
use std::cmp::min;
use std::cmp::max;
/// Rendering Base
///
#[derive(Debug)]
pub struct RenderingBase<T> {
/// Pixel Format
pub pixf: T,
}
impl<T> RenderingBase<T> where T: Pixel {
/// Create new Rendering Base from Pixel Format
pub fn new(pixf: T) -> RenderingBase<T> {
RenderingBase { pixf }
}
pub fn as_bytes(&self) -> &[u8] {
self.pixf.as_bytes()
}
pub fn to_file<P: AsRef<std::path::Path>>(&self, filename: P) -> Result<(),std::io::Error> {
self.pixf.to_file(filename)
}
/// Set Image to a single color
pub fn clear(&mut self, color: Rgba8) {
self.pixf.fill(color);
}
/// Get Image size
pub fn limits(&self) -> (i64,i64,i64,i64) {
let w = self.pixf.width() as i64;
let h = self.pixf.height() as i64;
(0, w-1, 0, h-1)
}
/// Blend a color along y-row from x1 to x2
pub fn blend_hline<C: Color>(&mut self, x1: i64, y: i64, x2: i64, c: C, cover: u64) {
let (xmin,xmax,ymin,ymax) = self.limits();
let (x1,x2) = if x2 > x1 { (x1,x2) } else { (x2,x1) };
if y > ymax || y < ymin || x1 > xmax || x2 < xmin {
return;
}
let x1 = max(x1, xmin);
let x2 = min(x2, xmax);
self.pixf.blend_hline(x1, y, x2 - x1 + 1, c, cover);
}
/// Blend a color from (x,y) with variable covers
pub fn blend_solid_hspan<C: Color>(&mut self, x: i64, y: i64, len: i64, c: C, covers: &[u64]) {
let (xmin,xmax,ymin,ymax) = self.limits();
if y > ymax || y < ymin {
return;
}
let (mut x, mut len, mut off) = (x,len, 0);
if x < xmin {
len -= xmin - x;
if len <= 0 {
return;
}
off = off + xmin - x; // Woah!!!!
x = xmin;
}
if x + len > xmax {
len = xmax - x + 1;
if len <= 0 {
return;
}
}
let covers_win = &covers[off as usize .. (off+len) as usize];
assert!(len as usize <= covers[off as usize ..].len());
self.pixf.blend_solid_hspan(x, y, len, c, covers_win);
}
/// Blend a color from (x,y) with variable covers
pub fn blend_solid_vspan<C: Color>(&mut self, x: i64, y: i64, len: i64, c: C, covers: &[u64]) {
let (xmin,xmax,ymin,ymax) = self.limits();
if x > xmax || x < xmin {
return;
}
let (mut y, mut len, mut off) = (y,len, 0);
if y < ymin {
len -= ymin - y;
if len <= 0 {
return;
}
off = off + ymin - y; // Woah!!!!
y = ymin;
}
if y + len > ymax {
len = ymax - y + 1;
if len <= 0 {
return;
}
}
let covers_win = &covers[off as usize .. (off+len) as usize];
assert!(len as usize <= covers[off as usize ..].len());
self.pixf.blend_solid_vspan(x, y, len, c, covers_win);
}
pub fn blend_color_vspan<C: Color>(&mut self, x: i64, y: i64, len: i64, colors: &[C], covers: &[u64], cover: u64) {
let (xmin,xmax,ymin,ymax) = self.limits();
if x > xmax || x < xmin {
return;
}
let (mut y, mut len, mut off) = (y,len, 0);
if y < ymin {
len -= ymin - y;
if len <= 0 {
return;
}
off = off + ymin - y; // Woah!!!!
y = ymin;
}
if y + len > ymax {
len = ymax - y + 1;
if len <= 0 {
return;
}
}
let covers_win = if covers.is_empty() {
&[]
} else {
&covers[off as usize .. (off+len) as usize]
};
let colors_win = &colors[off as usize .. (off+len) as usize];
self.pixf.blend_color_vspan(x, y, len, colors_win, covers_win, cover);
}
pub fn blend_color_hspan<C: Color>(&mut self, x: i64, y: i64, len: i64, colors: &[C], covers: &[u64], cover: u64) {
let (xmin,xmax,ymin,ymax) = self.limits();
if y > ymax || y < ymin {
return;
}
let (mut x, mut len, mut off) = (x,len, 0);
if x < xmin {
len -= xmin - x;
if len <= 0 {
return;
}
off = off + xmin - x; // Woah!!!!
x = xmin;
}
if x + len > xmax {
len = xmax - x + 1;
if len <= 0 {
return;
}
}
let covers_win = if covers.is_empty() {
&[]
} else {
&covers[off as usize .. (off+len) as usize]
};
let colors_win = &colors[off as usize .. (off+len) as usize];
self.pixf.blend_color_hspan(x, y, len, colors_win, covers_win, cover);
}
}
|
/// This type exists for one purpose and one purpose only. To help you ensure you're mutating
/// the correct thing, and not mutating a copy. Put `Copy` types in here to rob them of their `Copy` implementation.
///
/// This wrapper tries to be transparent in every way, of course it can't implement every trait in existence,
/// but we'll try to get the important ones. Crate feature "serde" will include `Serialize` and `Deserialize`.
///
/// Major omissions from the trait implementations include common math traits, because consuming a `NotCopy`
/// usually isn't helpful, and those traits consume. However, traits such as `AddAssign` are implemented because
/// they're just a mutation.
#[derive(Default, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct NotCopy<T>(pub T);
impl<T> From<T> for NotCopy<T>
{
fn from(t: T) -> Self {
NotCopy(t.into())
}
}
impl<T, Rhs> std::ops::AddAssign<Rhs> for NotCopy<T>
where
T: std::ops::AddAssign<Rhs>,
{
fn add_assign(&mut self, rhs: Rhs) {
self.0 += rhs
}
}
impl<T, Rhs> std::ops::SubAssign<Rhs> for NotCopy<T>
where
T: std::ops::SubAssign<Rhs>,
{
fn sub_assign(&mut self, rhs: Rhs) {
self.0 -= rhs
}
}
impl<T, Rhs> std::ops::MulAssign<Rhs> for NotCopy<T>
where
T: std::ops::MulAssign<Rhs>,
{
fn mul_assign(&mut self, rhs: Rhs) {
self.0 *= rhs
}
}
impl<T, Rhs> std::ops::DivAssign<Rhs> for NotCopy<T>
where
T: std::ops::DivAssign<Rhs>,
{
fn div_assign(&mut self, rhs: Rhs) {
self.0 /= rhs
}
}
impl<T, Rhs> std::ops::RemAssign<Rhs> for NotCopy<T>
where
T: std::ops::RemAssign<Rhs>,
{
fn rem_assign(&mut self, rhs: Rhs) {
self.0 %= rhs
}
}
impl<T, Rhs> std::ops::ShlAssign<Rhs> for NotCopy<T>
where
T: std::ops::ShlAssign<Rhs>,
{
fn shl_assign(&mut self, rhs: Rhs) {
self.0 <<= rhs
}
}
impl<T, Rhs> std::ops::ShrAssign<Rhs> for NotCopy<T>
where
T: std::ops::ShrAssign<Rhs>,
{
fn shr_assign(&mut self, rhs: Rhs) {
self.0 >>= rhs
}
}
impl<T, Rhs> std::ops::BitAndAssign<Rhs> for NotCopy<T>
where
T: std::ops::BitAndAssign<Rhs>,
{
fn bitand_assign(&mut self, rhs: Rhs) {
self.0 &= rhs
}
}
impl<T, Rhs> std::ops::BitOrAssign<Rhs> for NotCopy<T>
where
T: std::ops::BitOrAssign<Rhs>,
{
fn bitor_assign(&mut self, rhs: Rhs) {
self.0 |= rhs
}
}
impl<T, Rhs> std::ops::BitXorAssign<Rhs> for NotCopy<T>
where
T: std::ops::BitXorAssign<Rhs>,
{
fn bitxor_assign(&mut self, rhs: Rhs) {
self.0 ^= rhs
}
}
impl<T, Idx> std::ops::Index<Idx> for NotCopy<T>
where
T: std::ops::Index<Idx>,
{
type Output = T::Output;
fn index(&self, index: Idx) -> &Self::Output {
&self.0[index]
}
}
impl<T, Idx> std::ops::IndexMut<Idx> for NotCopy<T>
where
T: std::ops::IndexMut<Idx>,
{
fn index_mut(&mut self, index: Idx) -> &mut Self::Output {
&mut self.0[index]
}
}
impl<T> std::fmt::Display for NotCopy<T>
where
T: std::fmt::Display,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl<T> std::fmt::Debug for NotCopy<T>
where
T: std::fmt::Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[cfg(feature = "serde")]
impl<'de, T> serde::Deserialize<'de> for NotCopy<T>
where
T: serde::Deserialize<'de>,
{
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
T::deserialize(deserializer).map(NotCopy)
}
}
#[cfg(feature = "serde")]
impl<T> serde::Serialize for NotCopy<T>
where
T: serde::Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
T::serialize(&self.0, serializer)
}
}
|
use crate::*;
use detail::constants::UNREACHABLE;
// Represents a tile in the map
pub struct Tile<'a, P: Platform> {
pub image: Option<&'a P::Image>,
pub info: &'a serialization::TileType<'a>,
pub unit: std::cell::Cell<Option<&'a serialization::Unit<'a>>>,
pub remaining_move: std::cell::Cell<numeric_types::MapDistance>,
}
pub fn make_tile<'a, P: Platform>(
image: Option<&'a P::Image>,
info: &'a serialization::TileType<'a>,
) -> Tile<'a, P> {
Tile {
image,
info,
unit: std::cell::Cell::new(None),
remaining_move: std::cell::Cell::new(UNREACHABLE),
}
}
pub fn get_tile<'a, P: Platform>(
image_map: &'a std::collections::HashMap<&str, P::Image>,
tile_types: &'a [serialization::TileType],
type_id: usize,
) -> Option<Tile<'a, P>> {
let tile_type = tile_types.get(type_id)?;
Some(make_tile(image_map.get(tile_type.image), tile_type))
}
|
// https://blog.rust-lang.org/2015/05/11/traits.html
pub trait Walk {
fn walk( &self ) -> u32;
}
struct Human {
legs: u32,
}
impl Human {
fn eat() {
}
}
struct Lion {
legs: u32,
}
impl Lion {
fn hunt() {
}
}
impl Walk for Lion {
fn walk ( &self) -> u32 {
15
}
}
impl Walk for Human {
fn walk ( &self) -> u32 {
3
}
}
fn print_walk<T : Walk> (t: &T ) {
println!("This specie walked {}", t.walk() )
}
fn main() {
let h = Human{ legs: 2 };
print_walk(&h);
Human::eat();
let l = Lion { legs : 4 };
print_walk(&l);
Lion::hunt();
} |
use std::{convert::TryInto, io};
use tokio::{net::TcpListener, task};
// `u32` (32-bit unsigned int) is used for storing a length of a message.
// Size of `u32`: 4 bytes.
// https://doc.rust-lang.org/std/mem/fn.size_of.html
const MESSAGE_LENGTH_BUFFER_SIZE: usize = 4;
#[tokio::main]
async fn main() -> io::Result<()> {
let listener = TcpListener::bind("127.0.0.1:7080").await?;
loop {
// New connection
let (socket, addr) = listener.accept().await?;
// Spawning a new task to handle each connection asynchronously
task::spawn(async move {
println!("[{}] new connection", addr);
// Buffer for an incoming message
let mut buf = Vec::new();
// Length of the current message
let mut len = None;
loop {
// Waiting for the socket to be readable
socket.readable().await.unwrap();
match socket.try_read_buf(&mut buf) {
Ok(0) => {
// Ok(0) indicates the stream’s read half is closed and will no longer yield data
println!("[{}] nothing left to read. we're done here.", addr);
break;
}
Ok(_) => {
// Some bytes were read and placed in the buffer.
// First, figuring out the length of the whole message.
let message_len = match len {
None => {
// No current length set.
// It means that either this is the very first message from this client,
// or the previous message was received and `buf` + `len` have been reset.
// Taking first 4 bytes out of the buffer.
// This is the length of the whole message.
let len_bytes = buf
.splice(..MESSAGE_LENGTH_BUFFER_SIZE, vec![])
.collect::<Vec<u8>>()
.try_into()
.unwrap();
// Converting these bytes into u32
u32::from_be_bytes(len_bytes)
}
Some(n) => {
// `len` is already set,
// which means a head of the message was already received.
n
}
};
if message_len as usize == buf.len() {
// Buffer length is equal to message length,
// means the whole message has been received
let message = std::str::from_utf8(&buf).unwrap();
println!("[{}] message: {}", addr, message);
// Resetting the buffer and the current length
buf.clear();
len = None;
} else if message_len as usize > buf.len() {
// Buffer length is less then message length,
// means the buffer contains only a part of the message
len = Some(message_len);
} else {
panic!("Message length < current buffer");
}
}
// If for whatever reason socket is unreadable, retrying
Err(ref err) if err.kind() == io::ErrorKind::WouldBlock => continue,
Err(err) => panic!("[{}] {}", addr, err),
}
}
});
}
}
|
use std::env;
use std::fs;
use std::process;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("Exactly one argument is required; the input file");
process::exit(1);
}
let contents = fs::read_to_string(&args[1])?;
let mut iter: Vec<usize> = contents
.trim()
.split(',')
.map(|x| {
x.parse::<usize>()
.expect(&format!("{:} was not a number!", x))
})
.collect();
iter[1] = 12;
iter[2] = 2;
let mut pc = 0;
loop {
match iter[pc] {
1 => {
let i_first = iter[pc + 1];
let i_second = iter[pc + 2];
let i_target = iter[pc + 3];
iter[i_target] = iter[i_first] + iter[i_second];
println!("Sum! Wrote {:} to pos {:}", iter[i_target], i_target);
}
2 => {
let i_first = iter[pc + 1];
let i_second = iter[pc + 2];
let i_target = iter[pc + 3];
iter[i_target] = iter[i_first] * iter[i_second];
println!("Multiply! Wrote {:} to pos {:}", iter[i_target], i_target);
}
99 => {
println!("End!");
break;
}
_ => panic!("unexpected op code {:}", iter[pc]),
}
pc += 4;
}
println!("Result: {:}", iter[0]);
Ok(())
}
|
#[macro_use]
extern crate criterion;
extern crate serde_json;
extern crate slack_api;
use criterion::Criterion;
use slack_api::channels::HistoryResponse;
fn history_1000(c: &mut Criterion) {
let the_json = std::fs::read_to_string("general.txt").unwrap();
c.bench_function("history_1000", move |b| {
b.iter(|| ::serde_json::from_str::<HistoryResponse>(&the_json).unwrap())
});
}
criterion_group!(benches, history_1000);
criterion_main!(benches);
|
#[doc = "Register `ISR0` reader"]
pub type R = crate::R<ISR0_SPEC>;
#[doc = "Field `AE0` reader - AE0"]
pub type AE0_R = crate::BitReader;
#[doc = "Field `AE1` reader - AE1"]
pub type AE1_R = crate::BitReader;
#[doc = "Field `AE2` reader - AE2"]
pub type AE2_R = crate::BitReader;
#[doc = "Field `AE3` reader - AE3"]
pub type AE3_R = crate::BitReader;
#[doc = "Field `AE4` reader - AE4"]
pub type AE4_R = crate::BitReader;
#[doc = "Field `AE5` reader - AE5"]
pub type AE5_R = crate::BitReader;
#[doc = "Field `AE6` reader - AE6"]
pub type AE6_R = crate::BitReader;
#[doc = "Field `AE7` reader - AE7"]
pub type AE7_R = crate::BitReader;
#[doc = "Field `AE8` reader - AE8"]
pub type AE8_R = crate::BitReader;
#[doc = "Field `AE9` reader - AE9"]
pub type AE9_R = crate::BitReader;
#[doc = "Field `AE10` reader - AE10"]
pub type AE10_R = crate::BitReader;
#[doc = "Field `AE11` reader - AE11"]
pub type AE11_R = crate::BitReader;
#[doc = "Field `AE12` reader - AE12"]
pub type AE12_R = crate::BitReader;
#[doc = "Field `AE13` reader - AE13"]
pub type AE13_R = crate::BitReader;
#[doc = "Field `AE14` reader - AE14"]
pub type AE14_R = crate::BitReader;
#[doc = "Field `AE15` reader - AE15"]
pub type AE15_R = crate::BitReader;
#[doc = "Field `PE0` reader - PE0"]
pub type PE0_R = crate::BitReader;
#[doc = "Field `PE1` reader - PE1"]
pub type PE1_R = crate::BitReader;
#[doc = "Field `PE2` reader - PE2"]
pub type PE2_R = crate::BitReader;
#[doc = "Field `PE3` reader - PE3"]
pub type PE3_R = crate::BitReader;
#[doc = "Field `PE4` reader - PE4"]
pub type PE4_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - AE0"]
#[inline(always)]
pub fn ae0(&self) -> AE0_R {
AE0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - AE1"]
#[inline(always)]
pub fn ae1(&self) -> AE1_R {
AE1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - AE2"]
#[inline(always)]
pub fn ae2(&self) -> AE2_R {
AE2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - AE3"]
#[inline(always)]
pub fn ae3(&self) -> AE3_R {
AE3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - AE4"]
#[inline(always)]
pub fn ae4(&self) -> AE4_R {
AE4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - AE5"]
#[inline(always)]
pub fn ae5(&self) -> AE5_R {
AE5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - AE6"]
#[inline(always)]
pub fn ae6(&self) -> AE6_R {
AE6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - AE7"]
#[inline(always)]
pub fn ae7(&self) -> AE7_R {
AE7_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - AE8"]
#[inline(always)]
pub fn ae8(&self) -> AE8_R {
AE8_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - AE9"]
#[inline(always)]
pub fn ae9(&self) -> AE9_R {
AE9_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - AE10"]
#[inline(always)]
pub fn ae10(&self) -> AE10_R {
AE10_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - AE11"]
#[inline(always)]
pub fn ae11(&self) -> AE11_R {
AE11_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - AE12"]
#[inline(always)]
pub fn ae12(&self) -> AE12_R {
AE12_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - AE13"]
#[inline(always)]
pub fn ae13(&self) -> AE13_R {
AE13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - AE14"]
#[inline(always)]
pub fn ae14(&self) -> AE14_R {
AE14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - AE15"]
#[inline(always)]
pub fn ae15(&self) -> AE15_R {
AE15_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - PE0"]
#[inline(always)]
pub fn pe0(&self) -> PE0_R {
PE0_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - PE1"]
#[inline(always)]
pub fn pe1(&self) -> PE1_R {
PE1_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - PE2"]
#[inline(always)]
pub fn pe2(&self) -> PE2_R {
PE2_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - PE3"]
#[inline(always)]
pub fn pe3(&self) -> PE3_R {
PE3_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - PE4"]
#[inline(always)]
pub fn pe4(&self) -> PE4_R {
PE4_R::new(((self.bits >> 20) & 1) != 0)
}
}
#[doc = "DSI Host interrupt and status register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr0::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ISR0_SPEC;
impl crate::RegisterSpec for ISR0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`isr0::R`](R) reader structure"]
impl crate::Readable for ISR0_SPEC {}
#[doc = "`reset()` method sets ISR0 to value 0"]
impl crate::Resettable for ISR0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo};
use std::fmt::Debug;
#[derive(Clone, Debug, Default)]
#[repr(C)]
pub struct SubTaskExecution {
pub sub_tasks: Vec<Box<::afrl::cmasi::task::TaskT>>,
pub strict_order: bool,
}
impl PartialEq for SubTaskExecution {
fn eq(&self, _other: &SubTaskExecution) -> bool {
true
&& &self.sub_tasks == &_other.sub_tasks
&& &self.strict_order == &_other.strict_order
}
}
impl LmcpSubscription for SubTaskExecution {
fn subscription() -> &'static str { "uxas.messages.uxnative.SubTaskExecution" }
}
impl Struct for SubTaskExecution {
fn struct_info() -> StructInfo {
StructInfo {
exist: 1,
series: 6149751333668345413u64,
version: 8,
struct_ty: 10,
}
}
}
impl Lmcp for SubTaskExecution {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
let mut pos = 0;
{
let x = Self::struct_info().ser(buf)?;
pos += x;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.sub_tasks.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.strict_order.ser(r)?;
pos += writeb;
}
Ok(pos)
}
fn deser(buf: &[u8]) -> Result<(SubTaskExecution, usize), Error> {
let mut pos = 0;
let (si, u) = StructInfo::deser(buf)?;
pos += u;
if si == SubTaskExecution::struct_info() {
let mut out: SubTaskExecution = Default::default();
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<Box<::afrl::cmasi::task::TaskT>>, usize) = Lmcp::deser(r)?;
out.sub_tasks = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (bool, usize) = Lmcp::deser(r)?;
out.strict_order = x;
pos += readb;
}
Ok((out, pos))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
let mut size = 15;
size += self.sub_tasks.size();
size += self.strict_order.size();
size
}
}
pub trait SubTaskExecutionT: Debug + Send {
fn as_uxas_messages_uxnative_sub_task_execution(&self) -> Option<&SubTaskExecution> { None }
fn as_mut_uxas_messages_uxnative_sub_task_execution(&mut self) -> Option<&mut SubTaskExecution> { None }
fn sub_tasks(&self) -> &Vec<Box<::afrl::cmasi::task::TaskT>>;
fn sub_tasks_mut(&mut self) -> &mut Vec<Box<::afrl::cmasi::task::TaskT>>;
fn strict_order(&self) -> bool;
fn strict_order_mut(&mut self) -> &mut bool;
}
impl Clone for Box<SubTaskExecutionT> {
fn clone(&self) -> Box<SubTaskExecutionT> {
if let Some(x) = SubTaskExecutionT::as_uxas_messages_uxnative_sub_task_execution(self.as_ref()) {
Box::new(x.clone())
} else {
unreachable!()
}
}
}
impl Default for Box<SubTaskExecutionT> {
fn default() -> Box<SubTaskExecutionT> { Box::new(SubTaskExecution::default()) }
}
impl PartialEq for Box<SubTaskExecutionT> {
fn eq(&self, other: &Box<SubTaskExecutionT>) -> bool {
if let (Some(x), Some(y)) =
(SubTaskExecutionT::as_uxas_messages_uxnative_sub_task_execution(self.as_ref()),
SubTaskExecutionT::as_uxas_messages_uxnative_sub_task_execution(other.as_ref())) {
x == y
} else {
false
}
}
}
impl Lmcp for Box<SubTaskExecutionT> {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
if let Some(x) = SubTaskExecutionT::as_uxas_messages_uxnative_sub_task_execution(self.as_ref()) {
x.ser(buf)
} else {
unreachable!()
}
}
fn deser(buf: &[u8]) -> Result<(Box<SubTaskExecutionT>, usize), Error> {
let (si, _) = StructInfo::deser(buf)?;
if si == SubTaskExecution::struct_info() {
let (x, readb) = SubTaskExecution::deser(buf)?;
Ok((Box::new(x), readb))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
if let Some(x) = SubTaskExecutionT::as_uxas_messages_uxnative_sub_task_execution(self.as_ref()) {
x.size()
} else {
unreachable!()
}
}
}
impl SubTaskExecutionT for SubTaskExecution {
fn as_uxas_messages_uxnative_sub_task_execution(&self) -> Option<&SubTaskExecution> { Some(self) }
fn as_mut_uxas_messages_uxnative_sub_task_execution(&mut self) -> Option<&mut SubTaskExecution> { Some(self) }
fn sub_tasks(&self) -> &Vec<Box<::afrl::cmasi::task::TaskT>> { &self.sub_tasks }
fn sub_tasks_mut(&mut self) -> &mut Vec<Box<::afrl::cmasi::task::TaskT>> { &mut self.sub_tasks }
fn strict_order(&self) -> bool { self.strict_order }
fn strict_order_mut(&mut self) -> &mut bool { &mut self.strict_order }
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for SubTaskExecution {
fn arbitrary<G: Gen>(_g: &mut G) -> SubTaskExecution {
SubTaskExecution {
sub_tasks: Vec::<::afrl::cmasi::task::Task>::arbitrary(_g).into_iter().map(|x| Box::new(x) as Box<::afrl::cmasi::task::TaskT>).collect(),
strict_order: Arbitrary::arbitrary(_g),
}
}
}
quickcheck! {
fn serializes(x: SubTaskExecution) -> Result<TestResult, Error> {
use std::u16;
if x.sub_tasks.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
Ok(TestResult::from_bool(sx == x.size()))
}
fn roundtrips(x: SubTaskExecution) -> Result<TestResult, Error> {
use std::u16;
if x.sub_tasks.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
let (y, sy) = SubTaskExecution::deser(&buf)?;
Ok(TestResult::from_bool(sx == sy && x == y))
}
}
}
|
/// a color in rgba channel format. These should all be in the range 0.0-1.0
/// **Not in 0-255** use `new_from_rgb` or `new_from_rgba`
#[allow(missing_docs)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct Color {
pub r: f32,
pub g: f32,
pub b: f32,
pub a: f32,
}
impl Color {
/// returns a new color in standard rgb format (0-255)
pub fn new_from_rgb(r: u8, g: u8, b: u8) -> Color {
Color {
r: (r as f32) / 255.0,
g: (g as f32) / 255.0,
b: (b as f32) / 255.0,
a: 1.0,
}
}
/// returns a new color in standard rgba format (rgb: 0-255, a: 0.0-1.0)
pub fn new_from_rgba(r: u8, g: u8, b: u8, a: f32) -> Color {
Color {
r: (r as f32) / 255.0,
g: (g as f32) / 255.0,
b: (b as f32) / 255.0,
a,
}
}
/// converts the color to a value that a uniform uploader will accept.
pub fn to_uniform(&self) -> [f32; 4] {
[self.r, self.g, self.b, self.a]
}
}
|
#!/usr/bin/env rustx
use std;
import std::json;
import std::json::Json;
import std::json::Error;
fn main (args: ~[~str]) {
if vec::len(args) != 2 { io::println(~"Usage: ./pp.rs JSON-FILE"); return; }
let result_data: Result<~str, ~str> = io::read_whole_file_str(&path::Path(args[1]));
let data = result::unwrap(result_data);
let result_json: Result<Json, Error> = json::from_str(data);
let json = result::get(result_json);
io::println(json::to_str_pretty(json));
}
#[test]
fn test () {
assert 2 + 2 == 4;
}
|
$NetBSD: patch-vendor_nix_src_sys_signal.rs,v 1.2 2023/05/03 22:39:09 he Exp $
Narrow the conditional on mips to only apply to Linux.
--- vendor/nix/src/sys/signal.rs.orig 2023-01-25 01:49:16.000000000 +0000
+++ vendor/nix/src/sys/signal.rs
@@ -1069,7 +1069,7 @@ mod sigevent {
SigevNotify::SigevThreadId{..} => libc::SIGEV_THREAD_ID,
#[cfg(all(target_os = "linux", target_env = "uclibc"))]
SigevNotify::SigevThreadId{..} => libc::SIGEV_THREAD_ID,
- #[cfg(any(all(target_os = "linux", target_env = "musl"), target_arch = "mips"))]
+ #[cfg(all(target_os = "linux", target_env = "musl", target_arch = "mips"))]
SigevNotify::SigevThreadId{..} => 4 // No SIGEV_THREAD_ID defined
};
sev.sigev_signo = match sigev_notify {
|
mod document_file;
mod document_message_handler;
mod layer_panel;
mod movement_handler;
#[doc(inline)]
pub use document_file::LayerData;
#[doc(inline)]
pub use document_file::{AlignAggregate, AlignAxis, DocumentMessage, DocumentMessageDiscriminant, DocumentMessageHandler, FlipAxis};
#[doc(inline)]
pub use document_message_handler::{DocumentsMessage, DocumentsMessageDiscriminant, DocumentsMessageHandler};
#[doc(inline)]
pub use movement_handler::{MovementMessage, MovementMessageDiscriminant};
|
pub mod config;
pub mod local;
pub mod loopback;
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Testing-related utilities.
use rand::{SeedableRng, XorShiftRng};
/// Create a new deterministic RNG from a seed.
pub fn new_rng(mut seed: u64) -> impl SeedableRng<[u32; 4]> {
if seed == 0 {
// XorShiftRng can't take 0 seeds
seed = 1;
}
XorShiftRng::from_seed([
seed as u32,
(seed >> 32) as u32,
seed as u32,
(seed >> 32) as u32,
])
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsStepType : Step type used in your Synthetic test.
/// Step type used in your Synthetic test.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum SyntheticsStepType {
#[serde(rename = "assertCurrentUrl")]
ASSERT_CURRENT_URL,
#[serde(rename = "assertElementAttribute")]
ASSERT_ELEMENT_ATTRIBUTE,
#[serde(rename = "assertElementContent")]
ASSERT_ELEMENT_CONTENT,
#[serde(rename = "assertElementPresent")]
ASSERT_ELEMENT_PRESENT,
#[serde(rename = "assertEmail")]
ASSERT_EMAIL,
#[serde(rename = "assertFileDownload")]
ASSERT_FILE_DOWNLOAD,
#[serde(rename = "assertFromJavascript")]
ASSERT_FROM_JAVASCRIPT,
#[serde(rename = "assertPageContains")]
ASSERT_PAGE_CONTAINS,
#[serde(rename = "assertPageLacks")]
ASSERT_PAGE_LACKS,
#[serde(rename = "click")]
CLICK,
#[serde(rename = "extractFromJavascript")]
EXTRACT_FROM_JAVASCRIPT,
#[serde(rename = "extractVariable")]
EXTRACT_VARIABLE,
#[serde(rename = "goToEmailLink")]
GO_TO_EMAIL_LINK,
#[serde(rename = "goToUrl")]
GO_TO_URL,
#[serde(rename = "goToUrlAndMeasureTti")]
GO_TO_URL_AND_MEASURE_TTI,
#[serde(rename = "hover")]
HOVER,
#[serde(rename = "playSubTest")]
PLAY_SUB_TEST,
#[serde(rename = "pressKey")]
PRESS_KEY,
#[serde(rename = "refresh")]
REFRESH,
#[serde(rename = "runApiTest")]
RUN_API_TEST,
#[serde(rename = "scroll")]
SCROLL,
#[serde(rename = "selectOption")]
SELECT_OPTION,
#[serde(rename = "typeText")]
TYPE_TEXT,
#[serde(rename = "uploadFiles")]
UPLOAD_FILES,
#[serde(rename = "wait")]
WAIT,
}
impl ToString for SyntheticsStepType {
fn to_string(&self) -> String {
match self {
Self::ASSERT_CURRENT_URL => String::from("assertCurrentUrl"),
Self::ASSERT_ELEMENT_ATTRIBUTE => String::from("assertElementAttribute"),
Self::ASSERT_ELEMENT_CONTENT => String::from("assertElementContent"),
Self::ASSERT_ELEMENT_PRESENT => String::from("assertElementPresent"),
Self::ASSERT_EMAIL => String::from("assertEmail"),
Self::ASSERT_FILE_DOWNLOAD => String::from("assertFileDownload"),
Self::ASSERT_FROM_JAVASCRIPT => String::from("assertFromJavascript"),
Self::ASSERT_PAGE_CONTAINS => String::from("assertPageContains"),
Self::ASSERT_PAGE_LACKS => String::from("assertPageLacks"),
Self::CLICK => String::from("click"),
Self::EXTRACT_FROM_JAVASCRIPT => String::from("extractFromJavascript"),
Self::EXTRACT_VARIABLE => String::from("extractVariable"),
Self::GO_TO_EMAIL_LINK => String::from("goToEmailLink"),
Self::GO_TO_URL => String::from("goToUrl"),
Self::GO_TO_URL_AND_MEASURE_TTI => String::from("goToUrlAndMeasureTti"),
Self::HOVER => String::from("hover"),
Self::PLAY_SUB_TEST => String::from("playSubTest"),
Self::PRESS_KEY => String::from("pressKey"),
Self::REFRESH => String::from("refresh"),
Self::RUN_API_TEST => String::from("runApiTest"),
Self::SCROLL => String::from("scroll"),
Self::SELECT_OPTION => String::from("selectOption"),
Self::TYPE_TEXT => String::from("typeText"),
Self::UPLOAD_FILES => String::from("uploadFiles"),
Self::WAIT => String::from("wait"),
}
}
}
|
fn main() {
proconio::input! {
n: usize,
v: [u32; 2*n],
}
let mut v = v.clone();
let mut s_v = v.clone();
s_v.sort();
// println!("{:?}", v);
// println!("{:?}", s_v);
let mut centor = s_v[n];
// println!("centor {}", centor);
let mut sum:u32 = 0;
let mut j:usize = 0;
while v.len() > 0 {
// println!("{:?}", v);
let i = v.len() / 2 - 1;
if v[i] >= centor || v[i+1] >= centor {
// もし片方でも中央値以上なら大きい方を取る
if v[i] < v[i+1] {
sum += v[i+1];
v.remove(i+1);
v.remove(i);
}else {
sum += v[i];
v.remove(i+1);
v.remove(i);
}
continue;
}
// 両方とも小さい場合
// 中央値以上の値を探してくる
while j < v.len() {
if v[j] >= centor {
sum += v[j];
v.remove(j);
v.remove(i);
if j > i {
j -= 1;
}
break;
}
j += 1;
}
if i == v.len() / 2 - 1 {
// j で中央値以上がない場合
// 中央値を更新する
s_v = v.clone();
s_v.sort();
centor = s_v[i];
j = 0;
}
// println!("{} {}", i, sum);
}
println!("{}", sum);
} |
#[doc = "Register `AHB4LPENR` reader"]
pub type R = crate::R<AHB4LPENR_SPEC>;
#[doc = "Register `AHB4LPENR` writer"]
pub type W = crate::W<AHB4LPENR_SPEC>;
#[doc = "Field `GPIOALPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub type GPIOALPEN_R = crate::BitReader<GPIOALPEN_A>;
#[doc = "GPIO peripheral clock enable during CSleep mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GPIOALPEN_A {
#[doc = "0: The selected clock is disabled during csleep mode"]
Disabled = 0,
#[doc = "1: The selected clock is enabled during csleep mode"]
Enabled = 1,
}
impl From<GPIOALPEN_A> for bool {
#[inline(always)]
fn from(variant: GPIOALPEN_A) -> Self {
variant as u8 != 0
}
}
impl GPIOALPEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GPIOALPEN_A {
match self.bits {
false => GPIOALPEN_A::Disabled,
true => GPIOALPEN_A::Enabled,
}
}
#[doc = "The selected clock is disabled during csleep mode"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == GPIOALPEN_A::Disabled
}
#[doc = "The selected clock is enabled during csleep mode"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == GPIOALPEN_A::Enabled
}
}
#[doc = "Field `GPIOALPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub type GPIOALPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, GPIOALPEN_A>;
impl<'a, REG, const O: u8> GPIOALPEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "The selected clock is disabled during csleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(GPIOALPEN_A::Disabled)
}
#[doc = "The selected clock is enabled during csleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(GPIOALPEN_A::Enabled)
}
}
#[doc = "Field `GPIOBLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOBLPEN_R;
#[doc = "Field `GPIOCLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOCLPEN_R;
#[doc = "Field `GPIODLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIODLPEN_R;
#[doc = "Field `GPIOELPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOELPEN_R;
#[doc = "Field `GPIOFLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOFLPEN_R;
#[doc = "Field `GPIOGLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOGLPEN_R;
#[doc = "Field `GPIOHLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOHLPEN_R;
#[doc = "Field `GPIOILPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOILPEN_R;
#[doc = "Field `GPIOJLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOJLPEN_R;
#[doc = "Field `GPIOKLPEN` reader - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as GPIOKLPEN_R;
#[doc = "Field `CRCLPEN` reader - CRC peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_R as CRCLPEN_R;
#[doc = "Field `BDMALPEN` reader - BDMA Clock Enable During CSleep Mode"]
pub use GPIOALPEN_R as BDMALPEN_R;
#[doc = "Field `ADC3LPEN` reader - ADC3 Peripheral Clocks Enable During CSleep Mode"]
pub use GPIOALPEN_R as ADC3LPEN_R;
#[doc = "Field `BKPRAMLPEN` reader - Backup RAM Clock Enable During CSleep Mode"]
pub use GPIOALPEN_R as BKPRAMLPEN_R;
#[doc = "Field `SRAM4LPEN` reader - SRAM4 Clock Enable During CSleep Mode"]
pub use GPIOALPEN_R as SRAM4LPEN_R;
#[doc = "Field `GPIOBLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOBLPEN_W;
#[doc = "Field `GPIOCLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOCLPEN_W;
#[doc = "Field `GPIODLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIODLPEN_W;
#[doc = "Field `GPIOELPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOELPEN_W;
#[doc = "Field `GPIOFLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOFLPEN_W;
#[doc = "Field `GPIOGLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOGLPEN_W;
#[doc = "Field `GPIOHLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOHLPEN_W;
#[doc = "Field `GPIOILPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOILPEN_W;
#[doc = "Field `GPIOJLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOJLPEN_W;
#[doc = "Field `GPIOKLPEN` writer - GPIO peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as GPIOKLPEN_W;
#[doc = "Field `CRCLPEN` writer - CRC peripheral clock enable during CSleep mode"]
pub use GPIOALPEN_W as CRCLPEN_W;
#[doc = "Field `BDMALPEN` writer - BDMA Clock Enable During CSleep Mode"]
pub use GPIOALPEN_W as BDMALPEN_W;
#[doc = "Field `ADC3LPEN` writer - ADC3 Peripheral Clocks Enable During CSleep Mode"]
pub use GPIOALPEN_W as ADC3LPEN_W;
#[doc = "Field `BKPRAMLPEN` writer - Backup RAM Clock Enable During CSleep Mode"]
pub use GPIOALPEN_W as BKPRAMLPEN_W;
#[doc = "Field `SRAM4LPEN` writer - SRAM4 Clock Enable During CSleep Mode"]
pub use GPIOALPEN_W as SRAM4LPEN_W;
impl R {
#[doc = "Bit 0 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioalpen(&self) -> GPIOALPEN_R {
GPIOALPEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioblpen(&self) -> GPIOBLPEN_R {
GPIOBLPEN_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioclpen(&self) -> GPIOCLPEN_R {
GPIOCLPEN_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpiodlpen(&self) -> GPIODLPEN_R {
GPIODLPEN_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioelpen(&self) -> GPIOELPEN_R {
GPIOELPEN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioflpen(&self) -> GPIOFLPEN_R {
GPIOFLPEN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioglpen(&self) -> GPIOGLPEN_R {
GPIOGLPEN_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpiohlpen(&self) -> GPIOHLPEN_R {
GPIOHLPEN_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioilpen(&self) -> GPIOILPEN_R {
GPIOILPEN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpiojlpen(&self) -> GPIOJLPEN_R {
GPIOJLPEN_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn gpioklpen(&self) -> GPIOKLPEN_R {
GPIOKLPEN_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 19 - CRC peripheral clock enable during CSleep mode"]
#[inline(always)]
pub fn crclpen(&self) -> CRCLPEN_R {
CRCLPEN_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 21 - BDMA Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn bdmalpen(&self) -> BDMALPEN_R {
BDMALPEN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 24 - ADC3 Peripheral Clocks Enable During CSleep Mode"]
#[inline(always)]
pub fn adc3lpen(&self) -> ADC3LPEN_R {
ADC3LPEN_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 28 - Backup RAM Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn bkpramlpen(&self) -> BKPRAMLPEN_R {
BKPRAMLPEN_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - SRAM4 Clock Enable During CSleep Mode"]
#[inline(always)]
pub fn sram4lpen(&self) -> SRAM4LPEN_R {
SRAM4LPEN_R::new(((self.bits >> 29) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioalpen(&mut self) -> GPIOALPEN_W<AHB4LPENR_SPEC, 0> {
GPIOALPEN_W::new(self)
}
#[doc = "Bit 1 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioblpen(&mut self) -> GPIOBLPEN_W<AHB4LPENR_SPEC, 1> {
GPIOBLPEN_W::new(self)
}
#[doc = "Bit 2 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioclpen(&mut self) -> GPIOCLPEN_W<AHB4LPENR_SPEC, 2> {
GPIOCLPEN_W::new(self)
}
#[doc = "Bit 3 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpiodlpen(&mut self) -> GPIODLPEN_W<AHB4LPENR_SPEC, 3> {
GPIODLPEN_W::new(self)
}
#[doc = "Bit 4 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioelpen(&mut self) -> GPIOELPEN_W<AHB4LPENR_SPEC, 4> {
GPIOELPEN_W::new(self)
}
#[doc = "Bit 5 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioflpen(&mut self) -> GPIOFLPEN_W<AHB4LPENR_SPEC, 5> {
GPIOFLPEN_W::new(self)
}
#[doc = "Bit 6 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioglpen(&mut self) -> GPIOGLPEN_W<AHB4LPENR_SPEC, 6> {
GPIOGLPEN_W::new(self)
}
#[doc = "Bit 7 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpiohlpen(&mut self) -> GPIOHLPEN_W<AHB4LPENR_SPEC, 7> {
GPIOHLPEN_W::new(self)
}
#[doc = "Bit 8 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioilpen(&mut self) -> GPIOILPEN_W<AHB4LPENR_SPEC, 8> {
GPIOILPEN_W::new(self)
}
#[doc = "Bit 9 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpiojlpen(&mut self) -> GPIOJLPEN_W<AHB4LPENR_SPEC, 9> {
GPIOJLPEN_W::new(self)
}
#[doc = "Bit 10 - GPIO peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn gpioklpen(&mut self) -> GPIOKLPEN_W<AHB4LPENR_SPEC, 10> {
GPIOKLPEN_W::new(self)
}
#[doc = "Bit 19 - CRC peripheral clock enable during CSleep mode"]
#[inline(always)]
#[must_use]
pub fn crclpen(&mut self) -> CRCLPEN_W<AHB4LPENR_SPEC, 19> {
CRCLPEN_W::new(self)
}
#[doc = "Bit 21 - BDMA Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn bdmalpen(&mut self) -> BDMALPEN_W<AHB4LPENR_SPEC, 21> {
BDMALPEN_W::new(self)
}
#[doc = "Bit 24 - ADC3 Peripheral Clocks Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn adc3lpen(&mut self) -> ADC3LPEN_W<AHB4LPENR_SPEC, 24> {
ADC3LPEN_W::new(self)
}
#[doc = "Bit 28 - Backup RAM Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn bkpramlpen(&mut self) -> BKPRAMLPEN_W<AHB4LPENR_SPEC, 28> {
BKPRAMLPEN_W::new(self)
}
#[doc = "Bit 29 - SRAM4 Clock Enable During CSleep Mode"]
#[inline(always)]
#[must_use]
pub fn sram4lpen(&mut self) -> SRAM4LPEN_W<AHB4LPENR_SPEC, 29> {
SRAM4LPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC AHB4 Sleep Clock Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb4lpenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb4lpenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AHB4LPENR_SPEC;
impl crate::RegisterSpec for AHB4LPENR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ahb4lpenr::R`](R) reader structure"]
impl crate::Readable for AHB4LPENR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ahb4lpenr::W`](W) writer structure"]
impl crate::Writable for AHB4LPENR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets AHB4LPENR to value 0"]
impl crate::Resettable for AHB4LPENR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::convert::TryInto;
use std::error::Error;
use zbus::{dbus_interface, fdo};
struct Foo;
#[dbus_interface(name = "com.grahamc.Foo1")]
impl Foo {
fn bar(&self, name: &str, arg: &str) -> String {
format!("Hello {} {}!", name, arg)
}
}
fn main() -> Result<(), Box<dyn Error>> {
let connection = zbus::Connection::new_session()?;
fdo::DBusProxy::new(&connection)?.request_name(
"com.grahamc.Foo",
fdo::RequestNameFlags::ReplaceExisting.into(),
)?;
let mut object_server = zbus::ObjectServer::new(&connection);
object_server.at(&"/com/grahamc/Foo".try_into()?, Foo)?;
loop {
println!("hello");
if let Err(err) = object_server.try_handle_next() {
eprintln!("{}", err);
}
}
}
|
#[macro_use] extern crate log;
#[macro_use] extern crate rustc_serialize;
extern crate log4rs;
extern crate regex;
extern crate win32_bindgen;
use std::collections::HashMap;
use win32_bindgen as bg;
const LOGGER_CONFIG_PATH: &'static str = "local/log.toml";
fn main() {
println!("Loading logger config...");
match logger_config() {
Ok(()) => (),
Err(err) => {
println!("Error loading logging config from {}: {}", LOGGER_CONFIG_PATH, err);
println!("Loading defaults instead.");
log4rs::init_config(default_logger_config().unwrap()).unwrap();
}
}
println!("Starting...");
let result = try_main();
result.unwrap();
}
fn logger_config() -> Result<(), String> {
let _ = try!(std::fs::metadata(LOGGER_CONFIG_PATH).map_err(|e| e.to_string()));
log4rs::init_file(LOGGER_CONFIG_PATH, log4rs::toml::Creator::default())
.map_err(|e| e.to_string())
}
fn default_logger_config() -> Result<log4rs::config::Config, log4rs::config::Errors> {
use log::LogLevelFilter;
use log4rs::{appender, config};
use log4rs::pattern::PatternLayout;
config::Config::builder(
config::Root::builder(LogLevelFilter::Info)
.appender("stderr".into())
.appender("trace-log".into())
.build()
)
.appender(
config::Appender::builder(
"stderr".into(),
Box::new(appender::ConsoleAppender::builder()
.pattern(PatternLayout::default())
.build()
)
)
.build()
)
.appender(
config::Appender::builder(
"trace-log".into(),
Box::new(appender::FileAppender::builder("err.log")
.pattern(PatternLayout::default())
.append(false)
.build().unwrap()
)
)
.build()
)
.build()
}
fn try_main() -> Result<(), String> {
info!("Running with local\\config.json...");
let json_config: Config = rustc_serialize::json::decode(&read_file("local/config.json")).unwrap();
let header = json_config.header;
let gen_config = json_config.generation.into_gen_config();
let out_config = json_config.output.into_out_config();
bg::process_header(&header, &gen_config, &out_config);
Ok(())
}
fn re(re: &str) -> regex::Regex {
regex::Regex::new(re).unwrap()
}
#[derive(Clone, Debug, RustcDecodable, RustcEncodable)]
pub struct Config {
pub header: String,
pub generation: GenConfig,
pub output: OutConfig,
}
#[derive(Clone, Debug, RustcDecodable, RustcEncodable)]
pub struct GenConfig {
pub expansion_configs: Vec<ExpConfig>,
pub dont_ignore_decl_spelling: Vec<String>,
pub ignore_decls: Vec<(String, u32, String)>,
pub ignore_decl_spellings: Vec<String>,
pub ignore_file_paths: Vec<String>,
pub switches: Vec<String>,
pub non_canonical_tag_names: Vec<String>,
}
impl GenConfig {
pub fn into_gen_config(self) -> bg::GenConfig {
bg::GenConfig {
exp_configs: self.expansion_configs.into_iter().map(ExpConfig::into_exp_config).collect(),
dont_ignore_decl_spelling: self.dont_ignore_decl_spelling.into_iter().map(|s| re(&s)).collect(),
ignore_decls: self.ignore_decls.into_iter().map(|(f,k,s)| (f,k.into(),s)).collect(),
ignore_decl_spellings: self.ignore_decl_spellings.into_iter().map(|s| re(&s)).collect(),
ignore_file_paths: self.ignore_file_paths.into_iter().map(|s| re(&s)).collect(),
switches: self.switches,
non_canonical_tag_names: self.non_canonical_tag_names.into_iter().map(|s| re(&s)).collect(),
}
}
}
#[derive(Clone, Debug, RustcDecodable, RustcEncodable)]
pub struct OutConfig {
pub output_dir: String,
pub header_path: String,
pub library_path: String,
pub function_library_map: String,
pub function_library_fallback: String,
}
impl OutConfig {
pub fn into_out_config(self) -> bg::OutConfig {
bg::OutConfig {
output_dir: self.output_dir,
header_path: self.header_path,
library_path: self.library_path,
function_library_map: read_symbol_list(&self.function_library_map),
function_library_fallbacks: vec![self.function_library_fallback],
}
}
}
#[derive(Clone, Debug, RustcDecodable, RustcEncodable)]
pub struct ExpConfig {
pub architecture: Architecture,
pub windows_version_short: String,
pub windows_version_full: String,
pub native_calling_conv: NativeCallConv,
}
impl ExpConfig {
pub fn into_exp_config(self) -> bg::ExpConfig {
bg::ExpConfig {
arch: self.architecture.into_architecture(),
winver: ((wv(&self.windows_version_short) >> 16) as u16, wv(&self.windows_version_full)),
native_cc: self.native_calling_conv.into_native_call_conv(),
}
}
}
fn wv(s: &str) -> u32 {
bg::WinVersion::from_name(s).unwrap() as u32
}
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcDecodable, RustcEncodable)]
pub enum Architecture {
X86_32,
X86_64,
Arm,
}
impl Architecture {
pub fn into_architecture(self) -> bg::Architecture {
use self::Architecture::*;
match self {
X86_32 => bg::Architecture::X86_32,
X86_64 => bg::Architecture::X86_64,
Arm => bg::Architecture::Arm,
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcDecodable, RustcEncodable)]
pub enum NativeCallConv {
C,
Stdcall,
}
impl NativeCallConv {
pub fn into_native_call_conv(self) -> bg::NativeCallConv {
use self::NativeCallConv::*;
match self {
C => bg::NativeCallConv::C,
Stdcall => bg::NativeCallConv::Stdcall,
}
}
}
pub fn read_symbol_list(path: &str) -> HashMap<String, Vec<String>> {
use std::fs;
use std::io;
use std::io::prelude::*;
let mut map = HashMap::new();
for line in io::BufReader::new(fs::File::open(path).unwrap()).lines() {
let line = line.unwrap();
let line = line.trim();
if line.len() == 0 || line.starts_with("#") { continue; }
let mut parts = line.splitn(2, ":");
let sym_name = parts.next().unwrap();
let sym_libs = parts.next().unwrap();
let sym_libs = sym_libs.split_whitespace().map(|s| s.into()).collect();
map.insert(sym_name.into(), sym_libs);
}
map
}
pub fn read_file(path: &str) -> String {
use std::fs;
use std::io::prelude::*;
let mut s = String::new();
let _ = fs::File::open(path).unwrap().read_to_string(&mut s).unwrap();
s
}
|
use crate::{
configuration::WebsocketSettings,
error::WebsocketError,
message::{ClientMessage, ResultMessage, TaskMessage, WebsocketMessage},
subsystems::{
pc_usage::PcUsageSystem, python_repo::PythonRepoSystem, Subsystem, WebsocketSystem,
},
telemetry::tokio_spawn,
};
use anyhow::Context;
use axum::extract::ws::{Message, WebSocket};
use futures::{
stream::{SplitSink, SplitStream},
SinkExt, StreamExt,
};
use std::{
sync::{Arc, Mutex},
time::Instant,
};
use tokio::sync::mpsc;
pub struct Session {
hb: Mutex<Instant>,
settings: WebsocketSettings,
}
impl Session {
pub fn new(settings: &WebsocketSettings) -> Self {
Session {
hb: Mutex::new(Instant::now()),
settings: settings.clone(),
}
}
/// Sends ping to client every x seconds.
/// Also checks heartbeats from client.
#[tracing::instrument(name = "Heartbeat task", level = "trace", skip(self, sender))]
async fn hb(&self, sender: mpsc::Sender<WebsocketMessage>) -> Result<(), WebsocketError> {
let mut interval = tokio::time::interval(self.settings.heartbeat_interval);
loop {
interval.tick().await;
// Check client heartbeats
if Instant::now().duration_since(*self.hb.lock().unwrap())
> self.settings.client_timeout
{
// Heartbeat timed out
tracing::info!("Websocket client heartbeat failed, disconnecting.");
sender.send(WebsocketMessage::Close).await?;
return Ok(());
}
// Send ping
tracing::trace!("Sending ping.");
sender.send(WebsocketMessage::Ping(vec![])).await?;
}
}
}
#[tracing::instrument(name = "Handling websocket message", skip(socket, settings))]
pub async fn handle_socket(socket: WebSocket, settings: Arc<WebsocketSettings>) {
let session = Arc::new(Session::new(&settings));
let (socket_sender, socket_receiver) = socket.split();
let (tx, rx) = mpsc::channel(32);
let mut recv_task = tokio_spawn(receive_message(rx, socket_sender));
let mut hb_task = tokio_spawn({
let tx = tx.clone();
let session = session.clone();
async move { session.hb(tx).await }
});
let python_repo_system = PythonRepoSystem {};
let (python_repo_tx, python_repo_rx) = mpsc::channel(32);
let mut python_repo_task = tokio_spawn({
let tx = tx.clone();
async move { python_repo_system.handle_messages(python_repo_rx, tx).await }
});
let pc_usage_system = PcUsageSystem {};
let (pc_usage_tx, pc_usage_rx) = mpsc::channel(32);
let mut pc_usage_task = tokio_spawn({
let tx = tx.clone();
async move { pc_usage_system.handle_messages(pc_usage_rx, tx).await }
});
let mut client_recv_task = tokio_spawn({
let session = session.clone();
let tx = tx.clone();
async move {
client_receive_task(socket_receiver, session, tx, python_repo_tx, pc_usage_tx).await
}
});
let (result, _, _) = futures::future::select_all(vec![
&mut client_recv_task,
&mut recv_task,
&mut python_repo_task,
&mut pc_usage_task,
&mut hb_task,
])
.await;
match result {
Ok(Err(e)) => tracing::info!("Got WebsocketError: {:?}", e),
Err(e) => tracing::info!("Got JoinError: {:?}", e),
_ => {}
}
}
#[tracing::instrument(
name = "Client receiver task",
level = "trace",
skip(socket_receiver, session, sender, python_repo_tx)
)]
async fn client_receive_task(
mut socket_receiver: SplitStream<WebSocket>,
session: Arc<Session>,
sender: mpsc::Sender<WebsocketMessage>,
python_repo_tx: mpsc::Sender<TaskMessage>,
pc_usage_tx: mpsc::Sender<TaskMessage>,
) -> Result<(), WebsocketError> {
while let Some(msg) = socket_receiver.next().await {
match msg {
Err(e) => tracing::info!("Client disconnected: {:?}", e),
Ok(msg) => {
tracing::trace!("Received: {:?}", msg);
match msg {
Message::Text(msg) => match serde_json::from_str::<ClientMessage>(&msg) {
Ok(msg) => {
let tx = match msg.system {
WebsocketSystem::PythonRepo => &python_repo_tx,
WebsocketSystem::PcUsage => &pc_usage_tx,
};
tx.send(msg.into()).await?;
}
Err(e) => {
tracing::info!("Failed to deserialize message: {:?}", e);
sender
.send(WebsocketMessage::TaskResult(ResultMessage::from_error(
e, None,
)))
.await?;
}
},
Message::Binary(_) => {
tracing::info!("Invalid binary message from client.");
}
Message::Ping(msg) => {
*session.hb.lock().unwrap() = Instant::now();
sender.send(WebsocketMessage::Ping(msg)).await?;
}
Message::Pong(_) => {
*session.hb.lock().unwrap() = Instant::now();
}
Message::Close(_) => todo!(),
}
}
}
}
Ok(())
}
#[tracing::instrument(
name = "Internal receiver task",
level = "trace"
skip(rx, socket_sender),
)]
async fn receive_message(
mut rx: mpsc::Receiver<WebsocketMessage>,
mut socket_sender: SplitSink<WebSocket, Message>,
) -> Result<(), WebsocketError> {
while let Some(msg) = rx.recv().await {
tracing::trace!("Received: {:?}", msg);
match msg {
WebsocketMessage::Ping(msg) => {
socket_sender
.send(Message::Ping(msg))
.await
.context("Failed to send Ping message to socket.")?;
}
WebsocketMessage::Close => {
socket_sender
.send(Message::Close(None))
.await
.context("Failed to send Close message to socket.")?;
break;
}
WebsocketMessage::TaskResult(msg) => {
let msg =
serde_json::to_string(&msg).context("Failed to serialize ClientMessage")?;
socket_sender
.send(Message::Text(msg))
.await
.context("Failed to send ClientMessage to socket.")?;
}
}
}
Ok(())
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
#[serde(rename_all = "camelCase")]
pub enum Dimension {
#[serde(rename_all = "camelCase")]
Default {
dimension: String,
output_name: String,
output_type: OutputType,
},
#[serde(rename_all = "camelCase")]
Extraction {
dimenstion: String,
output_name: String,
output_type: OutputType,
extraction_fn: ExtractFN,
},
#[serde(rename_all = "camelCase")]
ListFiltered {
delegate: Box<Dimension>,
values: Vec<String>,
is_whitelist: bool,
},
#[serde(rename_all = "camelCase")]
RegexFiltered {
delegate: Box<Dimension>,
pattern: String,
},
#[serde(rename_all = "camelCase")]
PrefixFiltered {
delegate: Box<Dimension>,
prefix: String,
},
#[serde(rename_all = "camelCase")]
#[serde(rename(serialize = "lookup"))]
LookupMap {
dimension: String,
output_name: String,
replace_missing_value_with: String,
retain_missing_value: bool,
lookup: LookupMap,
},
Lookup {
dimension: String,
output_name: String,
name: String,
},
}
#[derive(Debug, Serialize, Deserialize)]
pub enum OutputType {
STRING,
LONG,
FLOAT,
}
impl Dimension {
pub fn default(dimension: &str) -> Dimension {
Dimension::Default {
dimension: dimension.into(),
output_name: dimension.into(),
output_type: OutputType::STRING,
}
}
pub fn regex(dimension: Dimension, pattern: &str) -> Dimension {
Dimension::RegexFiltered {
pattern: pattern.into(),
delegate: Box::new(dimension),
}
}
pub fn prefix(dimension: Dimension, prefix: &str) -> Dimension {
Dimension::PrefixFiltered {
prefix: prefix.into(),
delegate: Box::new(dimension),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "type", rename = "map")]
pub struct LookupMap {
map: std::collections::HashMap<String, String>,
is_one_to_one: bool,
}
#[rustfmt::skip]
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
#[serde(rename_all = "camelCase")]
pub enum Aggregation {
Count { name: String },
#[serde(rename_all = "camelCase")]
LongSum { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
DoubleSum { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatSum { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
LongMax { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
DoubleMax { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatMax { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
LongMin { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatMin { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
DoubleMin { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
LongFirst { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatFirst { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
DoubleFirst { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
LongLast { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatLast { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
DoubleLast { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
StringFirst { name: String, field_name: String, max_string_bytes: usize },
#[serde(rename_all = "camelCase")]
StringLast { name: String, field_name: String, max_string_bytes: usize },
#[serde(rename_all = "camelCase")]
DoubleAny { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
FloatAny { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
LongAny { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
StringAny { name: String, field_name: String },
#[serde(rename_all = "camelCase")]
Javascript { name: String, field_names: Vec<String>, fn_aggregate: String, fn_combine: String, fn_reset: String},
#[serde(rename_all = "camelCase")]
ThetaSketch {name: String, field_name: String, is_input_theta_sketch: bool, size: usize},
#[serde(rename_all = "camelCase")]
HLLSketchBuild { name: String, field_name: String, lg_k: usize, lgt_hll_type: HllType, round: bool},
#[serde(rename_all = "camelCase")]
Cardinality { name: String, fields: Vec<String>, by_row: bool, round: bool},
#[serde(rename_all = "camelCase")]
HyperUnique { name: String, field_name: String, is_input_hyper_unique: bool, round: bool},
Filtered { filter: Filter, aggregator: Box<Aggregation>}
}
#[derive(Serialize, Deserialize, Debug)]
pub enum HllType {
#[allow(non_camel_case_types)]
HLL_4,
#[allow(non_camel_case_types)]
HLL_6,
#[allow(non_camel_case_types)]
HLL_8,
}
// todo: macro
impl Aggregation {
pub fn count(name: &str) -> Aggregation {
Aggregation::Count {
name: name.to_string(),
}
}
pub fn long_sum(name: &str, field_name: &str) -> Aggregation {
Aggregation::LongSum {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn double_sum(name: &str, field_name: &str) -> Aggregation {
Aggregation::DoubleSum {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn float_sum(name: &str, field_name: &str) -> Aggregation {
Aggregation::FloatSum {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn long_max(name: &str, field_name: &str) -> Aggregation {
Aggregation::LongMax {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn double_max(name: &str, field_name: &&str) -> Aggregation {
Aggregation::DoubleMax {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn float_max(name: &str, field_name: &str) -> Aggregation {
Aggregation::FloatMax {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn long_min(name: &str, field_name: &str) -> Aggregation {
Aggregation::LongMin {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn float_min(name: &str, field_name: &str) -> Aggregation {
Aggregation::FloatMin {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn double_min(name: &str, field_name: &str) -> Aggregation {
Aggregation::DoubleMin {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn long_first(name: &str, field_name: &str) -> Aggregation {
Aggregation::LongFirst {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
pub fn float_first(name: &str, field_name: &str) -> Aggregation {
Aggregation::FloatFirst {
name: name.to_string(),
field_name: field_name.to_string(),
}
}
// pub fn double_first(name: &str, field_name: &str) -> Aggregation {}
// pub fn long_last(name: &str, field_name: &str) -> Aggregation {}
// pub fn float_last(name: &str, field_name: &str) -> Aggregation {}
// pub fn double_last(name: &str, field_name: &str) -> Aggregation {}
// pub fn string_first(name: &str, field_name: &str, max_string_bytes: usize) -> Aggregation {}
// pub fn string_last(name: &str, field_name: &str, max_string_bytes: usize) -> Aggregation {}
// pub fn double_any(name: &str, field_name: &str) -> Aggregation {}
// pub fn float_any(name: &str, field_name: &str) -> Aggregation {}
// pub fn long_any(name: &str, field_name: &str) -> Aggregation {}
// pub fn string_any(name: &str, field_name: &str) -> Aggregation {}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "snake_case")]
pub enum Granularity {
All,
None,
Second,
Minute,
FifteenMinute,
ThirtyMinute,
Hour,
Day,
Week,
Month,
Quarter,
Year,
Duration { duration: usize },
Null,
}
impl Default for Granularity {
fn default() -> Self {
Granularity::Null
}
}
#[rustfmt::skip]
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub enum ExtractFN {
#[serde(rename_all = "camelCase")]
Regex { expr: String, index: usize, replace_missing_value: bool, replace_missing_value_with: Option<String>},
#[serde(rename_all = "camelCase")]
Partial { expr: String },
// SearchQuery { query: SearchQuerySpec }
#[serde(rename_all = "camelCase")]
Substring { index: usize, length: Option<usize> },
#[serde(rename_all = "camelCase")]
Strlen,
#[serde(rename_all = "camelCase")]
TimeFormat { format: Option<String>, time_zone: Option<String>, locale: Option<String>, granularity: Option<Granularity>, as_millis: bool },
#[serde(rename_all = "camelCase")]
Time { time_format: String, result_format: String, joda: bool },
#[serde(rename_all = "camelCase")]
Javascript { function: String },
#[serde(rename_all = "camelCase")]
RegisteredLookup { lookup: String, retain_missing_value: bool },
#[serde(rename_all = "camelCase")]
Lookup { lookup: LookupMap, retain_missing_value: bool, injective: bool, replace_missing_value_with: String },
#[serde(rename_all = "camelCase")]
Cascade { extraction_fns: Vec<ExtractFN> },
#[serde(rename_all = "camelCase")]
StringFormat {format: String, null_handling: Option<NullHandling>},
#[serde(rename_all = "camelCase")]
Upper { locale: Option<String> },
//todo
#[serde(rename_all = "camelCase")]
Lower { locale: Option<String> },
#[serde(rename_all = "camelCase")]
Bucket { size: usize, offset: usize },
}
#[serde(rename_all = "camelCase")]
#[derive(Deserialize, Serialize, Debug)]
pub enum NullHandling {
NullString,
EmptyString,
ReturnNull,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "camelCase")]
pub enum Filter {
#[serde(rename_all = "camelCase")]
Selector {
dimension: String,
value: String,
extract_fn: Option<ExtractFN>,
},
ColumnComparison {
dimensions: Vec<String>,
},
Regex {
dimension: String,
pattern: String,
},
And {
fields: Vec<Filter>,
},
Or {
fields: Vec<Filter>,
},
Not {
field: Box<Filter>,
},
Javascript {
dimension: String,
function: String,
},
Search {
dimension: String,
query: FilterQuerySpec,
},
In {
dimension: String,
values: Vec<String>,
},
#[serde(rename_all = "camelCase")]
Like {
dimension: String,
pattern: String,
escape: Option<String>,
extraction_fn: Option<ExtractFN>,
},
#[serde(rename_all = "camelCase")]
Bound {
dimension: String,
lower: String,
upper: String,
lower_strict: bool,
upper_strict: bool,
ordering: SortingOrder,
extraction_fn: Option<ExtractFN>,
},
#[serde(rename_all = "camelCase")]
Interval {
dimension: String,
intervals: Vec<String>,
extraction_fn: Option<ExtractFN>,
},
True,
}
impl Filter {
pub fn selector(dimension: &str, value: &str) -> Filter {
Filter::Selector {
dimension: dimension.to_string(),
value: value.to_string(),
extract_fn: None,
}
}
pub fn column_comparison(dimensions: Vec<&str>) -> Self {
Filter::ColumnComparison {
dimensions: dimensions.iter().map(|s| s.to_string()).collect(),
}
}
pub fn regex(dimension: &str, pattern: &str) -> Self {
Filter::Regex {
dimension: dimension.to_string(),
pattern: pattern.to_string(),
}
}
pub fn javascript(dimension: &str, javascript: &str) -> Self {
Filter::Javascript {
dimension: dimension.to_string(),
function: javascript.to_string(),
}
}
pub fn in_values(dimension: &str, values: Vec<&str>) -> Self {
Filter::In {
dimension: dimension.to_string(),
values: values.iter().map(|s| s.to_string()).collect(),
}
}
pub fn like(dimension: &str, pattern: &str) -> Self {
Filter::Like {
dimension: dimension.to_string(),
pattern: pattern.to_string(),
escape: None,
extraction_fn: None,
}
}
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum FilterQuerySpec {
#[serde(rename_all = "camelCase")]
Contains { value: String, case_sensitive: bool },
#[serde(rename_all = "camelCase")]
InsensitiveContains { value: String },
#[serde(rename_all = "camelCase")]
Fragment {
values: Vec<String>,
case_sensitive: bool,
},
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub enum Ordering {
Ascending,
Descending,
None,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub enum SortingOrder {
Lexicographic,
Alphanumeric,
Strlen,
Numeric,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.