text stringlengths 8 4.13M |
|---|
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `ENABLE` reader - LPTIM Enable"]
pub type ENABLE_R = crate::BitReader<ENABLE_A>;
#[doc = "LPTIM Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ENABLE_A {
#[doc = "0: LPTIM is disabled"]
Disabled = 0,
#[doc = "1: LPTIM is enabled"]
Enabled = 1,
}
impl From<ENABLE_A> for bool {
#[inline(always)]
fn from(variant: ENABLE_A) -> Self {
variant as u8 != 0
}
}
impl ENABLE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ENABLE_A {
match self.bits {
false => ENABLE_A::Disabled,
true => ENABLE_A::Enabled,
}
}
#[doc = "LPTIM is disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == ENABLE_A::Disabled
}
#[doc = "LPTIM is enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == ENABLE_A::Enabled
}
}
#[doc = "Field `ENABLE` writer - LPTIM Enable"]
pub type ENABLE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ENABLE_A>;
impl<'a, REG, const O: u8> ENABLE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "LPTIM is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(ENABLE_A::Disabled)
}
#[doc = "LPTIM is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(ENABLE_A::Enabled)
}
}
#[doc = "Field `SNGSTRT` reader - LPTIM start in single mode"]
pub type SNGSTRT_R = crate::BitReader<SNGSTRTW_A>;
#[doc = "LPTIM start in single mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SNGSTRTW_A {
#[doc = "1: LPTIM start in Single mode"]
Start = 1,
}
impl From<SNGSTRTW_A> for bool {
#[inline(always)]
fn from(variant: SNGSTRTW_A) -> Self {
variant as u8 != 0
}
}
impl SNGSTRT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SNGSTRTW_A> {
match self.bits {
true => Some(SNGSTRTW_A::Start),
_ => None,
}
}
#[doc = "LPTIM start in Single mode"]
#[inline(always)]
pub fn is_start(&self) -> bool {
*self == SNGSTRTW_A::Start
}
}
#[doc = "Field `SNGSTRT` writer - LPTIM start in single mode"]
pub type SNGSTRT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SNGSTRTW_A>;
impl<'a, REG, const O: u8> SNGSTRT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "LPTIM start in Single mode"]
#[inline(always)]
pub fn start(self) -> &'a mut crate::W<REG> {
self.variant(SNGSTRTW_A::Start)
}
}
#[doc = "Field `CNTSTRT` reader - Timer start in continuous mode"]
pub type CNTSTRT_R = crate::BitReader<CNTSTRTW_A>;
#[doc = "Timer start in continuous mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CNTSTRTW_A {
#[doc = "1: Timer start in Continuous mode"]
Start = 1,
}
impl From<CNTSTRTW_A> for bool {
#[inline(always)]
fn from(variant: CNTSTRTW_A) -> Self {
variant as u8 != 0
}
}
impl CNTSTRT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CNTSTRTW_A> {
match self.bits {
true => Some(CNTSTRTW_A::Start),
_ => None,
}
}
#[doc = "Timer start in Continuous mode"]
#[inline(always)]
pub fn is_start(&self) -> bool {
*self == CNTSTRTW_A::Start
}
}
#[doc = "Field `CNTSTRT` writer - Timer start in continuous mode"]
pub type CNTSTRT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CNTSTRTW_A>;
impl<'a, REG, const O: u8> CNTSTRT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Timer start in Continuous mode"]
#[inline(always)]
pub fn start(self) -> &'a mut crate::W<REG> {
self.variant(CNTSTRTW_A::Start)
}
}
impl R {
#[doc = "Bit 0 - LPTIM Enable"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - LPTIM start in single mode"]
#[inline(always)]
pub fn sngstrt(&self) -> SNGSTRT_R {
SNGSTRT_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Timer start in continuous mode"]
#[inline(always)]
pub fn cntstrt(&self) -> CNTSTRT_R {
CNTSTRT_R::new(((self.bits >> 2) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - LPTIM Enable"]
#[inline(always)]
#[must_use]
pub fn enable(&mut self) -> ENABLE_W<CR_SPEC, 0> {
ENABLE_W::new(self)
}
#[doc = "Bit 1 - LPTIM start in single mode"]
#[inline(always)]
#[must_use]
pub fn sngstrt(&mut self) -> SNGSTRT_W<CR_SPEC, 1> {
SNGSTRT_W::new(self)
}
#[doc = "Bit 2 - Timer start in continuous mode"]
#[inline(always)]
#[must_use]
pub fn cntstrt(&mut self) -> CNTSTRT_W<CR_SPEC, 2> {
CNTSTRT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register MMMS_ADVCH_NI_ENABLE"]
pub type R = crate::R<u32, super::MMMS_ADVCH_NI_ENABLE>;
#[doc = "Writer for register MMMS_ADVCH_NI_ENABLE"]
pub type W = crate::W<u32, super::MMMS_ADVCH_NI_ENABLE>;
#[doc = "Register MMMS_ADVCH_NI_ENABLE `reset()`'s with value 0"]
impl crate::ResetValue for super::MMMS_ADVCH_NI_ENABLE {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ADV_NI_ENABLE`"]
pub type ADV_NI_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ADV_NI_ENABLE`"]
pub struct ADV_NI_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ADV_NI_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `SCAN_NI_ENABLE`"]
pub type SCAN_NI_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SCAN_NI_ENABLE`"]
pub struct SCAN_NI_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> SCAN_NI_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `INIT_NI_ENABLE`"]
pub type INIT_NI_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INIT_NI_ENABLE`"]
pub struct INIT_NI_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> INIT_NI_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
impl R {
#[doc = "Bit 0 - This bit is used to enable the Advertisement NI timer and is valid when MMMS_ENABLE=1. 0 - ADV_NI timer is disabled 1 - ADV_NI timer is enabled In this mode, the adv engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn adv_ni_enable(&self) -> ADV_NI_ENABLE_R {
ADV_NI_ENABLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - This bit is used to enable the SCAN NI timer and is valid when MMMS_ENABLE=1. 0 - SCAN_NI timer is disabled 1 - SCAN_NI timer is enabled In this mode, the scan engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn scan_ni_enable(&self) -> SCAN_NI_ENABLE_R {
SCAN_NI_ENABLE_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - This bit is used to enable the INIT NI timer and is valid when MMMS_ENABLE=1. 0 - INIT_NI timer is disabled 1 - INIT_NI timer is enabled In this mode, the init engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn init_ni_enable(&self) -> INIT_NI_ENABLE_R {
INIT_NI_ENABLE_R::new(((self.bits >> 2) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - This bit is used to enable the Advertisement NI timer and is valid when MMMS_ENABLE=1. 0 - ADV_NI timer is disabled 1 - ADV_NI timer is enabled In this mode, the adv engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn adv_ni_enable(&mut self) -> ADV_NI_ENABLE_W {
ADV_NI_ENABLE_W { w: self }
}
#[doc = "Bit 1 - This bit is used to enable the SCAN NI timer and is valid when MMMS_ENABLE=1. 0 - SCAN_NI timer is disabled 1 - SCAN_NI timer is enabled In this mode, the scan engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn scan_ni_enable(&mut self) -> SCAN_NI_ENABLE_W {
SCAN_NI_ENABLE_W { w: self }
}
#[doc = "Bit 2 - This bit is used to enable the INIT NI timer and is valid when MMMS_ENABLE=1. 0 - INIT_NI timer is disabled 1 - INIT_NI timer is enabled In this mode, the init engine next instant is scheduled by firmware"]
#[inline(always)]
pub fn init_ni_enable(&mut self) -> INIT_NI_ENABLE_W {
INIT_NI_ENABLE_W { w: self }
}
}
|
use board::Board;
use marker::Marker;
use marker;
pub fn expand_board(board: &Board) -> Vec<String> {
let spaces = board.get_spaces();
let number_of_spaces = board.get_size() * board.get_size();
let mut expanded_board: Vec<String> = vec![" ".to_string(); number_of_spaces as usize];
for (index, space) in spaces.iter().enumerate() {
expanded_board[*space as usize] = marker::inspect(&set_marker(index));
}
expanded_board
}
fn set_marker(index: usize) -> Marker {
if is_even(index) {
Marker::X
} else {
Marker::O
}
}
fn is_even(index: usize) -> bool {
index % 2 == 0
}
pub mod tests {
#[cfg(test)]
use super::*;
#[cfg(test)]
use board::tests::set_up_board;
#[test]
fn convert_empty_board() {
let board = set_up_board(3, vec![]);
let expanded_board: Vec<String> = vec![
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
];
assert_eq!(expanded_board, expand_board(&board));
}
#[test]
fn convert_in_progress_board() {
let board = set_up_board(3, vec![0, 4]);
let expanded_board: Vec<String> = vec![
"X".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
"O".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
" ".to_string(),
];
assert_eq!(expanded_board, expand_board(&board));
}
#[test]
fn convert_full_board() {
let board = set_up_board(3, vec![0, 4, 8, 2, 6, 7, 1, 3, 5]);
let expanded_board: Vec<String> = vec![
"X".to_string(),
"X".to_string(),
"O".to_string(),
"O".to_string(),
"O".to_string(),
"X".to_string(),
"X".to_string(),
"O".to_string(),
"X".to_string(),
];
assert_eq!(expanded_board, expand_board(&board));
}
}
|
use super::data::*;
use nom::{
character::complete::{digit1, hex_digit1, line_ending, not_line_ending, space1},
bytes::complete::tag,
delimited, do_parse, eof, many0, map, map_res, named, opt, preceded,
return_error, separated_list, switch, take, value,
};
pub struct ParseError;
fn from_hex(input: &str) -> Result<u64, std::num::ParseIntError> {
u64::from_str_radix(input, 16)
}
fn from_dec(input: &str) -> Result<u64, std::num::ParseIntError> {
u64::from_str_radix(input, 10)
}
named!(dec_u64<&str, u64>, map_res!(digit1, from_dec));
named!(hex_u64<&str, u64>, preceded!(tag("0x"), map_res!(hex_digit1, from_hex)));
named!(address<&str, Address>, map!(hex_u64, Address));
named!(size<&str, Size>, map!(hex_u64, Size));
named!(pass<&str, Pass>, map!(dec_u64, Pass));
named!(current_status<&str, CurrentStatus>, switch!(take!(1),
"?" => value!(CurrentStatus::CopyNonTriedBlock) |
"*" => value!(CurrentStatus::TrimmingBlock) |
"/" => value!(CurrentStatus::ScrapingBlock) |
"-" => value!(CurrentStatus::RetryBadSector) |
"F" => value!(CurrentStatus::Filling) |
"G" => value!(CurrentStatus::Approximate) |
"+" => value!(CurrentStatus::Finished)
));
named!(block_status<&str, BlockStatus>, switch!(take!(1),
"?" => value!(BlockStatus::Untried) |
"*" => value!(BlockStatus::NonTrimmed) |
"/" => value!(BlockStatus::NonScraped) |
"-" => value!(BlockStatus::BadSector) |
"+" => value!(BlockStatus::Finished)
));
named!(current_state<&str, CurrentState>, do_parse!(
current_pos: address >>
space1 >>
current_status: current_status >>
current_pass: opt!(preceded!(space1, pass)) >>
(CurrentState{current_pos, current_status, current_pass})
));
named!(block<&str, Block>, do_parse!(
pos: address >>
space1 >>
size: size >>
space1 >>
status: block_status >>
(Block{pos, size, status})
));
named!(comment<&str, &str>, delimited!(tag("#"), not_line_ending, line_ending));
named!(comment_lines<&str, ()>, value!((), many0!(comment)));
named!(pub parse_mapfile<&str, MapFile>, do_parse!(
comment_lines >>
current_state: current_state >>
line_ending >>
comment_lines >>
blocks: separated_list!(line_ending, block) >>
opt!(line_ending) >>
eof!() >>
(
MapFile {
current_state,
blocks,
})
));
#[cfg(test)]
mod tests {
use super::*;
use nom::multi::many0;
#[test]
fn test_basic() {
assert_eq!(comment("# comment\r\n"), Ok(("", " comment".into())));
assert_eq!(hex_u64("0xdEaDbEeF "), Ok((" ", 0xdeadbeef)));
assert_eq!(address("0xdEaDbEeF;"), Ok((";", Address(0xdeadbeef))));
assert_eq!(size("0xdEaDbEeF;"), Ok((";", Size(0xdeadbeef))));
assert_eq!(dec_u64("5;"), Ok((";", 5)));
assert_eq!(pass("5;"), Ok((";", Pass(5))));
}
#[test]
fn test_block_status() {
use BlockStatus::*;
assert_eq!(
many0(block_status)("?*/-+;"),
Ok((
";",
vec![Untried, NonTrimmed, NonScraped, BadSector, Finished,]
))
);
}
#[test]
fn test_current_status() {
use CurrentStatus::*;
assert_eq!(
many0(current_status)("?*/-FG+;"),
Ok((
";",
vec![
CopyNonTriedBlock,
TrimmingBlock,
ScrapingBlock,
RetryBadSector,
Filling,
Approximate,
Finished,
]
))
);
}
#[test]
fn test_current_state() {
assert_eq!(
current_state("0x24F35400 +\r\n"),
Ok((
"\r\n",
CurrentState {
current_pos: Address(0x24f35400),
current_status: CurrentStatus::Finished,
current_pass: None,
}
))
);
assert_eq!(
current_state("0x24F35400 + 1\r\n"),
Ok((
"\r\n",
CurrentState {
current_pos: Address(0x24f35400),
current_status: CurrentStatus::Finished,
current_pass: Some(Pass(1)),
}
))
);
}
#[test]
fn test_block() {
assert_eq!(
block("0x00000001 0x2237B000 +;"),
Ok((
";",
Block {
pos: Address(0x1),
size: Size(0x2237B000),
status: BlockStatus::Finished,
}
))
);
}
#[test]
fn test_file() {
assert_eq!(
parse_mapfile(
"# Rescue Logfile.
# current_pos current_status
0x24F35400 +
# pos size status
0x00000000 0x2237B000 +
0x2237B000 0x02BBA800 -"
),
Ok((
"",
MapFile {
current_state: CurrentState {
current_pos: Address(0x24f35400),
current_status: CurrentStatus::Finished,
current_pass: None,
},
blocks: vec![
Block {
pos: Address(0x0),
size: Size(0x2237B000),
status: BlockStatus::Finished,
},
Block {
pos: Address(0x2237B000),
size: Size(0x02BBA800),
status: BlockStatus::BadSector,
},
],
},
)),
);
}
#[test]
fn test_file2() {
assert_eq!(
parse_mapfile(
"# Rescue Logfile.
# current_pos current_status
0x24F35400 +
# pos size status
0x00000000 0x2237B000 +
0x2237B000 0x02BBA800 -
"
),
Ok((
"",
MapFile {
current_state: CurrentState {
current_pos: Address(0x24f35400),
current_status: CurrentStatus::Finished,
current_pass: None,
},
blocks: vec![
Block {
pos: Address(0x0),
size: Size(0x2237B000),
status: BlockStatus::Finished,
},
Block {
pos: Address(0x2237B000),
size: Size(0x02BBA800),
status: BlockStatus::BadSector,
},
],
},
)),
);
}
#[test]
fn test_mapfile_eof() {
assert!(parse_mapfile(
"# Rescue Logfile.
# current_pos current_status
0x24F35400 +
# pos size status
0x00000000 0x2237B000 +
0x2237B000 0x02BBA800 -;"
)
.is_err());
}
}
|
use std::rc::Rc;
use std::sync::Arc;
use crate::builder::factories::SubsystemFactory;
use crate::mechatronics::dumper::Dumper;
use crate::motor_controllers::motor_group::MotorGroup;
use crate::motor_controllers::print_motor::PrintMotor;
use crate::motor_controllers::roboclaw::RoboClaw;
use crate::motor_controllers::test_motor::TestMotor;
use crate::pinouts::factories::IoFactory;
use crate::robot_map::DUMPER_PWM_CHIP;
use crate::robot_map::DUMPER_PWM_NUM;
use crate::status::robot_state::GlobalRobotState;
pub struct ProductionDumperFactory {
state: Arc<GlobalRobotState>,
io: Rc<IoFactory>,
}
pub struct TestDumperFactory {
state: Arc<GlobalRobotState>
}
pub struct PrintDumperFactory {
state: Arc<GlobalRobotState>
}
impl ProductionDumperFactory {
pub fn new(state: Arc<GlobalRobotState>, io: Rc<IoFactory>) -> Self {
Self {
state,
io,
}
}
}
impl TestDumperFactory {
pub fn new(state: Arc<GlobalRobotState>) -> Self {
Self {
state
}
}
}
impl PrintDumperFactory {
pub fn new(state: Arc<GlobalRobotState>) -> Self {
Self {
state
}
}
}
impl ToString for ProductionDumperFactory {
fn to_string(&self) -> String {
"production dumper".to_owned()
}
}
impl ToString for TestDumperFactory {
fn to_string(&self) -> String {
"test dumper".to_owned()
}
}
impl ToString for PrintDumperFactory {
fn to_string(&self) -> String {
"print dumper".to_owned()
}
}
impl SubsystemFactory<Dumper> for ProductionDumperFactory {
fn produce(self: Box<Self>) -> Dumper {
let state = &self.state;
let pwm = self.io.generate_pwm(DUMPER_PWM_CHIP, DUMPER_PWM_NUM);
let dumper_motor = Box::new(RoboClaw::new(pwm, state.get_dumper().get_motor()));
Dumper::new(state.get_life(), dumper_motor, state.get_dumper())
}
}
impl SubsystemFactory<Dumper> for TestDumperFactory {
fn produce(self: Box<Self>) -> Dumper {
let state = &self.state;
let dumper_motor = Box::new(TestMotor::new(state.get_dumper().get_motor()));
Dumper::new(state.get_life(), dumper_motor, state.get_dumper())
}
}
impl SubsystemFactory<Dumper> for PrintDumperFactory {
fn produce(self: Box<Self>) -> Dumper {
let state = &self.state;
let dumper_motor = Box::new(PrintMotor::new("Dumper", state.get_dumper().get_motor()));
let dumper_group = Box::new(MotorGroup::new(vec![dumper_motor], state.get_dumper().get_motor()));
Dumper::new(state.get_life(), dumper_group, state.get_dumper())
}
} |
#[doc = "Register `CCSWCR` reader"]
pub type R = crate::R<CCSWCR_SPEC>;
#[doc = "Register `CCSWCR` writer"]
pub type W = crate::W<CCSWCR_SPEC>;
#[doc = "Field `SW_ANSRC1` reader - NMOS compensation code for VDD power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
pub type SW_ANSRC1_R = crate::FieldReader;
#[doc = "Field `SW_ANSRC1` writer - NMOS compensation code for VDD power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
pub type SW_ANSRC1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `SW_APSRC1` reader - PMOS compensation code for the VDD power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
pub type SW_APSRC1_R = crate::FieldReader;
#[doc = "Field `SW_APSRC1` writer - PMOS compensation code for the VDD power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
pub type SW_APSRC1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `SW_ANSRC2` reader - NMOS compensation code for VDDIO power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
pub type SW_ANSRC2_R = crate::FieldReader;
#[doc = "Field `SW_ANSRC2` writer - NMOS compensation code for VDDIO power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
pub type SW_ANSRC2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `SW_APSRC2` reader - PMOS compensation code for the V<sub>DDIO</sub> power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
pub type SW_APSRC2_R = crate::FieldReader;
#[doc = "Field `SW_APSRC2` writer - PMOS compensation code for the V<sub>DDIO</sub> power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
pub type SW_APSRC2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:3 - NMOS compensation code for VDD power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
#[inline(always)]
pub fn sw_ansrc1(&self) -> SW_ANSRC1_R {
SW_ANSRC1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - PMOS compensation code for the VDD power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
#[inline(always)]
pub fn sw_apsrc1(&self) -> SW_APSRC1_R {
SW_APSRC1_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - NMOS compensation code for VDDIO power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
#[inline(always)]
pub fn sw_ansrc2(&self) -> SW_ANSRC2_R {
SW_ANSRC2_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - PMOS compensation code for the V<sub>DDIO</sub> power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
#[inline(always)]
pub fn sw_apsrc2(&self) -> SW_APSRC2_R {
SW_APSRC2_R::new(((self.bits >> 12) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - NMOS compensation code for VDD power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
#[inline(always)]
#[must_use]
pub fn sw_ansrc1(&mut self) -> SW_ANSRC1_W<CCSWCR_SPEC, 0> {
SW_ANSRC1_W::new(self)
}
#[doc = "Bits 4:7 - PMOS compensation code for the VDD power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS1 is set in SBS_CCSR."]
#[inline(always)]
#[must_use]
pub fn sw_apsrc1(&mut self) -> SW_APSRC1_W<CCSWCR_SPEC, 4> {
SW_APSRC1_W::new(self)
}
#[doc = "Bits 8:11 - NMOS compensation code for VDDIO power rails This bitfield is written by software to define an I/O compensation cell code for NMOS transistors of the VDD power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
#[inline(always)]
#[must_use]
pub fn sw_ansrc2(&mut self) -> SW_ANSRC2_W<CCSWCR_SPEC, 8> {
SW_ANSRC2_W::new(self)
}
#[doc = "Bits 12:15 - PMOS compensation code for the V<sub>DDIO</sub> power rails This bitfield is written by software to define an I/O compensation cell code for PMOS transistors of the VDDIO power rail. This code is applied to the I/O when CS2 is set in SBS_CCSR."]
#[inline(always)]
#[must_use]
pub fn sw_apsrc2(&mut self) -> SW_APSRC2_W<CCSWCR_SPEC, 12> {
SW_APSRC2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SBS compensation cell for I/Os software code register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccswcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccswcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CCSWCR_SPEC;
impl crate::RegisterSpec for CCSWCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ccswcr::R`](R) reader structure"]
impl crate::Readable for CCSWCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ccswcr::W`](W) writer structure"]
impl crate::Writable for CCSWCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CCSWCR to value 0x7878"]
impl crate::Resettable for CCSWCR_SPEC {
const RESET_VALUE: Self::Ux = 0x7878;
}
|
//! Handles COM initialization and cleanup.
use super::helpers::*;
use winapi::um::combaseapi::{CoInitializeEx, CoUninitialize};
use winapi::um::objbase::COINIT_MULTITHREADED;
use std::ptr;
/// RAII object that guards the fact that COM is initialized.
///
// We store a raw pointer because it's the only way at the moment to remove
// `Send`/`Sync` from the object.
struct ComInitialized(*mut ());
impl Drop for ComInitialized {
#[inline]
fn drop(&mut self) {
unsafe { CoUninitialize() };
}
}
thread_local! {
static COM_INITIALIZED: ComInitialized = {
unsafe {
// This call can fail if another library initialized COM in
// single-threaded mode.
// TODO: handle this situation properly.
check_result(CoInitializeEx(ptr::null_mut(), COINIT_MULTITHREADED)).unwrap();
ComInitialized(ptr::null_mut())
}
}
}
/// Ensures that COM is initialized in this thread.
#[inline]
pub fn ensure_com_initialized() {
COM_INITIALIZED.with(|_| {});
}
|
#[doc = "Register `TZC_CID3` reader"]
pub type R = crate::R<TZC_CID3_SPEC>;
#[doc = "Field `COMP_ID_3` reader - COMP_ID_3"]
pub type COMP_ID_3_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - COMP_ID_3"]
#[inline(always)]
pub fn comp_id_3(&self) -> COMP_ID_3_R {
COMP_ID_3_R::new((self.bits & 0xff) as u8)
}
}
#[doc = "Component ID 3.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tzc_cid3::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TZC_CID3_SPEC;
impl crate::RegisterSpec for TZC_CID3_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`tzc_cid3::R`](R) reader structure"]
impl crate::Readable for TZC_CID3_SPEC {}
#[doc = "`reset()` method sets TZC_CID3 to value 0xb1"]
impl crate::Resettable for TZC_CID3_SPEC {
const RESET_VALUE: Self::Ux = 0xb1;
}
|
extern crate byteorder;
use std::env::args;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::io::stdin;
use std::io::stdout;
use std::path::Path;
/// The virtual machine memory is 0x8000 elements followed by 8 registers.
type Memory = [u16; 0x8008];
/// The default memory image name used when no arguments are provided.
const DEFAULT_BIN_NAME: &'static str = "challenge.bin";
/// Loads the memory image from the file at the given path.
fn open_program<P: AsRef<Path>>(path: P) -> Memory {
let mut file = File::open(path).expect("open_program");
read_program(&mut file)
}
/// Loads the memory image from the given file.
fn read_program<F: Read>(file: &mut F) -> Memory {
use byteorder::{LittleEndian, ReadBytesExt};
let mut mem = [0; 0x8008];
for i in 0 .. 0x8000 {
match file.read_u16::<LittleEndian>().ok() {
Some(x) => mem[i] = x,
None => break
}
}
mem
}
/// Read memory at the given index. If the value is less than 0x8000 then
/// it is a literal and is returned. Otherwise it is a register address and
/// the value of the register is returned.
fn get(mem: &Memory, i: u16) -> u16 {
let v = mem[i as usize];
if (v & 0x8000) == 0 {
v
} else {
mem[v as usize]
}
}
/// Assign the value `x` into the register specified by the value in memory
/// at index `i`.
fn set(mem: &mut Memory, i: u16, x: u16) {
mem[mem[i as usize] as usize] = x
}
/// Attempt to read a single byte of input from stdin.
fn read_input() -> Option<u8> {
stdin().bytes().next().and_then(Result::ok)
}
/// Start executing program loaded in memory at PC 0
fn vm(mem: &mut Memory) {
let mut pc = 0;
let mut stack = Vec::new();
loop {
match mem[pc as usize] {
// HALT
0 => return,
// SET
1 => {
let b = get(mem, pc+2);
set(mem, pc+1, b);
pc += 3
},
// PUSH
2 => {
stack.push(get(mem, pc+1));
pc += 2
},
// POP
3 =>
match stack.pop() {
Some(x) => {
set(mem, pc+1, x);
pc += 2
},
None => return
},
// EQ
4 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
let r = if b == c { 1 } else { 0 };
set(mem, pc+1, r);
pc += 4
},
// GT
5 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
let r = if b > c { 1 } else { 0 };
set(mem, pc+1, r);
pc += 4
},
// JMP
6 => pc = get(mem, pc+1),
// JT
7 => pc = if get(mem, pc+1) > 0 { get(mem, pc+2) } else { pc + 3 },
// JF
8 => pc = if get(mem, pc+1) == 0 { get(mem, pc+2) } else { pc + 3 },
// ADD
9 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
set(mem, pc+1, (b+c) % 0x8000);
pc += 4
},
// MULT
10 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
set(mem, pc+1, b.wrapping_mul(c) % 0x8000);
pc += 4
},
// MOD
11 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
set(mem, pc+1, b%c);
pc += 4
},
// AND
12 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
set(mem, pc+1, b&c);
pc += 4
},
// OR
13 => {
let b = get(mem, pc+2);
let c = get(mem, pc+3);
set(mem, pc+1, b|c);
pc += 4
},
// NOT
14 => {
let b = get(mem, pc+2);
set(mem, pc+1, b ^ 0x7fff);
pc += 3
},
// RMEM
15 => {
let x = mem[get(mem, pc+2) as usize];
set(mem, pc+1, x);
pc += 3
},
// WMEM
16 => {
let a = get(mem, pc+1);
let b = get(mem, pc+2);
mem[a as usize] = b;
pc += 3
},
// CALL
17 => {
stack.push(pc+2);
pc = get(mem, pc+1)
},
// RET
18 =>
match stack.pop() {
Some(addr) => pc = addr,
None => return
},
// OUT
19 => {
let c = get(mem,pc+1);
if stdout().write_all(&[c as u8]).is_err() {
return
}
pc += 2
},
// IN
20 => {
match read_input() {
Some(x) => set(mem, pc+1, x as u16),
None => return,
};
pc += 2
},
// NO OP
21 => pc += 1,
// BAD
_ => return
}
}
}
fn main() {
let path = args().nth(1).unwrap_or(DEFAULT_BIN_NAME.to_string());
vm(&mut open_program(path));
}
|
/// A node property.
///
/// # Semantics
///
/// A property contained in a [`greater_elements::PropertyDrawer`].
///
/// # Syntax
///
/// Follows one of these patterns:
///
/// - `:NAME: VALUE`
/// - `:NAME+: VALUE`
/// - `:NAME:`
/// - `:NAME+:`
///
/// `NAME` can contain any non-whitespace character but can't be an empty string or end with a
/// plus sign (`+`).
///
/// `VALUE` can contain anything but a newline character.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NodeProperty {
pub name: String,
pub value: String,
}
|
use std::{fs::File, io::{BufRead, BufReader}};
fn main() {
let file = File::open("inputs/input-11.txt").unwrap();
let mut seats: Vec<Vec<char>> = BufReader::new(file).lines().map(|l| l.unwrap().chars().collect::<Vec<char>>()).collect();
let mut seats2 = seats.clone();
let mut change = true;
while change {
change = false;
let mut cloned = seats.clone();
for (r, row) in seats.iter().enumerate() {
for (c, col) in row.iter().enumerate() {
let neighbours = get_occ_neighbours(&seats, r, c);
if col == &'L' && neighbours == 0 {
cloned[r][c] = '#';
change = true;
}
if col == &'#' && neighbours >= 4 {
cloned[r][c] = 'L';
change = true;
}
}
}
seats = cloned;
}
println!("occupied {:?}", seats.iter().map(|v| v.iter().filter(|&x| x == &'#').count()).sum::<usize>());
let mut change = true;
while change {
change = false;
let mut cloned = seats2.clone();
for (r, row) in seats2.iter().enumerate() {
for (c, col) in row.iter().enumerate() {
let neighbours = get_see_occ_neighbours(&seats2, r, c);
if col == &'L' && neighbours == 0 {
cloned[r][c] = '#';
change = true;
}
if col == &'#' && neighbours >= 5 {
cloned[r][c] = 'L';
change = true;
}
}
}
seats2 = cloned;
}
println!("occupied seen {:?}", seats2.iter().map(|v| v.iter().filter(|&x| x == &'#').count()).sum::<usize>());
}
#[inline(always)]
fn get_occ_neighbours(seats: &Vec<Vec<char>>, row: usize, col: usize) -> usize {
let mut neighbours = vec![];
let indexes: &[i32] = &[-1, 0, 1];
for r in indexes {
for c in indexes {
if *r == 0 && *c == 0 {
continue
}
if in_bounds(seats, row as i32 + r, col as i32 + c){
neighbours.push(seats[(row as i32 + *r) as usize][(col as i32 + *c) as usize])
}
}
}
return neighbours.iter().filter(|&ch| ch == &'#').count()
}
#[inline(always)]
fn get_see_occ_neighbours(seats: &Vec<Vec<char>>, row: usize, col: usize) -> usize {
let mut neighbours = vec![];
let indexes: &[i32] = &[-1, 0, 1];
let mut row_check;
let mut col_check;
for r in indexes {
for c in indexes {
if *r == 0 && *c == 0 {
continue
}
row_check = *r;
col_check = *c;
while in_bounds(seats, row as i32 + row_check, col as i32 + col_check) {
let value = seats[(row as i32 + row_check) as usize][(col as i32 + col_check) as usize];
if value != '.' {
neighbours.push(value);
break;
}
row_check += r;
col_check += c;
}
}
}
return neighbours.iter().filter(|&ch| ch == &'#').count()
}
#[inline(always)]
fn in_bounds(seats: &Vec<Vec<char>>, row: i32, col: i32) -> bool {
if row < 0 || col < 0 {
return false;
}
let max_row = seats.len() as i32;
let max_col = seats[0].len() as i32;
if row > max_row - 1 || col > max_col - 1 {
return false
}
return true
}
|
extern crate irc;
use irc::client::prelude::*;
fn main() {
let server = IrcServer::new("config.json").unwrap();
server.identify().unwrap();
for message in server.iter() {
let message = message.unwrap(); // We'll just panic if there's an error.
println!("{}", message.into_string());
match message.command {
Command::PRIVMSG(ref target, ref msg) => if target.starts_with("#") {
server.send_privmsg(target, "Hi!").unwrap();
},
_ => (),
}
}
}
|
//! TODO docs
use crate::{Read, ReadError, Write, WriteError};
use std::ops::Deref;
/// A stream of bytes
pub struct DataStream {
/// TODO docs
bytes: Vec<u8>,
/// TODO docs
pos: usize,
}
impl DataStream {
/// Read something from the stream
#[inline]
pub fn read<T: Read>(&mut self) -> Result<T, ReadError> {
let bytes = self
.bytes
.get(self.pos..)
.ok_or(ReadError::NotEnoughBytes)?;
T::read(bytes, &mut self.pos)
}
/// Write something to the stream
#[inline]
pub fn write<T: Write>(&mut self, thing: &T) -> Result<(), WriteError> {
let bytes = self
.bytes
.get_mut(self.pos..)
.ok_or(WriteError::NotEnoughSpace)?;
thing.write(bytes, &mut self.pos)
}
/// Gets bytes as slice
#[inline]
pub fn as_bytes(&self) -> &[u8] {
self.bytes.get(self.pos..).unwrap_or_else(|| &self.bytes)
}
/// Resets the data stream position
#[inline]
pub fn reset(&mut self) {
self.pos = 0;
}
/// Get the current position
#[inline]
pub const fn position(&self) -> usize {
self.pos
}
/// Gets the remaining number of bytes
#[inline]
pub fn remaining(&self) -> usize {
self.bytes.len() - self.pos
}
}
impl From<Vec<u8>> for DataStream {
fn from(bytes: Vec<u8>) -> Self {
Self { bytes, pos: 0 }
}
}
impl Deref for DataStream {
type Target = [u8];
fn deref(&self) -> &Self::Target {
self.as_bytes()
}
}
|
fn main() {
// Define y of type reference to
// an immutable int.
let y: &i32;
{
let x = 5;
// This is not allowed.
y = &x;
// x is freed here.
}
// We would now have a reference to an invalid
// memory location.
println!("{}", y);
// y is freed here.
}
|
//给定一个字符串 s,找到 s 中最长的回文子串。你可以假设 s 的最大长度为 1000。
//
//示例 1:
//
//输入: "babad"
//输出: "bab"
//注意: "aba" 也是一个有效答案。
//示例 2:
//
//输入: "cbbd"
//输出: "bb"
use std::cmp::max;
type MaxStr = (usize, String);
struct Solution {
}
impl Solution {
fn longest_palindrome(s:&String) -> String {
let mut max_str:MaxStr = (0, "".to_string());
for oi in 0..s.len() {
for interval in 1..(oi+1) {
if !judge_char_equal(s, oi-interval, oi+interval) {
break;
}
if 2*interval + 1 > max_str.0 {
max_str.0 = 2*interval + 1;
max_str.1 = s.get((oi-interval)..(oi+interval+1)).unwrap().to_string();
}
}
}
for oi in 0..s.len()-1 {
if !judge_char_equal(s, oi, oi+1) {
continue;
}
for interval in 1..(oi+1) {
if !judge_char_equal(s, oi-interval, oi+interval+1) {
break;
}
if 2*interval + 2 > max_str.0 {
max_str.0 = 2*interval + 2;
max_str.1 = s.get((oi-interval)..(oi+interval+2)).unwrap().to_string();
}
}
}
max_str.1
}
}
fn judge_char_equal(s:&String, i:usize, j:usize) -> bool {
if max(i, j) >= s.len() {
return false;
}
s.get(i..i+1) == s.get(j..j+1)
}
fn main() {
assert_eq!("abacaba", Solution::longest_palindrome(&"abacabac".to_string()));
assert_eq!("abaccaba", Solution::longest_palindrome(&"abaccabac".to_string()));
println!("{}", "finish");
}
|
extern crate pest;
extern crate rust_orgmode;
use pest::Parser;
use rust_orgmode::parsing::{OrgModeParser, Rule};
use std::fs::{self, File};
use std::io::Read;
fn test_files() -> impl Iterator<Item = File> {
fs::read_dir("tests/correct").unwrap().filter_map(|entry| {
let entry = match entry {
Ok(entry) => entry,
Err(_) => return None,
};
let path = entry.path();
if path.is_file() {
if let Ok(file) = File::open(path) {
Some(file)
} else {
None
}
} else {
None
}
})
}
#[test]
fn parsing_succeeds() {
test_files().for_each(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
OrgModeParser::parse(Rule::document, &contents).unwrap();
})
}
#[test]
fn parsing_produces_document() {
test_files().for_each(|mut file| {
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
rust_orgmode::parsing::parse_document(&contents).unwrap();
})
}
|
use std::os::raw::{c_double, c_int, c_uint};
use crate::point::Point;
use crate::size::WorldSize;
use crate::game::{Game, GameEvent, GameState};
use crate::bullet::BulletType;
use crate::swarm::Swarm;
use crate::shield::Shield;
use crate::particle::{make_explosion, Particle};
use std::sync::mpsc;
use std::sync::mpsc::{Receiver,Sender};
extern "C" {
fn clear_screen();
fn draw_player(_: c_double, _: c_double, _: c_double);
fn draw_bullet(_: c_double, _: c_double);
fn draw_player_bullet(_: c_double, _: c_double);
fn draw_particle(_: c_double, _: c_double, _: c_double, _: c_int);
fn draw_ufo(_: c_double, _: c_double);
fn draw_hud(_: c_int, _: c_int, _: c_int);
fn draw_intro();
fn draw_game_over(_: c_int);
// fn draw_debug(_: c_double, _: c_double, _: c_double, _: c_double);
// id, x,y, dim
fn draw_shield(_: c_int, _: c_double, _: c_double, _: c_double);
// sprite id, frame index, x, y
fn draw_sprite(_: c_uint, _: c_uint, _: c_uint, _: c_uint);
fn update_local_score(_: c_int);
}
pub struct RenderData {
pub screen_top_left_offset: Point,
pub game_to_screen: f64,
pub width: usize,
pub height: usize,
pub particles: Vec<Particle>,
receiver: Receiver<GameEvent>,
pub sender: Sender<GameEvent>,
}
impl RenderData {
pub fn new() -> Self {
let (tx,rx) = mpsc::channel();
RenderData{
screen_top_left_offset: Point::new(0.0,0.0),
game_to_screen: 1.,
width: 1024,
height: 768,
particles: Vec::with_capacity(1000),
receiver: rx,
sender: tx,
}
}
pub fn world_to_screen(&self, in_point: &Point) -> Point {
Point{
x: (in_point.x + self.screen_top_left_offset.x) * self.game_to_screen,
y: (in_point.y + self.screen_top_left_offset.y) * self.game_to_screen,
}
}
pub fn resize(&mut self, world_size: WorldSize, width: f64, height: f64) -> f64 {
self.width = width.trunc() as usize;
self.height = height.trunc() as usize;
if world_size.width < width && world_size.height < height {
self.screen_top_left_offset.x = (width - world_size.width) / 2.;
self.screen_top_left_offset.y = (height - world_size.height) / 2.;
self.game_to_screen = 1.;
return self.game_to_screen;
}
// this stuff doesn't work very well...
if world_size.width > width {
self.game_to_screen = width / world_size.width;
// this isn't quite right; it needs some sort of scaling
self.screen_top_left_offset.y = (height - world_size.height) / 2.;
}
else if world_size.height > height {
self.game_to_screen = height / world_size.height;
// this isn't quite right; it needs some sort of scaling
self.screen_top_left_offset.x = (width - world_size.width) / 2.;
}
self.game_to_screen
}
unsafe fn draw_swarm(&self, swarm: &Swarm) {
// enable to draw bounds
// let br = swarm.get_bottom_right();
// draw_bounds(data.screen_top_left_offset.x + swarm.top_left.x * data.game_to_screen, data.screen_top_left_offset.y + swarm.top_left.y * data.game_to_screen,
// br.x * data.game_to_screen, br.y * data.game_to_screen);
// is there a better iterator way to do this?
for i in 0..swarm.num_x {
for j in 0..swarm.num_y {
if swarm.alive[j*swarm.num_x+i] {
let p = self.world_to_screen(&swarm.get_enemy_location(i,j));
let index = match j {
0 => 1,
1|2 => 2,
_ => 0, // 3|4
};
draw_sprite(index, swarm.frame, p.x as u32,p.y as u32);
}
}
}
}
unsafe fn handle_game_event(&mut self, event: GameEvent) {
match event {
GameEvent::ScoreChanged(i) => {
update_local_score(i);
},
GameEvent::EntityDied(p,c) => {
let particles = &mut self.particles;
make_explosion(particles, &p, 6, c);
}
}
}
pub unsafe fn draw(&mut self, game_state: GameState, game: &Game, dt: f64) {
let world = &game.world;
clear_screen();
match self.receiver.try_recv() {
Ok(event) => {
self.handle_game_event(event);
},
Err(_) => {
}
}
{
let particles = &mut self.particles;
particles.retain(|particle| {
particle.ttl > 0.0
});
}
for particle in &mut self.particles {
particle.update(dt);
}
for particle in &self.particles {
let world_pos = self.world_to_screen(&particle.vector.position);
draw_particle(world_pos.x, world_pos.y, 5.0 * particle.ttl, particle.get_colour_index());
}
match game_state {
GameState::Intro(_) => {
draw_intro();
},
GameState::Playing | GameState::Death(_) | GameState::Win(_) => {
for bullet in &world.bullets {
let bp = self.world_to_screen(&bullet.location.position);
draw_bullet(bp.x, bp.y);
}
if let BulletType::Player(alive) = world.player_bullet.bullet_type {
if alive {
let bp = self.world_to_screen(&world.player_bullet.location.position);
draw_player_bullet(bp.x, bp.y);
}
}
let p = self.world_to_screen(&Point{x: world.player.x(), y: world.player.y()});
if world.player.alive {
draw_player(p.x, p.y, world.player.dir());
}
self.draw_swarm(&world.swarm);
for (index,shield) in world.shields.iter().enumerate() {
let screen_pos = self.world_to_screen(&shield.top_left);
draw_shield(index as i32, screen_pos.x, screen_pos.y, Shield::BLOCK_DIM * self.game_to_screen);
}
if world.ufo.active {
let screen_pos = self.world_to_screen(&world.ufo.position);
draw_ufo(screen_pos.x, screen_pos.y);
}
},
GameState::GameOver(_) => {
draw_game_over(game.score);
},
}
draw_hud(game.score, game.lives, game.wave);
}
}
|
use std::collections::HashMap;
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn read_vec<T: std::str::FromStr>() -> Vec<T> {
read::<String>().split_whitespace().map(|e| e.parse().ok().unwrap()).collect()
}
fn solve(n : i32, m : i32, a : Vec<i32>) -> i64 {
let mut map = HashMap::<i32, i32>::new();
map.reserve((n + 1) as usize);
let mut ans : i64 = 0;
let mut acc : i32 = 0;
map.insert(acc, 1);
for i in 0..n {
acc = (acc + a[i as usize]) % m;
let count = map.entry(acc).or_insert(0);
ans += *count as i64;
*count += 1;
}
ans
}
fn main() {
let nm = read_vec::<i32>();
let a = read_vec::<i32>();
println!("{}", solve(nm[0], nm[1], a));
}
|
// Copyright 2015-2020 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::{
errors::Error,
token::{StrictTokenizer, Tokenizer},
Uint,
};
use std::borrow::Cow;
use once_cell::sync::Lazy;
static RE: Lazy<regex::Regex> =
Lazy::new(|| regex::Regex::new(r"^([0-9]+)(\.[0-9]+)?\s*(ether|gwei|nanoether|nano|wei)$").expect("invalid regex"));
/// Tries to parse string as a token. Does not require string to clearly represent the value.
pub struct LenientTokenizer;
impl Tokenizer for LenientTokenizer {
fn tokenize_address(value: &str) -> Result<[u8; 20], Error> {
StrictTokenizer::tokenize_address(value)
}
fn tokenize_string(value: &str) -> Result<String, Error> {
StrictTokenizer::tokenize_string(value)
}
fn tokenize_bool(value: &str) -> Result<bool, Error> {
StrictTokenizer::tokenize_bool(value)
}
fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error> {
StrictTokenizer::tokenize_bytes(value)
}
fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error> {
StrictTokenizer::tokenize_fixed_bytes(value, len)
}
fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> {
let result = StrictTokenizer::tokenize_uint(value);
if result.is_ok() {
return result;
}
// Tries to parse it as is first. If it fails, tries to check for
// expectable units with the following format: 'Number[Spaces]Unit'.
// If regex fails, then the original FromDecStrErr should take priority
let uint = match Uint::from_dec_str(value) {
Ok(_uint) => _uint,
Err(dec_error) => {
let original_dec_error = dec_error.to_string();
match RE.captures(value) {
Some(captures) => {
let integer = captures.get(1).expect("capture group does not exist").as_str();
let fract = captures.get(2).map(|c| c.as_str().trim_start_matches('.')).unwrap_or_else(|| "");
let units = captures.get(3).expect("capture group does not exist").as_str();
let units = Uint::from(match units.to_lowercase().as_str() {
"ether" => 18,
"gwei" | "nano" | "nanoether" => 9,
"wei" => 0,
_ => return Err(dec_error.into()),
});
let integer = Uint::from_dec_str(integer)?.checked_mul(Uint::from(10u32).pow(units));
if fract.is_empty() {
integer.ok_or(dec_error)?
} else {
// makes sure we don't go beyond 18 decimals
let fract_pow = units.checked_sub(Uint::from(fract.len())).ok_or(dec_error)?;
let fract = Uint::from_dec_str(fract)?
.checked_mul(Uint::from(10u32).pow(fract_pow))
.ok_or_else(|| Error::Other(Cow::Owned(original_dec_error.clone())))?;
integer
.and_then(|integer| integer.checked_add(fract))
.ok_or(Error::Other(Cow::Owned(original_dec_error)))?
}
}
None => return Err(dec_error.into()),
}
}
};
Ok(uint.into())
}
// We don't have a proper signed int 256-bit long type, so here we're cheating. We build a U256
// out of it and check that it's within the lower/upper bound of a hypothetical I256 type: half
// the `U256::max_value().
fn tokenize_int(value: &str) -> Result<[u8; 32], Error> {
let result = StrictTokenizer::tokenize_int(value);
if result.is_ok() {
return result;
}
let abs = Uint::from_dec_str(value.trim_start_matches('-'))?;
let max = Uint::max_value() / 2;
let int = if value.starts_with('-') {
if abs.is_zero() {
return Ok(abs.into());
} else if abs > max + 1 {
return Err(Error::Other(Cow::Borrowed("int256 parse error: Underflow")));
}
!abs + 1 // two's complement
} else {
if abs > max {
return Err(Error::Other(Cow::Borrowed("int256 parse error: Overflow")));
}
abs
};
Ok(int.into())
}
}
#[cfg(test)]
mod tests {
use ethereum_types::FromDecStrErr;
use crate::{
errors::Error,
token::{LenientTokenizer, Token, Tokenizer},
ParamType, Uint,
};
#[test]
fn tokenize_uint() {
assert_eq!(
LenientTokenizer::tokenize(
&ParamType::Uint(256),
"1111111111111111111111111111111111111111111111111111111111111111"
)
.unwrap(),
Token::Uint([0x11u8; 32].into())
);
}
#[test]
fn tokenize_uint_wei() {
assert_eq!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1wei").unwrap(), Token::Uint(Uint::from(1)));
assert_eq!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1 wei").unwrap(), Token::Uint(Uint::from(1)));
}
#[test]
fn tokenize_uint_gwei() {
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "1nano").unwrap(),
Token::Uint(Uint::from_dec_str("1000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "1nanoether").unwrap(),
Token::Uint(Uint::from_dec_str("1000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "1gwei").unwrap(),
Token::Uint(Uint::from_dec_str("1000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "0.1 gwei").unwrap(),
Token::Uint(Uint::from_dec_str("100000000").unwrap())
);
}
#[test]
fn tokenize_uint_ether() {
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "10000000000ether").unwrap(),
Token::Uint(Uint::from_dec_str("10000000000000000000000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "1ether").unwrap(),
Token::Uint(Uint::from_dec_str("1000000000000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "0.01 ether").unwrap(),
Token::Uint(Uint::from_dec_str("10000000000000000").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "0.000000000000000001ether").unwrap(),
Token::Uint(Uint::from_dec_str("1").unwrap())
);
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "0.000000000000000001ether").unwrap(),
LenientTokenizer::tokenize(&ParamType::Uint(256), "1wei").unwrap(),
);
}
#[test]
fn tokenize_uint_array_ether() {
assert_eq!(
LenientTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Uint(256))), "[1ether,0.1 ether]")
.unwrap(),
Token::Array(vec![
Token::Uint(Uint::from_dec_str("1000000000000000000").unwrap()),
Token::Uint(Uint::from_dec_str("100000000000000000").unwrap())
])
);
}
#[test]
fn tokenize_uint_invalid_units() {
let _error = Error::from(FromDecStrErr::InvalidCharacter);
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "0.1 wei"), Err(_error)));
// 0.1 wei
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "0.0000000000000000001ether"), Err(_error)));
// 1 ether + 0.1 wei
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1.0000000000000000001ether"), Err(_error)));
// 1_000_000_000 ether + 0.1 wei
assert!(matches!(
LenientTokenizer::tokenize(&ParamType::Uint(256), "1000000000.0000000000000000001ether"),
Err(_error)
));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "0..1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "..1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1. gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), ".1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "2.1.1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), ".1.1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1abc"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1 gwei "), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "g 1 gwei"), Err(_error)));
assert!(matches!(LenientTokenizer::tokenize(&ParamType::Uint(256), "1gwei 1 gwei"), Err(_error)));
}
}
|
use libc::{c_void, c_char, c_int, c_uint};
use std::{mem, ptr};
use nix::sys::socket;
use std::net::{SocketAddr, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr};
// nix doesn't have this const
pub const AF_PACKET: i32 = 17;
#[allow(dead_code)]
#[repr(C)]
pub enum SIOCGIFFLAGS {
IFF_UP = 0x1, /* Interface is up. */
IFF_BROADCAST = 0x2, /* Broadcast address valid. */
IFF_DEBUG = 0x4, /* Turn on debugging. */
IFF_LOOPBACK = 0x8, /* Is a loopback net. */
IFF_POINTOPOINT = 0x10, /* Interface is point-to-point link. */
IFF_NOTRAILERS = 0x20, /* Avoid use of trailers. */
IFF_RUNNING = 0x40, /* Resources allocated. */
IFF_NOARP = 0x80, /* No address resolution protocol. */
IFF_PROMISC = 0x100, /* Receive all packets. */
/* Not supported */
IFF_ALLMULTI = 0x200, /* Receive all multicast packets. */
IFF_MASTER = 0x400, /* Master of a load balancer. */
IFF_SLAVE = 0x800, /* Slave of a load balancer. */
IFF_MULTICAST = 0x1000, /* Supports multicast. */
IFF_PORTSEL = 0x2000, /* Can set media type. */
IFF_AUTOMEDIA = 0x4000, /* Auto media select active. */
IFF_DYNAMIC = 0x8000 /* Dialup device with changing addresses. */
}
#[repr(C)]
pub struct union_ifa_ifu {
pub data: *mut c_void,
}
impl union_ifa_ifu {
pub fn ifu_broadaddr (&mut self) -> *mut socket::sockaddr {
self.data as *mut socket::sockaddr
}
pub fn ifu_dstaddr (&mut self) -> *mut socket::sockaddr {
self.data as *mut socket::sockaddr
}
}
#[repr(C)]
pub struct ifaddrs {
pub ifa_next: *mut ifaddrs,
pub ifa_name: *mut c_char,
pub ifa_flags: c_uint,
pub ifa_addr: *mut socket::sockaddr,
pub ifa_netmask: *mut socket::sockaddr,
pub ifa_ifu: union_ifa_ifu,
pub ifa_data: *mut c_void,
}
extern "C" {
pub fn getifaddrs (ifap: *mut *mut ifaddrs) -> c_int;
pub fn freeifaddrs (ifa: *mut ifaddrs) -> c_void;
}
pub fn convert_sockaddr (sa: *mut socket::sockaddr) -> Option<SocketAddr> {
if sa == ptr::null_mut() { return None; }
match unsafe { *sa }.sa_family as i32 {
socket::AF_INET => {
let sa: *const socket::sockaddr_in = unsafe { mem::transmute(sa) };
let sa = & unsafe { *sa };
let addr: [u8; 4] = unsafe { mem::transmute(sa.sin_addr.s_addr) };
Some(SocketAddr::V4(SocketAddrV4::new(
Ipv4Addr::new(
addr[0],
addr[1],
addr[2],
addr[3],
), sa.sin_port
)))
},
socket::AF_INET6 => {
let sa: *const socket::sockaddr_in6 = unsafe { mem::transmute(sa) };
let sa = & unsafe { *sa };
let addr: [u16; 8] = unsafe { mem::transmute(sa.sin6_addr.s6_addr) };
Some(SocketAddr::V6(SocketAddrV6::new(
Ipv6Addr::new(
u16::from_be(addr[0]),
u16::from_be(addr[1]),
u16::from_be(addr[2]),
u16::from_be(addr[3]),
u16::from_be(addr[4]),
u16::from_be(addr[5]),
u16::from_be(addr[6]),
u16::from_be(addr[7]),
), sa.sin6_port, sa.sin6_flowinfo, sa.sin6_scope_id
)))
},
_ => None,
}
}
|
use core::{marker::PhantomData, pin::Pin};
use std::{
collections::HashSet,
sync::{
atomic::{AtomicU64, Ordering},
Arc,
},
};
use futures::{Future, FutureExt};
use tokio::sync::Mutex;
use crate::{
error::StdSyncSendError,
receiver::{
BusPollerCallback, Receiver, ReciveTypedReceiver, SendTypedReceiver, SendUntypedReceiver,
UntypedPollerCallback,
},
receivers, AsyncBatchHandler, AsyncBatchSynchronizedHandler, AsyncHandler,
AsyncSynchronizedHandler, BatchHandler, BatchSynchronizedHandler, Bus, BusInner, Handler,
Message, Relay, SynchronizedHandler, Untyped,
};
static RECEVIER_ID_SEQ: AtomicU64 = AtomicU64::new(1);
pub trait ReceiverSubscriberBuilder<T, M, R, E>:
SendUntypedReceiver + SendTypedReceiver<M> + ReciveTypedReceiver<R, E>
where
T: 'static,
M: Message,
R: Message,
E: StdSyncSendError,
{
type Config: Default;
fn build(cfg: Self::Config) -> (Self, UntypedPollerCallback)
where
Self: Sized;
}
pub struct SyncEntry;
pub struct UnsyncEntry;
#[must_use]
pub struct RegisterEntry<K, T, F, P, B> {
item: Untyped,
payload: B,
builder: F,
poller: P,
receivers: HashSet<Receiver>,
pollers: Vec<BusPollerCallback>,
_m: PhantomData<(K, T)>,
}
impl<K, T: 'static, F, P, B> RegisterEntry<K, T, F, P, B>
where
F: FnMut(&mut B, Receiver),
P: FnMut(&mut B, BusPollerCallback),
{
pub fn done(mut self) -> B {
for r in self.receivers {
(self.builder)(&mut self.payload, r);
}
for p in self.pollers {
(self.poller)(&mut self.payload, p);
}
self.payload
}
}
impl<T, F, P, B> RegisterEntry<UnsyncEntry, T, F, P, B> {
pub fn subscribe<M, S, R, E>(mut self, queue: u64, cfg: S::Config) -> Self
where
T: Send + 'static,
M: Message,
R: Message,
E: StdSyncSendError,
S: ReceiverSubscriberBuilder<T, M, R, E> + 'static,
{
let (inner, poller) = S::build(cfg);
let receiver = Receiver::new::<M, R, E, S>(
RECEVIER_ID_SEQ.fetch_add(1, Ordering::Relaxed),
queue,
true,
inner,
);
let poller2 = receiver.start_polling();
self.receivers.insert(receiver);
self.pollers.push(poller(self.item.clone()));
self.pollers.push(poller2);
self
}
#[inline]
pub fn subscribe_sync<M>(self, queue: u64, cfg: receivers::SynchronizedConfig) -> Self
where
T: SynchronizedHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::SynchronizedSync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_async<M>(self, queue: u64, cfg: receivers::SynchronizedConfig) -> Self
where
T: AsyncSynchronizedHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::SynchronizedAsync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_batch_sync<M>(
self,
queue: u64,
cfg: receivers::SynchronizedBatchedConfig,
) -> Self
where
T: BatchSynchronizedHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::SynchronizedBatchedSync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_batch_async<M>(
self,
queue: u64,
cfg: receivers::SynchronizedBatchedConfig,
) -> Self
where
T: AsyncBatchSynchronizedHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::SynchronizedBatchedAsync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
}
impl<T, F, P, B> RegisterEntry<SyncEntry, T, F, P, B> {
pub fn subscribe<M, S, R, E>(mut self, queue: u64, cfg: S::Config) -> Self
where
T: Send + Sync + 'static,
M: Message,
R: Message,
E: StdSyncSendError,
S: ReceiverSubscriberBuilder<T, M, R, E> + 'static,
{
let (inner, poller) = S::build(cfg);
let receiver = Receiver::new::<M, R, E, S>(
RECEVIER_ID_SEQ.fetch_add(1, Ordering::Relaxed),
queue,
true,
inner,
);
let poller2 = receiver.start_polling();
self.receivers.insert(receiver);
self.pollers.push(poller(self.item.clone()));
self.pollers.push(poller2);
self
}
#[inline]
pub fn subscribe_sync<M>(self, queue: u64, cfg: receivers::BufferUnorderedConfig) -> Self
where
T: Handler<M> + Send + Sync + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::BufferUnorderedSync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_async<M>(self, queue: u64, cfg: receivers::BufferUnorderedConfig) -> Self
where
T: AsyncHandler<M> + Send + Sync + 'static,
M: Message,
T::Response: Message,
T::Error: StdSyncSendError,
{
self.subscribe::<M, receivers::BufferUnorderedAsync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_batch_sync<M>(
self,
queue: u64,
cfg: receivers::BufferUnorderedBatchedConfig,
) -> Self
where
T: BatchHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::BufferUnorderedBatchedSync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
#[inline]
pub fn subscribe_batch_async<M>(
self,
queue: u64,
cfg: receivers::BufferUnorderedBatchedConfig,
) -> Self
where
T: AsyncBatchHandler<M> + Send + 'static,
M: Message,
T::Response: Message,
{
self.subscribe::<M, receivers::BufferUnorderedBatchedAsync<M, T::Response, T::Error>, T::Response, T::Error>(queue, cfg)
}
}
#[derive(Default)]
pub struct Module {
receivers: HashSet<Receiver>,
pollings: Vec<BusPollerCallback>,
}
impl Module {
pub fn new() -> Self {
Self {
receivers: HashSet::new(),
pollings: Vec::new(),
}
}
pub fn register_relay<S: Relay + Send + Sync + 'static>(mut self, inner: S) -> Self {
let receiver =
Receiver::new_relay::<S>(RECEVIER_ID_SEQ.fetch_add(1, Ordering::Relaxed), inner);
self.pollings.push(receiver.start_polling());
self.receivers.insert(receiver);
self
}
pub fn register<T: Send + Sync + 'static>(
self,
item: T,
) -> RegisterEntry<
SyncEntry,
T,
impl FnMut(&mut Self, Receiver),
impl FnMut(&mut Self, Box<dyn FnOnce(Bus) -> Pin<Box<dyn Future<Output = ()> + Send>>>),
Self,
> {
RegisterEntry {
item: Arc::new(item) as Untyped,
payload: self,
builder: |p: &mut Self, r| {
p.receivers.insert(r);
},
poller: |p: &mut Self, poller| p.pollings.push(poller),
receivers: HashSet::new(),
pollers: Vec::new(),
_m: Default::default(),
}
}
pub fn register_unsync<T: Send + 'static>(
self,
item: T,
) -> RegisterEntry<
UnsyncEntry,
T,
impl FnMut(&mut Self, Receiver),
impl FnMut(&mut Self, Box<dyn FnOnce(Bus) -> Pin<Box<dyn Future<Output = ()> + Send>>>),
Self,
> {
let item = Arc::new(Mutex::new(item)) as Untyped;
RegisterEntry {
item,
payload: self,
builder: |p: &mut Self, r| {
p.receivers.insert(r);
},
poller: |p: &mut Self, poller| p.pollings.push(poller),
receivers: HashSet::new(),
pollers: Vec::new(),
_m: Default::default(),
}
}
pub fn add_module(mut self, module: Module) -> Self {
self.pollings.extend(module.pollings);
self.receivers.extend(module.receivers);
self
}
}
pub struct BusBuilder {
inner: Module,
}
impl BusBuilder {
pub(crate) fn new() -> Self {
Self {
inner: Module::new(),
}
}
pub fn register_relay<S: Relay + Send + Sync + 'static>(self, inner: S) -> Self {
let inner = self.inner.register_relay(inner);
BusBuilder { inner }
}
pub fn register<T: Send + Sync + 'static>(
self,
item: T,
) -> RegisterEntry<
SyncEntry,
T,
impl FnMut(&mut Self, Receiver),
impl FnMut(&mut Self, Box<dyn FnOnce(Bus) -> Pin<Box<dyn Future<Output = ()> + Send>>>),
Self,
> {
RegisterEntry {
item: Arc::new(item) as Untyped,
payload: self,
builder: |p: &mut Self, r| {
p.inner.receivers.insert(r);
},
poller: |p: &mut Self, poller| p.inner.pollings.push(poller),
receivers: HashSet::new(),
pollers: Vec::new(),
_m: Default::default(),
}
}
pub fn register_unsync<T: Send + 'static>(
self,
item: T,
) -> RegisterEntry<
UnsyncEntry,
T,
impl FnMut(&mut Self, Receiver),
impl FnMut(&mut Self, Box<dyn FnOnce(Bus) -> Pin<Box<dyn Future<Output = ()> + Send>>>),
Self,
> {
RegisterEntry {
item: Arc::new(Mutex::new(item)) as Untyped,
payload: self,
builder: |p: &mut Self, r| {
p.inner.receivers.insert(r);
},
poller: |p: &mut Self, poller| p.inner.pollings.push(poller),
receivers: HashSet::new(),
pollers: Vec::new(),
_m: Default::default(),
}
}
pub fn add_module(mut self, module: Module) -> Self {
self.inner = self.inner.add_module(module);
self
}
pub fn build(self) -> (Bus, impl Future<Output = ()>) {
let bus = Bus {
inner: Arc::new(BusInner::new(self.inner.receivers)),
};
let mut futs = Vec::with_capacity(self.inner.pollings.len() * 2);
for poller in self.inner.pollings {
futs.push(tokio::task::spawn(poller(bus.clone())));
}
let poller = futures::future::join_all(futs).map(|_| ()).map(|_| ());
bus.init();
(bus, poller)
}
}
|
use ring::rand::{SecureRandom, SystemRandom};
use ring::{digest, pbkdf2};
use std::num::NonZeroU32;
static DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
const CREDENTIAL_LEN: usize = digest::SHA256_OUTPUT_LEN;
pub fn hash_password(secret: &[u8], salt: &[u8], iterations: u32) -> Vec<u8> {
let mut out = vec![0u8; CREDENTIAL_LEN];
let iterations = NonZeroU32::new(iterations).expect("Iterations can't be zero");
pbkdf2::derive(DIGEST_ALG, iterations, &salt, secret, &mut out);
out
}
pub fn verify_password(secret: &[u8], salt: &[u8], previous: &[u8], iterations: u32) -> bool {
let iterations = NonZeroU32::new(iterations).expect("Iterations can't be zero");
pbkdf2::verify(DIGEST_ALG, iterations, salt, secret, previous).is_ok()
}
pub fn get_random_64() -> Vec<u8> {
get_random(vec![0u8; 64])
}
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
SystemRandom::new()
.fill(&mut array)
.expect("Error generating random values");
array
}
|
#![allow(dead_code)]
extern crate fantoccini;
extern crate futures_util;
use fantoccini::{error, Client};
use std::future::Future;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::path::PathBuf;
use warp::Filter;
pub async fn select_client_type(s: &str) -> Result<Client, error::NewSessionError> {
match s {
"firefox" => {
let mut caps = serde_json::map::Map::new();
let opts = serde_json::json!({ "args": ["--headless"] });
caps.insert("moz:firefoxOptions".to_string(), opts.clone());
Client::with_capabilities("http://localhost:4444", caps).await
}
"chrome" => {
let mut caps = serde_json::map::Map::new();
let opts = serde_json::json!({
"args": ["--headless", "--disable-gpu", "--no-sandbox", "--disable-dev-shm-usage"],
"binary":
if std::path::Path::new("/usr/bin/chromium-browser").exists() {
// on Ubuntu, it's called chromium-browser
"/usr/bin/chromium-browser"
} else if std::path::Path::new("/Applications/Google Chrome.app/Contents/MacOS/Google Chrome").exists() {
// macOS
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome"
} else {
// elsewhere, it's just called chromium
"/usr/bin/chromium"
}
});
caps.insert("goog:chromeOptions".to_string(), opts.clone());
Client::with_capabilities("http://localhost:9515", caps).await
}
browser => unimplemented!("unsupported browser backend {}", browser),
}
}
pub fn handle_test_error(
res: Result<Result<(), fantoccini::error::CmdError>, Box<dyn std::any::Any + Send>>,
) -> bool {
match res {
Ok(Ok(_)) => true,
Ok(Err(e)) => {
eprintln!("test future failed to resolve: {:?}", e);
false
}
Err(e) => {
if let Some(e) = e.downcast_ref::<error::CmdError>() {
eprintln!("test future panicked: {:?}", e);
} else if let Some(e) = e.downcast_ref::<error::NewSessionError>() {
eprintln!("test future panicked: {:?}", e);
} else {
eprintln!("test future panicked; an assertion probably failed");
}
false
}
}
}
#[macro_export]
macro_rules! tester {
($f:ident, $endpoint:expr) => {{
use std::sync::{Arc, Mutex};
use std::thread;
let c = common::select_client_type($endpoint);
// we'll need the session_id from the thread
// NOTE: even if it panics, so can't just return it
let session_id = Arc::new(Mutex::new(None));
// run test in its own thread to catch panics
let sid = session_id.clone();
let res = thread::spawn(move || {
let mut rt = tokio::runtime::Builder::new()
.enable_all()
.basic_scheduler()
.build()
.unwrap();
let mut c = rt.block_on(c).expect("failed to construct test client");
*sid.lock().unwrap() = rt.block_on(c.session_id()).unwrap();
// make sure we close, even if an assertion fails
let x = rt.block_on(async move {
let r = tokio::spawn($f(c.clone())).await;
let _ = c.close().await;
r
});
drop(rt);
x.expect("test panicked")
})
.join();
let success = common::handle_test_error(res);
assert!(success);
}};
}
#[macro_export]
macro_rules! local_tester {
($f:ident, $endpoint:expr) => {{
let port: u16 = common::setup_server();
let f = move |c: Client| async move { $f(c, port).await };
tester!(f, $endpoint)
}};
}
/// Sets up the server and returns the port it bound to.
pub fn setup_server() -> u16 {
let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let mut rt = tokio::runtime::Builder::new()
.enable_all()
.basic_scheduler()
.build()
.unwrap();
let _ = rt.block_on(async {
let (socket_addr, server) = start_server();
tx.send(socket_addr.port())
.expect("To be able to send port");
server.await
});
});
rx.recv().expect("To get the bound port.")
}
/// Configures and starts the server
fn start_server() -> (SocketAddr, impl Future<Output = ()> + 'static) {
let socket_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0);
const ASSETS_DIR: &str = "tests/test_html";
let assets_dir: PathBuf = PathBuf::from(ASSETS_DIR);
let routes = fileserver(assets_dir);
warp::serve(routes).bind_ephemeral(socket_addr)
}
/// Serves files under this directory.
fn fileserver(
assets_dir: PathBuf,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::get()
.and(warp::fs::dir(assets_dir))
.and(warp::path::end())
}
|
use clap::Clap;
use futures::future::join_all;
use rayon::prelude::*;
use std::fs::File;
use std::io::copy;
use std::io::{self, BufRead};
use std::path::Path;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let opts: Opts = Opts::parse();
let urls: Vec<String> = match read_lines(opts.input_file) {
Ok(lines) => lines.filter_map(|l| match l {
Err(err) => {
panic!(err)
}
Ok(line) => Some(line),
}),
Err(err) => {
panic!(err)
}
}
.collect();
let output_path = Path::new(&opts.output_folder);
let urls_to_download: Vec<_> = urls
.par_iter()
.filter_map(|url| {
let file_name = get_url_file_name(url);
if !output_path.join(&file_name).exists() {
Some((url, file_name))
} else {
None
}
})
.map(|(url, file_name)| download_url(output_path, (url, file_name)))
.collect();
join_all(urls_to_download).await;
Ok(())
}
async fn download_url(output_path: &Path, (url, file_name): (&String, String)) {
println!("Downloading : '{}' - '{}'", url, file_name);
let resp = reqwest::get(url).await;
if let Ok(resp) = resp {
let mut dest = {
let fname = resp
.url()
.path_segments()
.and_then(|segments| segments.last())
.and_then(|name| if name.is_empty() { None } else { Some(name) })
.unwrap_or(&file_name);
let fname = output_path.join(fname);
File::create(fname).unwrap()
};
let bytes = resp.bytes().await.unwrap();
let bufsved = bytes.to_vec();
let mut bufs = bufsved.as_slice();
copy(&mut bufs, &mut dest).unwrap();
println!("Downloaded : '{}' - '{}'", url, file_name);
}
}
pub fn get_url_file_name(url: &String) -> String {
match url.split("/").last() {
Some(name) => name.to_string(),
None => panic!("Could not find filename for url: {}", url),
}
}
#[derive(Clap)]
#[clap(
version = "1.0",
author = "Philip Kristoffersen <philipkristoffersen@gmail.com>"
)]
struct Opts {
#[clap(about = "Text file with one url pr line that should be downloaded")]
input_file: String,
#[clap(
short,
long,
default_value = ".",
about = "the output folder to download the files to"
)]
output_folder: String,
}
// The output is wrapped in a Result to allow matching on errors
// Returns an Iterator to the Reader of the lines of the file.
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
|
use futures::{
future::{self, Ready},
prelude::*,
};
use service::World;
use std::{
io,
net::{IpAddr, SocketAddr},
};
use tarpc::{
context,
server::{self, Channel, Handler},
};
use tokio_serde::formats::Json;
#[derive(Clone)]
pub struct HelloServer(SocketAddr);
impl World for HelloServer {
// Each defined rpc generates two items in the trait, a fn that serves the RPC, and
// an associated type representing the future output by the fn.
type HelloFut = Ready<String>;
fn hello(self, _: context::Context, name: String) -> Self::HelloFut {
future::ready(format!(
"Hello, {}! You are connected from {:?}.",
name, self.0
))
}
}
#[tokio::main]
async fn main() -> io::Result<()> {
let mut ths = vec![];
for port in 8000..8008 {
let server_addr = format!("127.0.0.1:{}", port);
let server_addr = server_addr.parse::<SocketAddr>().unwrap();
let th = tokio::spawn(async move {
tarpc::serde_transport::tcp::listen(&server_addr, Json::default)
.await
.unwrap()
// Ignore accept errors.
.filter_map(|r| future::ready(r.ok()))
.map(server::BaseChannel::with_defaults)
// Limit channels to 1 per IP.
.max_channels_per_key(1, |t| t.as_ref().peer_addr().unwrap().ip())
// serve is generated by the service attribute. It takes as input any type implementing
// the generated World trait.
.map(|channel| {
let server = HelloServer(channel.as_ref().as_ref().peer_addr().unwrap());
channel.respond_with(server.serve()).execute()
})
.buffer_unordered(10)
.for_each(|_| async {
println!("Got collection.");
})
.await;
});
ths.push(th);
}
for th in ths.into_iter() {
th.await?;
}
Ok(())
}
|
// #[cfg(test)]
// mod tests {
// #[test]
// fn it_works() {
// assert_eq!(2 + 2, 4);
// }
// }
pub fn add_two(a:i32) -> i32{
internal_adder(a,2)
}
fn internal_adder(a:i32,b:i32) -> i32 {
a + b
}
#[cfg(test)]
mod tests{
use super::*;
#[test]
fn internal(){
// assert_eq!(4,internal_adder(2,2));
assert_eq!(5,add_two(3));
}
} |
use std::convert::Infallible;
use syscall::{
Result,
Error,
close,
EIO,
pipe2,
read,
write,
SchemeMut,
Packet,
O_CREAT,
O_RDWR,
O_CLOEXEC, EINTR,
};
#[must_use = "Daemon::ready must be called"]
pub struct Daemon {
write_pipe: usize,
}
impl Daemon {
pub fn new<F: FnOnce(Daemon) -> Infallible>(f: F) -> Result<u8> {
let mut pipes = [0; 2];
pipe2(&mut pipes, 0)?;
let [read_pipe, write_pipe] = pipes;
let res = unsafe { libc::fork() };
assert!(res >= 0);
if res == 0 {
let _ = close(read_pipe);
f(Daemon {
write_pipe,
});
// TODO: Replace Infallible with the never type once it is stabilized.
unreachable!();
} else {
let _ = close(write_pipe);
let mut data = [0];
let res = read(read_pipe, &mut data);
let _ = close(read_pipe);
if res? == 1 {
//exit(data[0] as usize)?;
//unreachable!();
Ok(data[0])
} else {
Err(Error::new(EIO))
}
}
}
pub fn ready(self) -> Result<()> {
let res = write(self.write_pipe, &[0]);
let _ = close(self.write_pipe);
if res? == 1 {
Ok(())
} else {
Err(Error::new(EIO))
}
}
}
pub fn scheme(name: &str, scheme_name: &str, mut scheme: impl SchemeMut) -> Result<()> {
Daemon::new(move |daemon: Daemon| -> std::convert::Infallible {
let error_handler = |error| -> ! {
eprintln!("error in {} daemon: {}", name, error);
std::process::exit(1)
};
let socket = syscall::open(format!(":{}", scheme_name), O_CREAT | O_RDWR | O_CLOEXEC).unwrap_or_else(|error| error_handler(error));
daemon.ready().unwrap_or_else(|error| error_handler(error));
let mut packet = Packet::default();
'outer: loop {
'read: loop {
match syscall::read(socket, &mut packet) {
Ok(0) => break 'outer,
Ok(_) => break 'read,
Err(Error { errno: EINTR }) => continue 'read,
Err(other) => error_handler(other),
}
}
scheme.handle(&mut packet);
'write: loop {
match syscall::write(socket, &packet) {
Ok(0) => break 'outer,
Ok(_) => break 'write,
Err(Error { errno: EINTR }) => continue 'write,
Err(other) => error_handler(other),
}
}
}
let _ = syscall::close(socket);
std::process::exit(0);
})?;
Ok(())
}
|
use axum::response::IntoResponse;
use miette::Diagnostic;
use reqwest::StatusCode;
use thiserror::Error;
#[derive(Debug, Diagnostic, Error)]
#[error("MietteError")]
pub struct MietteError(pub(crate) miette::Report);
impl IntoResponse for MietteError {
fn into_response(self) -> axum::response::Response {
sentry::capture_error(&self);
(StatusCode::INTERNAL_SERVER_ERROR, self.0.to_string()).into_response()
}
}
impl From<miette::Report> for MietteError {
fn from(err: miette::Report) -> Self {
MietteError(err)
}
}
|
mod database;
mod model;
pub use self::database::Database;
pub use self::model::{CustomEmoji, Emoji};
|
{
"id": "7d304d1d-da59-4419-9d97-af59fb4cb5e7",
"$type": "BaseTypesResource",
"BoolField": "true",
"ByteField": "123",
"DoubleField": "123",
"FloatField": "1231.2211",
"IntField": "123",
"LongField": "123",
"StringField": "21312dsadas"
} |
use std::fs::File;
use std::io::Read;
use zip::ZipArchive;
pub struct ArchiveFile {}
pub struct ArchiveManager {
pub files: ZipArchive<File>,
}
impl ArchiveManager {
pub fn new(files: ZipArchive<File>) -> ArchiveManager {
ArchiveManager { files }
}
pub fn get(&mut self, name: String) -> Vec<u8> {
let mut resources_content = Vec::new();
&self
.files
.by_name(name.as_ref())
.unwrap()
.read_to_end(&mut resources_content)
.unwrap();
resources_content
}
}
|
use std::fmt::Debug;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Error;
use std::str::FromStr;
pub fn read_data<T>(filename: &str) -> Result<Vec<T>, Error>
where
T: FromStr,
<T as FromStr>::Err: Debug,
{
let file = File::open(filename)?;
let v = BufReader::new(file)
.lines()
.map(|line| line.unwrap())
.filter(|line| !line.is_empty())
.map(|line| line.trim().parse::<T>().unwrap())
.collect();
Ok(v)
}
pub fn read_data_space_saperator<T>(filename: &str) -> Result<Vec<T>, Error>
where
T: FromStr,
<T as FromStr>::Err: Debug,
{
let file = File::open(filename)?;
let data: Vec<String> = BufReader::new(file)
.lines()
.map(|line| line.unwrap())
.collect();
let mut v: Vec<T> = Vec::new();
let mut s = String::new();
for l in data {
if l == "" {
v.push(s.trim().parse().unwrap());
s.clear()
} else {
s.push_str(&(" ".to_owned() + l.trim()))
}
}
v.push(s.trim().parse().unwrap());
Ok(v)
}
/// keep spaces on sections
pub fn read_data_space_saperator2<T>(filename: &str, sep: char) -> Result<Vec<T>, Error>
where
T: FromStr,
<T as FromStr>::Err: Debug,
{
let file = File::open(filename)?;
let data: Vec<String> = BufReader::new(file)
.lines()
.map(|line| line.unwrap())
.collect();
let mut v: Vec<T> = Vec::new();
let mut s = String::new();
for l in data {
if l == "" {
v.push(s.trim().parse().unwrap());
s.clear()
} else {
s.push_str(&(sep.to_string().to_owned() + l.trim()))
}
}
v.push(s.trim().parse().unwrap());
Ok(v)
}
|
pub mod board;
pub mod nodeman;
pub mod nodelet;
#[cfg(test)]
mod tests {
use crate::nodeman::NodeMan;
use crate::nodelet::Nodelet;
use crate::board::Board;
use bitop::b16::B16;
#[test]
fn test_board_16() {
let mut board : Board<B16> = Board::<B16>::new();
//初手から実行
let ret1 : i32 = board.get_best_result_from_start();
assert_eq!(ret1, -10);
//第2手から実行
let ret2 : i32 = board.get_best_result(3648, 32, 1);
assert_eq!(ret2, -10);
}
#[test]
fn test_nodeman() {
let mut man : NodeMan = NodeMan::new();
man.clear(10);
let mut idlast : usize = man.set_move(11, 0);
man.set_move(12, 0);
man.set_move(13, 1);
man.set_move(14, 0);
idlast = man.set_move(15, 1);
assert!(man.len() == idlast + 1);
man.unset_moves(3);
assert!(man.len() == 3);
let moves = man.get_move_list();
assert!(moves.len() == 4);
}
/*
#[test]
fn test_nodelet() {
let mut nodelet : Nodelet = Nodelet::new();
assert!(nodelet.get_black() == 0u64);
}
*/
}
|
//! A simple Driver for the Waveshare 2.9" E-Ink Display via SPI
//!
//!
//! # Example for the 2.9 in E-Ink Display
//!
//!```rust, no_run
//!# use embedded_hal_mock::*;
//!# fn main() -> Result<(), MockError> {
//!use embedded_graphics::{
//! pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle},
//!};
//!use epd_waveshare::{epd2in9::*, prelude::*};
//!#
//!# let expectations = [];
//!# let mut spi = spi::Mock::new(&expectations);
//!# let expectations = [];
//!# let cs_pin = pin::Mock::new(&expectations);
//!# let busy_in = pin::Mock::new(&expectations);
//!# let dc = pin::Mock::new(&expectations);
//!# let rst = pin::Mock::new(&expectations);
//!# let mut delay = delay::MockNoop::new();
//!
//!// Setup EPD
//!let mut epd = Epd2in9::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?;
//!
//!// Use display graphics from embedded-graphics
//!let mut display = Display2in9::default();
//!
//!// Use embedded graphics for drawing a line
//!let _ = Line::new(Point::new(0, 120), Point::new(0, 295))
//! .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1))
//! .draw(&mut display);
//!
//! // Display updated frame
//!epd.update_frame(&mut spi, &display.buffer(), &mut delay)?;
//!epd.display_frame(&mut spi, &mut delay)?;
//!
//!// Set the EPD to sleep
//!epd.sleep(&mut spi, &mut delay)?;
//!# Ok(())
//!# }
//!```
/// Width of epd2in9 in pixels
pub const WIDTH: u32 = 128;
/// Height of epd2in9 in pixels
pub const HEIGHT: u32 = 296;
/// Default Background Color (white)
pub const DEFAULT_BACKGROUND_COLOR: Color = Color::White;
const IS_BUSY_LOW: bool = false;
use embedded_hal::{
blocking::{delay::*, spi::Write},
digital::v2::*,
};
use crate::type_a::{
command::Command,
constants::{LUT_FULL_UPDATE, LUT_PARTIAL_UPDATE},
};
use crate::color::Color;
use crate::traits::*;
use crate::buffer_len;
use crate::interface::DisplayInterface;
/// Display with Fullsize buffer for use with the 2in9 EPD
#[cfg(feature = "graphics")]
pub type Display2in9 = crate::graphics::Display<
WIDTH,
HEIGHT,
false,
{ buffer_len(WIDTH as usize, HEIGHT as usize) },
Color,
>;
/// Epd2in9 driver
///
pub struct Epd2in9<SPI, CS, BUSY, DC, RST, DELAY> {
/// SPI
interface: DisplayInterface<SPI, CS, BUSY, DC, RST, DELAY>,
/// Color
background_color: Color,
/// Refresh LUT
refresh: RefreshLut,
}
impl<SPI, CS, BUSY, DC, RST, DELAY> Epd2in9<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
fn init(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.interface.reset(delay, 10_000, 10_000);
self.wait_until_idle(spi, delay)?;
// 3 Databytes:
// A[7:0]
// 0.. A[8]
// 0.. B[2:0]
// Default Values: A = Height of Screen (0x127), B = 0x00 (GD, SM and TB=0?)
self.interface
.cmd_with_data(spi, Command::DriverOutputControl, &[0x27, 0x01, 0x00])?;
// 3 Databytes: (and default values from datasheet and arduino)
// 1 .. A[6:0] = 0xCF | 0xD7
// 1 .. B[6:0] = 0xCE | 0xD6
// 1 .. C[6:0] = 0x8D | 0x9D
//TODO: test
self.interface
.cmd_with_data(spi, Command::BoosterSoftStartControl, &[0xD7, 0xD6, 0x9D])?;
// One Databyte with value 0xA8 for 7V VCOM
self.interface
.cmd_with_data(spi, Command::WriteVcomRegister, &[0xA8])?;
// One Databyte with default value 0x1A for 4 dummy lines per gate
self.interface
.cmd_with_data(spi, Command::SetDummyLinePeriod, &[0x1A])?;
// One Databyte with default value 0x08 for 2us per line
self.interface
.cmd_with_data(spi, Command::SetGateLineWidth, &[0x08])?;
// One Databyte with default value 0x03
// -> address: x increment, y increment, address counter is updated in x direction
self.interface
.cmd_with_data(spi, Command::DataEntryModeSetting, &[0x03])?;
self.set_lut(spi, delay, None)
}
}
impl<SPI, CS, BUSY, DC, RST, DELAY> WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY>
for Epd2in9<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
type DisplayColor = Color;
fn width(&self) -> u32 {
WIDTH
}
fn height(&self) -> u32 {
HEIGHT
}
fn new(
spi: &mut SPI,
cs: CS,
busy: BUSY,
dc: DC,
rst: RST,
delay: &mut DELAY,
delay_us: Option<u32>,
) -> Result<Self, SPI::Error> {
let interface = DisplayInterface::new(cs, busy, dc, rst, delay_us);
let mut epd = Epd2in9 {
interface,
background_color: DEFAULT_BACKGROUND_COLOR,
refresh: RefreshLut::Full,
};
epd.init(spi, delay)?;
Ok(epd)
}
fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
// 0x00 for Normal mode (Power on Reset), 0x01 for Deep Sleep Mode
//TODO: is 0x00 needed here? (see also epd1in54)
self.interface
.cmd_with_data(spi, Command::DeepSleepMode, &[0x00])?;
Ok(())
}
fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
self.init(spi, delay)?;
Ok(())
}
fn update_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
self.use_full_frame(spi, delay)?;
self.interface
.cmd_with_data(spi, Command::WriteRam, buffer)?;
Ok(())
}
//TODO: update description: last 3 bits will be ignored for width and x_pos
fn update_partial_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
buffer: &[u8],
x: u32,
y: u32,
width: u32,
height: u32,
) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
self.set_ram_area(spi, x, y, x + width, y + height)?;
self.set_ram_counter(spi, delay, x, y)?;
self.interface
.cmd_with_data(spi, Command::WriteRam, buffer)?;
Ok(())
}
fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
// enable clock signal, enable cp, display pattern -> 0xC4 (tested with the arduino version)
//TODO: test control_1 or control_2 with default value 0xFF (from the datasheet)
self.interface
.cmd_with_data(spi, Command::DisplayUpdateControl2, &[0xC4])?;
self.interface.cmd(spi, Command::MasterActivation)?;
// MASTER Activation should not be interupted to avoid currption of panel images
// therefore a terminate command is send
self.interface.cmd(spi, Command::Nop)?;
Ok(())
}
fn update_and_display_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error> {
self.update_frame(spi, buffer, delay)?;
self.display_frame(spi, delay)?;
Ok(())
}
fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
self.use_full_frame(spi, delay)?;
// clear the ram with the background color
let color = self.background_color.get_byte_value();
self.interface.cmd(spi, Command::WriteRam)?;
self.interface
.data_x_times(spi, color, WIDTH / 8 * HEIGHT)?;
Ok(())
}
fn set_background_color(&mut self, background_color: Color) {
self.background_color = background_color;
}
fn background_color(&self) -> &Color {
&self.background_color
}
fn set_lut(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
refresh_rate: Option<RefreshLut>,
) -> Result<(), SPI::Error> {
if let Some(refresh_lut) = refresh_rate {
self.refresh = refresh_lut;
}
match self.refresh {
RefreshLut::Full => self.set_lut_helper(spi, delay, &LUT_FULL_UPDATE),
RefreshLut::Quick => self.set_lut_helper(spi, delay, &LUT_PARTIAL_UPDATE),
}
}
fn wait_until_idle(&mut self, _spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
self.interface.wait_until_idle(delay, IS_BUSY_LOW);
Ok(())
}
}
impl<SPI, CS, BUSY, DC, RST, DELAY> Epd2in9<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
fn use_full_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> {
// choose full frame/ram
self.set_ram_area(spi, 0, 0, WIDTH - 1, HEIGHT - 1)?;
// start from the beginning
self.set_ram_counter(spi, delay, 0, 0)
}
fn set_ram_area(
&mut self,
spi: &mut SPI,
start_x: u32,
start_y: u32,
end_x: u32,
end_y: u32,
) -> Result<(), SPI::Error> {
assert!(start_x < end_x);
assert!(start_y < end_y);
// x is positioned in bytes, so the last 3 bits which show the position inside a byte in the ram
// aren't relevant
self.interface.cmd_with_data(
spi,
Command::SetRamXAddressStartEndPosition,
&[(start_x >> 3) as u8, (end_x >> 3) as u8],
)?;
// 2 Databytes: A[7:0] & 0..A[8] for each - start and end
self.interface.cmd_with_data(
spi,
Command::SetRamYAddressStartEndPosition,
&[
start_y as u8,
(start_y >> 8) as u8,
end_y as u8,
(end_y >> 8) as u8,
],
)
}
fn set_ram_counter(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
x: u32,
y: u32,
) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
// x is positioned in bytes, so the last 3 bits which show the position inside a byte in the ram
// aren't relevant
self.interface
.cmd_with_data(spi, Command::SetRamXAddressCounter, &[(x >> 3) as u8])?;
// 2 Databytes: A[7:0] & 0..A[8]
self.interface.cmd_with_data(
spi,
Command::SetRamYAddressCounter,
&[y as u8, (y >> 8) as u8],
)?;
Ok(())
}
/// Set your own LUT, this function is also used internally for set_lut
fn set_lut_helper(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
buffer: &[u8],
) -> Result<(), SPI::Error> {
self.wait_until_idle(spi, delay)?;
assert!(buffer.len() == 30);
self.interface
.cmd_with_data(spi, Command::WriteLutRegister, buffer)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn epd_size() {
assert_eq!(WIDTH, 128);
assert_eq!(HEIGHT, 296);
assert_eq!(DEFAULT_BACKGROUND_COLOR, Color::White);
}
}
|
use storm::{Entity, MssqlDelete};
#[derive(MssqlDelete)]
#[storm(table = "t", keys = "id", no_test = true)]
pub struct EntityWithDuplicateKey {
pub name: String,
pub id: i32,
}
impl Entity for EntityWithDuplicateKey {
type Key = i32;
type TrackCtx = ();
}
|
use crate::defs::*;
use crate::exceptions;
use crate::symbols;
use crate::error::RuntimeError;
use libc::{c_char, c_void};
use std::ffi::{CStr, CString};
use std::io::Write;
use std::mem;
fn arr_to_raw(arr: &[&str]) -> *const *const c_char {
let vec: Vec<_> = arr
.iter()
.map(|s| CString::new(*s).unwrap().into_raw())
.collect();
let ptr = vec.as_ptr();
mem::forget(vec);
ptr as *const *const c_char
}
fn init_symbol_fn(
invoke_fn: *const c_void,
apply_to_fn: *const c_void,
name: &str,
arglist: &[&str],
vararg: bool,
) {
let sym = symbols::get_or_intern_symbol(name.to_string());
let func = Function {
ty: FunctionType::Function,
name: CString::new(name).unwrap().into_raw(),
arglist: arr_to_raw(arglist),
arg_count: (arglist.len() as u64),
is_macro: false,
invoke_f_ptr: invoke_fn,
apply_to_f_ptr: apply_to_fn,
has_restarg: vararg,
};
let func = Box::into_raw(Box::new(func));
unsafe { (*sym).function = func };
}
unsafe extern "C" fn native_add_invoke(_: *const Function, n: u64, mut args: ...) -> Object {
let args = va_list_to_obj_array(n, args.as_va_list());
let mut sum = 0;
for i in 0..n {
sum += (*args.offset(i as isize)).unpack_int();
}
Object::from_int(sum)
}
unsafe extern "C" fn native_add_apply(_: *const Function, args: List) -> Object {
let mut sum = 0;
let args_count = args.len;
let mut cur_args = args;
for _ in 0..args_count {
sum += cur_args.first().unpack_int();
cur_args = cur_args.rest();
}
Object::from_int(sum)
}
unsafe extern "C" fn native_sub_invoke(
_: *const Function,
n: u64,
x: Object,
mut args: ...
) -> Object {
let args = va_list_to_obj_array(n, args.as_va_list());
let mut result = x.unpack_int();
for i in 0..n {
result -= (*args.offset(i as isize)).unpack_int();
}
Object::from_int(result)
}
unsafe extern "C" fn native_sub_apply(_: *const Function, args: List) -> Object {
let mut result = args.first().unpack_int();
let mut cur_args = args.rest();
let args_len = cur_args.len;
for _ in 0..args_len {
result -= cur_args.first().unpack_int();
cur_args = cur_args.rest();
}
Object::from_int(result)
}
extern "C" fn native_equal_invoke(_: *const Function, x: Object, y: Object) -> Object {
if x == y {
x
} else {
Object::nil()
}
}
unsafe extern "C" fn native_equal_apply(f: *const Function, args: List) -> Object {
native_equal_invoke(f, args.first(), args.rest().first())
}
extern "C" fn native_set_fn_invoke(_: *const Function, sym: Object, func: Object) -> Object {
let sym = sym.unpack_symbol();
let func = func.unpack_function();
unsafe { (*sym).function = func };
Object::nil()
}
unsafe extern "C" fn native_set_fn_apply(f: *const Function, args: List) -> Object {
native_set_fn_invoke(f, args.first(), args.rest().first())
}
extern "C" fn native_cons_invoke(_: *const Function, x: Object, list: Object) -> Object {
let list = list.unpack_list();
let len = unsafe { (*list).len };
let node = Node {
val: Box::into_raw(Box::new(x)),
next: list,
};
let new_list = List {
node: Box::into_raw(Box::new(node)),
len: len + 1,
};
Object::from_list(Box::into_raw(Box::new(new_list)))
}
unsafe extern "C" fn native_cons_apply(f: *const Function, args: List) -> Object {
native_cons_invoke(f, args.first(), args.rest().first())
}
extern "C" fn native_rest_invoke(_: *const Function, list: Object) -> Object {
let list = list.unpack_list();
let len = unsafe { (*list).len };
if len == 0 {
Object::nil()
} else {
let rest = unsafe { (*(*list).node).next };
Object::from_list(rest)
}
}
unsafe extern "C" fn native_rest_apply(f: *const Function, args: List) -> Object {
native_rest_invoke(f, args.first())
}
unsafe extern "C" fn native_first_invoke(_: *const Function, list: Object) -> Object {
let list = list.unpack_list();
let len = (*list).len;
if len == 0 {
exceptions::raise_error("cannot do first on an empty list".to_string());
} else {
(*(*(*list).node).val).clone()
}
}
unsafe extern "C" fn native_first_apply(f: *const Function, args: List) -> Object {
native_first_invoke(f, args.first())
}
unsafe fn apply_to_list(f: *const Function, args: List) -> Object {
if !unlisp_rt_check_arity(f, args.len) {
exceptions::raise_arity_error((*f).name, (*f).arg_count, args.len);
}
let apply_fn: unsafe extern "C" fn(*const Function, List) -> Object =
mem::transmute((*f).apply_to_f_ptr);
apply_fn(f, args)
}
unsafe extern "C" fn native_apply_invoke(
_: *const Function,
n: u64,
f: Object,
mut args: ...
) -> Object {
let f = f.unpack_function();
let args_arr = va_list_to_obj_array(n, args.as_va_list());
let last_arg = (*args_arr.offset((n as isize) - 1)).unpack_list();
let args_list = obj_array_to_list(n - 1, args_arr, Some(last_arg));
apply_to_list(f, (*args_list).clone())
}
unsafe extern "C" fn native_apply_apply(_: *const Function, args: List) -> Object {
let f = args.first().unpack_function();
let mut to_cons = vec![];
let mut f_args = args.rest();
while f_args.len != 1 {
to_cons.push(f_args.first());
f_args = f_args.rest();
}
let cons_base = f_args.first().unpack_list();
let reconsed_args = to_cons
.into_iter()
.rev()
.fold((*cons_base).clone(), |acc, item| acc.cons(item));
apply_to_list(f, reconsed_args)
}
unsafe extern "C" fn native_symbol_fn_invoke(_: *const Function, sym: Object) -> Object {
let sym = sym.unpack_symbol();
let f = (*sym).function;
Object::from_function(f)
}
unsafe extern "C" fn native_symbol_fn_apply(f: *const Function, args: List) -> Object {
native_symbol_fn_invoke(f, args.first())
}
unsafe extern "C" fn native_set_macro_invoke(_: *const Function, f: Object) -> Object {
let f = f.unpack_function();
(*f).is_macro = true;
Object::nil()
}
unsafe extern "C" fn native_set_macro_apply(f: *const Function, args: List) -> Object {
native_set_macro_invoke(f, args.first())
}
unsafe extern "C" fn native_listp_invoke(_: *const Function, x: Object) -> Object {
if x.ty == ObjType::List {
Object::from_symbol(symbols::get_or_intern_symbol("true".to_string()))
} else {
Object::nil()
}
}
unsafe extern "C" fn native_listp_apply(f: *const Function, args: List) -> Object {
native_set_macro_invoke(f, args.first())
}
unsafe extern "C" fn native_symbolp_invoke(_: *const Function, x: Object) -> Object {
if x.ty == ObjType::Symbol {
Object::from_symbol(symbols::get_or_intern_symbol("true".to_string()))
} else {
Object::nil()
}
}
unsafe extern "C" fn native_symbolp_apply(f: *const Function, args: List) -> Object {
native_symbolp_invoke(f, args.first())
}
pub unsafe fn call_macro(f: *mut Function, args: List) -> Result<Object, RuntimeError> {
assert!((*f).is_macro);
let apply_fn: unsafe extern "C" fn(*const Function, List) -> Object =
mem::transmute((*f).apply_to_f_ptr);
exceptions::run_with_global_ex_handler(|| {
if !unlisp_rt_check_arity(f, args.len) {
exceptions::raise_arity_error((*f).name, (*f).arg_count, args.len);
}
apply_fn(f, args)
})
}
unsafe extern "C" fn native_macroexpand_1_invoke(_: *const Function, form: Object) -> Object {
match &form.ty {
ObjType::List => {
let list = form.unpack_list();
if (*list).len == 0 {
form
} else {
let first = (*list).first();
match first.ty {
ObjType::Symbol => {
let sym = first.unpack_symbol();
let sym_fn = (*sym).function;
if sym_fn.is_null() || !(*sym_fn).is_macro {
form
} else {
match call_macro(sym_fn, (*list).rest()) {
Ok(expanded) => expanded,
Err(e) => exceptions::raise_error(format!("{}", e)),
}
}
}
_ => form,
}
}
}
_ => form,
}
}
unsafe extern "C" fn native_macroexpand_1_apply(f: *const Function, args: List) -> Object {
native_macroexpand_1_invoke(f, args.first())
}
unsafe extern "C" fn native_error_invoke(_: *const Function, msg: Object) -> ! {
let s = msg.unpack_string();
let rust_str = CStr::from_ptr(s).to_str().unwrap().to_string();
exceptions::raise_error(rust_str)
}
unsafe extern "C" fn native_error_apply(f: *const Function, args: List) -> ! {
native_error_invoke(f, args.first())
}
unsafe extern "C" fn native_print_invoke(_: *const Function, x: Object) -> Object {
print!("{}", x);
x
}
unsafe extern "C" fn native_print_apply(f: *const Function, args: List) -> Object {
native_print_invoke(f, args.first())
}
unsafe extern "C" fn native_println_invoke(_: *const Function, x: Object) -> Object {
println!("{}", x);
x
}
unsafe extern "C" fn native_println_apply(f: *const Function, args: List) -> Object {
native_println_invoke(f, args.first())
}
unsafe extern "C" fn native_stdout_write_invoke(_: *const Function, s: Object) -> Object {
let s = s.unpack_string();
let rust_str = CStr::from_ptr(s).to_str().unwrap().to_string();
let _ = write!(std::io::stdout(), "{}", rust_str)
.map_err(|e| exceptions::raise_error(format!("{}", e)));
Object::nil()
}
unsafe extern "C" fn native_stdout_write_apply(f: *const Function, args: List) -> Object {
native_stdout_write_invoke(f, args.first())
}
pub fn init() {
init_symbol_fn(
native_add_invoke as *const c_void,
native_add_apply as *const c_void,
"+",
&[],
true,
);
init_symbol_fn(
native_sub_invoke as *const c_void,
native_sub_apply as *const c_void,
"-",
&["x"],
true,
);
init_symbol_fn(
native_equal_invoke as *const c_void,
native_equal_apply as *const c_void,
"equal",
&["x", "y"],
false,
);
init_symbol_fn(
native_set_fn_invoke as *const c_void,
native_set_fn_apply as *const c_void,
"set-fn",
&["sym", "func"],
false,
);
init_symbol_fn(
native_symbol_fn_invoke as *const c_void,
native_symbol_fn_apply as *const c_void,
"symbol-function",
&["sym"],
false,
);
init_symbol_fn(
native_cons_invoke as *const c_void,
native_cons_apply as *const c_void,
"cons",
&["x", "list"],
false,
);
init_symbol_fn(
native_rest_invoke as *const c_void,
native_rest_apply as *const c_void,
"rest",
&["list"],
false,
);
init_symbol_fn(
native_first_invoke as *const c_void,
native_first_apply as *const c_void,
"first",
&["list"],
false,
);
init_symbol_fn(
native_apply_invoke as *const c_void,
native_apply_apply as *const c_void,
"apply",
&["f"],
true,
);
init_symbol_fn(
native_set_macro_invoke as *const c_void,
native_set_macro_apply as *const c_void,
"set-macro",
&["f"],
false,
);
init_symbol_fn(
native_listp_invoke as *const c_void,
native_listp_apply as *const c_void,
"listp",
&["x"],
false,
);
init_symbol_fn(
native_symbolp_invoke as *const c_void,
native_symbolp_apply as *const c_void,
"symbolp",
&["x"],
false,
);
init_symbol_fn(
native_macroexpand_1_invoke as *const c_void,
native_macroexpand_1_apply as *const c_void,
"macroexpand-1",
&["form"],
false,
);
init_symbol_fn(
native_error_invoke as *const c_void,
native_error_apply as *const c_void,
"error",
&["msg"],
false,
);
init_symbol_fn(
native_print_invoke as *const c_void,
native_print_apply as *const c_void,
"print",
&["x"],
false,
);
init_symbol_fn(
native_println_invoke as *const c_void,
native_println_apply as *const c_void,
"println",
&["x"],
false,
);
init_symbol_fn(
native_stdout_write_invoke as *const c_void,
native_stdout_write_apply as *const c_void,
"stdout-write",
&["s"],
false,
);
}
|
use std::collections::hash_map::Entry;
use std::collections::{HashMap, VecDeque};
use std::iter::FromIterator;
use anyhow::*;
use bevy_asset::{Handle, LoadContext, LoadedAsset};
use bevy_ecs::{
bundle::Bundle,
entity::Entity,
reflect::ReflectComponent,
system::{Commands, Query},
world::{EntityMut, World},
};
use bevy_math::*;
use bevy_reflect::{Reflect, TypeUuid};
use bevy_render::{
color::Color,
draw::{Draw, Visible},
mesh::{Indices, Mesh},
pipeline::{PrimitiveTopology, RenderPipeline, RenderPipelines},
render_graph::base::MainPass,
texture::Texture,
};
use bevy_scene::Scene;
use bevy_sprite::{ColorMaterial, Sprite, QUAD_HANDLE, SPRITE_PIPELINE_HANDLE};
use bevy_transform::components::{GlobalTransform, Transform};
use crate::parallax::Parallax;
use crate::tmx::{Layer, Map, Object, Texture as TmxTexture, TexturePtr, Tile};
pub type ObjectVisitor = dyn for<'w> Fn(&Object, &mut EntityMut<'w>) + Send + Sync;
pub type ImageVisitor = dyn for<'w> Fn(&mut EntityMut<'w>) + Send + Sync;
pub type MapVisitor = dyn for<'w> Fn(&Map, &mut World) + Send + Sync;
pub struct SceneBuilder<'a, 'b> {
world: World,
context: &'a mut LoadContext<'b>,
map: &'a Map,
texture_handles: HashMap<TexturePtr, Handle<Texture>>,
material_handles: HashMap<(Handle<Texture>, [u8; 4]), Handle<ColorMaterial>>,
object_sprites: HashMap<u32, ProtoSpriteBundle>,
label_counter: usize,
offset_z: f32,
scale: Vec3,
visit_object: Option<&'a ObjectVisitor>,
visit_image: Option<&'a ImageVisitor>,
visit_map: Option<&'a MapVisitor>,
}
#[derive(Debug, Default, Clone, TypeUuid, Reflect)]
#[reflect(Component)]
#[uuid = "39eb4ed0-d44e-4ed5-8676-2e0c148f96c4"]
pub struct ProtoSprite(Vec2);
#[derive(Bundle, Clone)]
struct ProtoSpriteBundle {
pub sprite: ProtoSprite,
pub mesh: Handle<Mesh>,
pub material: Handle<ColorMaterial>,
pub main_pass: MainPass,
pub draw: Draw,
pub visible: Visible,
pub render_pipelines: RenderPipelines,
pub transform: Transform,
pub global_transform: GlobalTransform,
}
impl<'a, 'b> SceneBuilder<'a, 'b> {
pub fn new(
load_context: &'a mut LoadContext<'b>,
map: &'a Map,
visit_object: Option<&'a ObjectVisitor>,
visit_image: Option<&'a ImageVisitor>,
visit_map: Option<&'a MapVisitor>,
scale: Vec3,
) -> Self {
Self {
world: World::default(),
context: load_context,
map,
texture_handles: HashMap::default(),
material_handles: HashMap::default(),
object_sprites: HashMap::default(),
label_counter: 0,
offset_z: 0.0,
visit_object,
visit_image,
visit_map,
scale,
}
}
pub async fn build(mut self) -> Result<Scene> {
let mut layer_queue = VecDeque::from_iter(self.map.layers.iter());
while let Some(layer) = layer_queue.pop_front() {
match layer {
Layer::TileLayer {
position,
size,
color,
visible: _,
offset,
parallax,
data,
} => {
let mut images_to_meshes =
HashMap::<TexturePtr, (Handle<ColorMaterial>, Vec<_>)>::new();
for (i, &gid) in data.iter().enumerate() {
if let Some(&Tile {
image: Some(ref image),
top_left,
bottom_right,
width: tile_width,
height: tile_height,
..
}) = self.map.get_tile(gid)
{
let (x, y) = self.map.tile_type.coord_to_pos(
size.y as i32,
(i as i32 % size.x as i32) + position.x,
(i as i32 / size.x as i32) + position.y,
);
let tile = (x, y, tile_width, tile_height, top_left, bottom_right);
match images_to_meshes.entry(TexturePtr::from(image)) {
Entry::Occupied(mut value) => value.get_mut().1.push(tile),
vacant => {
let texture = self.texture_handle(image).await?;
let material = self.texture_material_handle(texture, color);
vacant.or_insert((material, Vec::new())).1.push(tile);
}
};
}
}
for (_, (material, tiles)) in images_to_meshes.into_iter() {
let mut vertices = Vec::with_capacity(tiles.len() * 4);
let mut normals = Vec::with_capacity(tiles.len() * 4);
let mut uvs = Vec::with_capacity(tiles.len() * 4);
let mut indices = Vec::with_capacity(tiles.len() * 6);
for (x, y, w, h, top_left, bottom_right) in tiles {
let i = vertices.len() as u16;
indices.extend_from_slice(&[i, i + 1, i + 2, i + 2, i + 1, i + 3]);
vertices.push([x as f32, y as f32, 0.0]);
vertices.push([(x + w) as f32, y as f32, 0.0]);
vertices.push([x as f32, (y + h) as f32, 0.0]);
vertices.push([(x + w) as f32, (y + h) as f32, 0.0]);
normals.push([0.0, 0.0, 1.0]);
normals.push([0.0, 0.0, 1.0]);
normals.push([0.0, 0.0, 1.0]);
normals.push([0.0, 0.0, 1.0]);
uvs.push([top_left.x, top_left.y]);
uvs.push([bottom_right.x, top_left.y]);
uvs.push([top_left.x, bottom_right.y]);
uvs.push([bottom_right.x, bottom_right.y]);
}
let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);
mesh.set_attribute(Mesh::ATTRIBUTE_POSITION, vertices);
mesh.set_attribute(Mesh::ATTRIBUTE_NORMAL, normals);
mesh.set_attribute(Mesh::ATTRIBUTE_UV_0, uvs);
mesh.set_indices(Some(Indices::U16(indices)));
self.label_counter += 1;
let mesh = self.context.set_labeled_asset(
format!("mesh#{}", self.label_counter).as_str(),
LoadedAsset::new(mesh),
);
let mut entity = self.world.spawn();
let transform = Transform::from_xyz(
offset.x as f32 * self.scale.x,
offset.y as f32 * self.scale.y,
self.offset_z,
);
entity.insert_bundle(ProtoSpriteBundle {
sprite: ProtoSprite(self.scale.xy()),
mesh,
material,
transform,
..ProtoSpriteBundle::default()
});
if parallax != &Vec2::new(1.0, 1.0) {
entity.insert(Parallax::new(*parallax, transform));
}
}
}
Layer::ObjectLayer {
objects,
offset,
parallax,
visible,
color,
..
} => {
for (i, object) in objects.iter().enumerate() {
let object_sprite = if let Some(gid) = object.tile {
self.object_sprite(gid, color).await?
} else {
None
};
let mut entity = self.world.spawn();
let mut transform = Transform::from_xyz(
(offset.x as f32 + object.x) * self.scale.x,
(offset.y as f32 + object.y) * self.scale.y,
self.offset_z as f32 + (i as f32 / objects.len() as f32) * self.scale.z,
);
transform.rotation = Quat::from_rotation_z(-object.rotation.to_radians());
if let Some(object_sprite) = object_sprite {
entity.insert_bundle(ProtoSpriteBundle {
sprite: ProtoSprite(
Vec2::new(object.width, object.height) * self.scale.xy(),
),
transform,
visible: Visible {
is_transparent: true,
is_visible: *visible && object.visible,
},
..object_sprite
});
} else {
entity.insert_bundle((transform, GlobalTransform::default()));
}
if parallax != &Vec2::new(1.0, 1.0) {
entity.insert(Parallax::new(*parallax, transform));
}
if let Some(handler) = self.visit_object.as_ref() {
(*handler)(object, &mut entity);
}
}
}
Layer::ImageLayer {
color,
visible: _,
offset,
parallax,
image,
} => {
let texture = self.texture_handle(image).await?;
let material = self.texture_material_handle(texture, color);
let transform = Transform::from_xyz(
offset.x as f32 * self.scale.x,
offset.y as f32 * self.scale.y,
self.offset_z,
);
let mut entity = self.world.spawn();
entity.insert_bundle(ProtoSpriteBundle {
sprite: ProtoSprite(
Vec2::new(image.width() as f32, image.height() as f32)
* self.scale.xy(),
),
material,
transform,
..ProtoSpriteBundle::default()
});
if parallax != &Vec2::new(1.0, 1.0) {
entity.insert(Parallax::new(*parallax, transform));
}
if let Some(handler) = self.visit_image.as_ref() {
(*handler)(&mut entity);
}
}
Layer::Group { layers } => {
for layer in layers.iter().rev() {
layer_queue.push_front(layer);
}
}
}
self.offset_z += self.scale.z;
}
if let Some(visit_map) = self.visit_map {
(*visit_map)(&self.map, &mut self.world);
}
Ok(Scene::new(self.world))
}
async fn texture_handle(&mut self, image: &TmxTexture) -> Result<Handle<Texture>> {
let handle: Handle<Texture> = match self.texture_handles.entry(TexturePtr::from(image)) {
Entry::Occupied(value) => value.get().clone(),
vacant => vacant.or_insert(image.load(self.context).await?).clone(),
};
Ok(handle)
}
fn texture_material_handle(
&mut self,
texture: Handle<Texture>,
color: &Vec4,
) -> Handle<ColorMaterial> {
let material_handles = &mut self.material_handles;
let label_counter = &mut self.label_counter;
let context = &mut *self.context;
let color_u8 = [
(color.x * 255.0) as u8,
(color.y * 255.0) as u8,
(color.z * 255.0) as u8,
(color.w * 255.0) as u8,
];
material_handles
.entry((texture.clone(), color_u8))
.or_insert_with(|| {
*label_counter += 1;
context.set_labeled_asset(
format!("material#{}", *label_counter).as_str(),
LoadedAsset::new(ColorMaterial::modulated_texture(
texture,
Color::from(*color),
)),
)
})
.clone()
}
async fn object_sprite(&mut self, gid: u32, color: &Vec4) -> Result<Option<ProtoSpriteBundle>> {
if self.object_sprites.contains_key(&gid) {
Ok(self.object_sprites.get(&gid).cloned())
} else {
let tile = if let Some(tile) = self.map.get_tile(gid) {
tile
} else {
return Ok(None);
};
let image = if let Some(image) = tile.image.as_ref() {
image
} else {
return Ok(None);
};
let texture = self.texture_handle(image).await?;
let material = self.texture_material_handle(texture, color);
let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);
mesh.set_attribute(
Mesh::ATTRIBUTE_POSITION,
vec![
[0.0, -1.0, 0.0],
[1.0, -1.0, 0.0],
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
],
);
mesh.set_attribute(Mesh::ATTRIBUTE_NORMAL, vec![[0.0, 0.0, 1.0]; 4]);
mesh.set_attribute(
Mesh::ATTRIBUTE_UV_0,
vec![
[tile.top_left.x, tile.top_left.y],
[tile.bottom_right.x, tile.top_left.y],
[tile.top_left.x, tile.bottom_right.y],
[tile.bottom_right.x, tile.bottom_right.y],
],
);
mesh.set_indices(Some(Indices::U16(vec![0, 1, 2, 2, 1, 3])));
self.label_counter += 1;
let mesh = self.context.set_labeled_asset(
format!("object#{}", self.label_counter).as_str(),
LoadedAsset::new(mesh),
);
Ok(Some(
self.object_sprites
.entry(gid)
.or_insert(ProtoSpriteBundle {
sprite: ProtoSprite(self.scale.xy()),
mesh,
material,
..ProtoSpriteBundle::default()
})
.clone(),
))
}
}
}
impl Default for ProtoSpriteBundle {
fn default() -> Self {
ProtoSpriteBundle {
mesh: QUAD_HANDLE.typed(),
render_pipelines: RenderPipelines::from_pipelines(vec![RenderPipeline::new(
SPRITE_PIPELINE_HANDLE.typed(),
)]),
visible: Visible {
is_transparent: true,
..Default::default()
},
main_pass: MainPass,
draw: Default::default(),
sprite: Default::default(),
material: Default::default(),
transform: Default::default(),
global_transform: Default::default(),
}
}
}
pub fn proto_sprite_upgrade_system(mut commands: Commands, sprites: Query<(Entity, &ProtoSprite)>) {
for (e, s) in sprites.iter() {
commands
.entity(e)
.insert(Sprite::new(s.0))
.remove::<ProtoSprite>();
}
}
|
#![doc = include_str!("../README.md")]
use std::iter::Iterator;
use proc_macro::TokenStream;
use quote::{quote, quote_spanned};
#[derive(Clone, Copy)]
enum Platform {
Default,
Tokio,
AsyncStd,
Hydroflow,
Wasm,
EnvLogging,
EnvTracing,
}
impl Platform {
// All platforms.
const ALL: [Self; 7] = [
Self::Default,
Self::Tokio,
Self::AsyncStd,
Self::Hydroflow,
Self::Wasm,
Self::EnvLogging,
Self::EnvTracing,
];
// Default when no platforms are specified.
const DEFAULT: [Self; 2] = [Self::Default, Self::Wasm];
/// Name of platform ident in attribute.
const fn name(self) -> &'static str {
match self {
Self::Default => "test",
Self::Tokio => "tokio",
Self::AsyncStd => "async_std",
Self::Hydroflow => "hydroflow",
Self::Wasm => "wasm",
Self::EnvLogging => "env_logging",
Self::EnvTracing => "env_tracing",
}
}
/// Generate the attribute for this platform (if any).
fn make_attribute(self) -> proc_macro2::TokenStream {
// Fully specify crate names so that the consumer does not need to add another
// use statement. They still need to depend on the crate in their `Cargo.toml`,
// though.
// TODO(mingwei): use `proc_macro_crate::crate_name(...)` to handle renames.
match self {
Platform::Default => quote! { #[test] },
Platform::Tokio => quote! { #[tokio::test ] },
Platform::AsyncStd => quote! { #[async_std::test] },
Platform::Hydroflow => quote! { #[hydroflow::test] },
Platform::Wasm => {
quote! { #[wasm_bindgen_test::wasm_bindgen_test] }
}
Platform::EnvLogging | Platform::EnvTracing => Default::default(),
}
}
/// Generate the initialization code statements for this platform (if any).
fn make_init_code(self) -> proc_macro2::TokenStream {
match self {
Platform::EnvLogging => quote! {
let _ = env_logger::builder().is_test(true).try_init();
},
Platform::EnvTracing => quote! {
let subscriber = tracing_subscriber::FmtSubscriber::builder()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_test_writer()
.finish();
let _ = tracing::subscriber::set_global_default(subscriber);
},
_ => Default::default(),
}
}
}
/// See the [crate] docs for usage information.
#[proc_macro_attribute]
pub fn multiplatform_test(attr: TokenStream, body: TokenStream) -> TokenStream {
let ts = multiplatform_test_impl(
proc_macro2::TokenStream::from(attr),
proc_macro2::TokenStream::from(body),
);
TokenStream::from(ts)
}
fn multiplatform_test_impl(
attr: proc_macro2::TokenStream,
body: proc_macro2::TokenStream,
) -> proc_macro2::TokenStream {
let mut attr = attr.into_iter();
let mut platforms = Vec::<Platform>::new();
while let Some(token) = attr.next() {
let proc_macro2::TokenTree::Ident(i) = &token else {
return quote_spanned! {token.span()=>
compile_error!("malformed #[multiplatform_test] attribute; expected identifier.");
};
};
let name = i.to_string();
let Some(&platform) = Platform::ALL
.iter()
.find(|platform| name == platform.name())
else {
let msg = proc_macro2::Literal::string(&format!(
"unknown platform {}; expected one of [{}]",
name,
Platform::ALL.map(Platform::name).join(", "),
));
return quote_spanned! {token.span()=> compile_error!(#msg); };
};
platforms.push(platform);
match &attr.next() {
Some(proc_macro2::TokenTree::Punct(op)) if op.as_char() == ',' => {}
Some(other) => {
return quote_spanned! {other.span()=>
compile_error!("malformed `#[multiplatform_test]` attribute; expected `,`.");
};
}
None => break,
}
}
if platforms.is_empty() {
platforms.extend(Platform::DEFAULT.iter());
}
let mut output = proc_macro2::TokenStream::new();
let mut init_code = proc_macro2::TokenStream::new();
for p in platforms {
output.extend(p.make_attribute());
init_code.extend(p.make_init_code());
}
if init_code.is_empty() {
output.extend(body);
} else {
let mut body_head = body.into_iter().collect::<Vec<_>>();
let Some(proc_macro2::TokenTree::Group(body_code)) = body_head.pop() else {
panic!();
};
output.extend(body_head);
output.extend(quote! {
{
{ #init_code };
#body_code
}
});
}
output
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default_platforms() {
let test_fn: proc_macro2::TokenStream = quote! { fn test() { } };
let attrs = proc_macro2::TokenStream::new();
let got: proc_macro2::TokenStream = multiplatform_test_impl(attrs, test_fn);
let want = quote! {
#[test]
#[wasm_bindgen_test::wasm_bindgen_test]
fn test() { }
};
assert_eq!(want.to_string(), got.to_string());
}
#[test]
fn test_host_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { test };
let got = multiplatform_test_impl(attrs, test_fn);
let want = quote! {
#[test]
fn test() { }
};
assert_eq!(want.to_string(), got.to_string());
}
#[test]
fn test_wasm_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { wasm };
let got = multiplatform_test_impl(attrs, test_fn);
let want = quote! {
#[wasm_bindgen_test::wasm_bindgen_test]
fn test() { }
};
assert_eq!(want.to_string(), got.to_string());
}
#[test]
fn test_host_wasm_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { test, wasm };
let got = multiplatform_test_impl(attrs, test_fn);
let want = quote! {
#[test]
#[wasm_bindgen_test::wasm_bindgen_test]
fn test() { }
};
assert_eq!(want.to_string(), got.to_string());
}
#[test]
fn test_unknown_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { hello };
let got = multiplatform_test_impl(attrs, test_fn);
assert!(got.to_string().starts_with("compile_error !"));
}
#[test]
fn test_invalid_attr_nocomma_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { wasm() };
let got = multiplatform_test_impl(attrs, test_fn);
assert!(got.to_string().starts_with("compile_error !"));
}
#[test]
fn test_invalid_attr_noident_platform() {
let test_fn = quote! { fn test() { } };
let attrs = quote! { () };
let got = multiplatform_test_impl(attrs, test_fn);
assert!(got.to_string().starts_with("compile_error !"));
}
}
|
use nannou::prelude::*;
fn main() {
nannou::sketch(view).run()
}
fn view(app: &App, frame: Frame) {
// Begin drawing
let draw = app.draw();
// Clear the background to blue.
draw.background().color(BLACK);
let win = app.window_rect();
// Draw an ellipse to follow the mouse.
let t = app.time;
// Construct the tescellating pattern
let radius = 200.0;
let points = (0..=360).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius * t % 1200.0; // + radian.cos() * radius;
let y = radian.cos() * radius * t % 1200.0; // + radian.sin() * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.weight(4.0)
.rgb(t.sin().abs(), 1.0, t.sin().abs())
// .hsv((0.2 * t) % 1000.0, 1.0, 1.0)
// .color(GREEN)
.points_closed(points);
// Construct the main circle
let points = (0..=360).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius;
let y = radian.cos() * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.weight(5.0)
.color(WHITE)
.points_closed(points);
// Pink Hexagons
let radius = 240.0;
let points = (0..=360).step_by(60).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius;
let y = radian.cos() * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.weight(5.0)
.color(PINK)
.rotate(2.0 * t)
.points_closed(points);
let points = (0..=360).step_by(60).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius;
let y = radian.cos() * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.weight(5.0)
.color(PINK)
.rotate(-2.0 * t)
.points_closed(points);
let points = (0..=360).step_by(120).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius + t.sin();
let y = radian.cos() * radius + t.sin();
pt2(x,y)
});
draw.polygon() // create a PathStroke Builder object
.stroke_weight(5.0)
.color(rgba(0.0, 255.0, 0.0, 0.5))
.rotate(t)
.points(points);
let points = (0..=360).step_by(120).map(|i| {
let radian = deg_to_rad((i + 180 % 360) as f32);
let x = radian.sin() * radius + t.sin();
let y = radian.cos() * radius + t.sin();
pt2(x,y)
});
draw.polygon() // create a PathStroke Builder object
.stroke_weight(5.0)
.color(rgba(0.0, 255.0, 0.0, 0.5))
.rotate(-t)
.points(points);
let radius = 100.0;
let points = (0..=360).step_by(120).map(|i| {
let radian = deg_to_rad((i % 360) as f32);
let x = radian.sin() * radius + t.sin();
let y = radian.cos() * radius + t.sin();
pt2(x,y)
});
draw.polygon() // create a PathStroke Builder object
.stroke_weight(5.0)
.color(rgba(0.0, 255.0, 0.0, 0.5))
.rotate(2.0 * t)
.points(points);
let points = (0..=360).step_by(120).map(|i| {
let radian = deg_to_rad((i + 180 % 360) as f32);
let x = radian.sin() * radius + t.sin();
let y = radian.cos() * radius + t.sin();
pt2(x,y)
});
draw.polygon() // create a PathStroke Builder object
.stroke_weight(5.0)
.color(rgba(0.0, 255.0, 0.0, 0.5))
.rotate(2.0 * t)
.points(points);
let radius = 100.0;
let points = (0..=360).step_by(60).map(|i| {
let radian = deg_to_rad((i + 180 % 360) as f32);
let x = (t * radian).sin() * 2.0 * radius;
let y = (t * radian).cos() * 2.0 * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.stroke_weight(5.0)
.color(PINK)
.rotate(2.0 * (0.5 * t).sin())
.points(points);
let radius = 50.0;
let points = (0..=360).step_by(60).map(|i| {
let radian = deg_to_rad((i + 180 % 360) as f32);
let x = (-t * radian).sin() * 2.0 * radius;
let y = (-t * radian).cos() * 2.0 * radius;
pt2(x,y)
});
draw.polyline() // create a PathStroke Builder object
.stroke_weight(3.0)
.color(PINK)
.rotate(-4.0 * (0.5 * t).sin())
.points(points);
let radius = 240.0;
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
|
//! JSON API
mod call;
mod deploy;
mod error;
mod inputdata;
mod receipt;
mod spawn;
pub(crate) mod serde_types;
pub use call::{decode_call, encode_call, encode_call_raw};
pub use deploy::deploy_template;
pub use error::JsonError;
pub use inputdata::{decode_inputdata, encode_inputdata};
pub use receipt::decode_receipt;
pub use spawn::{decode_spawn, encode_spawn};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value as Json};
use svm_types::{Gas, ReceiptLog};
/// Provides very simple utility functions to working with [`serde_json::Value`]
/// in an easy way.
pub(crate) trait JsonSerdeUtils: Serialize + for<'a> Deserialize<'a> {
fn to_json(self) -> Json {
serde_json::to_value(self).unwrap()
}
fn from_json_str(json_str: &str) -> Result<Self, JsonError> {
let json_deserializer = &mut serde_json::Deserializer::from_str(json_str);
let item = serde_path_to_error::deserialize(json_deserializer)?;
Ok(item)
}
}
/// Converts a [`Json`] value to a UTF-8 valid [`Vec<u8>`] JSON representation.
///
/// # Panics
///
/// Panics if serialization type implementations fail or `json` contains a map
/// with non-string keys.
pub(crate) fn to_bytes(json: &Json) -> Vec<u8> {
serde_json::to_string(&json)
.expect("JSON serialization error")
.into_bytes()
}
pub(crate) fn gas_to_json(gas: &Gas) -> i64 {
if gas.is_some() {
gas.unwrap() as _
} else {
-1
}
}
pub(crate) fn logs_to_json(logs: &[ReceiptLog]) -> Vec<Json> {
logs.iter()
.map(|log| {
let data = unsafe { String::from_utf8_unchecked(log.as_bytes().to_vec()) };
json!({
"data": data,
})
})
.collect()
}
|
#[macro_use]
extern crate log;
extern crate async_std;
extern crate getopts;
extern crate stunnel;
use std::env;
use std::net::Shutdown;
use std::net::ToSocketAddrs;
use std::str::from_utf8;
use std::vec::Vec;
use async_std::net::TcpListener;
use async_std::net::TcpStream;
use async_std::prelude::*;
use async_std::task;
use stunnel::client::*;
use stunnel::cryptor::Cryptor;
use stunnel::logger;
use stunnel::socks5;
async fn process_read(stream: &mut &TcpStream, mut write_port: TunnelWritePort) {
loop {
let mut buf = vec![0; 1024];
match stream.read(&mut buf).await {
Ok(0) => {
let _ = stream.shutdown(Shutdown::Read);
write_port.shutdown_write().await;
write_port.drop().await;
break;
}
Ok(n) => {
buf.truncate(n);
write_port.write(buf).await;
}
Err(_) => {
let _ = stream.shutdown(Shutdown::Both);
write_port.close().await;
break;
}
}
}
}
async fn process_write(stream: &mut &TcpStream, mut read_port: TunnelReadPort) {
loop {
let buf = match read_port.read().await {
TunnelPortMsg::Data(buf) => buf,
TunnelPortMsg::ShutdownWrite => {
let _ = stream.shutdown(Shutdown::Write);
read_port.drain();
read_port.drop().await;
break;
}
_ => {
let _ = stream.shutdown(Shutdown::Both);
read_port.drain();
read_port.close().await;
break;
}
};
if stream.write_all(&buf).await.is_err() {
let _ = stream.shutdown(Shutdown::Both);
read_port.drain();
read_port.close().await;
break;
}
}
}
async fn run_tunnel_port(
mut stream: TcpStream,
mut read_port: TunnelReadPort,
mut write_port: TunnelWritePort,
) {
match socks5::handshake(&mut stream).await {
Ok(socks5::Destination::Address(addr)) => {
let mut buf = Vec::new();
let _ = std::io::Write::write_fmt(&mut buf, format_args!("{}", addr));
write_port.connect(buf).await;
}
Ok(socks5::Destination::DomainName(domain_name, port)) => {
write_port.connect_domain_name(domain_name, port).await;
}
_ => {
return write_port.close().await;
}
}
let addr = match read_port.read().await {
TunnelPortMsg::ConnectOk(buf) => from_utf8(&buf).unwrap().to_socket_addrs().unwrap().nth(0),
_ => None,
};
let success = match addr {
Some(addr) => socks5::destination_connected(&mut stream, addr)
.await
.is_ok(),
None => socks5::destination_unreached(&mut stream).await.is_ok() && false,
};
if success {
let (reader, writer) = &mut (&stream, &stream);
let r = process_read(reader, write_port);
let w = process_write(writer, read_port);
let _ = r.join(w).await;
} else {
let _ = stream.shutdown(Shutdown::Both);
read_port.drain();
write_port.close().await;
}
}
fn run_tunnels(
listen_addr: String,
server_addr: String,
count: u32,
key: Vec<u8>,
enable_ucp: bool,
) {
task::block_on(async move {
let mut tunnels = Vec::new();
if enable_ucp {
let tunnel = UcpTunnel::new(0, server_addr.clone(), key.clone());
tunnels.push(tunnel);
} else {
for i in 0..count {
let tunnel = TcpTunnel::new(i, server_addr.clone(), key.clone());
tunnels.push(tunnel);
}
}
let mut index = 0;
let listener = TcpListener::bind(listen_addr.as_str()).await.unwrap();
let mut incoming = listener.incoming();
while let Some(stream) = incoming.next().await {
match stream {
Ok(stream) => {
{
let tunnel: &mut Tunnel = tunnels.get_mut(index).unwrap();
let (write_port, read_port) = tunnel.open_port().await;
task::spawn(async move {
run_tunnel_port(stream, read_port, write_port).await;
});
}
index = (index + 1) % tunnels.len();
}
Err(_) => {}
}
}
});
}
fn main() {
let args: Vec<_> = env::args().collect();
let program = args[0].clone();
let mut opts = getopts::Options::new();
opts.reqopt("s", "server", "server address", "server-address");
opts.reqopt("k", "key", "secret key", "key");
opts.optopt("c", "tunnel-count", "tunnel count", "tunnel-count");
opts.optopt("l", "listen", "listen address", "listen-address");
opts.optopt("", "log", "log path", "log-path");
opts.optflag("", "enable-ucp", "enable ucp");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(_) => {
println!("{}", opts.short_usage(&program));
return;
}
};
let server_addr = matches.opt_str("s").unwrap();
let tunnel_count = matches.opt_str("c").unwrap_or(String::new());
let key = matches.opt_str("k").unwrap().into_bytes();
let log_path = matches.opt_str("log").unwrap_or(String::new());
let enable_ucp = matches.opt_present("enable-ucp");
let listen_addr = matches.opt_str("l").unwrap_or("127.0.0.1:1080".to_string());
let (min, max) = Cryptor::key_size_range();
if key.len() < min || key.len() > max {
println!("key length must in range [{}, {}]", min, max);
return;
}
let count: u32 = match tunnel_count.parse() {
Err(_) | Ok(0) => 1,
Ok(count) => count,
};
logger::init(log::Level::Info, log_path, 1, 2000000).unwrap();
info!("starting up");
run_tunnels(listen_addr, server_addr, count, key, enable_ucp);
}
|
use super::super::HasTable;
use super::{Component, FromWorldMut, TableId};
use crate::prelude::World;
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
/// Fetch read-write table reference from a Storage.
/// This is a pretty unsafe way to obtain mutable references. Use with caution.
/// Do not store UnsafeViews for longer than the function scope, that's just asking for trouble.
///
pub struct UnsafeView<Id: TableId, C: Component<Id>>(NonNull<C::Table>);
unsafe impl<Id: TableId, C: Component<Id>> Send for UnsafeView<Id, C> {}
unsafe impl<Id: TableId, C: Component<Id>> Sync for UnsafeView<Id, C> {}
impl<Id: TableId, C: Component<Id>> UnsafeView<Id, C> {
pub fn as_ptr(&mut self) -> *mut C::Table {
self.0.as_ptr()
}
pub fn from_table(t: &mut C::Table) -> Self {
let ptr = unsafe { NonNull::new_unchecked(t) };
let res: UnsafeView<Id, C> = Self(ptr);
res
}
}
impl<Id: TableId, C: Component<Id>> FromWorldMut for UnsafeView<Id, C>
where
crate::world::World: HasTable<Id, C>,
{
fn from_world_mut(w: &mut World) -> Self {
<World as HasTable<Id, C>>::unsafe_view(w)
}
}
impl<Id: TableId, C: Component<Id>> Clone for UnsafeView<Id, C> {
fn clone(&self) -> Self {
Self(self.0)
}
}
impl<Id: TableId, C: Component<Id>> Copy for UnsafeView<Id, C> {}
impl<Id: TableId, C: Component<Id>> Deref for UnsafeView<Id, C> {
type Target = C::Table;
fn deref(&self) -> &Self::Target {
unsafe { self.0.as_ref() }
}
}
impl<Id: TableId, C: Component<Id>> DerefMut for UnsafeView<Id, C> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { self.0.as_mut() }
}
}
|
#![feature(test)]
extern crate test;
extern crate delight_book;
extern crate rand;
use delight_book::chapter15::*;
use rand::Rng;
#[bench]
fn bench_hamming_code(b: &mut test::Bencher) {
b.iter(||
for i in 0..(39 * 40) / 2 + 1 {
let us = rand::thread_rng().gen::<i64>() * 3; // Generate random information bits
// (rand() always has the msb = 0).
let mut ps = hamming_checkbits(us); // Compute their 6 check bits
ps = ps | (hamming_parity(us ^ ps) << 6); // and prepend the overall
// parity bit.
let mut ur = us; // Set up the received data.
let mut pr = ps;
let e = hamming_perturb(&mut pr, &mut ur); // Alter 0, 1, or 2 bits of pr and ur.
let mut uc = ur;
let c = hamming_correct(pr, &mut uc); // Correct ur if 1 error occurred.
println!("{} {} {} {} ", ps, us, pr, ur); // Program
println!("{} {} {}\n", e, c, uc); // trace.
assert_eq!(e ,c);
assert_eq!(e <= 1 && uc != us,false)
});
} |
use std::{
convert::{TryFrom, TryInto},
fmt::Display,
io::{BufReader, Read},
};
use thiserror::Error;
use super::{
lookup::{LookupError, LookupRef, LookupValue},
raw::{CelesteIo, NonRleString, RleString, StringReadError},
};
#[derive(Clone, Debug)]
pub enum Value {
Bool(bool),
Int(i32),
Float(f32),
String(String),
}
impl Display for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Value::Bool(x) => write!(f, "{}", x),
Value::Int(x) => write!(f, "{}", x),
Value::Float(x) => write!(f, "{}", x),
Value::String(x) => write!(f, "{}", x),
}
}
}
#[derive(Error, Debug)]
#[error("value does not match target conversion type")]
pub struct ValueConversionError;
impl TryFrom<Value> for i32 {
type Error = ValueConversionError;
fn try_from(value: Value) -> Result<Self, Self::Error> {
match value {
Value::Int(x) => Ok(x),
_ => Err(ValueConversionError),
}
}
}
impl TryFrom<&Value> for i32 {
type Error = ValueConversionError;
fn try_from(value: &Value) -> Result<Self, Self::Error> {
match value {
Value::Int(x) => Ok(*x),
_ => Err(ValueConversionError),
}
}
}
impl TryFrom<Value> for u32 {
type Error = ValueConversionError;
fn try_from(value: Value) -> Result<Self, Self::Error> {
match value {
Value::Int(x) => x.try_into().map_err(|_| ValueConversionError),
_ => Err(ValueConversionError),
}
}
}
impl TryFrom<&Value> for u32 {
type Error = ValueConversionError;
fn try_from(value: &Value) -> Result<Self, Self::Error> {
match value {
Value::Int(x) => (*x).try_into().map_err(|_| ValueConversionError),
_ => Err(ValueConversionError),
}
}
}
impl TryFrom<Value> for String {
type Error = ValueConversionError;
fn try_from(value: Value) -> Result<Self, Self::Error> {
match value {
Value::String(x) => Ok(x),
_ => Err(ValueConversionError),
}
}
}
macro_rules! impl_value_from {
( $(( $var:ident, $x:ty )),* ) => {
$(
impl From<$x> for Value {
fn from(x: $x) -> Self {
Value::$var((x).into())
}
}
)*
};
}
impl_value_from!(
(Bool, bool),
(Int, u8),
(Int, i16),
(Int, i32),
(Float, f32),
(String, String)
);
#[derive(Error, Debug)]
pub enum ReadValueError {
#[error("failed to read value bytes")]
Io(#[from] std::io::Error),
#[error("failed to read string")]
String(#[from] StringReadError),
#[error("unknown value type {0}")]
UnknownValueType(u8),
#[error("lookup error")]
LookupError(#[from] LookupError),
}
impl CelesteIo for Value {
type Error = ReadValueError;
fn read<R: Read>(
reader: &mut BufReader<R>,
lookup: Option<LookupRef<'_>>,
) -> Result<Self, Self::Error> {
match u8::read(reader, lookup)? {
0 => Ok(bool::read(reader, lookup)?.into()),
1 => Ok(u8::read(reader, lookup)?.into()),
2 => Ok(i16::read(reader, lookup)?.into()),
3 => Ok(i32::read(reader, lookup)?.into()),
4 => Ok(f32::read(reader, lookup)?.into()),
5 => Ok(LookupValue::read(reader, lookup)?.into()),
6 => Ok(NonRleString::read(reader, lookup)?.into()),
7 => Ok(RleString::read(reader, lookup)?.into()),
x => Err(ReadValueError::UnknownValueType(x)),
}
}
}
|
use anyhow::Error;
use anyhow::Result;
const RAM_SIZE: usize = 4096;
#[derive(PartialEq, Eq, Debug)]
pub struct Memory([u8; RAM_SIZE]);
impl Memory {
pub fn new() -> Self {
let mut mem = [0; RAM_SIZE];
Self::load_font(&mut mem);
Self(mem)
}
pub fn get_byte(&self, addr: u16) -> Result<&u8> {
self.0.get(addr as usize).ok_or_else(|| {
Error::msg(format!(
"Attempting to access memory at {:X}, but {:X} is the maximum. Exiting.",
addr, RAM_SIZE,
))
})
}
pub fn get_byte_mut(&mut self, addr: u16) -> Result<&mut u8> {
self.0.get_mut(addr as usize).ok_or_else(|| {
Error::msg(format!(
"Attempting to access memory at {:X}, but {:X} is the maximum. Exiting.",
addr, RAM_SIZE,
))
})
}
pub fn get_word(&self, addr: u16) -> Result<u16> {
Ok((u16::from(*self.get_byte(addr)?) << 8) | u16::from(*self.get_byte(addr + 1)?))
}
pub fn index_of_font_char(byte: u8) -> Result<u16> {
if byte < 0x10 {
Ok(u16::from(0x50 + (byte * 5)))
} else {
Err(Error::msg(format!(
"'{}' is not a character within the current font. Exiting.",
byte
)))
}
}
pub fn load_rom(&mut self, rom: &[u8]) -> Result<()> {
if rom.len() >= (self.0.len() - 0x200) {
return Err(Error::msg(format!(
"The rom that you are attempting to load is too large ({}). {} bytes is the maximum.",
rom.len(),
self.0.len(),
)));
}
for (i, byte) in rom.iter().copied().enumerate() {
self.0[0x200 + i] = byte;
}
Ok(())
}
fn load_font(memory: &mut [u8]) {
const FONT: [u8; 80] = [
0xF0, 0x90, 0x90, 0x90, 0xF0, // 0
0x20, 0x60, 0x20, 0x20, 0x70, // 1
0xF0, 0x10, 0xF0, 0x80, 0xF0, // 2
0xF0, 0x10, 0xF0, 0x10, 0xF0, // 3
0x90, 0x90, 0xF0, 0x10, 0x10, // 4
0xF0, 0x80, 0xF0, 0x10, 0xF0, // 5
0xF0, 0x80, 0xF0, 0x90, 0xF0, // 6
0xF0, 0x10, 0x20, 0x40, 0x40, // 7
0xF0, 0x90, 0xF0, 0x90, 0xF0, // 8
0xF0, 0x90, 0xF0, 0x10, 0xF0, // 9
0xF0, 0x90, 0xF0, 0x90, 0x90, // A
0xE0, 0x90, 0xE0, 0x90, 0xE0, // B
0xF0, 0x80, 0x80, 0x80, 0xF0, // C
0xE0, 0x90, 0x90, 0x90, 0xE0, // D
0xF0, 0x80, 0xF0, 0x80, 0xF0, // E
0xF0, 0x80, 0xF0, 0x80, 0x80, // F
];
for (i, byte) in FONT.iter().copied().enumerate() {
memory[0x50 + i] = byte;
}
}
}
|
mod p1qa;
use p1qa::build_vector_verbose;
fn main() {
let n = build_vector_verbose();
println!("{:?}", n)
}
|
use super::helpers::edits::get_random_edit;
use super::helpers::fixtures::{fixtures_dir, get_language, get_test_language};
use super::helpers::random::Rand;
use crate::generate::generate_parser_for_grammar;
use crate::parse::perform_edit;
use std::fs;
use tree_sitter::{Node, Parser, Point, Tree};
const JSON_EXAMPLE: &'static str = r#"
[
123,
false,
{
"x": null
}
]
"#;
const GRAMMAR_WITH_ALIASES_AND_EXTRAS: &'static str = r#"{
"name": "aliases_and_extras",
"extras": [
{"type": "PATTERN", "value": "\\s+"},
{"type": "SYMBOL", "name": "comment"}
],
"rules": {
"a": {
"type": "SEQ",
"members": [
{"type": "SYMBOL", "name": "b"},
{
"type": "ALIAS",
"value": "B",
"named": true,
"content": {"type": "SYMBOL", "name": "b"}
},
{
"type": "ALIAS",
"value": "C",
"named": true,
"content": {"type": "SYMBOL", "name": "_c"}
}
]
},
"b": {"type": "STRING", "value": "b"},
"_c": {"type": "STRING", "value": "c"},
"comment": {"type": "STRING", "value": "..."}
}
}"#;
#[test]
fn test_node_child() {
let tree = parse_json_example();
let array_node = tree.root_node().child(0).unwrap();
assert_eq!(array_node.kind(), "array");
assert_eq!(array_node.named_child_count(), 3);
assert_eq!(array_node.start_byte(), JSON_EXAMPLE.find("[").unwrap());
assert_eq!(array_node.end_byte(), JSON_EXAMPLE.find("]").unwrap() + 1);
assert_eq!(array_node.start_position(), Point::new(2, 0));
assert_eq!(array_node.end_position(), Point::new(8, 1));
assert_eq!(array_node.child_count(), 7);
let left_bracket_node = array_node.child(0).unwrap();
let number_node = array_node.child(1).unwrap();
let comma_node1 = array_node.child(2).unwrap();
let false_node = array_node.child(3).unwrap();
let comma_node2 = array_node.child(4).unwrap();
let object_node = array_node.child(5).unwrap();
let right_bracket_node = array_node.child(6).unwrap();
assert_eq!(left_bracket_node.kind(), "[");
assert_eq!(number_node.kind(), "number");
assert_eq!(comma_node1.kind(), ",");
assert_eq!(false_node.kind(), "false");
assert_eq!(comma_node2.kind(), ",");
assert_eq!(object_node.kind(), "object");
assert_eq!(right_bracket_node.kind(), "]");
assert_eq!(left_bracket_node.is_named(), false);
assert_eq!(number_node.is_named(), true);
assert_eq!(comma_node1.is_named(), false);
assert_eq!(false_node.is_named(), true);
assert_eq!(comma_node2.is_named(), false);
assert_eq!(object_node.is_named(), true);
assert_eq!(right_bracket_node.is_named(), false);
assert_eq!(number_node.start_byte(), JSON_EXAMPLE.find("123").unwrap());
assert_eq!(
number_node.end_byte(),
JSON_EXAMPLE.find("123").unwrap() + 3
);
assert_eq!(number_node.start_position(), Point::new(3, 2));
assert_eq!(number_node.end_position(), Point::new(3, 5));
assert_eq!(false_node.start_byte(), JSON_EXAMPLE.find("false").unwrap());
assert_eq!(
false_node.end_byte(),
JSON_EXAMPLE.find("false").unwrap() + 5
);
assert_eq!(false_node.start_position(), Point::new(4, 2));
assert_eq!(false_node.end_position(), Point::new(4, 7));
assert_eq!(object_node.start_byte(), JSON_EXAMPLE.find("{").unwrap());
assert_eq!(object_node.start_position(), Point::new(5, 2));
assert_eq!(object_node.end_position(), Point::new(7, 3));
assert_eq!(object_node.child_count(), 3);
let left_brace_node = object_node.child(0).unwrap();
let pair_node = object_node.child(1).unwrap();
let right_brace_node = object_node.child(2).unwrap();
assert_eq!(left_brace_node.kind(), "{");
assert_eq!(pair_node.kind(), "pair");
assert_eq!(right_brace_node.kind(), "}");
assert_eq!(left_brace_node.is_named(), false);
assert_eq!(pair_node.is_named(), true);
assert_eq!(right_brace_node.is_named(), false);
assert_eq!(pair_node.start_byte(), JSON_EXAMPLE.find("\"x\"").unwrap());
assert_eq!(pair_node.end_byte(), JSON_EXAMPLE.find("null").unwrap() + 4);
assert_eq!(pair_node.start_position(), Point::new(6, 4));
assert_eq!(pair_node.end_position(), Point::new(6, 13));
assert_eq!(pair_node.child_count(), 3);
let string_node = pair_node.child(0).unwrap();
let colon_node = pair_node.child(1).unwrap();
let null_node = pair_node.child(2).unwrap();
assert_eq!(string_node.kind(), "string");
assert_eq!(colon_node.kind(), ":");
assert_eq!(null_node.kind(), "null");
assert_eq!(string_node.is_named(), true);
assert_eq!(colon_node.is_named(), false);
assert_eq!(null_node.is_named(), true);
assert_eq!(
string_node.start_byte(),
JSON_EXAMPLE.find("\"x\"").unwrap()
);
assert_eq!(
string_node.end_byte(),
JSON_EXAMPLE.find("\"x\"").unwrap() + 3
);
assert_eq!(string_node.start_position(), Point::new(6, 4));
assert_eq!(string_node.end_position(), Point::new(6, 7));
assert_eq!(null_node.start_byte(), JSON_EXAMPLE.find("null").unwrap());
assert_eq!(null_node.end_byte(), JSON_EXAMPLE.find("null").unwrap() + 4);
assert_eq!(null_node.start_position(), Point::new(6, 9));
assert_eq!(null_node.end_position(), Point::new(6, 13));
assert_eq!(string_node.parent().unwrap(), pair_node);
assert_eq!(null_node.parent().unwrap(), pair_node);
assert_eq!(pair_node.parent().unwrap(), object_node);
assert_eq!(number_node.parent().unwrap(), array_node);
assert_eq!(false_node.parent().unwrap(), array_node);
assert_eq!(object_node.parent().unwrap(), array_node);
assert_eq!(array_node.parent().unwrap(), tree.root_node());
assert_eq!(tree.root_node().parent(), None);
}
#[test]
fn test_node_children() {
let tree = parse_json_example();
let mut cursor = tree.walk();
let array_node = tree.root_node().child(0).unwrap();
assert_eq!(
array_node
.children(&mut cursor)
.map(|n| n.kind())
.collect::<Vec<_>>(),
&["[", "number", ",", "false", ",", "object", "]",]
);
assert_eq!(
array_node
.named_children(&mut cursor)
.map(|n| n.kind())
.collect::<Vec<_>>(),
&["number", "false", "object"]
);
let object_node = array_node
.named_children(&mut cursor)
.find(|n| n.kind() == "object")
.unwrap();
assert_eq!(
object_node
.children(&mut cursor)
.map(|n| n.kind())
.collect::<Vec<_>>(),
&["{", "pair", "}",]
);
}
#[test]
fn test_node_children_by_field_name() {
let mut parser = Parser::new();
parser.set_language(get_language("python")).unwrap();
let source = "
if one:
a()
elif two:
b()
elif three:
c()
elif four:
d()
";
let tree = parser.parse(source, None).unwrap();
let node = tree.root_node().child(0).unwrap();
assert_eq!(node.kind(), "if_statement");
let mut cursor = tree.walk();
let alternatives = node.children_by_field_name("alternative", &mut cursor);
let alternative_texts =
alternatives.map(|n| &source[n.child_by_field_name("condition").unwrap().byte_range()]);
assert_eq!(
alternative_texts.collect::<Vec<_>>(),
&["two", "three", "four",]
);
}
#[test]
fn test_node_parent_of_child_by_field_name() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let tree = parser.parse("foo(a().b[0].c.d.e())", None).unwrap();
let call_node = tree
.root_node()
.named_child(0)
.unwrap()
.named_child(0)
.unwrap();
assert_eq!(call_node.kind(), "call_expression");
// Regression test - when a field points to a hidden node (in this case, `_expression`)
// the hidden node should not be added to the node parent cache.
assert_eq!(
call_node.child_by_field_name("function").unwrap().parent(),
Some(call_node)
);
}
#[test]
fn test_node_field_name_for_child() {
let mut parser = Parser::new();
parser.set_language(get_language("c")).unwrap();
let tree = parser.parse("int w = x + y;", None).unwrap();
let translation_unit_node = tree.root_node();
let declaration_node = translation_unit_node.named_child(0).unwrap();
let binary_expression_node = declaration_node
.child_by_field_name("declarator")
.unwrap()
.child_by_field_name("value")
.unwrap();
assert_eq!(binary_expression_node.field_name_for_child(0), Some("left"));
assert_eq!(
binary_expression_node.field_name_for_child(1),
Some("operator")
);
assert_eq!(
binary_expression_node.field_name_for_child(2),
Some("right")
);
// Negative test - Not a valid child index
assert_eq!(binary_expression_node.field_name_for_child(3), None);
}
#[test]
fn test_node_child_by_field_name_with_extra_hidden_children() {
let mut parser = Parser::new();
parser.set_language(get_language("python")).unwrap();
// In the Python grammar, some fields are applied to `suite` nodes,
// which consist of an invisible `indent` token followed by a block.
// Check that when searching for a child with a field name, we don't
//
let tree = parser.parse("while a:\n pass", None).unwrap();
let while_node = tree.root_node().child(0).unwrap();
assert_eq!(while_node.kind(), "while_statement");
assert_eq!(
while_node.child_by_field_name("body").unwrap(),
while_node.child(3).unwrap(),
);
}
#[test]
fn test_node_named_child() {
let tree = parse_json_example();
let array_node = tree.root_node().child(0).unwrap();
let number_node = array_node.named_child(0).unwrap();
let false_node = array_node.named_child(1).unwrap();
let object_node = array_node.named_child(2).unwrap();
assert_eq!(number_node.kind(), "number");
assert_eq!(number_node.start_byte(), JSON_EXAMPLE.find("123").unwrap());
assert_eq!(
number_node.end_byte(),
JSON_EXAMPLE.find("123").unwrap() + 3
);
assert_eq!(number_node.start_position(), Point::new(3, 2));
assert_eq!(number_node.end_position(), Point::new(3, 5));
assert_eq!(false_node.kind(), "false");
assert_eq!(false_node.start_byte(), JSON_EXAMPLE.find("false").unwrap());
assert_eq!(
false_node.end_byte(),
JSON_EXAMPLE.find("false").unwrap() + 5
);
assert_eq!(false_node.start_position(), Point::new(4, 2));
assert_eq!(false_node.end_position(), Point::new(4, 7));
assert_eq!(object_node.kind(), "object");
assert_eq!(object_node.start_byte(), JSON_EXAMPLE.find("{").unwrap());
assert_eq!(object_node.start_position(), Point::new(5, 2));
assert_eq!(object_node.end_position(), Point::new(7, 3));
assert_eq!(object_node.named_child_count(), 1);
let pair_node = object_node.named_child(0).unwrap();
assert_eq!(pair_node.kind(), "pair");
assert_eq!(pair_node.start_byte(), JSON_EXAMPLE.find("\"x\"").unwrap());
assert_eq!(pair_node.end_byte(), JSON_EXAMPLE.find("null").unwrap() + 4);
assert_eq!(pair_node.start_position(), Point::new(6, 4));
assert_eq!(pair_node.end_position(), Point::new(6, 13));
let string_node = pair_node.named_child(0).unwrap();
let null_node = pair_node.named_child(1).unwrap();
assert_eq!(string_node.kind(), "string");
assert_eq!(null_node.kind(), "null");
assert_eq!(
string_node.start_byte(),
JSON_EXAMPLE.find("\"x\"").unwrap()
);
assert_eq!(
string_node.end_byte(),
JSON_EXAMPLE.find("\"x\"").unwrap() + 3
);
assert_eq!(string_node.start_position(), Point::new(6, 4));
assert_eq!(string_node.end_position(), Point::new(6, 7));
assert_eq!(null_node.start_byte(), JSON_EXAMPLE.find("null").unwrap());
assert_eq!(null_node.end_byte(), JSON_EXAMPLE.find("null").unwrap() + 4);
assert_eq!(null_node.start_position(), Point::new(6, 9));
assert_eq!(null_node.end_position(), Point::new(6, 13));
assert_eq!(string_node.parent().unwrap(), pair_node);
assert_eq!(null_node.parent().unwrap(), pair_node);
assert_eq!(pair_node.parent().unwrap(), object_node);
assert_eq!(number_node.parent().unwrap(), array_node);
assert_eq!(false_node.parent().unwrap(), array_node);
assert_eq!(object_node.parent().unwrap(), array_node);
assert_eq!(array_node.parent().unwrap(), tree.root_node());
assert_eq!(tree.root_node().parent(), None);
}
#[test]
fn test_node_named_child_with_aliases_and_extras() {
let (parser_name, parser_code) =
generate_parser_for_grammar(GRAMMAR_WITH_ALIASES_AND_EXTRAS).unwrap();
let mut parser = Parser::new();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
let tree = parser.parse("b ... b ... c", None).unwrap();
let root = tree.root_node();
assert_eq!(root.to_sexp(), "(a (b) (comment) (B) (comment) (C))");
assert_eq!(root.named_child_count(), 5);
assert_eq!(root.named_child(0).unwrap().kind(), "b");
assert_eq!(root.named_child(1).unwrap().kind(), "comment");
assert_eq!(root.named_child(2).unwrap().kind(), "B");
assert_eq!(root.named_child(3).unwrap().kind(), "comment");
assert_eq!(root.named_child(4).unwrap().kind(), "C");
}
#[test]
fn test_node_descendant_count() {
let tree = parse_json_example();
let value_node = tree.root_node();
let all_nodes = get_all_nodes(&tree);
assert_eq!(value_node.descendant_count(), all_nodes.len());
let mut cursor = value_node.walk();
for (i, node) in all_nodes.iter().enumerate() {
cursor.goto_descendant(i);
assert_eq!(cursor.node(), *node, "index {i}");
}
for (i, node) in all_nodes.iter().enumerate().rev() {
cursor.goto_descendant(i);
assert_eq!(cursor.node(), *node, "rev index {i}");
}
}
#[test]
fn test_descendant_count_single_node_tree() {
let mut parser = Parser::new();
parser
.set_language(get_language("embedded-template"))
.unwrap();
let tree = parser.parse("hello", None).unwrap();
let nodes = get_all_nodes(&tree);
assert_eq!(nodes.len(), 2);
assert_eq!(tree.root_node().descendant_count(), 2);
let mut cursor = tree.root_node().walk();
cursor.goto_descendant(0);
assert_eq!(cursor.depth(), 0);
assert_eq!(cursor.node(), nodes[0]);
cursor.goto_descendant(1);
assert_eq!(cursor.depth(), 1);
assert_eq!(cursor.node(), nodes[1]);
}
#[test]
fn test_node_descendant_for_range() {
let tree = parse_json_example();
let array_node = tree.root_node();
// Leaf node exactly matches the given bounds - byte query
let colon_index = JSON_EXAMPLE.find(":").unwrap();
let colon_node = array_node
.descendant_for_byte_range(colon_index, colon_index + 1)
.unwrap();
assert_eq!(colon_node.kind(), ":");
assert_eq!(colon_node.start_byte(), colon_index);
assert_eq!(colon_node.end_byte(), colon_index + 1);
assert_eq!(colon_node.start_position(), Point::new(6, 7));
assert_eq!(colon_node.end_position(), Point::new(6, 8));
// Leaf node exactly matches the given bounds - point query
let colon_node = array_node
.descendant_for_point_range(Point::new(6, 7), Point::new(6, 8))
.unwrap();
assert_eq!(colon_node.kind(), ":");
assert_eq!(colon_node.start_byte(), colon_index);
assert_eq!(colon_node.end_byte(), colon_index + 1);
assert_eq!(colon_node.start_position(), Point::new(6, 7));
assert_eq!(colon_node.end_position(), Point::new(6, 8));
// The given point is between two adjacent leaf nodes - byte query
let colon_index = JSON_EXAMPLE.find(":").unwrap();
let colon_node = array_node
.descendant_for_byte_range(colon_index, colon_index)
.unwrap();
assert_eq!(colon_node.kind(), ":");
assert_eq!(colon_node.start_byte(), colon_index);
assert_eq!(colon_node.end_byte(), colon_index + 1);
assert_eq!(colon_node.start_position(), Point::new(6, 7));
assert_eq!(colon_node.end_position(), Point::new(6, 8));
// The given point is between two adjacent leaf nodes - point query
let colon_node = array_node
.descendant_for_point_range(Point::new(6, 7), Point::new(6, 7))
.unwrap();
assert_eq!(colon_node.kind(), ":");
assert_eq!(colon_node.start_byte(), colon_index);
assert_eq!(colon_node.end_byte(), colon_index + 1);
assert_eq!(colon_node.start_position(), Point::new(6, 7));
assert_eq!(colon_node.end_position(), Point::new(6, 8));
// Leaf node starts at the lower bound, ends after the upper bound - byte query
let string_index = JSON_EXAMPLE.find("\"x\"").unwrap();
let string_node = array_node
.descendant_for_byte_range(string_index, string_index + 2)
.unwrap();
assert_eq!(string_node.kind(), "string");
assert_eq!(string_node.start_byte(), string_index);
assert_eq!(string_node.end_byte(), string_index + 3);
assert_eq!(string_node.start_position(), Point::new(6, 4));
assert_eq!(string_node.end_position(), Point::new(6, 7));
// Leaf node starts at the lower bound, ends after the upper bound - point query
let string_node = array_node
.descendant_for_point_range(Point::new(6, 4), Point::new(6, 6))
.unwrap();
assert_eq!(string_node.kind(), "string");
assert_eq!(string_node.start_byte(), string_index);
assert_eq!(string_node.end_byte(), string_index + 3);
assert_eq!(string_node.start_position(), Point::new(6, 4));
assert_eq!(string_node.end_position(), Point::new(6, 7));
// Leaf node starts before the lower bound, ends at the upper bound - byte query
let null_index = JSON_EXAMPLE.find("null").unwrap();
let null_node = array_node
.descendant_for_byte_range(null_index + 1, null_index + 4)
.unwrap();
assert_eq!(null_node.kind(), "null");
assert_eq!(null_node.start_byte(), null_index);
assert_eq!(null_node.end_byte(), null_index + 4);
assert_eq!(null_node.start_position(), Point::new(6, 9));
assert_eq!(null_node.end_position(), Point::new(6, 13));
// Leaf node starts before the lower bound, ends at the upper bound - point query
let null_node = array_node
.descendant_for_point_range(Point::new(6, 11), Point::new(6, 13))
.unwrap();
assert_eq!(null_node.kind(), "null");
assert_eq!(null_node.start_byte(), null_index);
assert_eq!(null_node.end_byte(), null_index + 4);
assert_eq!(null_node.start_position(), Point::new(6, 9));
assert_eq!(null_node.end_position(), Point::new(6, 13));
// The bounds span multiple leaf nodes - return the smallest node that does span it.
let pair_node = array_node
.descendant_for_byte_range(string_index + 2, string_index + 4)
.unwrap();
assert_eq!(pair_node.kind(), "pair");
assert_eq!(pair_node.start_byte(), string_index);
assert_eq!(pair_node.end_byte(), string_index + 9);
assert_eq!(pair_node.start_position(), Point::new(6, 4));
assert_eq!(pair_node.end_position(), Point::new(6, 13));
assert_eq!(colon_node.parent(), Some(pair_node));
// no leaf spans the given range - return the smallest node that does span it.
let pair_node = array_node
.named_descendant_for_point_range(Point::new(6, 6), Point::new(6, 8))
.unwrap();
assert_eq!(pair_node.kind(), "pair");
assert_eq!(pair_node.start_byte(), string_index);
assert_eq!(pair_node.end_byte(), string_index + 9);
assert_eq!(pair_node.start_position(), Point::new(6, 4));
assert_eq!(pair_node.end_position(), Point::new(6, 13));
}
#[test]
fn test_node_edit() {
let mut code = JSON_EXAMPLE.as_bytes().to_vec();
let mut tree = parse_json_example();
let mut rand = Rand::new(0);
for _ in 0..10 {
let mut nodes_before = get_all_nodes(&tree);
let edit = get_random_edit(&mut rand, &mut code);
let mut tree2 = tree.clone();
let edit = perform_edit(&mut tree2, &mut code, &edit);
for node in nodes_before.iter_mut() {
node.edit(&edit);
}
let nodes_after = get_all_nodes(&tree2);
for (i, node) in nodes_before.into_iter().enumerate() {
assert_eq!(
(node.kind(), node.start_byte(), node.start_position()),
(
nodes_after[i].kind(),
nodes_after[i].start_byte(),
nodes_after[i].start_position()
),
);
}
tree = tree2;
}
}
#[test]
fn test_root_node_with_offset() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let tree = parser.parse(" if (a) b", None).unwrap();
let node = tree.root_node_with_offset(6, Point::new(2, 2));
assert_eq!(node.byte_range(), 8..16);
assert_eq!(node.start_position(), Point::new(2, 4));
assert_eq!(node.end_position(), Point::new(2, 12));
let child = node.child(0).unwrap().child(2).unwrap();
assert_eq!(child.kind(), "expression_statement");
assert_eq!(child.byte_range(), 15..16);
assert_eq!(child.start_position(), Point::new(2, 11));
assert_eq!(child.end_position(), Point::new(2, 12));
let mut cursor = node.walk();
cursor.goto_first_child();
cursor.goto_first_child();
cursor.goto_next_sibling();
let child = cursor.node();
assert_eq!(child.kind(), "parenthesized_expression");
assert_eq!(child.byte_range(), 11..14);
assert_eq!(child.start_position(), Point::new(2, 7));
assert_eq!(child.end_position(), Point::new(2, 10));
}
#[test]
fn test_node_is_extra() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let tree = parser.parse("foo(/* hi */);", None).unwrap();
let root_node = tree.root_node();
let comment_node = root_node.descendant_for_byte_range(7, 7).unwrap();
assert_eq!(root_node.kind(), "program");
assert_eq!(comment_node.kind(), "comment");
assert!(!root_node.is_extra());
assert!(comment_node.is_extra());
}
#[test]
fn test_node_sexp() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let tree = parser.parse("if (a) b", None).unwrap();
let root_node = tree.root_node();
let if_node = root_node.descendant_for_byte_range(0, 0).unwrap();
let paren_node = root_node.descendant_for_byte_range(3, 3).unwrap();
let identifier_node = root_node.descendant_for_byte_range(4, 4).unwrap();
assert_eq!(if_node.kind(), "if");
assert_eq!(if_node.to_sexp(), "(\"if\")");
assert_eq!(paren_node.kind(), "(");
assert_eq!(paren_node.to_sexp(), "(\"(\")");
assert_eq!(identifier_node.kind(), "identifier");
assert_eq!(identifier_node.to_sexp(), "(identifier)");
}
#[test]
fn test_node_field_names() {
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_grammar_with_fields",
"extras": [
{"type": "PATTERN", "value": "\\s+"}
],
"rules": {
"rule_a": {
"type": "SEQ",
"members": [
{
"type": "FIELD",
"name": "field_1",
"content": {"type": "STRING", "value": "child-0"}
},
{
"type": "CHOICE",
"members": [
{"type": "STRING", "value": "child-1"},
{"type": "BLANK"},
// This isn't used in the test, but prevents `_hidden_rule1`
// from being eliminated as a unit reduction.
{
"type": "ALIAS",
"value": "x",
"named": true,
"content": {
"type": "SYMBOL",
"name": "_hidden_rule1"
}
}
]
},
{
"type": "FIELD",
"name": "field_2",
"content": {"type": "SYMBOL", "name": "_hidden_rule1"}
},
{"type": "SYMBOL", "name": "_hidden_rule2"}
]
},
// Fields pointing to hidden nodes with a single child resolve to the child.
"_hidden_rule1": {
"type": "CHOICE",
"members": [
{"type": "STRING", "value": "child-2"},
{"type": "STRING", "value": "child-2.5"}
]
},
// Fields within hidden nodes can be referenced through the parent node.
"_hidden_rule2": {
"type": "SEQ",
"members": [
{"type": "STRING", "value": "child-3"},
{
"type": "FIELD",
"name": "field_3",
"content": {"type": "STRING", "value": "child-4"}
}
]
}
}
}
"#,
)
.unwrap();
let mut parser = Parser::new();
let language = get_test_language(&parser_name, &parser_code, None);
parser.set_language(language).unwrap();
let tree = parser
.parse("child-0 child-1 child-2 child-3 child-4", None)
.unwrap();
let root_node = tree.root_node();
assert_eq!(root_node.child_by_field_name("field_1"), root_node.child(0));
assert_eq!(root_node.child_by_field_name("field_2"), root_node.child(2));
assert_eq!(root_node.child_by_field_name("field_3"), root_node.child(4));
assert_eq!(
root_node.child(0).unwrap().child_by_field_name("field_1"),
None
);
assert_eq!(root_node.child_by_field_name("not_a_real_field"), None);
let mut cursor = root_node.walk();
assert_eq!(cursor.field_name(), None);
cursor.goto_first_child();
assert_eq!(cursor.node().kind(), "child-0");
assert_eq!(cursor.field_name(), Some("field_1"));
cursor.goto_next_sibling();
assert_eq!(cursor.node().kind(), "child-1");
assert_eq!(cursor.field_name(), None);
cursor.goto_next_sibling();
assert_eq!(cursor.node().kind(), "child-2");
assert_eq!(cursor.field_name(), Some("field_2"));
cursor.goto_next_sibling();
assert_eq!(cursor.node().kind(), "child-3");
assert_eq!(cursor.field_name(), None);
cursor.goto_next_sibling();
assert_eq!(cursor.node().kind(), "child-4");
assert_eq!(cursor.field_name(), Some("field_3"));
}
#[test]
fn test_node_field_calls_in_language_without_fields() {
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_grammar_with_no_fields",
"extras": [
{"type": "PATTERN", "value": "\\s+"}
],
"rules": {
"a": {
"type": "SEQ",
"members": [
{
"type": "STRING",
"value": "b"
},
{
"type": "STRING",
"value": "c"
},
{
"type": "STRING",
"value": "d"
}
]
}
}
}
"#,
)
.unwrap();
let mut parser = Parser::new();
let language = get_test_language(&parser_name, &parser_code, None);
parser.set_language(language).unwrap();
let tree = parser.parse("b c d", None).unwrap();
let root_node = tree.root_node();
assert_eq!(root_node.kind(), "a");
assert_eq!(root_node.child_by_field_name("something"), None);
let mut cursor = root_node.walk();
assert_eq!(cursor.field_name(), None);
assert_eq!(cursor.goto_first_child(), true);
assert_eq!(cursor.field_name(), None);
}
#[test]
fn test_node_is_named_but_aliased_as_anonymous() {
let (parser_name, parser_code) = generate_parser_for_grammar(
&fs::read_to_string(
&fixtures_dir()
.join("test_grammars")
.join("named_rule_aliased_as_anonymous")
.join("grammar.json"),
)
.unwrap(),
)
.unwrap();
let mut parser = Parser::new();
let language = get_test_language(&parser_name, &parser_code, None);
parser.set_language(language).unwrap();
let tree = parser.parse("B C B", None).unwrap();
let root_node = tree.root_node();
assert!(!root_node.has_error());
assert_eq!(root_node.child_count(), 3);
assert_eq!(root_node.named_child_count(), 2);
let aliased = root_node.child(0).unwrap();
assert!(!aliased.is_named());
assert_eq!(aliased.kind(), "the-alias");
assert_eq!(root_node.named_child(0).unwrap().kind(), "c");
}
#[test]
fn test_node_numeric_symbols_respect_simple_aliases() {
let mut parser = Parser::new();
parser.set_language(get_language("python")).unwrap();
// Example 1:
// Python argument lists can contain "splat" arguments, which are not allowed within
// other expressions. This includes `parenthesized_list_splat` nodes like `(*b)`. These
// `parenthesized_list_splat` nodes are aliased as `parenthesized_expression`. Their numeric
// `symbol`, aka `kind_id` should match that of a normal `parenthesized_expression`.
let tree = parser.parse("(a((*b)))", None).unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
"(module (expression_statement (parenthesized_expression (call function: (identifier) arguments: (argument_list (parenthesized_expression (list_splat (identifier))))))))",
);
let outer_expr_node = root.child(0).unwrap().child(0).unwrap();
assert_eq!(outer_expr_node.kind(), "parenthesized_expression");
let inner_expr_node = outer_expr_node
.named_child(0)
.unwrap()
.child_by_field_name("arguments")
.unwrap()
.named_child(0)
.unwrap();
assert_eq!(inner_expr_node.kind(), "parenthesized_expression");
assert_eq!(inner_expr_node.kind_id(), outer_expr_node.kind_id());
// Example 2:
// Ruby handles the unary (negative) and binary (minus) `-` operators using two different
// tokens. One or more of these is an external token that's aliased as `-`. Their numeric
// kind ids should match.
parser.set_language(get_language("ruby")).unwrap();
let tree = parser.parse("-a - b", None).unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
"(program (binary left: (unary operand: (identifier)) right: (identifier)))",
);
let binary_node = root.child(0).unwrap();
assert_eq!(binary_node.kind(), "binary");
let unary_minus_node = binary_node
.child_by_field_name("left")
.unwrap()
.child(0)
.unwrap();
assert_eq!(unary_minus_node.kind(), "-");
let binary_minus_node = binary_node.child_by_field_name("operator").unwrap();
assert_eq!(binary_minus_node.kind(), "-");
assert_eq!(unary_minus_node.kind_id(), binary_minus_node.kind_id());
}
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
let mut result = Vec::new();
let mut visited_children = false;
let mut cursor = tree.walk();
loop {
if !visited_children {
result.push(cursor.node());
if !cursor.goto_first_child() {
visited_children = true;
}
} else {
if cursor.goto_next_sibling() {
visited_children = false;
} else if !cursor.goto_parent() {
break;
}
}
}
return result;
}
fn parse_json_example() -> Tree {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
parser.parse(JSON_EXAMPLE, None).unwrap()
}
|
use std::sync::{
Arc,
Weak,
// Mutex, MutexGuard
};
use parking_lot::{
Mutex, MutexGuard
};
use serenity::prelude::*;
use serenity::model::channel::Message;
use serenity::framework::{
Framework,
standard::{
StandardFramework,
Configuration,
}
};
use crate::dynamic_loading::{
GroupLib,
PluginManager
};
use threadpool::ThreadPool;
static TOASTER_ID: u64 = 601092364181962762;
static TOASTER_PREFIX: &str = "t>";
type RawInnerFactory = fn() -> StandardFramework;
pub fn default_raw_inner_factory() -> StandardFramework
{
StandardFramework::new()
}
pub struct ToasterFramework
{
inner: Arc<Mutex<StandardFramework>>,
plugin_manager: Arc<PluginManager>
}
// Ensures it clones correctly
impl Clone for ToasterFramework
{
fn clone(&self) -> Self
{
ToasterFramework {
inner: Arc::clone(&self.inner),
plugin_manager: Arc::clone(&self.plugin_manager)
}
}
}
impl TypeMapKey for ToasterFramework
{
type Value = ToasterFramework;
}
type ConfigFn = fn(&mut Configuration) -> &mut Configuration;
impl ToasterFramework
{
const DEFAULT_CONFIG: ConfigFn = |conf| { conf
.prefix(TOASTER_PREFIX)
.on_mention(Some(TOASTER_ID.into()))
.with_whitespace(true)
.allow_dm(false)
};
pub fn new<F>(plugin_manager: PluginManager, config: F) -> ToasterFramework
where F: FnOnce(&mut Configuration) -> &mut Configuration
{
let inner = Arc::new(Mutex::new(
Self::create_raw_inner(default_raw_inner_factory)
.configure(config)
));
let plugin_manager = Arc::new(plugin_manager);
ToasterFramework {
inner,
plugin_manager,
}
}
pub fn add_all_groups(&self) -> Result<(), String>
{
// Lock mutex now since the entire process should be protected
let mut lock = self.inner.lock();
let group_lib_vec = self.plugin_manager.load_all_groups()?;
for group_lib in group_lib_vec
{
self.add_group_impl(group_lib, &mut lock)?;
}
Ok(())
}
pub fn add_group(&self, group: &str) -> Result<(), String>
{
// Lock mutex now since the entire process should be protected
let mut lock = self.inner.lock();
let group_lib = self.plugin_manager.load_group(group)?;
self.add_group_impl(group_lib, &mut lock)
}
fn add_group_impl(&self, group_lib: Weak<GroupLib>, lock: &mut MutexGuard<StandardFramework>) -> Result<(), String>
{
let group = match group_lib.upgrade()
{
Some(group_lib) => group_lib.group,
None => return Err("[ToasterFramework::add_group] weak pointer from load_group has expired!".to_owned())
};
println!("[ToasterFramework::add_group_impl] Adding group: '{}'", group.name);
lock.group_add(group);
Ok(())
}
pub fn remove_group(&self, group: &str) -> Result<(), String>
{
// Lock mutex now since the entire process should be protected
let mut lock = self.inner.lock();
let group_lib = self.plugin_manager.unload_group(group);
let group = match group_lib
{
Some(group_lib) => group_lib.group,
None => return Err("[ToasterFramework::remove_group] tried to remove a group that wasn't loaded!".to_owned())
};
// Normally it might make more sense to remove the group from being usable
// before we unload it. Luckily, the only "fail" case for unloading is if
// the group was never loaded in the first place. That should make it safe.
println!("[ToasterFramework::remove_group] Removing group: '{}'", group.name);
lock.group_remove(group);
Ok(())
}
pub fn flush_lib_buffer(&self)
{
self.plugin_manager.flush_unload_buffer();
}
pub fn get_group_list(&self) -> Vec<String>
{
self.plugin_manager.list_groups()
}
pub fn create_raw_inner(raw_inner_factory: RawInnerFactory) -> StandardFramework
{
raw_inner_factory()
.configure(Self::DEFAULT_CONFIG)
}
// pub fn get_inner(&self) -> MutexGuard<StandardFramework>
// {
// self.inner.lock()
// }
}
impl Framework for ToasterFramework
{
#[inline]
fn dispatch(&mut self, ctx: Context, msg: Message, threadpool: &ThreadPool)
{
// let mut lock = self.inner.lock().expect("[ToasterFramework::dispatch] Poisoned mutex!");
let mut lock = self.inner.lock();
lock.dispatch(ctx, msg, threadpool);
}
} |
use std::fs;
use std::io::{self, Write};
use std::collections::HashMap;
const MODE_POS: i64 = 0;
const MODE_IMM: i64 = 1;
const MODE_REL: i64 = 2;
const OP_EXIT: i64 = 99;
const OP_ADD: i64 = 1;
const OP_MULTIPLY: i64 = 2;
const OP_INPUT: i64 = 3;
const OP_OUTPUT: i64 = 4;
const OP_JUMP_IF_TRUE: i64 = 5;
const OP_JUMP_IF_FALSE: i64 = 6;
const OP_LESS_THAN: i64 = 7;
const OP_EQUALS: i64 = 8;
const OP_REL_BASE_OFFSET: i64 = 9;
fn run_program(program: &Vec<i64>) {
let mut memory = HashMap::<i64, i64>::new();
for i in 0..program.len() {
memory.insert(i as i64, program[i]);
}
let mut address: i64 = 0;
let mut rel_base: i64 = 0;
while address < program.len() as i64 {
let instr = memory[&address];
let opcode = read_opcode(instr);
address += 1;
match opcode {
OP_EXIT => {
println!("Program exited");
break;
},
OP_ADD | OP_MULTIPLY | OP_LESS_THAN | OP_EQUALS => {
let param1 = read_param_value(address, &memory, rel_base, 0);
let param2 = read_param_value(address, &memory, rel_base, 1);
let result_address = read_param_value_out(address, &memory, rel_base, 2);
address += 3;
match opcode {
OP_ADD => {
memory.insert(result_address, param1 + param2);
},
OP_MULTIPLY => {
memory.insert(result_address, param1 * param2);
},
OP_LESS_THAN => {
memory.insert(result_address, if param1 < param2 { 1 } else { 0 });
},
OP_EQUALS => {
memory.insert(result_address, if param1 == param2 { 1 } else { 0 });
}
_ => panic!()
}
},
OP_INPUT => {
let mut input_text = String::new();
print!("> ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut input_text).unwrap();
let input_value = input_text
.trim()
.parse::<i64>()
.expect("Input value is not an integer");
let result_address = read_param_value_out(address, &memory, rel_base, 0);
memory.insert(result_address, input_value);
address += 1;
},
OP_OUTPUT => {
let param = read_param_value(address, &memory, rel_base, 0);
address += 1;
println!("{}", param);
},
OP_JUMP_IF_TRUE | OP_JUMP_IF_FALSE => {
let param1 = read_param_value(address, &memory, rel_base, 0);
let param2 = read_param_value(address, &memory, rel_base, 1);
address += 2;
if (opcode == OP_JUMP_IF_TRUE && param1 != 0)
|| (opcode == OP_JUMP_IF_FALSE && param1 == 0) {
address = param2;
}
},
OP_REL_BASE_OFFSET => {
rel_base += read_param_value(address, &memory, rel_base, 0);
address += 1;
}
_ => {}
}
}
fn read_opcode(instr: i64) -> i64 {
instr % 100
}
fn read_param_mode(instr: i64, index: u32) -> i64 {
instr % 10_i64.pow(index + 3) / 10_i64.pow(index + 2)
}
fn read_param_value(start: i64, memory: &HashMap::<i64, i64>, rel_base: i64, index: u32) -> i64 {
let mode = read_param_mode(memory[&(start - 1)], index);
let param = *memory.get(&(start + index as i64)).unwrap_or(&0);
match mode {
MODE_POS => *memory.get(¶m).unwrap_or(&0),
MODE_IMM => param,
MODE_REL => *memory.get(&(param + rel_base)).unwrap_or(&0),
_ => panic!()
}
}
fn read_param_value_out(start: i64, memory: &HashMap::<i64, i64>, rel_base: i64, index: u32) -> i64 {
let mode = read_param_mode(memory[&(start - 1)], index);
let param = *memory.get(&(start + index as i64)).unwrap_or(&0);
match mode {
MODE_POS => param,
MODE_REL => param + rel_base,
_ => panic!()
}
}
}
fn main() {
let program = fs::read_to_string("9_input.txt")
.unwrap()
.trim()
.split(",")
.map(|x| x.parse::<i64>().unwrap())
.collect::<Vec<i64>>();
run_program(&program);
}
|
// Copyright 2019 EinsteinDB Project Authors. Licensed under Apache-2.0.
use criterion::{BatchSize, Bencher, BenchmarkId, Criterion, Throughput};
use violetabft::evioletabftpb::{ConfState, Entry, Message, Snapshot, SnapshotMetadata};
use violetabft::{storage::MemStorage, Config, RawNode};
use std::time::Duration;
pub fn bench_raw_node(c: &mut Criterion) {
bench_raw_node_new(c);
bench_raw_node_leader_propose(c);
bench_raw_node_new_ready(c);
}
fn quick_raw_node(logger: &slog::Logger) -> RawNode<MemStorage> {
let id = 1;
let conf_state = ConfState::from((vec![1], vec![]));
let storage = MemStorage::new_with_conf_state(conf_state);
let config = Config::new(id);
RawNode::new(&config, storage, logger).unwrap()
}
pub fn bench_raw_node_new(c: &mut Criterion) {
let bench = |b: &mut Bencher| {
let logger = violetabft::default_logger();
b.iter(|| quick_raw_node(&logger));
};
c.bench_function("RawNode::new", bench);
}
pub fn bench_raw_node_leader_propose(c: &mut Criterion) {
static KB: usize = 1024;
let mut test_sets = vec![
0,
32,
128,
512,
KB,
4 * KB,
16 * KB,
128 * KB,
512 * KB,
KB * KB,
];
let mut group = c.benchmark_group("RawNode::leader_propose");
for size in test_sets.drain(..) {
// Calculate measurement time in seconds according to the input size.
// The approximate time might not be the best but should work fine.
let mtime = if size < KB {
1
} else if size < 128 * KB {
3
} else {
7
};
group
.measurement_time(Duration::from_secs(mtime))
.throughput(Throughput::Bytes(size as u64))
.bench_with_input(
BenchmarkId::from_parameter(size),
&size,
|b: &mut Bencher, size| {
let logger = violetabft::default_logger();
let mut node = quick_raw_node(&logger);
node.violetabft.become_candidate();
node.violetabft.become_leader();
b.iter_batched(
|| (vec![0; 8], vec![0; *size]),
|(context, value)| node.propose(context, value).expect(""),
BatchSize::SmallInput,
);
},
);
}
}
pub fn bench_raw_node_new_ready(c: &mut Criterion) {
let logger = violetabft::default_logger();
let mut group = c.benchmark_group("RawNode::ready");
group
// TODO: The proper measurement time could be affected by the system and machine.
.measurement_time(Duration::from_secs(20))
.bench_function("Default", |b: &mut Bencher| {
b.iter_batched(
|| test_ready_violetabft_node(&logger),
|mut node| {
let _ = node.ready();
},
// NOTICE: SmallInput accumulates (iters + 10 - 1) / 10 samples per batch
BatchSize::SmallInput,
);
});
}
// Create a violetabft node calling `ready()` with things below:
// - 100 new entries with 32KB data each
// - 100 committed entries with 32KB data each
// - 100 violetabft messages
// - A snapshot with 8MB data
// TODO: Maybe gathering all the things we need into a struct(e.g. something like `ReadyBenchOption`) and use it
// to customize the output.
fn test_ready_violetabft_node(logger: &slog::Logger) -> RawNode<MemStorage> {
let mut node = quick_raw_node(logger);
node.violetabft.become_candidate();
node.violetabft.become_leader();
node.violetabft.violetabft_log.stable_to(1, 1);
node.violetabft.commit_apply(1);
let mut entries = vec![];
for i in 1..101 {
let mut e = Entry::default();
e.data = vec![0; 32 * 1024];
e.context = vec![];
e.index = i;
e.term = 1;
entries.push(e);
}
let mut unstable_entries = entries.clone();
node.violetabft.violetabft_log.store.wl().append(&entries).expect("");
node.violetabft.violetabft_log.unstable.offset = 102;
// This increases 'committed_index' to `last_index` because there is only one node in quorum.
node.violetabft.append_entry(&mut unstable_entries);
let mut snap = Snapshot::default();
snap.set_data(vec![0; 8 * 1024 * 1024]);
// We don't care about the contents in snapshot here since it won't be applied.
snap.set_metadata(SnapshotMetadata::default());
for _ in 0..100 {
node.violetabft.msgs.push(Message::default());
}
// Force reverting committed index to provide us some entries to be stored from next `Ready`
node.violetabft.violetabft_log.committed = 101;
node
}
|
//! Clients for services.
mod device_auth;
mod user_auth;
pub use device_auth::*;
pub use user_auth::*;
use drogue_client::error::{ClientError, ErrorInformation};
use http::StatusCode;
use reqwest::Response;
pub(crate) async fn default_error<T>(
code: StatusCode,
response: Response,
) -> Result<T, ClientError<reqwest::Error>> {
match response.json::<ErrorInformation>().await {
Ok(result) => {
log::debug!("Service reported error ({}): {}", code, result);
Err(ClientError::Service(result))
}
Err(err) => {
log::debug!(
"Service call failed ({}). Result couldn't be decoded: {:?}",
code,
err
);
Err(ClientError::Request(format!(
"Failed to decode service error response: {}",
err
)))
}
}
}
|
#![deny(rustdoc::broken_intra_doc_links)]
use partition::Partition;
use proc_macro::TokenStream;
use syn::{parse_macro_input, ItemMod, TypePath};
mod generate;
mod parse;
mod partition;
/// Convenience macro for simpler partition development with less pitfalls
///
/// For using this macro a module is annotated with the [`partition()`] attribute.
/// Inside of this module, start functions, processes, as well as channels can be defined using attributes.
///
/// [`partition()`]: macro@partition#attribute-partition
///
/// # Example
/// ```no_run
/// use a653rs::prelude::PartitionExt;
/// use a653rs_macros::partition;
///
/// # // TODO include example/partition.rs
/// # #[path = "../../examples/deps/dummy.rs"]
/// mod dummy;
///
/// fn main() {
/// example::Partition.run();
/// }
///
/// #[partition(crate::dummy::DummyHypervisor)]
/// mod example {
/// #[sampling_out(name = "Ch1", msg_size = "10KB")]
/// struct Channel1;
///
/// #[sampling_in(refresh_period = "500ms")]
/// #[sampling_in(msg_size = "25KB")]
/// struct ChannelTwo;
///
/// #[queuing_out(msg_count = 20, msg_size = "12KB", discipline = "FIFO")]
/// struct Channel3;
///
/// #[start(cold)]
/// fn cold_start(ctx: start::Context) {
/// warm_start(ctx);
/// }
///
/// #[start(warm)]
/// fn warm_start(mut ctx: start::Context) {
/// ctx.create_aperiodic2().unwrap().start().unwrap();
/// ctx.create_periodic3().unwrap().start().unwrap();
/// ctx.create_channel_1().unwrap();
/// ctx.create_channel_two().unwrap();
///
/// // Maybe we do not always want to initialize channel3
/// // ctx.create_channel_3().unwrap();
/// }
///
/// #[aperiodic(
/// name = "ap2",
/// time_capacity = "2ms",
/// stack_size = "10KB",
/// base_priority = 1,
/// deadline = "Soft"
/// )]
/// fn aperiodic2(ctx: aperiodic2::Context) {
/// ctx.get_time();
/// }
///
/// #[periodic(
/// period = "10ms",
/// time_capacity = "5ms",
/// stack_size = "10KB",
/// base_priority = 1,
/// deadline = "Hard"
/// )]
/// fn periodic3(ctx: periodic3::Context) {}
/// }
/// ```
///
/// # Attribute `#[partition()]`
///
/// The [`partition()`] attribute marks the entry point of this macro.
/// It is meant to be used on a module containing the partition.
///
/// When the attribute is used correctly, inside of the module a `Partition` struct is made available.
/// This `Partition` struct can then be used in i.e the `main` function for running the partition.
///
/// ## Requirements
///
/// #### #[partition(HYPERVISOR)]
///
/// - *HYPERVISOR*: the full path to the used hypervisor
///
/// #### Module
/// - [`start(cold)`] and [`start(cold)`]
/// - For calling `run()` on the `Partition` struct, HYPERVISOR must implement `a653rs::prelude::PartitionExt`
///
/// [`start(cold)`]: macro@partition#attributes-startcold-and-startwarm
/// [`start(warm)`]: macro@partition#attributes-startcold-and-startwarm
///
/// ## Flexibility
///
/// - The module name can be anything
///
/// ## Example
/// ```no_run
/// use a653rs::prelude::PartitionExt;
/// use a653rs_macros::partition;
/// # #[path = "../../examples/deps/dummy.rs"]
/// # mod dummy;
///
/// fn main() {
/// example::Partition.run();
/// }
///
/// #[partition(crate::dummy::DummyHypervisor)]
/// mod example {
/// #[start(cold)]
/// fn cold_start(ctx: start::Context) { }
///
/// #[start(warm)]
/// fn warm_start(ctx: start::Context) { }
/// }
/// ```
///
/// ## Attributes `start(cold)` and `start(warm)`
///
/// [`start(cold)`] and [`start(warm)`] are used for the start functions of the partition.
/// Inside these functions, the `start::Context` provides simple functions
/// for initializing processes, channels and using apex functionalities of the provided hypervisor.
///
/// ## Requirements
///
/// - the start functions must require solely the `Context` parameter
///
/// ## Flexibility
///
/// - The identifier of the functions can be anything
/// - The identifier of the `start::Context` can be anything
///
/// ## Example
/// ```no_run
/// # use a653rs::prelude::PartitionExt;
/// # use a653rs_macros::partition;
/// # #[path = "../../examples/deps/dummy.rs"]
/// # mod dummy;
/// # fn main() {
/// # example::Partition.run();
/// # }
/// # #[partition(crate::dummy::DummyHypervisor)]
/// # mod example {
/// #[start(cold)]
/// fn cold_start(ctx: start::Context) {
/// let status = ctx.get_partition_status();
/// }
///
/// #[start(warm)]
/// fn warm_start(ctx: start::Context) {
/// cold_start(ctx);
/// }
/// # }
/// ```
///
/// # Attributes `periodic()` and `aperiodic()`
///
/// Two types of processes are available: periodic and aperiodic processes.
///
/// Functions with either the [`periodic()`] or [`aperiodic()`] attribute use a `Context` parameter for interacting with the rest of the partition.
/// This `Context` contains fields for all defined channels and processes as well as functions provided by the used hypervisor.
///
/// When a process is defined, a `create_NAME()` function is made available on the `start::Context` struct in [`start(cold)`] and [`start(warm)`].
/// This function must be called in order to initialize the process.
/// Also, these create functions return a reference to the process on success.
/// For the process to be scheduled, the `start()` function must be called on this reference.
///
/// [`periodic()`]: macro@partition#attributes-periodic-and-aperiodic
/// [`aperiodic()`]: macro@partition#attributes-periodic-and-aperiodic
///
/// ## Requirements
///
/// - the functions must require solely the `Context` parameter
/// - the module path of the `Context` is the name of the function
///
/// #### #[periodic(NAME, PERIOD, TIME_CAPACITY, STACK_SIZE, BASE_PRIORITY, DEADLINE)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **PERIOD**: time like ["10ms", "16s", "18m", ...](https://crates.io/crates/humantime)
/// - Suggested value for P4: equal to the partition period
/// - **TIME_CAPACITY**: either "Infinite" or a time like ["10ms", "16s", "18m", ...](https://crates.io/crates/humantime)
/// - Suggested value for P4: equal to the partition duration
/// - **STACK_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
/// - **BASE_PRIORITY**: [i32]
/// - Suggested value for P4: lower than the base priority of the aperiodic process
/// - **DEADLINE**: either "Hard" or "Soft"
///
/// #### #[aperiodic(NAME, TIME_CAPACITY, STACK_SIZE, BASE_PRIORITY, DEADLINE)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **TIME_CAPACITY**: either "Infinite" or a time like ["10ms", "16s", "18m", ...](https://crates.io/crates/humantime)
/// - Suggested value for P4: equal to the partition duration
/// - **STACK_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
/// - **BASE_PRIORITY**: [i32]
/// - Suggested value for P4: higher than the base priority of the periodic process
/// - **DEADLINE**: either "Hard" or "Soft"
///
/// ## Flexibility
///
/// - The identifier of the functions can be anything
/// - The identifier of the `Context` can be anything
///
/// ## Example
/// ```no_run
/// # use a653rs::prelude::PartitionExt;
/// # use a653rs_macros::partition;
/// # #[path = "../../examples/deps/dummy.rs"]
/// # mod dummy;
/// # fn main() {
/// # example::Partition.run();
/// # }
/// # #[partition(crate::dummy::DummyHypervisor)]
/// # mod example {
/// #[start(cold)]
/// fn cold_start(ctx: start::Context) {
/// warm_start(ctx);
/// }
///
/// #[start(warm)]
/// fn warm_start(mut ctx: start::Context) {
/// ctx.create_aperiodic2().unwrap().start().unwrap();
/// ctx.create_periodic3().unwrap().start().unwrap();
/// }
///
/// #[aperiodic(
/// name = "ap2",
/// time_capacity = "Infinite",
/// stack_size = "10KB",
/// base_priority = 1,
/// deadline = "Soft"
/// )]
/// fn aperiodic2(ctx: aperiodic2::Context) {
/// ctx.get_time();
/// ctx.periodic3.unwrap().stop();
/// }
///
/// #[periodic(
/// period = "10ms",
/// time_capacity = "Infinite",
/// stack_size = "10KB",
/// base_priority = 1,
/// deadline = "Hard"
/// )]
/// fn periodic3(ctx: periodic3::Context) {
/// let status = ctx.proc_self.status();
/// ctx.report_application_message(b"Hello World").unwrap()
/// }
/// # }
/// ```
///
/// # Attributes `sampling_out()`, `sampling_in()`, `queuing_out()` and `queuing_in()`
///
/// Two types of channel are available: sampling and queuing ports.
///
/// Structs with [`sampling_out()`], [`sampling_in()`], [`queuing_out()`] and [`queuing_in()`] attribute define channel.
///
/// When a channel is defined, a `create_NAME()` function is made available on the `start::Context` struct in [`start(cold)`] and [`start(warm)`].
/// This function must be called in order to initialize the channel.
/// Also a field for each created channel is made available on the `Context` of each [`periodic()`] and [`aperiodic()`] process.
///
/// [`sampling_out()`]: macro@partition#attributes-sampling_out-sampling_in-queuing_out-and-queuing_in
/// [`sampling_in()`]: macro@partition#attributes-sampling_out-sampling_in-queuing_out-and-queuing_in
/// [`queuing_out()`]: macro@partition#attributes-sampling_out-sampling_in-queuing_out-and-queuing_in
/// [`queuing_in()`]: macro@partition#attributes-sampling_out-sampling_in-queuing_out-and-queuing_in
///
/// ## Requirements
///
/// #### #[sampling_out(NAME, MSG_SIZE)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **MSG_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
///
/// #### #[sampling_in(NAME, MSG_SIZE, REFRESH_PERIOD)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **MSG_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
/// - **REFRESH_PERIOD**: time like ["10ms", "16s", "18m", ...](https://crates.io/crates/humantime)
///
/// #### #[queuing_out(NAME, MSG_COUNT, MSG_SIZE, DISCIPLINE)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **MSG_COUNT**: [u32]
/// - **MSG_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
/// - **DISCIPLINE**: either "FIFO" or "Priority"
///
/// #### #[queuing_in(NAME, MSG_COUNT, MSG_SIZE, DISCIPLINE)]
///
/// - **NAME**: name used for internal apex calls (optional)
/// - **MSG_COUNT**: [u32]
/// - **MSG_SIZE**: size like ["10KB", "16kiB", "12Mb", ...](https://crates.io/crates/bytesize)
/// - **REFRESH_PERIOD**: time like ["10ms", "16s", "18m", ...](https://crates.io/crates/humantime)
/// - **DISCIPLINE**: either "FIFO" or "Priority"
///
/// ## Flexibility
///
/// - The identifier of the struct can be anything
///
/// ## Example
/// ```no_run
/// # use a653rs::prelude::PartitionExt;
/// # use a653rs_macros::partition;
/// # #[path = "../../examples/deps/dummy.rs"]
/// # mod dummy;
/// # fn main() {
/// # example::Partition.run();
/// # }
/// # #[partition(crate::dummy::DummyHypervisor)]
/// # mod example {
/// #[sampling_out(name = "Ch1", msg_size = "10KB")]
/// struct Channel1;
///
/// #[sampling_in(refresh_period = "500ms")]
/// #[sampling_in(msg_size = "25KB")]
/// struct ChannelTwo;
///
/// #[queuing_out(msg_count = 20, msg_size = "12KB", discipline = "FIFO")]
/// struct Channel3;
///
/// #[queuing_in(name = "ch_3", msg_count = 20, msg_size = "12KB", discipline = "Priority")]
/// struct LastChannel;
///
/// #[start(cold)]
/// fn cold_start(ctx: start::Context) {
/// warm_start(ctx);
/// }
///
/// #[start(warm)]
/// fn warm_start(mut ctx: start::Context) {
/// ctx.create_channel_1().unwrap();
/// ctx.create_channel_two().unwrap();
/// ctx.create_channel_3().unwrap();
/// ctx.create_last_channel().unwrap();
/// }
/// # }
/// ```
///
///
///
#[proc_macro_attribute]
pub fn partition(args: TokenStream, input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemMod);
// Right now we only expect the Identifier of the used Hypervisor here
let args = parse_macro_input!(args as TypePath);
// TODO allow only for a single partition per project
Partition::expand_partition(input, args)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
|
extern crate libc;
use libc::{c_int, c_void, c_char, time_t, ssize_t, size_t, uint8_t, uint32_t};
pub type Tls = *mut c_void;
pub type Config = *mut c_void;
pub const WANT_POLLIN: i64 = -2;
pub const WANT_POLLOUT: i64 = -3;
extern "C" {
pub fn tls_init() -> c_int;
pub fn tls_free(ctx: Tls);
pub fn tls_error(ctx: Tls) -> *const c_char;
pub fn tls_configure(ctx: Tls, cfg: Config) -> c_int;
pub fn tls_config_new() -> Config;
pub fn tls_config_free(cfg: Config);
pub fn tls_config_set_ca_file(cfg: Config, ca_file: *const c_char) -> c_int;
pub fn tls_config_set_ca_path(cfg: Config, ca_file: *const c_char) -> c_int;
pub fn tls_config_set_ca_mem(cfg: Config, ca: *const uint8_t, len: size_t) -> c_int;
pub fn tls_config_set_verify_depth(cfg: Config, depth: c_int);
pub fn tls_config_set_key_file(cfg: Config, key_file: *const c_char) -> c_int;
pub fn tls_config_set_cert_file(cfg: Config, key_file: *const c_char) -> c_int;
pub fn tls_config_insecure_noverifyname(cfg: Config);
pub fn tls_config_insecure_noverifycert(cfg: Config);
pub fn tls_config_set_protocols(cfg: Config, protocols: uint32_t);
pub fn tls_config_parse_protocols(protocols: *mut uint32_t, protocols: *const c_char) -> c_int;
pub fn tls_config_set_ciphers(cfg: Config, ciphers: *const c_char) -> c_int;
pub fn tls_conn_version(ctx: Tls) -> *const c_char;
pub fn tls_conn_cipher(ctx: Tls) -> *const c_char;
pub fn tls_peer_cert_notbefore(ctx: Tls) -> time_t;
pub fn tls_peer_cert_notafter(ctx: Tls) -> time_t;
pub fn tls_peer_cert_issuer(ctx: Tls) -> *const c_char;
pub fn tls_peer_cert_subject(ctx: Tls) -> *const c_char;
pub fn tls_peer_cert_hash(ctx: Tls) -> *const c_char;
pub fn tls_peer_cert_contains_name(ctx: Tls, name: *const c_char) -> c_int;
pub fn tls_peer_cert_provided(ctx: Tls) -> c_int;
pub fn tls_client() -> Tls;
pub fn tls_connect(ctx: Tls, hostname: *const c_char, port: *const c_char) -> c_int;
pub fn tls_connect_servername(ctx: Tls,
hostname: *const c_char,
port: *const c_char,
servername: *const c_char)
-> c_int;
pub fn tls_connect_fds(ctx: Tls,
fd_read: c_int,
fd_write: c_int,
servername: *const c_char)
-> c_int;
pub fn tls_connect_socket(ctx: Tls, fd: c_int, servername: *const c_char) -> c_int;
pub fn tls_handshake(ctx: Tls) -> c_int;
pub fn tls_read(ctx: Tls, buf: *mut c_void, buflen: size_t) -> ssize_t;
pub fn tls_write(ctx: Tls, buf: *const c_void, buflen: size_t) -> ssize_t;
pub fn tls_close(ctx: Tls) -> c_int;
pub fn tls_server() -> Tls;
pub fn tls_accept_socket(ctx: Tls, cctx: *mut Tls, fd: c_int) -> c_int;
}
// A minimal test, enough to force a sanity check on the linkage
#[test]
fn test_init() {
unsafe {
tls_init();
}
}
|
use std::path::PathBuf;
use crate::{
HttpdArgs,
Result,
mqtt::{self, Mqtt},
};
use std::{
collections::HashMap,
sync::{Arc, Mutex}
};
use async_std::{fs as async_fs, io as async_io, task as async_task};
use log::{debug, info};
type ServerState = Arc<Mutex<Mqtt>>;
pub fn start(args: HttpdArgs, mut mqtt: Mqtt) -> Result<()> {
let bus = Arc::new(Mutex::new(bus::Bus::new(255)));
mqtt.register_subscriber({
let bus = bus.clone();
move |event| {
if let mqtt::Event::Message(msg) = event {
bus.lock().unwrap().broadcast(msg.clone());
}
}
});
super::ws_server::start(args.httpd_host.clone(), bus)?;
let mut app = tide::with_state(Arc::new(Mutex::new(mqtt)));
app.at("/").get(|_| redirect("index.html"));
app.at("/api/publish").post(publish);
match args.httpd_webapp_dir.clone() {
Some(path) => app.at("/*").get(move |req: tide::Request<ServerState>| {
let path = path.join(req.uri().path().trim_start_matches('/'));
serve_asset_from_path(path)
}),
None => {
let assets: HashMap<&str, Vec<u8>> = include!(concat!(env!("OUT_DIR"), "/assets.rs"));
debug!("embedded assets: {:#?}", assets.keys());
app.at("/*").get(move |req: tide::Request<ServerState>| {
let path = req.uri().path();
serve_embedded_asset(path.to_string(), assets.get(path).cloned())
})
},
};
async_task::block_on(
async {
let addr = format!("{}:{}", args.httpd_host, args.httpd_port);
info!("listen on addr: {}", addr);
app.listen(addr).await?;
Ok(())
},
)
}
async fn redirect(target: impl AsRef<str>) -> tide::Response {
tide::Response::new(307).set_header("Location", target)
}
async fn serve_asset_from_path(path: PathBuf) -> tide::Response {
debug!("serve asset from path: {}", path.display());
match async_fs::metadata(&path).await.ok() {
Some(meta) => {
let mime = mime_guess::from_path(&path).first_or_octet_stream();
debug!("{} Content-Type: {:?}", path.display(), mime);
let file = async_fs::File::open(path).await.unwrap();
let reader = async_io::BufReader::new(file);
tide::Response::new(200)
.set_header("Content-Length", meta.len().to_string())
.body(reader)
.set_mime(mime)
}
None => {
debug!("asset not found: {}", path.display());
tide::Response::new(404).body_string("Not found".into())
}
}
}
async fn serve_embedded_asset(path: String, asset: Option<Vec<u8>>) -> tide::Response {
debug!("serve embedded asset: {}", path);
match asset {
Some(asset) => {
let ext = path.split('.').last().unwrap_or("");
let mime = mime_guess::from_ext(ext).first_or_octet_stream();
debug!("{} Content-Type: {:?}", path, mime);
tide::Response::new(200)
.set_header("Content-Length", asset.len().to_string())
.body(async_io::Cursor::new(asset))
.set_mime(mime)
},
None => {
debug!("embedded asset not found: {}", path);
tide::Response::new(404).body_string("Not found".into())
},
}
}
async fn publish(mut req: tide::Request<ServerState>) -> tide::Response {
#[derive(serde::Deserialize)]
struct Payload {
topic: String,
message: String,
}
match req.body_json::<Payload>().await {
Ok(payload) => {
let mqtt = &mut req.state().lock().unwrap();
match mqtt.publish(payload.topic, payload.message) {
Ok(_) => tide::Response::new(201),
Err(err) => tide::Response::new(500).body_string(err.to_string()),
}
},
Err(err) => tide::Response::new(400).body_string(err.to_string()),
}
}
|
use crate::hal::{
gpio::{p0, Floating, Input, Output, Pin, PushPull},
gpiote::GpioteChannel,
prelude::{InputPin, OutputPin},
};
use rtic::time::duration::Milliseconds;
pub type ButtonEnablePin = p0::P0_15<Output<PushPull>>;
pub type ButtonPin = p0::P0_13<Input<Floating>>;
pub struct Button {
_enable_pin: ButtonEnablePin,
input_pin: Pin<Input<Floating>>,
}
impl Button {
pub const DEBOUNCE_MS: Milliseconds<u32> = Milliseconds(75);
pub fn new(
mut enable_pin: ButtonEnablePin,
input_pin: ButtonPin,
channel: &GpioteChannel<'_>,
) -> Self {
enable_pin.set_high().unwrap();
let input_pin = input_pin.degrade();
channel.input_pin(&input_pin).lo_to_hi().enable_interrupt();
Button {
_enable_pin: enable_pin,
input_pin,
}
}
pub fn is_pressed(&self) -> bool {
self.input_pin.is_high().unwrap()
}
}
|
#![no_std]
#![no_main]
#![feature(lang_items)]
#![feature(alloc_error_handler)]
#![feature(panic_info_message)]
// Import from `core` instead of from `std` since we are in no-std mode
use core::result::Result;
// Import heap related library from `alloc`
// https://doc.rust-lang.org/alloc/index.html
use alloc::vec::Vec;
// Import CKB syscalls and structures
// https://nervosnetwork.github.io/ckb-std/riscv64imac-unknown-none-elf/doc/ckb_std/index.html
use bitcoin_spv::{
btcspv,
types::{HeaderArray, MerkleArray, SPVError, Vin, Vout},
validatespv,
};
use ckb_std::{
ckb_constants::Source,
ckb_types::{bytes::Bytes, prelude::*},
debug, default_alloc, entry,
error::SysError,
high_level::{load_cell_data, load_witness_args},
};
use num::bigint::BigUint;
mod types;
use types::{Difficulty, DifficultyReader, SPVProof, SPVProofReader};
const TX_PROOF_DIFFICULTY_FACTOR: u8 = 6;
pub type RawBytes = Vec<u8>;
entry!(entry);
default_alloc!();
/// Program entry
fn entry() -> i8 {
// Call main function and return error code
match main() {
Ok(_) => 0,
Err(err) => err as i8,
}
}
/// Error
#[repr(i8)]
enum Error {
IndexOutOfBound = 1,
ItemMissing,
LengthNotEnough,
Encoding,
// Add customized errors here...
WitnessInvalidEncoding,
WitnessMissInputType,
DifficultyDataInvalid,
InvalidVin,
InvalidVout,
WrongTxId,
SpvError,
NotAtCurrentOrPreviousDifficulty,
InsufficientDifficulty,
BadMerkleProof,
}
impl From<SysError> for Error {
fn from(err: SysError) -> Self {
use SysError::*;
match err {
IndexOutOfBound => Self::IndexOutOfBound,
ItemMissing => Self::ItemMissing,
LengthNotEnough(_) => Self::LengthNotEnough,
Encoding => Self::Encoding,
Unknown(err_code) => panic!("unexpected sys error {}", err_code),
}
}
}
impl From<SPVError> for Error {
fn from(_err: SPVError) -> Self {
Self::SpvError
}
}
fn parse_difficulty() -> Result<Difficulty, Error> {
// TODO: get this index from witness args
let difficulty_cell_dep_index = 1;
let dep_data = load_cell_data(difficulty_cell_dep_index, Source::CellDep)?;
debug!("dep data is {:?}", &dep_data);
if DifficultyReader::verify(&dep_data, false).is_err() {
return Err(Error::DifficultyDataInvalid);
}
let difficulty = Difficulty::new_unchecked(dep_data.into());
Ok(difficulty)
}
/// parse proof from witness
fn parse_witness() -> Result<SPVProof, Error> {
let witness_args = load_witness_args(0, Source::Input)?.input_type();
if witness_args.is_none() {
return Err(Error::WitnessMissInputType);
}
let witness_args: Bytes = witness_args.to_opt().unwrap().unpack();
if SPVProofReader::verify(&witness_args, false).is_err() {
return Err(Error::WitnessInvalidEncoding);
}
let proof = SPVProof::new_unchecked(witness_args.into());
Ok(proof)
}
fn verify(proof: &SPVProof, difficulty: &Difficulty) -> Result<(), Error> {
if !btcspv::validate_vin(proof.vin().as_slice()) {
return Err(Error::InvalidVin);
}
if !btcspv::validate_vout(proof.vout().as_slice()) {
return Err(Error::InvalidVout);
}
let mut ver = [0u8; 4];
ver.copy_from_slice(proof.version().as_slice());
let mut lock = [0u8; 4];
lock.copy_from_slice(proof.locktime().as_slice());
let tx_id = validatespv::calculate_txid(
&ver,
&Vin::new(proof.vin().as_slice())?,
&Vout::new(proof.vout().as_slice())?,
&lock,
);
if tx_id.as_ref() != proof.tx_id().as_slice() {
return Err(Error::WrongTxId);
}
// verify difficulty
let raw_headers = proof.headers();
let headers = HeaderArray::new(raw_headers.as_slice())?;
let observed_diff = validatespv::validate_header_chain(&headers, false)?;
let previous_diff = BigUint::from_bytes_be(difficulty.previous().as_slice());
let current_diff = BigUint::from_bytes_be(difficulty.current().as_slice());
let first_header_diff = headers.index(0).difficulty();
let req_diff = if first_header_diff == current_diff {
current_diff
} else if first_header_diff == previous_diff {
previous_diff
} else {
return Err(Error::NotAtCurrentOrPreviousDifficulty);
};
if observed_diff < req_diff * TX_PROOF_DIFFICULTY_FACTOR {
return Err(Error::InsufficientDifficulty);
}
// verify tx
let header = headers.index(headers.len());
let mut idx = [0u8; 8];
idx.copy_from_slice(proof.index().as_slice());
if !validatespv::prove(
tx_id,
header.tx_root(),
&MerkleArray::new(proof.intermediate_nodes().as_slice())?,
u64::from_le_bytes(idx),
) {
return Err(Error::BadMerkleProof);
}
Ok(())
}
fn main() -> Result<(), Error> {
let proof = parse_witness()?;
let difficulty = parse_difficulty()?;
verify(&proof, &difficulty)?;
Ok(())
}
|
use std::borrow::Cow;
use std::cmp;
use std::io::Write;
use std::mem;
use byteorder::{ByteOrder, WriteBytesExt};
use nom::*;
use errors::{PcapError, Result};
use pcapng::options::pad_to;
use pcapng::{Block, BlockType};
use traits::WriteTo;
pub const BLOCK_TYPE: u32 = 0x0000_0003;
/// The Simple Packet Block (SPB) is a lightweight container for storing the packets coming from the network.
#[derive(Clone, Debug, PartialEq)]
pub struct SimplePacket<'a> {
/// actual length of the packet when it was transmitted on the network.
pub original_len: u32,
/// the data coming from the network, including link-layer headers.
pub data: Cow<'a, [u8]>,
}
impl<'a> SimplePacket<'a> {
pub fn block_type() -> BlockType {
BlockType::SimplePacket
}
pub fn size(&self) -> usize {
mem::size_of::<u32>() + pad_to::<u32>(self.data.len())
}
pub fn parse(buf: &'a [u8], endianness: Endianness) -> Result<(&'a [u8], Self)> {
parse_simple_packet(buf, endianness).map_err(|err| PcapError::from(err).into())
}
}
/// 0 1 2 3
/// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
/// +---------------------------------------------------------------+
/// 0 | Block Type = 0x00000003 |
/// +---------------------------------------------------------------+
/// 4 | Block Total Length |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// 8 | Original Packet Length |
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// 12 / /
/// / Packet Data /
/// / variable length, padded to 32 bits /
/// / /
/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/// | Block Total Length |
/// +---------------------------------------------------------------+
named_args!(parse_simple_packet(endianness: Endianness)<SimplePacket>,
dbg_dmp!(do_parse!(
original_len: u32!(endianness) >>
data: rest >>
(
SimplePacket {
original_len,
data: Cow::from(&data[..cmp::min(original_len as usize, data.len())]),
}
)
))
);
impl<'a> WriteTo for SimplePacket<'a> {
fn write_to<T: ByteOrder, W: Write>(&self, w: &mut W) -> Result<usize> {
w.write_u32::<T>(self.original_len)?;
w.write_all(&self.data)?;
let padded_len = pad_to::<u32>(self.data.len()) - self.data.len();
if padded_len > 0 {
w.write_all(&vec![0; padded_len])?;
}
Ok(self.size())
}
}
impl<'a> Block<'a> {
pub fn is_simple_packet(&self) -> bool {
self.ty == BLOCK_TYPE
}
pub fn as_simple_packet(&'a self, endianness: Endianness) -> Option<SimplePacket<'a>> {
if self.is_simple_packet() {
SimplePacket::parse(&self.body, endianness)
.map(|(_, packet)| packet)
.map_err(|err| {
warn!("fail to parse simple packet: {:?}", err);
hexdump!(self.body);
err
})
.ok()
} else {
None
}
}
}
#[cfg(test)]
pub mod tests {
use byteorder::LittleEndian;
use super::*;
use pcapng::Block;
pub const LE_SIMPLE_PACKET: &[u8] = b"\x03\x00\x00\x00\
\x54\x00\x00\x00\
\x42\x00\x00\x00\
\x8C\x85\x90\x0B\xCB\x9E\x20\x4E\x71\xFC\x92\x14\x08\x00\x45\x00\
\x00\x34\xE8\xA8\x40\x00\xEF\x06\xC1\x0B\x11\xA7\xC0\x80\x0A\x06\
\x05\xE2\x01\xBB\xC8\xF3\x0A\x30\x41\xDC\xD0\x4D\x17\xA5\x80\x10\
\x01\x3F\xC7\xDC\x00\x00\x01\x01\x05\x0A\xD0\x4D\x17\xA4\xD0\x4D\
\x17\xA5\x00\x00\
\x54\x00\x00\x00";
lazy_static! {
static ref SIMPLE_PACKET: SimplePacket<'static> = SimplePacket {
original_len: 66,
data: Cow::from(
&b"\x8C\x85\x90\x0B\xCB\x9E\x20\x4E\x71\xFC\x92\x14\x08\x00\x45\x00\
\x00\x34\xE8\xA8\x40\x00\xEF\x06\xC1\x0B\x11\xA7\xC0\x80\x0A\x06\
\x05\xE2\x01\xBB\xC8\xF3\x0A\x30\x41\xDC\xD0\x4D\x17\xA5\x80\x10\
\x01\x3F\xC7\xDC\x00\x00\x01\x01\x05\x0A\xD0\x4D\x17\xA4\xD0\x4D\
\x17\xA5"[..]
),
};
}
#[test]
fn test_parse() {
let (remaining, block) = Block::parse(LE_SIMPLE_PACKET, Endianness::Little).unwrap();
assert_eq!(remaining, b"");
assert_eq!(block.ty, BLOCK_TYPE);
assert_eq!(block.size(), LE_SIMPLE_PACKET.len());
let simple_packet = block.as_simple_packet(Endianness::Little).unwrap();
assert_eq!(simple_packet, *SIMPLE_PACKET);
}
#[test]
fn test_write() {
let mut buf = vec![];
let wrote = SIMPLE_PACKET.write_to::<LittleEndian, _>(&mut buf).unwrap();
assert_eq!(wrote, SIMPLE_PACKET.size());
assert_eq!(
buf.as_slice(),
&LE_SIMPLE_PACKET[8..LE_SIMPLE_PACKET.len() - 4]
);
}
}
|
use crate::fungible_token::FungibleToken;
use crate::storage_management::{StorageBalance, StorageBalanceBounds, StorageManagement};
use near_sdk::json_types::{ValidAccountId, U128};
use near_sdk::{assert_one_yocto, env, log, AccountId, Balance, Promise};
impl FungibleToken {
/// Internal method that returns the Account ID and the balance in case the account was
/// unregistered.
pub fn internal_storage_unregister(
&mut self,
force: Option<bool>,
) -> Option<(AccountId, Balance)> {
assert_one_yocto();
let account_id = env::predecessor_account_id();
let force = force.unwrap_or(false);
if let Some(balance) = self.accounts.get(&account_id) {
if balance == 0 || force {
self.accounts.remove(&account_id);
self.total_supply -= balance;
Promise::new(account_id.clone()).transfer(self.storage_balance_bounds().min.0 + 1);
Some((account_id, balance))
} else {
env::panic(b"Can't unregister the account with the positive balance without force")
}
} else {
log!("The account {} is not registered", &account_id);
None
}
}
fn internal_storage_balance_of(&self, account_id: &AccountId) -> Option<StorageBalance> {
if self.accounts.contains_key(account_id) {
Some(StorageBalance { total: self.storage_balance_bounds().min, available: 0.into() })
} else {
None
}
}
}
impl StorageManagement for FungibleToken {
// `registration_only` doesn't affect the implementation for vanilla fungible token.
#[allow(unused_variables)]
fn storage_deposit(
&mut self,
account_id: Option<ValidAccountId>,
registration_only: Option<bool>,
) -> StorageBalance {
let amount: Balance = env::attached_deposit();
let account_id =
account_id.map(|a| a.into()).unwrap_or_else(|| env::predecessor_account_id());
if self.accounts.contains_key(&account_id) {
log!("The account is already registered, refunding the deposit");
if amount > 0 {
Promise::new(env::predecessor_account_id()).transfer(amount);
}
} else {
let min_balance = self.storage_balance_bounds().min.0;
if amount < min_balance {
env::panic(b"The attached deposit is less than the mimimum storage balance");
}
self.internal_register_account(&account_id);
let refund = amount - min_balance;
if refund > 0 {
Promise::new(env::predecessor_account_id()).transfer(refund);
}
}
self.internal_storage_balance_of(&account_id).unwrap()
}
/// While storage_withdraw normally allows the caller to retrieve `available` balance, the basic
/// Fungible Token implementation sets storage_balance_bounds.min == storage_balance_bounds.max,
/// which means available balance will always be 0. So this implementation:
/// * panics if `amount > 0`
/// * never transfers Ⓝ to caller
/// * returns a `storage_balance` struct if `amount` is 0
fn storage_withdraw(&mut self, amount: Option<U128>) -> StorageBalance {
assert_one_yocto();
let predecessor_account_id = env::predecessor_account_id();
if let Some(storage_balance) = self.internal_storage_balance_of(&predecessor_account_id) {
match amount {
Some(amount) if amount.0 > 0 => {
env::panic(b"The amount is greater than the available storage balance");
}
_ => storage_balance,
}
} else {
env::panic(
format!("The account {} is not registered", &predecessor_account_id).as_bytes(),
);
}
}
fn storage_unregister(&mut self, force: Option<bool>) -> bool {
self.internal_storage_unregister(force).is_some()
}
fn storage_balance_bounds(&self) -> StorageBalanceBounds {
let required_storage_balance =
Balance::from(self.account_storage_usage) * env::storage_byte_cost();
StorageBalanceBounds {
min: required_storage_balance.into(),
max: Some(required_storage_balance.into()),
}
}
fn storage_balance_of(&self, account_id: ValidAccountId) -> Option<StorageBalance> {
self.internal_storage_balance_of(account_id.as_ref())
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
mod auth;
mod controller;
mod model;
mod repository;
mod service;
mod db;
#[macro_use]
extern crate rocket;
use crate::controller::*;
use crate::repository::{CommentsRepository, PostsRepository, UserRepository};
use rocket::http::Method;
use rocket::response::{NamedFile, Redirect};
use rocket_cors::{AllowedOrigins, CorsOptions};
use std::io;
use std::path::{Path, PathBuf};
#[get("/")]
fn index() -> Redirect {
Redirect::permanent("/index.html")
}
#[get("/<file..>", rank = 2)]
fn build_dir(file: PathBuf) -> io::Result<NamedFile> {
NamedFile::open(Path::new("../frontend/build/").join(file))
}
fn main() {
UserRepository::init_tables().unwrap();
PostsRepository::init_tables().unwrap();
CommentsRepository::init_tables().unwrap();
let cors = CorsOptions::default()
.allowed_origins(AllowedOrigins::all())
.allowed_methods(
vec![Method::Get, Method::Post, Method::Patch]
.into_iter()
.map(From::from)
.collect(),
)
.allow_credentials(true);
rocket::ignite()
.mount("/", routes![index, build_dir])
.mount(
"/api",
routes![
all_posts,
posts_page,
new_post,
get_comments,
login,
register,
new_comment
],
)
.attach(cors.to_cors().unwrap())
.launch();
}
|
use crate::rand3;
fn smoothstep (edge0: f64, edge1: f64, mut x: f64) -> f64{
// Scale, bias and saturate x to 0..1 range
x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0);
// Evaluate polynomial
return x * x * (3.0 - 2.0 * x);
}
fn clamp(x: f64, lowerlimit: f64, upperlimit: f64) -> f64 {
if x < lowerlimit {
return lowerlimit;
}else if x > upperlimit {
return upperlimit;
}
return x;
}
fn simple_interpolate(a: f64, b: f64, x: f64)->f64
{
return a + smoothstep(0.0,1.0,x) * (b-a);
}
fn interpolatedNoise3D( x: f64, y: f64, z: f64) -> f64
{
let integer_x: f64 = x - x.fract();
let fractional_x: f64 = x - integer_x;
let integer_y: f64 = y - y.fract();
let fractional_y: f64 = y - integer_y;
let integer_z: f64 = z - z.fract();
let fractional_z: f64 = z - integer_z;
let v1: f64 = rand3(integer_x, integer_y, integer_z);
let v2: f64 = rand3(integer_x+1.0, integer_y, integer_z);
let v3: f64 = rand3(integer_x, integer_y+1.0, integer_z);
let v4: f64 = rand3(integer_x+1.0, integer_y +1.0, integer_z);
let v5: f64 = rand3(integer_x, integer_y, integer_z+1.0);
let v6: f64 = rand3(integer_x+1.0, integer_y, integer_z+1.0);
let v7: f64 = rand3(integer_x, integer_y+1.0, integer_z+1.0);
let v8: f64 = rand3(integer_x+1.0, integer_y +1.0, integer_z+1.0);
let i1: f64 = simple_interpolate(v1,v5, fractional_z);
let i2: f64 = simple_interpolate(v2,v6, fractional_z);
let i3: f64 = simple_interpolate(v3,v7, fractional_z);
let i4: f64 = simple_interpolate(v4,v8, fractional_z);
let ii1: f64 = simple_interpolate(i1,i2,fractional_x);
let ii2: f64 = simple_interpolate(i3,i4,fractional_x);
return simple_interpolate(ii1 , ii2 , fractional_y);
}
pub fn perlin_noise (x: f64, y : f64, z: f64, wavelength: f64) -> f64
{
return interpolatedNoise3D(x/wavelength, y/wavelength, z/wavelength);
} |
pub mod neuron;
pub mod rl;
|
use gtk::prelude::GtkMenuItemExt;
use gtk::prelude::MenuShellExt;
use gtk::prelude::WidgetExt;
use gtk;
use clipboard::{ClipboardContext, ClipboardProvider};
use libappindicator::{AppIndicator, AppIndicatorStatus};
use super::super::seeds::Seeds;
pub struct Applet {
seeds: Seeds,
}
impl Applet {
pub fn new(seeds: Seeds) -> Self {
Applet { seeds: seeds }
}
pub fn run(&self) {
gtk::init().unwrap();
let mut indicator = AppIndicator::new("totp-clipboard", "");
indicator.set_icon_full(
"/usr/share/icons/Adwaita/22x22/emblems/emblem-readonly.png",
"Open the totpd menu",
);
let mut m = gtk::Menu::new();
for seed in self.seeds.get_seeds() {
let mi = gtk::MenuItem::with_label(seed.name());
mi.connect_activate(move |_| {
let code = seed.code();
let mut clipboard: ClipboardContext = ClipboardProvider::new().unwrap();
clipboard.set_contents(code).unwrap();
});
m.append(&mi);
}
indicator.set_menu(&mut m);
m.show_all();
indicator.set_status(AppIndicatorStatus::Active);
gtk::main();
}
}
|
#[macro_export]
macro_rules! typed_index {
(@base_no_salsa $outer_vis:vis $name:ident $max_u32:expr) => {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
$outer_vis struct $name(core::num::NonZeroU32);
$crate::debug_fallback_impl!($name);
impl $name {
const MAX_U32: u32 = $max_u32;
#[inline]
pub const fn as_u32(&self) -> u32 {
self.0.get() - 1
}
#[inline]
pub const fn as_usize(&self) -> usize {
self.as_u32() as usize
}
#[inline]
pub fn from_u32(val: u32) -> Self {
assert!(val < $name::MAX_U32);
unsafe { $name::from_u32_unchecked(val) }
}
#[inline]
pub fn from_usize(val: usize) -> Self {
assert!(val < ($name::MAX_U32 as usize));
unsafe { $name::from_u32_unchecked(val as u32) }
}
#[inline]
const unsafe fn from_u32_unchecked(v: u32) -> Self {
$name(core::num::NonZeroU32::new_unchecked(v + 1))
}
}
impl core::fmt::Debug for $name {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_tuple(stringify!($name)).field(&self.as_u32()).finish()
}
}
impl From<u32> for $name {
fn from(val: u32) -> Self {
Self::from_u32(val)
}
}
impl From<usize> for $name {
fn from(val: usize) -> Self {
Self::from_usize(val)
}
}
};
(@base $outer_vis:vis $name:ident) => {
use $crate::salsa::InternKey as _;
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
$outer_vis struct $name($crate::salsa::InternId);
$crate::debug_fallback_impl!($name);
impl $crate::salsa::InternKey for $name {
#[inline]
fn from_intern_id(v: $crate::salsa::InternId) -> Self {
$name(v)
}
#[inline]
fn as_intern_id(&self) -> $crate::salsa::InternId {
self.0
}
}
impl $name {
#[inline]
pub fn as_u32(&self) -> u32 {
self.0.as_u32()
}
#[inline]
pub fn from_u32(v: u32) -> Self {
$name(v.into())
}
}
impl From<$crate::salsa::InternId> for $name {
#[inline]
fn from(src: $crate::salsa::InternId) -> Self {
Self::from_intern_id(src)
}
}
impl From<u32> for $name {
#[inline]
fn from(src: u32) -> Self {
Self::from_intern_id($crate::salsa::InternId::from(src))
}
}
impl From<usize> for $name {
#[inline]
fn from(src: usize) -> Self {
Self::from_intern_id($crate::salsa::InternId::from(src))
}
}
};
(@no_owner $outer_vis:vis $name:ident) => {
$crate::typed_index!(@base_no_salsa $outer_vis $name core::u32::MAX);
impl $name {
$outer_vis fn new() -> Self {
static NEXT_ID: core::sync::atomic::AtomicU32 = core::sync::atomic::AtomicU32::new(0);
NEXT_ID.fetch_add(1, core::sync::atomic::Ordering::Relaxed).into()
}
}
};
(@owner $db:ident $outer_vis:vis $lookup_func:ident $name:ident $data:ident) => {
$crate::typed_index!(@base $outer_vis $name);
impl $name {
#[allow(dead_code)]
#[inline]
$outer_vis fn lookup(self, db: &impl $db) -> $data {
db.$lookup_func(self)
}
}
};
(@projected $db:ident $outer_vis:vis $projection:ident [$(($lookup_func:ident $intern_func:ident $name:ident $data:ident), )*]) => {
#[derive(Debug)]
$outer_vis struct $projection<'a, DB: $db> {
db: &'a DB
}
impl<'a, DB> $projection<'a, DB> where DB: $db {
$outer_vis fn new(db: &'a DB) -> Self {
$projection {
db
}
}
}
impl<'a, DB> From<&'a DB> for $projection<'a, DB> where DB: $db {
fn from(db: &'a DB) -> Self {
$projection::new(db)
}
}
impl<'a, DB> Copy for $projection<'a, DB> where DB: $db {}
impl<'a, DB> Clone for $projection<'a, DB> where DB: $db {
#[inline]
fn clone(&self) -> Self {
$projection {
db: self.db
}
}
}
impl<'a, DB> AsRef<DB> for $projection<'a, DB> where DB: $db {
#[inline]
fn as_ref(&self) -> &DB {
self.db
}
}
$(
$crate::typed_index!(@base $outer_vis $name);
impl<'a, DB> $crate::salsa_intern::Intern<$projection<'a, DB>, $name> for $data where DB: $db {
#[inline]
fn intern(self, table: $projection<'a, DB>) -> $name {
table.db.$intern_func(self)
}
}
impl<'a, DB> $crate::salsa_intern::Untern<$projection<'a, DB>, $data> for $name where DB: $db {
#[inline]
fn untern(self, table: $projection<'a, DB>) -> $data {
table.db.$lookup_func(self)
}
}
)*
};
}
pub trait Intern<Table, Key> {
fn intern(self, table: Table) -> Key;
}
pub trait Untern<Table, Data> {
fn untern(self, table: Table) -> Data;
}
#[cfg(test)]
mod tests {
#![allow(dead_code)]
typed_index!(@projected FooQueries pub FooInternTable [(lookup_foo_data foo_data Foo FooData), ]);
#[derive(Debug, Hash, Clone, PartialEq, Eq)]
pub struct FooData {
text: String,
}
#[salsa::query_group(FooQueriesStorage)]
pub trait FooQueries {
#[salsa::interned]
fn foo_data(&self, data: FooData) -> Foo;
}
#[salsa::database(FooQueriesStorage)]
pub struct FooDatabase {
runtime: salsa::Runtime<FooDatabase>,
}
impl salsa::Database for FooDatabase {
fn salsa_runtime(&self) -> &salsa::Runtime<FooDatabase> {
&self.runtime
}
}
impl Default for FooDatabase {
fn default() -> Self {
FooDatabase {
runtime: Default::default(),
}
}
}
use crate::salsa_intern::{Intern, Untern};
#[test]
fn intern_foo_stuff() {
let db = FooDatabase::default();
let table: FooInternTable<FooDatabase> = FooInternTable::from(&db);
let foo1: Foo = FooData {
text: String::from("hello my name is bob"),
}
.intern(table);
assert_eq!(foo1.untern(table).text, "hello my name is bob");
}
}
|
//! Zellij utilities.
pub mod consts;
pub mod logging;
pub mod shared;
|
struct Kagome{
} |
use player::*;
use slog::Logger;
pub struct InternalState {
pub players: Vec<Player>,
pub mafia_kill: isize,
pub first_night: bool,
pub logger: Logger,
}
|
// compile-args: --crate-type lib
#![deny(broken_intra_doc_links)]
//~^ WARNING renamed
//! [x]
//~^ ERROR unresolved link
|
extern crate ocl;
use ocl::prm::{Uchar8, Float16};
pub trait SceneObject{
fn get_integer_data(&self) -> Uchar8;
fn get_float_data(&self) -> Float16;
} |
extern crate bytecodec;
#[macro_use]
extern crate clap;
extern crate fibers;
extern crate fibers_http_client;
extern crate futures;
#[macro_use]
extern crate trackable;
extern crate url;
use bytecodec::bytes::Utf8Decoder;
use clap::Arg;
use fibers::sync::oneshot::MonitorError;
use fibers::{Executor, InPlaceExecutor, Spawn};
use fibers_http_client::connection::Oneshot;
use fibers_http_client::Client;
use std::time::Duration;
use trackable::error::MainError;
use url::Url;
fn main() -> Result<(), MainError> {
let matches = app_from_crate!()
.arg(Arg::with_name("URL").index(1).required(true))
.arg(
Arg::with_name("TIMEOUT_MILLIS")
.long("timeout")
.takes_value(true),
)
.get_matches();
let url: Url = track_any_err!(matches.value_of("URL").unwrap().parse())?;
let mut client = Client::new(Oneshot);
let mut request = client.request(&url).decoder(Utf8Decoder::new());
if let Some(timeout) = matches.value_of("TIMEOUT_MILLIS") {
let timeout = Duration::from_millis(track_any_err!(timeout.parse())?);
request = request.timeout(timeout);
}
let future = request.get();
let mut executor = track_any_err!(InPlaceExecutor::new())?;
let monitor = executor.spawn_monitor(future);
match track_any_err!(executor.run_fiber(monitor))? {
Err(MonitorError::Aborted) => panic!(),
Err(MonitorError::Failed(e)) => Err(track!(e).into()),
Ok(response) => {
println!("{}", response.body());
Ok(())
}
}
}
|
#[doc = "Register `DINR11` reader"]
pub type R = crate::R<DINR11_SPEC>;
#[doc = "Field `DIN11` reader - Input data received from MDIO Master during write frames"]
pub type DIN11_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - Input data received from MDIO Master during write frames"]
#[inline(always)]
pub fn din11(&self) -> DIN11_R {
DIN11_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "MDIOS input data register 11\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dinr11::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DINR11_SPEC;
impl crate::RegisterSpec for DINR11_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dinr11::R`](R) reader structure"]
impl crate::Readable for DINR11_SPEC {}
#[doc = "`reset()` method sets DINR11 to value 0"]
impl crate::Resettable for DINR11_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::container::Container;
use azure_core::incompletevector::IncompleteVector;
use azure_core::RequestId;
#[derive(Debug, Clone)]
pub struct ListContainersResponse {
pub incomplete_vector: IncompleteVector<Container>,
pub request_id: RequestId,
}
impl ListContainersResponse {
pub fn is_complete(&self) -> bool {
self.incomplete_vector.is_complete()
}
}
|
extern crate extended_collections;
extern crate rand;
use extended_collections::radix::RadixMap;
use self::rand::{thread_rng, Rng};
use std::iter;
use std::vec::Vec;
const NUM_OF_OPERATIONS: usize = 100_000;
#[test]
fn int_test_radix_map() {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut map = RadixMap::new();
let mut expected = Vec::new();
for _ in 0..NUM_OF_OPERATIONS {
// generate a random length from [10, 99)
let len = rng.gen_range(10, 99);
let key = iter::repeat(())
.map(|()| rng.gen::<u8>())
.take(len)
.collect::<Vec<u8>>();
let val = rng.gen::<u32>();
map.insert(key.as_slice(), val);
expected.push((key, val));
}
expected.reverse();
expected.sort_by(|l, r| l.0.cmp(&r.0));
expected.dedup_by_key(|pair| pair.0.clone());
assert_eq!(map.len(), expected.len());
assert_eq!(map.min(), Some(expected[0].0.clone()));
assert_eq!(map.max(), Some(expected[expected.len() - 1].0.clone()));
for entry in &expected {
assert!(map.contains_key(&entry.0));
assert_eq!(map.get(&entry.0), Some(&entry.1));
}
for entry in &mut expected {
let val_1 = rng.gen::<u32>();
let val_2 = rng.gen::<u32>();
let old_entry = map.insert(entry.0.as_slice(), val_1);
assert_eq!(old_entry, Some((entry.0.clone(), entry.1)));
{
let old_val = map.get_mut(&entry.0);
*old_val.unwrap() = val_2;
}
entry.1 = val_2;
assert_eq!(map.get(&entry.0), Some(&val_2));
}
thread_rng().shuffle(&mut expected);
let mut expected_len = expected.len();
for entry in expected {
let old_entry = map.remove(&entry.0);
expected_len -= 1;
assert_eq!(old_entry, Some((entry.0, entry.1)));
assert_eq!(map.len(), expected_len);
}
}
|
fn main() {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
let v: Vec<i32> = s
.trim()
.split_whitespace()
.map(|e| e.parse().ok().unwrap())
.collect();
let n = v[0];
let m = v[1];
let mut a: Vec<Vec<char>> = Vec::new();
for _i in 0..n {
s = String::new();
std::io::stdin().read_line(&mut s).ok();
a.push(s.trim().chars().collect::<Vec<char>>());
}
let mut b: Vec<Vec<char>> = Vec::new();
for _i in 0..m {
s = String::new();
std::io::stdin().read_line(&mut s).ok();
b.push(s.trim().chars().collect::<Vec<char>>());
}
let mut c = false;
'outer: for y in 0..n - m + 1 {
for x in 0..n - m + 1 {
c = diff(&a, &b, x, y, m);
if c {
break 'outer;
}
}
}
if c {
println!("Yes");
} else {
println!("No");
}
}
fn diff(a: &Vec<Vec<char>>, b: &Vec<Vec<char>>, x: i32, y: i32, m: i32) -> bool {
for j in 0..m {
for i in 0..m {
if a[(x + i) as usize][(y + j) as usize] != b[i as usize][j as usize] {
return false;
}
}
}
true
}
|
#[doc = "Register `EECR3` reader"]
pub type R = crate::R<EECR3_SPEC>;
#[doc = "Register `EECR3` writer"]
pub type W = crate::W<EECR3_SPEC>;
#[doc = "Field `EE6F` reader - EE6F"]
pub type EE6F_R = crate::FieldReader<EE6F_A>;
#[doc = "EE6F\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum EE6F_A {
#[doc = "0: Filter disabled"]
Disabled = 0,
#[doc = "1: f_SAMPLING=f_HRTIM, N=2"]
Div1N2 = 1,
#[doc = "2: f_SAMPLING=f_HRTIM, N=4"]
Div1N4 = 2,
#[doc = "3: f_SAMPLING=f_HRTIM, N=8"]
Div1N8 = 3,
#[doc = "4: f_SAMPLING=f_HRTIM/2, N=6"]
Div2N6 = 4,
#[doc = "5: f_SAMPLING=f_HRTIM/2, N=8"]
Div2N8 = 5,
#[doc = "6: f_SAMPLING=f_HRTIM/4, N=6"]
Div4N6 = 6,
#[doc = "7: f_SAMPLING=f_HRTIM/4, N=8"]
Div4N8 = 7,
#[doc = "8: f_SAMPLING=f_HRTIM/8, N=6"]
Div8N6 = 8,
#[doc = "9: f_SAMPLING=f_HRTIM/8, N=8"]
Div8N8 = 9,
#[doc = "10: f_SAMPLING=f_HRTIM/16, N=5"]
Div16N5 = 10,
#[doc = "11: f_SAMPLING=f_HRTIM/16, N=6"]
Div16N6 = 11,
#[doc = "12: f_SAMPLING=f_HRTIM/16, N=8"]
Div16N8 = 12,
#[doc = "13: f_SAMPLING=f_HRTIM/32, N=5"]
Div32N5 = 13,
#[doc = "14: f_SAMPLING=f_HRTIM/32, N=6"]
Div32N6 = 14,
#[doc = "15: f_SAMPLING=f_HRTIM/32, N=8"]
Div32N8 = 15,
}
impl From<EE6F_A> for u8 {
#[inline(always)]
fn from(variant: EE6F_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for EE6F_A {
type Ux = u8;
}
impl EE6F_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EE6F_A {
match self.bits {
0 => EE6F_A::Disabled,
1 => EE6F_A::Div1N2,
2 => EE6F_A::Div1N4,
3 => EE6F_A::Div1N8,
4 => EE6F_A::Div2N6,
5 => EE6F_A::Div2N8,
6 => EE6F_A::Div4N6,
7 => EE6F_A::Div4N8,
8 => EE6F_A::Div8N6,
9 => EE6F_A::Div8N8,
10 => EE6F_A::Div16N5,
11 => EE6F_A::Div16N6,
12 => EE6F_A::Div16N8,
13 => EE6F_A::Div32N5,
14 => EE6F_A::Div32N6,
15 => EE6F_A::Div32N8,
_ => unreachable!(),
}
}
#[doc = "Filter disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == EE6F_A::Disabled
}
#[doc = "f_SAMPLING=f_HRTIM, N=2"]
#[inline(always)]
pub fn is_div1_n2(&self) -> bool {
*self == EE6F_A::Div1N2
}
#[doc = "f_SAMPLING=f_HRTIM, N=4"]
#[inline(always)]
pub fn is_div1_n4(&self) -> bool {
*self == EE6F_A::Div1N4
}
#[doc = "f_SAMPLING=f_HRTIM, N=8"]
#[inline(always)]
pub fn is_div1_n8(&self) -> bool {
*self == EE6F_A::Div1N8
}
#[doc = "f_SAMPLING=f_HRTIM/2, N=6"]
#[inline(always)]
pub fn is_div2_n6(&self) -> bool {
*self == EE6F_A::Div2N6
}
#[doc = "f_SAMPLING=f_HRTIM/2, N=8"]
#[inline(always)]
pub fn is_div2_n8(&self) -> bool {
*self == EE6F_A::Div2N8
}
#[doc = "f_SAMPLING=f_HRTIM/4, N=6"]
#[inline(always)]
pub fn is_div4_n6(&self) -> bool {
*self == EE6F_A::Div4N6
}
#[doc = "f_SAMPLING=f_HRTIM/4, N=8"]
#[inline(always)]
pub fn is_div4_n8(&self) -> bool {
*self == EE6F_A::Div4N8
}
#[doc = "f_SAMPLING=f_HRTIM/8, N=6"]
#[inline(always)]
pub fn is_div8_n6(&self) -> bool {
*self == EE6F_A::Div8N6
}
#[doc = "f_SAMPLING=f_HRTIM/8, N=8"]
#[inline(always)]
pub fn is_div8_n8(&self) -> bool {
*self == EE6F_A::Div8N8
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=5"]
#[inline(always)]
pub fn is_div16_n5(&self) -> bool {
*self == EE6F_A::Div16N5
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=6"]
#[inline(always)]
pub fn is_div16_n6(&self) -> bool {
*self == EE6F_A::Div16N6
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=8"]
#[inline(always)]
pub fn is_div16_n8(&self) -> bool {
*self == EE6F_A::Div16N8
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=5"]
#[inline(always)]
pub fn is_div32_n5(&self) -> bool {
*self == EE6F_A::Div32N5
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=6"]
#[inline(always)]
pub fn is_div32_n6(&self) -> bool {
*self == EE6F_A::Div32N6
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=8"]
#[inline(always)]
pub fn is_div32_n8(&self) -> bool {
*self == EE6F_A::Div32N8
}
}
#[doc = "Field `EE6F` writer - EE6F"]
pub type EE6F_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O, EE6F_A>;
impl<'a, REG, const O: u8> EE6F_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Filter disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Disabled)
}
#[doc = "f_SAMPLING=f_HRTIM, N=2"]
#[inline(always)]
pub fn div1_n2(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div1N2)
}
#[doc = "f_SAMPLING=f_HRTIM, N=4"]
#[inline(always)]
pub fn div1_n4(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div1N4)
}
#[doc = "f_SAMPLING=f_HRTIM, N=8"]
#[inline(always)]
pub fn div1_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div1N8)
}
#[doc = "f_SAMPLING=f_HRTIM/2, N=6"]
#[inline(always)]
pub fn div2_n6(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div2N6)
}
#[doc = "f_SAMPLING=f_HRTIM/2, N=8"]
#[inline(always)]
pub fn div2_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div2N8)
}
#[doc = "f_SAMPLING=f_HRTIM/4, N=6"]
#[inline(always)]
pub fn div4_n6(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div4N6)
}
#[doc = "f_SAMPLING=f_HRTIM/4, N=8"]
#[inline(always)]
pub fn div4_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div4N8)
}
#[doc = "f_SAMPLING=f_HRTIM/8, N=6"]
#[inline(always)]
pub fn div8_n6(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div8N6)
}
#[doc = "f_SAMPLING=f_HRTIM/8, N=8"]
#[inline(always)]
pub fn div8_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div8N8)
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=5"]
#[inline(always)]
pub fn div16_n5(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div16N5)
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=6"]
#[inline(always)]
pub fn div16_n6(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div16N6)
}
#[doc = "f_SAMPLING=f_HRTIM/16, N=8"]
#[inline(always)]
pub fn div16_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div16N8)
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=5"]
#[inline(always)]
pub fn div32_n5(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div32N5)
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=6"]
#[inline(always)]
pub fn div32_n6(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div32N6)
}
#[doc = "f_SAMPLING=f_HRTIM/32, N=8"]
#[inline(always)]
pub fn div32_n8(self) -> &'a mut crate::W<REG> {
self.variant(EE6F_A::Div32N8)
}
}
#[doc = "Field `EE7F` reader - EE7F"]
pub use EE6F_R as EE7F_R;
#[doc = "Field `EE8F` reader - EE8F"]
pub use EE6F_R as EE8F_R;
#[doc = "Field `EE9F` reader - EE9F"]
pub use EE6F_R as EE9F_R;
#[doc = "Field `EE10F` reader - EE10F"]
pub use EE6F_R as EE10F_R;
#[doc = "Field `EE7F` writer - EE7F"]
pub use EE6F_W as EE7F_W;
#[doc = "Field `EE8F` writer - EE8F"]
pub use EE6F_W as EE8F_W;
#[doc = "Field `EE9F` writer - EE9F"]
pub use EE6F_W as EE9F_W;
#[doc = "Field `EE10F` writer - EE10F"]
pub use EE6F_W as EE10F_W;
#[doc = "Field `EEVSD` reader - EEVSD"]
pub type EEVSD_R = crate::FieldReader<EEVSD_A>;
#[doc = "EEVSD\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum EEVSD_A {
#[doc = "0: f_EEVS=f_HRTIM"]
Div1 = 0,
#[doc = "1: f_EEVS=f_HRTIM/2"]
Div2 = 1,
#[doc = "2: f_EEVS=f_HRTIM/4"]
Div4 = 2,
#[doc = "3: f_EEVS=f_HRTIM/8"]
Div8 = 3,
}
impl From<EEVSD_A> for u8 {
#[inline(always)]
fn from(variant: EEVSD_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for EEVSD_A {
type Ux = u8;
}
impl EEVSD_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EEVSD_A {
match self.bits {
0 => EEVSD_A::Div1,
1 => EEVSD_A::Div2,
2 => EEVSD_A::Div4,
3 => EEVSD_A::Div8,
_ => unreachable!(),
}
}
#[doc = "f_EEVS=f_HRTIM"]
#[inline(always)]
pub fn is_div1(&self) -> bool {
*self == EEVSD_A::Div1
}
#[doc = "f_EEVS=f_HRTIM/2"]
#[inline(always)]
pub fn is_div2(&self) -> bool {
*self == EEVSD_A::Div2
}
#[doc = "f_EEVS=f_HRTIM/4"]
#[inline(always)]
pub fn is_div4(&self) -> bool {
*self == EEVSD_A::Div4
}
#[doc = "f_EEVS=f_HRTIM/8"]
#[inline(always)]
pub fn is_div8(&self) -> bool {
*self == EEVSD_A::Div8
}
}
#[doc = "Field `EEVSD` writer - EEVSD"]
pub type EEVSD_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, EEVSD_A>;
impl<'a, REG, const O: u8> EEVSD_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "f_EEVS=f_HRTIM"]
#[inline(always)]
pub fn div1(self) -> &'a mut crate::W<REG> {
self.variant(EEVSD_A::Div1)
}
#[doc = "f_EEVS=f_HRTIM/2"]
#[inline(always)]
pub fn div2(self) -> &'a mut crate::W<REG> {
self.variant(EEVSD_A::Div2)
}
#[doc = "f_EEVS=f_HRTIM/4"]
#[inline(always)]
pub fn div4(self) -> &'a mut crate::W<REG> {
self.variant(EEVSD_A::Div4)
}
#[doc = "f_EEVS=f_HRTIM/8"]
#[inline(always)]
pub fn div8(self) -> &'a mut crate::W<REG> {
self.variant(EEVSD_A::Div8)
}
}
impl R {
#[doc = "Bits 0:3 - EE6F"]
#[inline(always)]
pub fn ee6f(&self) -> EE6F_R {
EE6F_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 6:9 - EE7F"]
#[inline(always)]
pub fn ee7f(&self) -> EE7F_R {
EE7F_R::new(((self.bits >> 6) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - EE8F"]
#[inline(always)]
pub fn ee8f(&self) -> EE8F_R {
EE8F_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 18:21 - EE9F"]
#[inline(always)]
pub fn ee9f(&self) -> EE9F_R {
EE9F_R::new(((self.bits >> 18) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - EE10F"]
#[inline(always)]
pub fn ee10f(&self) -> EE10F_R {
EE10F_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 30:31 - EEVSD"]
#[inline(always)]
pub fn eevsd(&self) -> EEVSD_R {
EEVSD_R::new(((self.bits >> 30) & 3) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - EE6F"]
#[inline(always)]
#[must_use]
pub fn ee6f(&mut self) -> EE6F_W<EECR3_SPEC, 0> {
EE6F_W::new(self)
}
#[doc = "Bits 6:9 - EE7F"]
#[inline(always)]
#[must_use]
pub fn ee7f(&mut self) -> EE7F_W<EECR3_SPEC, 6> {
EE7F_W::new(self)
}
#[doc = "Bits 12:15 - EE8F"]
#[inline(always)]
#[must_use]
pub fn ee8f(&mut self) -> EE8F_W<EECR3_SPEC, 12> {
EE8F_W::new(self)
}
#[doc = "Bits 18:21 - EE9F"]
#[inline(always)]
#[must_use]
pub fn ee9f(&mut self) -> EE9F_W<EECR3_SPEC, 18> {
EE9F_W::new(self)
}
#[doc = "Bits 24:27 - EE10F"]
#[inline(always)]
#[must_use]
pub fn ee10f(&mut self) -> EE10F_W<EECR3_SPEC, 24> {
EE10F_W::new(self)
}
#[doc = "Bits 30:31 - EEVSD"]
#[inline(always)]
#[must_use]
pub fn eevsd(&mut self) -> EEVSD_W<EECR3_SPEC, 30> {
EEVSD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Timer External Event Control Register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eecr3::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eecr3::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct EECR3_SPEC;
impl crate::RegisterSpec for EECR3_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eecr3::R`](R) reader structure"]
impl crate::Readable for EECR3_SPEC {}
#[doc = "`write(|w| ..)` method takes [`eecr3::W`](W) writer structure"]
impl crate::Writable for EECR3_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets EECR3 to value 0"]
impl crate::Resettable for EECR3_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Pad Configuration Register A (Pads 0-3)"]
pub padrega: PADREGA,
#[doc = "0x04 - Pad Configuration Register B (Pads 4-7)"]
pub padregb: PADREGB,
#[doc = "0x08 - Pad Configuration Register C (Pads 8-11)"]
pub padregc: PADREGC,
#[doc = "0x0c - Pad Configuration Register D (Pads 12-15)"]
pub padregd: PADREGD,
#[doc = "0x10 - Pad Configuration Register E (Pads 16-19)"]
pub padrege: PADREGE,
#[doc = "0x14 - Pad Configuration Register F (Pads 20-23)"]
pub padregf: PADREGF,
#[doc = "0x18 - Pad Configuration Register G (Pads 24-27)"]
pub padregg: PADREGG,
#[doc = "0x1c - Pad Configuration Register H (Pads 28-31)"]
pub padregh: PADREGH,
#[doc = "0x20 - Pad Configuration Register I (Pads 32-25)"]
pub padregi: PADREGI,
#[doc = "0x24 - Pad Configuration Register J (Pads 36-39)"]
pub padregj: PADREGJ,
#[doc = "0x28 - Pad Configuration Register K (Pads 40-43)"]
pub padregk: PADREGK,
#[doc = "0x2c - Pad Configuration Register L (Pads 44-47)"]
pub padregl: PADREGL,
#[doc = "0x30 - Pad Configuration Register M (Pads 47-48)"]
pub padregm: PADREGM,
_reserved0: [u8; 12usize],
#[doc = "0x40 - GPIO Configuration Register A (Pads 0-7)"]
pub cfga: CFGA,
#[doc = "0x44 - GPIO Configuration Register B (Pads 8-15)"]
pub cfgb: CFGB,
#[doc = "0x48 - GPIO Configuration Register C (Pads 16-23)"]
pub cfgc: CFGC,
#[doc = "0x4c - GPIO Configuration Register D (Pads 24-31)"]
pub cfgd: CFGD,
#[doc = "0x50 - GPIO Configuration Register E (Pads 32-39)"]
pub cfge: CFGE,
#[doc = "0x54 - GPIO Configuration Register F (Pads 40 -47)"]
pub cfgf: CFGF,
#[doc = "0x58 - GPIO Configuration Register G (Pads 48-49)"]
pub cfgg: CFGG,
_reserved1: [u8; 4usize],
#[doc = "0x60 - Key Register for all pad configuration registers"]
pub padkey: PADKEY,
_reserved2: [u8; 28usize],
#[doc = "0x80 - GPIO Input Register A"]
pub rda: RDA,
#[doc = "0x84 - GPIO Input Register B"]
pub rdb: RDB,
#[doc = "0x88 - GPIO Output Register A"]
pub wta: WTA,
#[doc = "0x8c - GPIO Output Register B"]
pub wtb: WTB,
#[doc = "0x90 - GPIO Output Register A Set"]
pub wtsa: WTSA,
#[doc = "0x94 - GPIO Output Register B Set"]
pub wtsb: WTSB,
#[doc = "0x98 - GPIO Output Register A Clear"]
pub wtca: WTCA,
#[doc = "0x9c - GPIO Output Register B Clear"]
pub wtcb: WTCB,
#[doc = "0xa0 - GPIO Enable Register A"]
pub ena: ENA,
#[doc = "0xa4 - GPIO Enable Register B"]
pub enb: ENB,
#[doc = "0xa8 - GPIO Enable Register A Set"]
pub ensa: ENSA,
#[doc = "0xac - GPIO Enable Register B Set"]
pub ensb: ENSB,
_reserved3: [u8; 4usize],
#[doc = "0xb4 - GPIO Enable Register A Clear"]
pub enca: ENCA,
#[doc = "0xb8 - GPIO Enable Register B Clear"]
pub encb: ENCB,
#[doc = "0xbc - STIMER Capture Control"]
pub stmrcap: STMRCAP,
#[doc = "0xc0 - IOM0 Flow Control IRQ Select"]
pub iom0irq: IOM0IRQ,
#[doc = "0xc4 - IOM1 Flow Control IRQ Select"]
pub iom1irq: IOM1IRQ,
#[doc = "0xc8 - IOM2 Flow Control IRQ Select"]
pub iom2irq: IOM2IRQ,
#[doc = "0xcc - IOM3 Flow Control IRQ Select"]
pub iom3irq: IOM3IRQ,
#[doc = "0xd0 - IOM4 Flow Control IRQ Select"]
pub iom4irq: IOM4IRQ,
#[doc = "0xd4 - IOM5 Flow Control IRQ Select"]
pub iom5irq: IOM5IRQ,
#[doc = "0xd8 - BLEIF Flow Control IRQ Select"]
pub bleifirq: BLEIFIRQ,
#[doc = "0xdc - GPIO Observation Mode Sample register"]
pub gpioobs: GPIOOBS,
#[doc = "0xe0 - Alternate Pad Configuration reg0 (Pads 3,2,1,0)"]
pub altpadcfga: ALTPADCFGA,
#[doc = "0xe4 - Alternate Pad Configuration reg1 (Pads 7,6,5,4)"]
pub altpadcfgb: ALTPADCFGB,
#[doc = "0xe8 - Alternate Pad Configuration reg2 (Pads 11,10,9,8)"]
pub altpadcfgc: ALTPADCFGC,
#[doc = "0xec - Alternate Pad Configuration reg3 (Pads 15,14,13,12)"]
pub altpadcfgd: ALTPADCFGD,
#[doc = "0xf0 - Alternate Pad Configuration reg4 (Pads 19,18,17,16)"]
pub altpadcfge: ALTPADCFGE,
#[doc = "0xf4 - Alternate Pad Configuration reg5 (Pads 23,22,21,20)"]
pub altpadcfgf: ALTPADCFGF,
#[doc = "0xf8 - Alternate Pad Configuration reg6 (Pads 27,26,25,24)"]
pub altpadcfgg: ALTPADCFGG,
#[doc = "0xfc - Alternate Pad Configuration reg7 (Pads 31,30,29,28)"]
pub altpadcfgh: ALTPADCFGH,
#[doc = "0x100 - Alternate Pad Configuration reg8 (Pads 35,34,33,32)"]
pub altpadcfgi: ALTPADCFGI,
#[doc = "0x104 - Alternate Pad Configuration reg9 (Pads 39,38,37,36)"]
pub altpadcfgj: ALTPADCFGJ,
#[doc = "0x108 - Alternate Pad Configuration reg10 (Pads 43,42,41,40)"]
pub altpadcfgk: ALTPADCFGK,
#[doc = "0x10c - Alternate Pad Configuration reg11 (Pads 47,46,45,44)"]
pub altpadcfgl: ALTPADCFGL,
#[doc = "0x110 - Alternate Pad Configuration reg12 (Pads 49,48)"]
pub altpadcfgm: ALTPADCFGM,
#[doc = "0x114 - SCARD Card Detect select"]
pub scdet: SCDET,
#[doc = "0x118 - Counter/Timer Enable Config"]
pub ctencfg: CTENCFG,
_reserved4: [u8; 228usize],
#[doc = "0x200 - GPIO Interrupt Registers 31-0: Enable"]
pub int0en: INT0EN,
#[doc = "0x204 - GPIO Interrupt Registers 31-0: Status"]
pub int0stat: INT0STAT,
#[doc = "0x208 - GPIO Interrupt Registers 31-0: Clear"]
pub int0clr: INT0CLR,
#[doc = "0x20c - GPIO Interrupt Registers 31-0: Set"]
pub int0set: INT0SET,
#[doc = "0x210 - GPIO Interrupt Registers 49-32: Enable"]
pub int1en: INT1EN,
#[doc = "0x214 - GPIO Interrupt Registers 49-32: Status"]
pub int1stat: INT1STAT,
#[doc = "0x218 - GPIO Interrupt Registers 49-32: Clear"]
pub int1clr: INT1CLR,
#[doc = "0x21c - GPIO Interrupt Registers 49-32: Set"]
pub int1set: INT1SET,
}
#[doc = "Pad Configuration Register A (Pads 0-3)"]
pub struct PADREGA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register A (Pads 0-3)"]
pub mod padrega;
#[doc = "Pad Configuration Register B (Pads 4-7)"]
pub struct PADREGB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register B (Pads 4-7)"]
pub mod padregb;
#[doc = "Pad Configuration Register C (Pads 8-11)"]
pub struct PADREGC {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register C (Pads 8-11)"]
pub mod padregc;
#[doc = "Pad Configuration Register D (Pads 12-15)"]
pub struct PADREGD {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register D (Pads 12-15)"]
pub mod padregd;
#[doc = "Pad Configuration Register E (Pads 16-19)"]
pub struct PADREGE {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register E (Pads 16-19)"]
pub mod padrege;
#[doc = "Pad Configuration Register F (Pads 20-23)"]
pub struct PADREGF {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register F (Pads 20-23)"]
pub mod padregf;
#[doc = "Pad Configuration Register G (Pads 24-27)"]
pub struct PADREGG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register G (Pads 24-27)"]
pub mod padregg;
#[doc = "Pad Configuration Register H (Pads 28-31)"]
pub struct PADREGH {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register H (Pads 28-31)"]
pub mod padregh;
#[doc = "Pad Configuration Register I (Pads 32-25)"]
pub struct PADREGI {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register I (Pads 32-25)"]
pub mod padregi;
#[doc = "Pad Configuration Register J (Pads 36-39)"]
pub struct PADREGJ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register J (Pads 36-39)"]
pub mod padregj;
#[doc = "Pad Configuration Register K (Pads 40-43)"]
pub struct PADREGK {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register K (Pads 40-43)"]
pub mod padregk;
#[doc = "Pad Configuration Register L (Pads 44-47)"]
pub struct PADREGL {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register L (Pads 44-47)"]
pub mod padregl;
#[doc = "Pad Configuration Register M (Pads 47-48)"]
pub struct PADREGM {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Pad Configuration Register M (Pads 47-48)"]
pub mod padregm;
#[doc = "GPIO Configuration Register A (Pads 0-7)"]
pub struct CFGA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register A (Pads 0-7)"]
pub mod cfga;
#[doc = "GPIO Configuration Register B (Pads 8-15)"]
pub struct CFGB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register B (Pads 8-15)"]
pub mod cfgb;
#[doc = "GPIO Configuration Register C (Pads 16-23)"]
pub struct CFGC {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register C (Pads 16-23)"]
pub mod cfgc;
#[doc = "GPIO Configuration Register D (Pads 24-31)"]
pub struct CFGD {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register D (Pads 24-31)"]
pub mod cfgd;
#[doc = "GPIO Configuration Register E (Pads 32-39)"]
pub struct CFGE {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register E (Pads 32-39)"]
pub mod cfge;
#[doc = "GPIO Configuration Register F (Pads 40 -47)"]
pub struct CFGF {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register F (Pads 40 -47)"]
pub mod cfgf;
#[doc = "GPIO Configuration Register G (Pads 48-49)"]
pub struct CFGG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Configuration Register G (Pads 48-49)"]
pub mod cfgg;
#[doc = "Key Register for all pad configuration registers"]
pub struct PADKEY {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Key Register for all pad configuration registers"]
pub mod padkey;
#[doc = "GPIO Input Register A"]
pub struct RDA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Input Register A"]
pub mod rda;
#[doc = "GPIO Input Register B"]
pub struct RDB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Input Register B"]
pub mod rdb;
#[doc = "GPIO Output Register A"]
pub struct WTA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register A"]
pub mod wta;
#[doc = "GPIO Output Register B"]
pub struct WTB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register B"]
pub mod wtb;
#[doc = "GPIO Output Register A Set"]
pub struct WTSA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register A Set"]
pub mod wtsa;
#[doc = "GPIO Output Register B Set"]
pub struct WTSB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register B Set"]
pub mod wtsb;
#[doc = "GPIO Output Register A Clear"]
pub struct WTCA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register A Clear"]
pub mod wtca;
#[doc = "GPIO Output Register B Clear"]
pub struct WTCB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Output Register B Clear"]
pub mod wtcb;
#[doc = "GPIO Enable Register A"]
pub struct ENA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register A"]
pub mod ena;
#[doc = "GPIO Enable Register B"]
pub struct ENB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register B"]
pub mod enb;
#[doc = "GPIO Enable Register A Set"]
pub struct ENSA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register A Set"]
pub mod ensa;
#[doc = "GPIO Enable Register B Set"]
pub struct ENSB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register B Set"]
pub mod ensb;
#[doc = "GPIO Enable Register A Clear"]
pub struct ENCA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register A Clear"]
pub mod enca;
#[doc = "GPIO Enable Register B Clear"]
pub struct ENCB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Enable Register B Clear"]
pub mod encb;
#[doc = "STIMER Capture Control"]
pub struct STMRCAP {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "STIMER Capture Control"]
pub mod stmrcap;
#[doc = "IOM0 Flow Control IRQ Select"]
pub struct IOM0IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM0 Flow Control IRQ Select"]
pub mod iom0irq;
#[doc = "IOM1 Flow Control IRQ Select"]
pub struct IOM1IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM1 Flow Control IRQ Select"]
pub mod iom1irq;
#[doc = "IOM2 Flow Control IRQ Select"]
pub struct IOM2IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM2 Flow Control IRQ Select"]
pub mod iom2irq;
#[doc = "IOM3 Flow Control IRQ Select"]
pub struct IOM3IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM3 Flow Control IRQ Select"]
pub mod iom3irq;
#[doc = "IOM4 Flow Control IRQ Select"]
pub struct IOM4IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM4 Flow Control IRQ Select"]
pub mod iom4irq;
#[doc = "IOM5 Flow Control IRQ Select"]
pub struct IOM5IRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "IOM5 Flow Control IRQ Select"]
pub mod iom5irq;
#[doc = "BLEIF Flow Control IRQ Select"]
pub struct BLEIFIRQ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "BLEIF Flow Control IRQ Select"]
pub mod bleifirq;
#[doc = "GPIO Observation Mode Sample register"]
pub struct GPIOOBS {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Observation Mode Sample register"]
pub mod gpioobs;
#[doc = "Alternate Pad Configuration reg0 (Pads 3,2,1,0)"]
pub struct ALTPADCFGA {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg0 (Pads 3,2,1,0)"]
pub mod altpadcfga;
#[doc = "Alternate Pad Configuration reg1 (Pads 7,6,5,4)"]
pub struct ALTPADCFGB {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg1 (Pads 7,6,5,4)"]
pub mod altpadcfgb;
#[doc = "Alternate Pad Configuration reg2 (Pads 11,10,9,8)"]
pub struct ALTPADCFGC {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg2 (Pads 11,10,9,8)"]
pub mod altpadcfgc;
#[doc = "Alternate Pad Configuration reg3 (Pads 15,14,13,12)"]
pub struct ALTPADCFGD {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg3 (Pads 15,14,13,12)"]
pub mod altpadcfgd;
#[doc = "Alternate Pad Configuration reg4 (Pads 19,18,17,16)"]
pub struct ALTPADCFGE {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg4 (Pads 19,18,17,16)"]
pub mod altpadcfge;
#[doc = "Alternate Pad Configuration reg5 (Pads 23,22,21,20)"]
pub struct ALTPADCFGF {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg5 (Pads 23,22,21,20)"]
pub mod altpadcfgf;
#[doc = "Alternate Pad Configuration reg6 (Pads 27,26,25,24)"]
pub struct ALTPADCFGG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg6 (Pads 27,26,25,24)"]
pub mod altpadcfgg;
#[doc = "Alternate Pad Configuration reg7 (Pads 31,30,29,28)"]
pub struct ALTPADCFGH {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg7 (Pads 31,30,29,28)"]
pub mod altpadcfgh;
#[doc = "Alternate Pad Configuration reg8 (Pads 35,34,33,32)"]
pub struct ALTPADCFGI {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg8 (Pads 35,34,33,32)"]
pub mod altpadcfgi;
#[doc = "Alternate Pad Configuration reg9 (Pads 39,38,37,36)"]
pub struct ALTPADCFGJ {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg9 (Pads 39,38,37,36)"]
pub mod altpadcfgj;
#[doc = "Alternate Pad Configuration reg10 (Pads 43,42,41,40)"]
pub struct ALTPADCFGK {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg10 (Pads 43,42,41,40)"]
pub mod altpadcfgk;
#[doc = "Alternate Pad Configuration reg11 (Pads 47,46,45,44)"]
pub struct ALTPADCFGL {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg11 (Pads 47,46,45,44)"]
pub mod altpadcfgl;
#[doc = "Alternate Pad Configuration reg12 (Pads 49,48)"]
pub struct ALTPADCFGM {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Alternate Pad Configuration reg12 (Pads 49,48)"]
pub mod altpadcfgm;
#[doc = "SCARD Card Detect select"]
pub struct SCDET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "SCARD Card Detect select"]
pub mod scdet;
#[doc = "Counter/Timer Enable Config"]
pub struct CTENCFG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Counter/Timer Enable Config"]
pub mod ctencfg;
#[doc = "GPIO Interrupt Registers 31-0: Enable"]
pub struct INT0EN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 31-0: Enable"]
pub mod int0en;
#[doc = "GPIO Interrupt Registers 31-0: Status"]
pub struct INT0STAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 31-0: Status"]
pub mod int0stat;
#[doc = "GPIO Interrupt Registers 31-0: Clear"]
pub struct INT0CLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 31-0: Clear"]
pub mod int0clr;
#[doc = "GPIO Interrupt Registers 31-0: Set"]
pub struct INT0SET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 31-0: Set"]
pub mod int0set;
#[doc = "GPIO Interrupt Registers 49-32: Enable"]
pub struct INT1EN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 49-32: Enable"]
pub mod int1en;
#[doc = "GPIO Interrupt Registers 49-32: Status"]
pub struct INT1STAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 49-32: Status"]
pub mod int1stat;
#[doc = "GPIO Interrupt Registers 49-32: Clear"]
pub struct INT1CLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 49-32: Clear"]
pub mod int1clr;
#[doc = "GPIO Interrupt Registers 49-32: Set"]
pub struct INT1SET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "GPIO Interrupt Registers 49-32: Set"]
pub mod int1set;
|
// 10 and 100 don't change the sum.
// 2, 20 doubles the first digit
// 3, 30 triples the first digit
// 4, 40 quadruples ...
// combinatorics is lost on me...
use num::{BigUint};
fn main() {
let result = factorial(BigUint::from(100_u32));
let sum: u32 = result
.to_string()
.chars()
.map(|x| x.to_digit(10).unwrap())
.fold(0_u32, |sum, x| sum + x);
println!("{}", sum);
}
fn factorial(x: BigUint) -> BigUint {
let one = BigUint::from(1_u32);
let zero = BigUint::from(0_u32);
if x == zero || x == one {
return BigUint::from(1_u32);
} else {
return factorial(x.clone() - BigUint::from(1_u32)) * x;
}
} |
pub(crate) use super::prelude;
pub(crate) mod base;
pub(crate) mod helper;
pub(crate) mod session;
|
//! A camera for viewing our world.
use crate::{
ray::Ray,
util::deg_to_rad,
vec3,
vec3::{Axis::*, Vec3},
};
use rand::Rng;
/// A simple axis-aligned camera.
#[derive(Debug, Copy, Clone)]
pub struct Camera {
/// The lower-left corner of our "screen", in relation the the camera's
/// `origin`.
pub lower_left_corner: Vec3,
/// The horizontal width of our "screen".
pub horizontal: Vec3,
/// The vertical height of our "screen".
pub vertical: Vec3,
/// The location of our camera.
pub origin: Vec3,
/// Horizontal component of orthogonal basis.
u: Vec3,
/// Vertical component of orthogonal basis.
v: Vec3,
/// Depth-wise component of orthogonal basis.
w: Vec3,
/// The radius of the lens.
lens_radius: f32,
/// The time that the camera starts capturing an image.
pub time0: f32,
/// The time that the camera stops capturing an image.
pub time1: f32,
}
impl Camera {
/// Create a new camera.
///
/// - `lookfrom` is the point where the camera is in the world.
/// - `lookat` is the point that the camera is looking at.
/// - `vup` is the camera's upwards vector, which can change things like the
/// angle the camera is rolled at.
/// - `vfov` is the top-to-bottom field of view, in degrees.
/// - `aspect` is the aspect ratio, width:height.
/// - `aperture` is the camera's aperture.
/// - `focus_distance` is the distance from the camera that is in focus.
/// - `time0` is the time that the camera starts capturing an image.
/// - `time1` is the time that the camera stops capturing an image.
#[allow(clippy::too_many_arguments)]
pub fn new(
lookfrom: Vec3,
lookat: Vec3,
vup: Vec3,
vfov: f32,
aspect: f32,
aperture: f32,
focus_distance: f32,
time0: f32,
time1: f32,
) -> Self {
let theta = deg_to_rad(vfov);
let half_height = (theta / 2.0).tan();
let half_width = aspect * half_height;
// Find an orthonormal basis {u,v,w} to describe our camera's
// orientation. Note that vup, v, and w are all in the same plane. Our
// camera will face point lookat, which is in the -w direction.
let w = (lookfrom - lookat).unit_vector();
let u = vup.cross(&w).unit_vector();
let v = w.cross(&u);
Self {
lower_left_corner: lookfrom
- half_width * focus_distance * u
- half_height * focus_distance * v
- focus_distance * w,
horizontal: 2.0 * half_width * focus_distance * u,
vertical: 2.0 * half_height * focus_distance * v,
origin: lookfrom,
u,
v,
w,
lens_radius: aperture / 2.0,
time0: if time0 > time1 { time1 } else { time0 },
time1,
}
}
/// Returns a ray that starts at the camera's origin and passes through
/// screen coordinate (s, t). Will change starting location based on
/// aperture of the camera and focal length.
pub fn get_ray<R: Rng + ?Sized>(&self, rng: &mut R, s: f32, t: f32) -> Ray {
let rd = self.lens_radius * Vec3::random_in_unit_disk(rng);
let offset = self.u * rd[X] + self.v * rd[Y];
// Send the ray out at a random time between time0 and time1:
let time = if (self.time1 - self.time0).abs() < f32::EPSILON {
self.time0
} else {
rng.gen_range(self.time0, self.time1)
};
Ray::new(
self.origin + offset,
self.lower_left_corner + (s * self.horizontal) + (t * self.vertical)
- self.origin
- offset,
time,
)
}
}
impl Default for Camera {
fn default() -> Self {
Camera::new(
vec3!(0.0, 0.0, 0.0),
vec3!(0.0, 0.0, -1.0),
vec3!(0.0, 1.0, 0.0),
90.0,
2.0,
1.0,
1.0,
0.0,
0.0,
)
}
}
|
extern crate serde_json;
use std::env;
use std::fs;
use std::io::prelude::*;
use std::io::BufReader;
use std::process::Command;
use self::serde_json::{Error, Value};
pub fn run(tx_name: &str) -> String {
let current_dir = env::current_dir().unwrap();
let current_dir = current_dir.as_path();
let input_file = current_dir
.join("ctest")
.join("inputs")
.join(tx_name.to_owned() + ".json");
let mut outpit_file = env::temp_dir();
outpit_file.push(tx_name.to_owned() + ".txt");
let output = Command::new("./compile_tests.sh")
.current_dir(current_dir.join("ctest"))
.output()
.expect("failed to compile capi test executable.");
assert!(
output.status.success(),
format!("compilation failed {:?}", output)
);
let output = Command::new("./test")
.current_dir(current_dir.join("ctest"))
.arg(tx_name)
.arg(input_file)
.arg(outpit_file.clone())
.output()
.expect("failed to run test executable");
assert!(
output.status.success(),
format!("running test failed {:?}", output)
);
let file = fs::File::open(outpit_file.clone());
assert!(file.is_ok());
let mut buf_reader = BufReader::new(file.unwrap());
let mut contents = String::new();
let res = buf_reader.read_to_string(&mut contents);
assert!(res.is_ok());
assert!(fs::remove_file(outpit_file).is_ok());
contents
}
pub fn read_inputs(tx_name: &str) -> Result<Value, Error> {
let current_dir = env::current_dir().unwrap();
let current_dir = current_dir.as_path();
let file_path = current_dir
.join("ctest")
.join("inputs")
.join(tx_name.to_owned() + ".json");
let file = fs::File::open(file_path);
assert!(file.is_ok());
let mut buf_reader = BufReader::new(file.unwrap());
let mut contents = String::new();
let res = buf_reader.read_to_string(&mut contents);
assert!(res.is_ok());
let v: Value = serde_json::from_str(&contents)?;
Ok(v)
}
pub fn hex_string(bytes: Vec<u8>) -> String {
let strs: Vec<String> = bytes.iter().map(|b| format!("{:02x}", b)).collect();
strs.join("")
}
|
use std::fs::File;
use std::io::ErrorKind;
use std::io;
use std::io::Read;
use std::fs;
fn main() {
println!("{}", read_username_shortest("hello.txt").expect("Failed to read username"));
}
fn call_panic() {
panic!("Crash and burn!");
}
fn panic_index() -> i32 {
let v = vec![1, 2, 3];
v[42]
}
fn open_file(file_name: String) {
let f = File::open(&file_name);
let f = match f {
Ok(file) => file,
Err(error) => match error.kind() {
ErrorKind::NotFound => match File::create(&file_name) {
Ok(fc) => fc,
Err(error) => panic!("Tryed to create the file but there was a problem {:?}", error)
},
other_error => panic!("There was a problem opening the file {:?}", other_error)
}
};
}
fn seasoned_open_file(file_name: String) {
let _f = File::open(&file_name).map_err( |error| {
if error.kind() == ErrorKind::NotFound {
File::create(&file_name).unwrap_or_else( |error| {
panic!("Tryed to create the file but there was a problem {:?}", error)
})
} else {
panic!("There was a problem opening the file {:?}", error)
}
});
}
fn unwrap_open_file(file_name: &str) {
File::open(file_name).unwrap();
}
fn expect_open_file(file_name: &str) {
File::open(file_name).expect("Failed to open file");
}
fn read_username(file_name: &str) -> Result<String, io::Error> {
let mut f = match File::open(file_name) {
Ok(file) => file,
Err(error) => return Err(error)
};
let mut username = String::new();
match f.read_to_string(&mut username) {
Ok(_) => Ok(username),
Err(e) => Err(e)
}
}
fn read_username_short(file_name: &str) -> Result<String, io::Error> {
let mut f = File::open(file_name)?;
let mut username = String::new();
f.read_to_string(&mut username)?;
Ok(username)
}
fn read_username_shortest(file_name: &str) -> Result<String, io::Error> {
fs::read_to_string(file_name)
} |
use socketcan_isotp::{self, IsoTpSocket, StandardId};
fn main() -> Result<(), socketcan_isotp::Error> {
let mut tp_socket = IsoTpSocket::open(
"vcan0",
StandardId::new(0x123).expect("Invalid rx id"),
StandardId::new(0x321).expect("Invalid tx id"),
)?;
let buffer = tp_socket.read()?;
println!("read {} bytes", buffer.len());
for x in buffer {
print!("{:X?} ", x);
}
println!("");
Ok(())
}
|
use pattern::{file_name_only, strip_grep_filepath, tag_name_only};
/// A tuple of match text piece (matching_text, offset_of_matching_text).
pub type FuzzyText<'a> = (&'a str, usize);
#[derive(Debug, Clone, Copy)]
pub enum MatchType {
Full,
TagName,
FileName,
IgnoreFilePath,
}
impl std::str::FromStr for MatchType {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(s.into())
}
}
impl<T: AsRef<str>> From<T> for MatchType {
fn from(match_type: T) -> Self {
match match_type.as_ref().to_lowercase().as_str() {
"full" => Self::Full,
"tagname" => Self::TagName,
"filename" => Self::FileName,
"ignorefilepath" => Self::IgnoreFilePath,
_ => Self::Full,
}
}
}
/// Text used in the matching algorithm.
pub trait MatchingText<'a> {
/// Initial full text.
fn full_text(&self) -> &str;
/// Text for calculating the bonus score.
fn bonus_text(&self) -> &str {
self.full_text()
}
/// Text for applying the fuzzy match algorithm.
///
/// The fuzzy matching process only happens when Some(_) is returned.
fn fuzzy_text(&self, match_ty: &MatchType) -> Option<FuzzyText>;
}
impl<'a> MatchingText<'a> for SourceItem {
fn full_text(&self) -> &str {
&self.raw
}
fn fuzzy_text(&self, match_type: &MatchType) -> Option<FuzzyText> {
self.get_fuzzy_text(match_type)
}
}
impl<'a> MatchingText<'a> for &'a str {
fn full_text(&self) -> &str {
self
}
fn fuzzy_text(&self, _match_type: &MatchType) -> Option<FuzzyText> {
Some((self, 0))
}
}
/// This type represents the item for doing the filtering pipeline.
#[derive(Debug, Clone)]
pub struct SourceItem {
/// Raw line from the initial input stream.
pub raw: String,
/// Text for performing the fuzzy match algorithm.
///
/// Could be initialized on creating a new [`SourceItem`].
pub fuzzy_text: Option<(String, usize)>,
/// Text for displaying on a window with limited size.
pub display_text: Option<String>,
}
// NOTE: do not use it when you are dealing with a large number of items.
impl From<&str> for SourceItem {
fn from(s: &str) -> Self {
String::from(s).into()
}
}
impl From<String> for SourceItem {
fn from(raw: String) -> Self {
Self {
raw,
fuzzy_text: None,
display_text: None,
}
}
}
impl SourceItem {
/// Constructs a new instance of [`SourceItem`].
pub fn new(
raw: String,
fuzzy_text: Option<(String, usize)>,
display_text: Option<String>,
) -> Self {
Self {
raw,
fuzzy_text,
display_text,
}
}
pub fn display_text(&self) -> &str {
if let Some(ref text) = self.display_text {
text
} else {
&self.raw
}
}
pub fn fuzzy_text_or_default(&self) -> &str {
if let Some((ref text, _)) = self.fuzzy_text {
text
} else {
&self.raw
}
}
pub fn get_fuzzy_text(&self, match_ty: &MatchType) -> Option<FuzzyText> {
if let Some((ref text, offset)) = self.fuzzy_text {
return Some((text, offset));
}
match match_ty {
MatchType::Full => Some((&self.raw, 0)),
MatchType::TagName => tag_name_only(self.raw.as_str()).map(|s| (s, 0)),
MatchType::FileName => file_name_only(self.raw.as_str()),
MatchType::IgnoreFilePath => strip_grep_filepath(self.raw.as_str()),
}
}
}
/// This struct represents the filtered result of [`SourceItem`].
#[derive(Debug, Clone)]
pub struct FilteredItem<T = i64> {
/// Tuple of (matched line text, filtering score, indices of matched elements)
pub source_item: SourceItem,
/// Filtering score.
pub score: T,
/// Indices of matched elements.
///
/// The indices may be truncated when truncating the text.
pub match_indices: Vec<usize>,
/// Text for showing the final filtered result.
///
/// Usually in a truncated form for fitting into the display window.
pub display_text: Option<String>,
}
impl<I: Into<SourceItem>, T> From<(I, T, Vec<usize>)> for FilteredItem<T> {
fn from((item, score, match_indices): (I, T, Vec<usize>)) -> Self {
Self {
source_item: item.into(),
score,
match_indices,
display_text: None,
}
}
}
impl<I: Into<SourceItem>, T: Default> From<I> for FilteredItem<T> {
fn from(item: I) -> Self {
Self {
source_item: item.into(),
score: Default::default(),
match_indices: Default::default(),
display_text: None,
}
}
}
impl<T> FilteredItem<T> {
pub fn new<I: Into<SourceItem>>(item: I, score: T, match_indices: Vec<usize>) -> Self {
(item, score, match_indices).into()
}
/// Untruncated display text.
pub fn source_item_display_text(&self) -> &str {
self.source_item.display_text()
}
/// Maybe truncated display text.
pub fn display_text(&self) -> &str {
if let Some(ref text) = self.display_text {
text
} else {
self.source_item.display_text()
}
}
/// Returns the match indices shifted by `offset`.
pub fn shifted_indices(&self, offset: usize) -> Vec<usize> {
self.match_indices.iter().map(|x| x + offset).collect()
}
}
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code, clippy::char_lit_as_u8, clippy::needless_bool)]
/// Should not trigger an ICE in `SpanlessHash` / `consts::constant`
///
/// Issue: https://github.com/rust-lang/rust-clippy/issues/2499
fn f(s: &[u8]) -> bool {
let t = s[0] as char;
match t {
'E' | 'W' => {},
'T' => {
if s[0..4] != ['0' as u8; 4] {
return false;
} else {
return true;
}
},
_ => {
return false;
},
}
true
}
fn main() {}
|
pub fn abbreviate(phrase: &str) -> String {
let mut result = String::new();
if phrase.len() == 0 {
return result;
}
let s = phrase.as_bytes();
for (i, &ch) in s.iter().enumerate() {
if i == 0 {
result.push(ch as char);
} else if s[i - 1] == b' ' || s[i - 1] == b'-' {
result.push(ch as char);
} else if ch.is_ascii_uppercase() && s[i - 1].is_ascii_lowercase() {
result.push(ch as char);
}
}
result.to_uppercase()
}
|
/* these are some handy x86 functions that do important, manipulat-y hardware-y things */
pub unsafe fn outportb(port: u16, val: u8)
{
asm!("outb %al, %dx" : : "{dx}"(port), "{al}"(val));
}
pub unsafe fn inportb(port: u16) -> u8
{
let ret: u8;
asm!("inb %dx, %al" : "={ax}"(ret): "{dx}"(port));
ret
}
|
#[doc = "RCB control register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [ctrl](ctrl) module"]
pub type CTRL = crate::Reg<u32, _CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CTRL;
#[doc = "`read()` method returns [ctrl::R](ctrl::R) reader structure"]
impl crate::Readable for CTRL {}
#[doc = "`write(|w| ..)` method takes [ctrl::W](ctrl::W) writer structure"]
impl crate::Writable for CTRL {}
#[doc = "RCB control register."]
pub mod ctrl;
#[doc = "RCB status register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [status](status) module"]
pub type STATUS = crate::Reg<u32, _STATUS>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _STATUS;
#[doc = "`read()` method returns [status::R](status::R) reader structure"]
impl crate::Readable for STATUS {}
#[doc = "RCB status register."]
pub mod status;
#[doc = "Transmitter control register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tx_ctrl](tx_ctrl) module"]
pub type TX_CTRL = crate::Reg<u32, _TX_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TX_CTRL;
#[doc = "`read()` method returns [tx_ctrl::R](tx_ctrl::R) reader structure"]
impl crate::Readable for TX_CTRL {}
#[doc = "`write(|w| ..)` method takes [tx_ctrl::W](tx_ctrl::W) writer structure"]
impl crate::Writable for TX_CTRL {}
#[doc = "Transmitter control register."]
pub mod tx_ctrl;
#[doc = "Transmitter FIFO control register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tx_fifo_ctrl](tx_fifo_ctrl) module"]
pub type TX_FIFO_CTRL = crate::Reg<u32, _TX_FIFO_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TX_FIFO_CTRL;
#[doc = "`read()` method returns [tx_fifo_ctrl::R](tx_fifo_ctrl::R) reader structure"]
impl crate::Readable for TX_FIFO_CTRL {}
#[doc = "`write(|w| ..)` method takes [tx_fifo_ctrl::W](tx_fifo_ctrl::W) writer structure"]
impl crate::Writable for TX_FIFO_CTRL {}
#[doc = "Transmitter FIFO control register."]
pub mod tx_fifo_ctrl;
#[doc = "Transmitter FIFO status register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tx_fifo_status](tx_fifo_status) module"]
pub type TX_FIFO_STATUS = crate::Reg<u32, _TX_FIFO_STATUS>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TX_FIFO_STATUS;
#[doc = "`read()` method returns [tx_fifo_status::R](tx_fifo_status::R) reader structure"]
impl crate::Readable for TX_FIFO_STATUS {}
#[doc = "Transmitter FIFO status register."]
pub mod tx_fifo_status;
#[doc = "Transmitter FIFO write register.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tx_fifo_wr](tx_fifo_wr) module"]
pub type TX_FIFO_WR = crate::Reg<u32, _TX_FIFO_WR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TX_FIFO_WR;
#[doc = "`write(|w| ..)` method takes [tx_fifo_wr::W](tx_fifo_wr::W) writer structure"]
impl crate::Writable for TX_FIFO_WR {}
#[doc = "Transmitter FIFO write register."]
pub mod tx_fifo_wr;
#[doc = "Receiver control register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [rx_ctrl](rx_ctrl) module"]
pub type RX_CTRL = crate::Reg<u32, _RX_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RX_CTRL;
#[doc = "`read()` method returns [rx_ctrl::R](rx_ctrl::R) reader structure"]
impl crate::Readable for RX_CTRL {}
#[doc = "`write(|w| ..)` method takes [rx_ctrl::W](rx_ctrl::W) writer structure"]
impl crate::Writable for RX_CTRL {}
#[doc = "Receiver control register."]
pub mod rx_ctrl;
#[doc = "Receiver FIFO control register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [rx_fifo_ctrl](rx_fifo_ctrl) module"]
pub type RX_FIFO_CTRL = crate::Reg<u32, _RX_FIFO_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RX_FIFO_CTRL;
#[doc = "`read()` method returns [rx_fifo_ctrl::R](rx_fifo_ctrl::R) reader structure"]
impl crate::Readable for RX_FIFO_CTRL {}
#[doc = "`write(|w| ..)` method takes [rx_fifo_ctrl::W](rx_fifo_ctrl::W) writer structure"]
impl crate::Writable for RX_FIFO_CTRL {}
#[doc = "Receiver FIFO control register."]
pub mod rx_fifo_ctrl;
#[doc = "Receiver FIFO status register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [rx_fifo_status](rx_fifo_status) module"]
pub type RX_FIFO_STATUS = crate::Reg<u32, _RX_FIFO_STATUS>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RX_FIFO_STATUS;
#[doc = "`read()` method returns [rx_fifo_status::R](rx_fifo_status::R) reader structure"]
impl crate::Readable for RX_FIFO_STATUS {}
#[doc = "Receiver FIFO status register."]
pub mod rx_fifo_status;
#[doc = "Receiver FIFO read register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [rx_fifo_rd](rx_fifo_rd) module"]
pub type RX_FIFO_RD = crate::Reg<u32, _RX_FIFO_RD>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RX_FIFO_RD;
#[doc = "`read()` method returns [rx_fifo_rd::R](rx_fifo_rd::R) reader structure"]
impl crate::Readable for RX_FIFO_RD {}
#[doc = "Receiver FIFO read register."]
pub mod rx_fifo_rd;
#[doc = "Receiver FIFO read register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [rx_fifo_rd_silent](rx_fifo_rd_silent) module"]
pub type RX_FIFO_RD_SILENT = crate::Reg<u32, _RX_FIFO_RD_SILENT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RX_FIFO_RD_SILENT;
#[doc = "`read()` method returns [rx_fifo_rd_silent::R](rx_fifo_rd_silent::R) reader structure"]
impl crate::Readable for RX_FIFO_RD_SILENT {}
#[doc = "Receiver FIFO read register."]
pub mod rx_fifo_rd_silent;
#[doc = "Master interrupt request register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr](intr) module"]
pub type INTR = crate::Reg<u32, _INTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR;
#[doc = "`read()` method returns [intr::R](intr::R) reader structure"]
impl crate::Readable for INTR {}
#[doc = "`write(|w| ..)` method takes [intr::W](intr::W) writer structure"]
impl crate::Writable for INTR {}
#[doc = "Master interrupt request register."]
pub mod intr;
#[doc = "Master interrupt set request register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_set](intr_set) module"]
pub type INTR_SET = crate::Reg<u32, _INTR_SET>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR_SET;
#[doc = "`read()` method returns [intr_set::R](intr_set::R) reader structure"]
impl crate::Readable for INTR_SET {}
#[doc = "`write(|w| ..)` method takes [intr_set::W](intr_set::W) writer structure"]
impl crate::Writable for INTR_SET {}
#[doc = "Master interrupt set request register"]
pub mod intr_set;
#[doc = "Master interrupt mask register.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_mask](intr_mask) module"]
pub type INTR_MASK = crate::Reg<u32, _INTR_MASK>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR_MASK;
#[doc = "`read()` method returns [intr_mask::R](intr_mask::R) reader structure"]
impl crate::Readable for INTR_MASK {}
#[doc = "`write(|w| ..)` method takes [intr_mask::W](intr_mask::W) writer structure"]
impl crate::Writable for INTR_MASK {}
#[doc = "Master interrupt mask register."]
pub mod intr_mask;
#[doc = "Master interrupt masked request register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_masked](intr_masked) module"]
pub type INTR_MASKED = crate::Reg<u32, _INTR_MASKED>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR_MASKED;
#[doc = "`read()` method returns [intr_masked::R](intr_masked::R) reader structure"]
impl crate::Readable for INTR_MASKED {}
#[doc = "Master interrupt masked request register"]
pub mod intr_masked;
#[doc = r"Register block"]
#[repr(C)]
pub struct RCBLL {
#[doc = "0x00 - RCB LL control register."]
pub ctrl: self::rcbll::CTRL,
_reserved1: [u8; 12usize],
#[doc = "0x10 - Master interrupt request register."]
pub intr: self::rcbll::INTR,
#[doc = "0x14 - Master interrupt set request register"]
pub intr_set: self::rcbll::INTR_SET,
#[doc = "0x18 - Master interrupt mask register."]
pub intr_mask: self::rcbll::INTR_MASK,
#[doc = "0x1c - Master interrupt masked request register"]
pub intr_masked: self::rcbll::INTR_MASKED,
#[doc = "0x20 - Address of Register#1 in Radio (MDON)"]
pub radio_reg1_addr: self::rcbll::RADIO_REG1_ADDR,
#[doc = "0x24 - Address of Register#2 in Radio (RSSI)"]
pub radio_reg2_addr: self::rcbll::RADIO_REG2_ADDR,
#[doc = "0x28 - Address of Register#3 in Radio (ACCL)"]
pub radio_reg3_addr: self::rcbll::RADIO_REG3_ADDR,
#[doc = "0x2c - Address of Register#4 in Radio (ACCH)"]
pub radio_reg4_addr: self::rcbll::RADIO_REG4_ADDR,
#[doc = "0x30 - Address of Register#5 in Radio (RSSI ENERGY)"]
pub radio_reg5_addr: self::rcbll::RADIO_REG5_ADDR,
_reserved10: [u8; 12usize],
#[doc = "0x40 - N/A"]
pub cpu_write_reg: self::rcbll::CPU_WRITE_REG,
#[doc = "0x44 - N/A"]
pub cpu_read_reg: self::rcbll::CPU_READ_REG,
}
#[doc = r"Register block"]
#[doc = "Radio Control Bus (RCB) & Link Layer controller"]
pub mod rcbll;
|
#![no_std]
#![allow(clippy::unused_unit)]
extern crate alloc;
use rand_core::{CryptoRng, RngCore};
use wasm_bindgen::prelude::*;
use alloc::{boxed::Box, string::ToString};
use pwbox::{pure::PureCrypto, ErasedPwBox, Eraser, Suite};
// Binding to a JavaScript CSPRNG.
#[wasm_bindgen]
extern "C" {
pub type Rng;
#[wasm_bindgen(structural, method, js_name = "fillBytes")]
fn random_bytes(this: &Rng, dest: &mut [u8]);
}
/// RNG based on `crypto.randomBytes()`.
struct CallbackRng(Rng);
impl RngCore for CallbackRng {
fn next_u32(&mut self) -> u32 {
let mut bytes = [0_u8; 4];
self.0.random_bytes(&mut bytes);
bytes
.iter()
.enumerate()
.fold(0, |acc, (i, &byte)| acc + (u32::from(byte) << (i * 8)))
}
fn next_u64(&mut self) -> u64 {
let mut bytes = [0_u8; 8];
self.0.random_bytes(&mut bytes);
bytes
.iter()
.enumerate()
.fold(0, |acc, (i, &byte)| acc + (u64::from(byte) << (i * 8)))
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
self.0.random_bytes(dest);
}
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
self.fill_bytes(dest);
Ok(())
}
}
impl CryptoRng for CallbackRng {}
/// Passphrase encryption utilities.
#[wasm_bindgen]
pub struct Pwbox {
rng: CallbackRng,
}
#[wasm_bindgen]
impl Pwbox {
/// Initializes utils with the provided RNG.
///
/// `{ fillBytes: crypto.randomFillSync }` should be passed to the constructor in Node,
/// and `{ fillBytes: crypto.getRandomValues }` in browsers.
#[wasm_bindgen(constructor)]
pub fn new(rng: Rng) -> Self {
Self {
rng: CallbackRng(rng),
}
}
/// Encrypts `data` using a provided `passphrase`.
pub fn encrypt(&mut self, passphrase: &str, data: &[u8]) -> JsValue {
let pwbox = PureCrypto::build_box(&mut self.rng)
.seal(passphrase, data)
.unwrap();
let mut eraser = Eraser::new();
eraser.add_suite::<PureCrypto>();
let pwbox = eraser.erase(&pwbox).unwrap();
JsValue::from_serde(&pwbox).unwrap()
}
/// Decrypts encrypted box using the provided `passphrase`.
pub fn decrypt(&self, passphrase: &str, encryption: &JsValue) -> Result<Box<[u8]>, JsValue> {
let encryption: ErasedPwBox = encryption.into_serde().map_err(convert_err)?;
let mut eraser = Eraser::new();
eraser.add_suite::<PureCrypto>();
let plaintext = eraser
.restore(&encryption)
.map_err(convert_err)?
.open(passphrase)
.map_err(convert_err)?;
Ok(plaintext.to_vec().into_boxed_slice())
}
}
fn convert_err<E: ToString>(error: E) -> JsValue {
JsValue::from_str(&error.to_string())
}
|
#![feature(core_intrinsics, custom_derive, plugin, custom_attribute, box_syntax)]
#![allow(unused_variables, unused_imports)]
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde;
extern crate time;
#[macro_use] extern crate itertools;
extern crate permutohedron;
pub mod traffic_protocol;
pub mod traffic_controls;
pub mod crossroad;
pub mod default_crossroad;
pub mod error;
pub mod cartesian;
pub mod signal_group;
trait BoolToOpt {
fn to_opt(&self) -> Option<()>;
}
impl BoolToOpt for bool {
fn to_opt(&self) -> Option<()> {
if *self { Some(()) } else { None }
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
NetworkFunctions_Get(#[from] network_functions::get::Error),
#[error(transparent)]
NetworkFunctions_CreateOrUpdate(#[from] network_functions::create_or_update::Error),
#[error(transparent)]
NetworkFunctions_UpdateTags(#[from] network_functions::update_tags::Error),
#[error(transparent)]
NetworkFunctions_Delete(#[from] network_functions::delete::Error),
#[error(transparent)]
NetworkFunctions_ListBySubscription(#[from] network_functions::list_by_subscription::Error),
#[error(transparent)]
NetworkFunctions_ListByResourceGroup(#[from] network_functions::list_by_resource_group::Error),
#[error(transparent)]
Devices_Get(#[from] devices::get::Error),
#[error(transparent)]
Devices_CreateOrUpdate(#[from] devices::create_or_update::Error),
#[error(transparent)]
Devices_UpdateTags(#[from] devices::update_tags::Error),
#[error(transparent)]
Devices_Delete(#[from] devices::delete::Error),
#[error(transparent)]
Devices_ListBySubscription(#[from] devices::list_by_subscription::Error),
#[error(transparent)]
Devices_ListByResourceGroup(#[from] devices::list_by_resource_group::Error),
#[error(transparent)]
Devices_ListRegistrationKey(#[from] devices::list_registration_key::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Vendors_Get(#[from] vendors::get::Error),
#[error(transparent)]
Vendors_CreateOrUpdate(#[from] vendors::create_or_update::Error),
#[error(transparent)]
Vendors_Delete(#[from] vendors::delete::Error),
#[error(transparent)]
Vendors_ListBySubscription(#[from] vendors::list_by_subscription::Error),
#[error(transparent)]
VendorSkus_Get(#[from] vendor_skus::get::Error),
#[error(transparent)]
VendorSkus_CreateOrUpdate(#[from] vendor_skus::create_or_update::Error),
#[error(transparent)]
VendorSkus_Delete(#[from] vendor_skus::delete::Error),
#[error(transparent)]
VendorSkus_List(#[from] vendor_skus::list::Error),
#[error(transparent)]
VendorSkuPreview_List(#[from] vendor_sku_preview::list::Error),
#[error(transparent)]
VendorSkuPreview_Get(#[from] vendor_sku_preview::get::Error),
#[error(transparent)]
VendorSkuPreview_CreateOrUpdate(#[from] vendor_sku_preview::create_or_update::Error),
#[error(transparent)]
VendorSkuPreview_Delete(#[from] vendor_sku_preview::delete::Error),
#[error(transparent)]
NetworkFunctionVendors_List(#[from] network_function_vendors::list::Error),
#[error(transparent)]
NetworkFunctionVendorSkus_ListByVendor(#[from] network_function_vendor_skus::list_by_vendor::Error),
#[error(transparent)]
NetworkFunctionVendorSkus_ListBySku(#[from] network_function_vendor_skus::list_by_sku::Error),
#[error(transparent)]
VendorNetworkFunctions_Get(#[from] vendor_network_functions::get::Error),
#[error(transparent)]
VendorNetworkFunctions_CreateOrUpdate(#[from] vendor_network_functions::create_or_update::Error),
#[error(transparent)]
RoleInstances_Start(#[from] role_instances::start::Error),
#[error(transparent)]
RoleInstances_Stop(#[from] role_instances::stop::Error),
#[error(transparent)]
RoleInstances_Restart(#[from] role_instances::restart::Error),
#[error(transparent)]
RoleInstances_Get(#[from] role_instances::get::Error),
#[error(transparent)]
VendorNetworkFunctions_List(#[from] vendor_network_functions::list::Error),
#[error(transparent)]
RoleInstances_List(#[from] role_instances::list::Error),
}
pub mod network_functions {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
network_function_name: &str,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunction, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
network_function_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunction =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
network_function_name: &str,
parameters: &models::NetworkFunction,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
network_function_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunction = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunction = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::NetworkFunction),
Created201(models::NetworkFunction),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_tags(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
network_function_name: &str,
parameters: &models::TagsObject,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunction, update_tags::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
network_function_name
);
let mut url = url::Url::parse(url_str).map_err(update_tags::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_tags::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_tags::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_tags::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_tags::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunction =
serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_tags::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_tags {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
network_function_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
network_function_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/networkFunctions",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/networkFunctions",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod devices {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
device_name: &str,
subscription_id: &str,
) -> std::result::Result<models::Device, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
device_name: &str,
parameters: &models::Device,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Device = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Device),
Created201(models::Device),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_tags(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
device_name: &str,
parameters: &models::TagsObject,
subscription_id: &str,
) -> std::result::Result<models::Device, update_tags::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(update_tags::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_tags::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update_tags::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_tags::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_tags::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Device =
serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update_tags::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_tags::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_tags {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
device_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::DeviceListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/devices",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::DeviceListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_registration_key(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
device_name: &str,
subscription_id: &str,
) -> std::result::Result<models::DeviceRegistrationKey, list_registration_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HybridNetwork/devices/{}/listRegistrationKey",
operation_config.base_path(),
subscription_id,
resource_group_name,
device_name
);
let mut url = url::Url::parse(url_str).map_err(list_registration_key::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_registration_key::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_registration_key::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_registration_key::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DeviceRegistrationKey = serde_json::from_slice(rsp_body)
.map_err(|source| list_registration_key::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_registration_key::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_registration_key::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_registration_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.HybridNetwork/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod vendors {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
vendor_name: &str,
subscription_id: &str,
) -> std::result::Result<models::Vendor, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}",
operation_config.base_path(),
subscription_id,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Vendor =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
vendor_name: &str,
parameters: Option<&models::Vendor>,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}",
operation_config.base_path(),
subscription_id,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Vendor = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Vendor = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Vendor),
Created201(models::Vendor),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
vendor_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}",
operation_config.base_path(),
subscription_id,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::VendorListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod vendor_skus {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VendorSku, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorSku =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
parameters: &models::VendorSku,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorSku = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::VendorSku = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VendorSku),
Created201(models::VendorSku),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
vendor_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VendorSkuListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus",
operation_config.base_path(),
subscription_id,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorSkuListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod vendor_sku_preview {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PreviewSubscriptionsList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}/previewSubscriptions",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PreviewSubscriptionsList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
preview_subscription: &str,
subscription_id: &str,
) -> std::result::Result<models::PreviewSubscription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}/previewSubscriptions/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name,
preview_subscription
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PreviewSubscription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
preview_subscription: &str,
parameters: &models::PreviewSubscription,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}/previewSubscriptions/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name,
preview_subscription
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PreviewSubscription = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PreviewSubscription = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Created201(models::PreviewSubscription),
Ok200(models::PreviewSubscription),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
vendor_name: &str,
sku_name: &str,
preview_subscription: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/vendors/{}/vendorSkus/{}/previewSubscriptions/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
sku_name,
preview_subscription
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod network_function_vendors {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionVendorListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/networkFunctionVendors",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionVendorListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod network_function_vendor_skus {
use super::{models, API_VERSION};
pub async fn list_by_vendor(
operation_config: &crate::OperationConfig,
vendor_name: &str,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionSkuListResult, list_by_vendor::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/networkFunctionVendors/{}/vendorSkus",
operation_config.base_path(),
subscription_id,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_vendor::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_vendor::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_vendor::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_vendor::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionSkuListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_by_vendor::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_by_vendor::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_vendor::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_vendor {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_sku(
operation_config: &crate::OperationConfig,
vendor_name: &str,
vendor_sku_name: &str,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionSkuDetails, list_by_sku::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/networkFunctionVendors/{}/vendorSkus/{}",
operation_config.base_path(),
subscription_id,
vendor_name,
vendor_sku_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_sku::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_sku::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_sku::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_sku::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionSkuDetails =
serde_json::from_slice(rsp_body).map_err(|source| list_by_sku::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_by_sku::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_sku::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_sku {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod vendor_network_functions {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
subscription_id: &str,
) -> std::result::Result<models::VendorNetworkFunction, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorNetworkFunction =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
parameters: &models::VendorNetworkFunction,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorNetworkFunction = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::VendorNetworkFunction = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VendorNetworkFunction),
Created201(models::VendorNetworkFunction),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::VendorNetworkFunctionListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VendorNetworkFunctionListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod role_instances {
use super::{models, API_VERSION};
pub async fn start(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
role_instance_name: &str,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}/roleInstances/{}/start",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key,
role_instance_name
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(start::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Err(start::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod start {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn stop(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
role_instance_name: &str,
subscription_id: &str,
) -> std::result::Result<stop::Response, stop::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}/roleInstances/{}/stop",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key,
role_instance_name
);
let mut url = url::Url::parse(url_str).map_err(stop::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(stop::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(stop::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(stop::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(stop::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(stop::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| stop::Error::DeserializeError(source, rsp_body.clone()))?;
Err(stop::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod stop {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
role_instance_name: &str,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}/roleInstances/{}/restart",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key,
role_instance_name
);
let mut url = url::Url::parse(url_str).map_err(restart::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(restart::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| restart::Error::DeserializeError(source, rsp_body.clone()))?;
Err(restart::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod restart {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
role_instance_name: &str,
subscription_id: &str,
) -> std::result::Result<models::RoleInstance, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}/roleInstances/{}",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key,
role_instance_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::RoleInstance =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
location_name: &str,
vendor_name: &str,
service_key: &str,
subscription_id: &str,
) -> std::result::Result<models::NetworkFunctionRoleInstanceListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HybridNetwork/locations/{}/vendors/{}/networkFunctions/{}/roleInstances",
operation_config.base_path(),
subscription_id,
location_name,
vendor_name,
service_key
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::NetworkFunctionRoleInstanceListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use super::build::BuildSystem;
use super::build::BuildSystemBase;
use super::file;
use fs::File;
use std::env;
use std::fs;
use std::io::Write;
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct BluePrint {
/// The base build system struct
pub build_system: BuildSystemBase,
}
impl BluePrint {
pub fn get_default_architectures(&self) -> Vec<String> {
self.build_system.get_default_architectures().clone()
}
pub fn get_name(&self) -> String {
self.build_system.get_name().clone()
}
pub fn has_default_architecture(&self) -> bool {
self.build_system.has_default_architecture()
}
pub fn get_architectures(&self) -> Vec<String> {
self.build_system.get_architectures().clone()
}
}
impl BuildSystem for BluePrint {
fn generate(&self) -> i32 {
let apk_dir = env::current_dir().unwrap();
let file_name = "Android.bp";
let android_gen_path: PathBuf = apk_dir.join(file_name);
let display = android_gen_path.display();
let file_name_ext: PathBuf = file::file_name_ext(&self.build_system.get_input()).into();
let build_system = self.build_system.clone();
// Open a file in write-only mode, returns `io::Result<File>`
let mut file = match File::create(&android_gen_path) {
Err(why) => panic!("Couldn't create {}: {}", display, why),
Ok(file) => file,
};
let mut jni_libs: String = String::new();
let lib_size = build_system.get_libraries().len();
// If we passed some architectures via cli, prioritize those
// Else, use the architectures we found in APK
let arch = if self.has_default_architecture() {
self.get_default_architectures()
} else {
self.get_architectures()
};
// TODO: Clean this up, it's unreadable, use r#
if lib_size > 0 {
println!("Please place APK's inside /prebuilt/<arch>/ \nSee [https://github.com/bensadiku/genapkbuild/issues/6] \n");
jni_libs.push_str("\n\tarch: {");
for archi in &arch {
jni_libs.push_str(&format!("\n\t\t{}: {}", archi, "{"));
jni_libs.push_str("\n\t\t");
let path = &format!("\t{}/{}/{}.apk\",\n", "prebuilt", archi, self.get_name());
jni_libs.push_str(path);
jni_libs.push_str("\t\t},");
}
jni_libs.push_str("\n\t},");
}
let dex = if build_system.get_preopt_dex().0 {
format!(
r#"dex_preopt: {{
enabled: {},
}},"#,
build_system.get_preopt_dex().1
)
} else {
String::new()
};
let priv_app = if build_system.privileged() {
"privileged: true,"
} else {
""
};
let bp = format!(
r#"android_app_import {{
name: {:#?},
srcs: [{:#?}],
certificate: "presigned",
{}
{}
{}
}}
"#,
build_system.get_name(),
file_name_ext.display(),
priv_app,
dex,
jni_libs
);
// Write everything
let ret = match file.write_all(bp.as_bytes()) {
Err(why) => panic!("Couldn't write to {}: {}", display, why),
Ok(_) => {
println!("Successfully created Android.bp to {}", display);
0
}
};
ret
}
}
|
use std::collections::HashMap;
// think I should've used a heap here
struct Vertex<'a> {
id: u64,
edges: Vec<Edge<'a>>,
}
struct Edge<'a> {
first: &'a Vertex<'a>,
second: &'a Vertex<'a>,
}
struct AdjacencyList<'a> {
vertices: HashMap<u64, &'a Vertex<'a>>, //Vec<Vertex<'a>>,
edges: Vec<Edge<'a>>,
}
impl<'a> AdjacencyList<'a> {
pub fn new() -> Self {
AdjacencyList{
vertices: HashMap::new(),
edges: Vec::new(),
}
}
pub fn node_present(&mut self, id: u64) -> bool {
self.vertices.contains_key(&id)
}
pub fn insert_node(&mut self, id: u64) -> &Vertex {
if self.vertices.contains_key(&id) { return &self.vertices[&id]; }
let vertex = Vertex{
id: id,
edges: Vec::new(),
};
self.vertices.insert(id, &vertex);
&vertex
}
// takes a vector where the first item is the node id
// the remaining items are the other nodes it's connected to
// ! assume we haven't seen this node's list before (for now)
pub fn insert_adj_list(&mut self, list: Vec<u64>) {
let n = list.len();
if n == 0 { return; }
let node = Vertex{
id: list[0],
edges: Vec::new(),
};
let connections: Vec<Edge> = Vec::new();
for i in 1..n {
// if the node is not in the graph, add it
let second = self.insert_node(list[i]);
// add the edge to this node's list
let edge = Edge{
first: &node,
second: &second,
};
}
// add note
}
//pub fn moo(&mut self) { self.moo = ... }
}
#[test]
fn test_new_adjacency_list() {
let mut graph = AdjacencyList::new();
let list = vec![1,2,3,4];
graph.insert_adj_list(list);
}
#[test]
fn test_insert_node() {
let mut graph = AdjacencyList::new();
graph.insert_node(1);
graph.insert_node(2);
graph.insert_node(5);
assert!(graph.node_present(1));
assert!(graph.node_present(2));
assert!(graph.node_present(5));
assert!(!graph.node_present(4));
}
|
use ::errors::*;
use config::Config;
pub fn load_config_from_file(path: &str) -> Result<Config> {
use std::fs::File;
use std::io::Read;
let mut file = File::open(path).chain_err(|| "Failed to open config file")?;
let mut contents = String::new();
file.read_to_string(&mut contents).chain_err(|| "Failed to read file")?;
let config = ::toml::de::from_str(&contents).chain_err(|| "Failed to deserialize config")?;
Ok(config)
}
pub fn get_error_trace(e: &Error) -> String {
let mut error_trace = String::new();
error_trace.push_str("Error: ");
error_trace.push_str(&e.to_string());
for e in e.iter().skip(1) {
error_trace.push_str("\nCause: ");
error_trace.push_str(&e.to_string());
}
error_trace
}
|
use libc::c_int;
use H5public::herr_t;
extern "C" {
pub fn H5PLset_loading_state(plugin_flags: c_int) -> herr_t;
pub fn H5PLget_loading_state(plugin_flags: *mut c_int) -> herr_t;
}
|
use amethyst::ecs::{storage::NullStorage, Component};
#[derive(Component, Default)]
#[storage(NullStorage)]
pub struct Selected;
#[derive(Component, Default)]
#[storage(NullStorage)]
pub struct Controllable;
|
use crate::gui::ImCgVec2;
use cgmath::num_traits::zero;
use cgmath::Vector2;
use imgui_inspect_derive::*;
use serde::{Deserialize, Serialize};
use specs::{Component, VecStorage};
#[derive(Component, Debug, Inspect, Clone, Serialize, Deserialize)]
#[storage(VecStorage)]
pub struct Kinematics {
#[inspect(proxy_type = "ImCgVec2")]
pub velocity: Vector2<f32>,
#[inspect(proxy_type = "ImCgVec2", skip = true)]
pub acceleration: Vector2<f32>,
pub mass: f32,
}
impl Kinematics {
pub fn from_mass(mass: f32) -> Self {
Kinematics {
velocity: zero(),
acceleration: zero(),
mass,
}
}
}
|
use anyhow::Result;
use sightglass_analysis::{effect_size, summarize};
use sightglass_data::Format;
use std::{
fs::File,
io::{self, BufReader},
};
use structopt::StructOpt;
/// Calculate the effect size (and associated confidence interval) between the
/// results for two different engines.
#[derive(Debug, StructOpt)]
#[structopt(name = "effect-size")]
pub struct EffectSizeCommand {
/// Path to the file(s) that will be read from, or none to indicate stdin (default).
#[structopt(short = "f")]
input_file: Option<Vec<String>>,
/// The format of the input data. Either 'json' or 'csv'.
#[structopt(short = "i", long = "input-format", default_value = "json")]
input_format: Format,
/// The format of the output data. Either 'json' or 'csv'; if unspecified, print the output in
/// human-readable form.
#[structopt(short = "o", long = "output-format")]
output_format: Option<Format>,
/// The significance level for the confidence interval. Typical values are
/// 0.01 and 0.05, which correspond to 99% and 95% confidence respectively.
#[structopt(short, long, default_value = "0.01")]
significance_level: f64,
}
impl EffectSizeCommand {
pub fn execute(&self) -> Result<()> {
let measurements = if let Some(files) = self.input_file.as_ref() {
let mut ms = Vec::new();
for file in files {
let reader = BufReader::new(File::open(file)?);
ms.append(&mut self.input_format.read(reader)?);
}
ms
} else {
self.input_format.read(io::stdin())?
};
let effects = effect_size::calculate(self.significance_level, &measurements)?;
if let Some(output_format) = &self.output_format {
output_format.write(&effects, io::stdout())
} else {
let summaries = summarize::calculate(&measurements);
effect_size::write(
effects,
&summaries,
self.significance_level,
&mut io::stdout(),
)
}
}
}
|
use super::api::{Project, Sample};
use console::style;
use failure::bail;
use number_prefix::NumberPrefix;
use std::io::{Read, Write};
use tabwriter::TabWriter;
/// Return the top 5 closest matching hits for a given query
///
/// derived from clap/src/parse/features/suggestions.rs
pub fn did_you_mean<T, I>(v: &str, possible_values: I) -> Vec<String>
where
T: AsRef<str>,
I: IntoIterator<Item = T>,
{
let mut candidates = vec![];
for pv in possible_values {
let confidence = strsim::jaro_winkler(v, pv.as_ref());
if confidence >= 0.80 {
candidates.push((confidence, pv.as_ref().to_owned()));
}
}
candidates.sort_by(|a, b| {
a.0.partial_cmp(&b.0)
.unwrap_or(std::cmp::Ordering::Equal)
.reverse()
});
candidates.into_iter().take(5).map(|x| x.1).collect()
}
/// When there are duplicate projects, the user needs
/// to resolve the conflict. This function prompts the user to pick
/// the desired project
pub fn resolve_duplicate_projects(mut projects: Vec<&Project>) -> &Project {
eprintln!(
"{} Found {} projects with the same name.",
style("warning:").bold().yellow(),
projects.len()
);
let stderr = std::io::stderr();
let mut stderr = stderr.lock();
let mut writer = TabWriter::new(&mut stderr);
writeln!(&mut writer, "#\tname\tdate created").unwrap();
for (index, project) in projects.iter().enumerate() {
writeln!(
&mut writer,
"{}\t{}\t{}",
index, project.user_owned_by.name, project.date_created
)
.unwrap();
}
writer.flush().unwrap();
let invalid_input = format!(
"{} Please enter an integer from 0 to {}",
style("error:").bold().red(),
projects.len() - 1
);
let user_index = loop {
eprint!("Enter the project index [0..{}]: ", projects.len() - 1);
let response: Result<usize, _> = try_read!();
break match response {
Ok(response) => {
if response > projects.len() - 1 {
eprintln!("{}", invalid_input);
continue;
}
response
}
Err(_) => {
eprintln!("{}", invalid_input);
continue;
}
};
};
projects.remove(user_index)
}
/// If a project is rerun, a separate set of Undetermined files
/// is created under the same project name. The only way to differentiate
/// is by data, so we need the user to make the decision.
pub fn resolve_duplicate_unindexed_reads(mut samples: Vec<&Sample>) -> &Sample {
eprintln!(
"{} Found {} \"Unindexed Reads\" with the same project name.",
style("warning:").bold().yellow(),
samples.len()
);
let stderr = std::io::stderr();
let mut stderr = stderr.lock();
let mut writer = TabWriter::new(&mut stderr);
writeln!(&mut writer, "#\tname\tdate created").unwrap();
for (index, sample) in samples.iter().enumerate() {
writeln!(
&mut writer,
"{}\t{}\t{}",
index, sample.name, sample.date_created
)
.unwrap();
}
writer.flush().unwrap();
let invalid_input = format!(
"{} Please enter an integer from 0 to {}",
style("error:").bold().red(),
samples.len() - 1
);
let user_index = loop {
eprint!("Enter the sample index [0..{}]: ", samples.len() - 1);
let response: Result<usize, _> = try_read!();
break match response {
Ok(response) => {
if response > samples.len() - 1 {
eprintln!("{}", invalid_input);
continue;
}
response
}
Err(_) => {
eprintln!("{}", invalid_input);
continue;
}
};
};
samples.remove(user_index)
}
/// Calculate s3 etag from known part size
pub fn s3_etag(
mut rdr: impl Read,
file_size: usize,
part_size: usize,
) -> Result<String, failure::Error> {
if file_size <= part_size {
let mut buffer = vec![0; file_size];
rdr.read_exact(&mut buffer[..]).unwrap();
let digest = md5::compute(&buffer);
return Ok(format!("{:?}", digest));
}
let mut digests: Vec<u8> = Vec::new();
let mut parts = 0;
loop {
let mut buffer = vec![0; part_size];
let bcount = rdr.read(&mut buffer[..]).unwrap();
if bcount == 0 {
break;
}
buffer.truncate(bcount);
let digest = md5::compute(&buffer);
digests.extend(&digest.0);
parts += 1;
if buffer.is_empty() {
break;
}
}
if digests.is_empty() || parts < 2 {
bail!("Could not calculate etag.");
} else {
Ok(format!("{:?}-{}", md5::compute(digests.as_slice()), parts))
}
}
/// Calculate the s3 etag from path, and compare it with
/// the expected etag.
///
/// The advantage of this function is that we don't need to know
/// the etag part size (if we already know the file size and known etag).
///
/// TODO: part size calculation is not working. Do not use this function
pub fn verify_s3_etag(
mut rdr: impl Read,
expected_etag: &str,
file_size: u64,
) -> Result<bool, failure::Error> {
let num_parts = match expected_etag.find('-') {
Some(index) => expected_etag
.chars()
.skip(index + 1)
.collect::<String>()
.parse::<usize>()?,
None => {
let mut buffer = vec![0; file_size as usize];
rdr.read_exact(&mut buffer[..]).unwrap();
let digest = md5::compute(&buffer);
let actual_etag = format!("{:?}", digest);
return Ok(actual_etag == expected_etag);
}
};
// Assumes AWS part sizes are a factor of one megabyte
static ONE_MEGABYTE: f64 = 1024.0 * 1024.0;
// TODO: does not work 100% of the time.
let x = file_size as f64 / num_parts as f64;
let y = x % (ONE_MEGABYTE);
let part_size = (x - y + (ONE_MEGABYTE)) as usize;
let mut digests: Vec<u8> = Vec::new();
let mut parts = 0;
loop {
let mut buffer = vec![0; part_size];
let bcount = rdr.read(&mut buffer[..]).unwrap();
if bcount == 0 {
break;
}
buffer.truncate(bcount);
let digest = md5::compute(&buffer);
digests.extend(&digest.0);
parts += 1;
if buffer.is_empty() {
break;
}
}
let actual_etag = if digests.is_empty() || parts < 2 {
bail!("Could not calculate etag.");
} else {
format!("{:?}-{}", md5::compute(digests.as_slice()), parts)
};
Ok(actual_etag == expected_etag)
}
/// Convert bytes to human readable form.
///
/// Trying to match format of unix's "ls -lh" command
pub fn convert_bytes(num: f64) -> String {
match NumberPrefix::decimal(num) {
NumberPrefix::Standalone(bytes) => bytes.to_string(),
NumberPrefix::Prefixed(prefix, n) => {
let symbol = prefix.symbol();
let number = if n >= 10.0 {
format!("{:.0}", n)
} else {
format!("{:.1}", n)
};
format!("{}{}", number, symbol)
}
}
}
|
use proconio::{fastout, input};
const MOD: i64 = 1_000_000_000 + 7;
#[fastout]
fn main() {
input! {
n: i64,
};
let ans = mpow(10, n as u64) - mpow(9, n as u64) - mpow(9, n as u64) + mpow(8, n as u64);
let ans = ans % MOD;
let ans = (ans + MOD) % MOD;
println!("{}", ans);
}
fn mpow(n: i64, mut k: u64) -> i64 {
let mut ans = 1;
let mut temp = n;
while k > 0 {
if (k & 1) == 1 {
ans = ans * temp % MOD;
}
temp = temp * temp % MOD;
k >>= 1;
}
ans
}
|
use franz::Client;
#[tokio::test]
async fn client_simple() {
let mut client = Client::connect("127.0.0.1:9092").await.unwrap();
client.send_api_version_request().await.unwrap();
client.wip_recv().await.unwrap();
}
|
mod artifact;
mod compiler;
mod console;
mod diagnostic;
mod directory;
mod extensions;
mod hasher;
mod processor;
mod refs;
mod result;
mod rule;
mod scope;
mod store;
pub mod system;
mod utils;
mod variable;
pub use refs::*;
pub use result::*;
pub use utils::*;
pub use rquickjs as qjs;
pub use std::time::{Duration, SystemTime as Time};
pub use weak_table::traits::{WeakElement, WeakKey};
pub use artifact::{Actual, Artifact, ArtifactStore, Input, Output, Phony, WeakArtifact};
pub use diagnostic::{
Diagnostic, Diagnostics, FixingSuggestion, Location, Severity, TextPoint, TextSpan,
};
pub use directory::Directory;
pub use hasher::DataHasher;
pub use processor::RuleStateChange;
pub use rule::{JsRule, NoRule, Rule, RuleApi, RuleId, RuleState};
pub use scope::Scope;
pub use store::Store;
pub use variable::{
Value, ValueDef, ValueError, ValueResult, ValueStore, Variable, VariableDef, VariableStore,
WeakVariable, WeakVariableSet,
};
pub use console::Js as ConsoleJs;
pub use extensions::Js as ExtensionsJs;
pub use system::Js as SystemJs;
pub use artifact::Js as ArtifactJs;
pub use directory::Js as DirectoryJs;
pub use rule::Js as RuleJs;
pub use scope::Js as ScopeJs;
pub use variable::Js as VariableJs;
pub use compiler::{CompilerJs, SymbolInfo, SymbolsJs};
use futures::future::LocalBoxFuture;
use fxhash::FxBuildHasher;
use indexmap::{IndexMap, IndexSet};
use weak_table::WeakHashSet;
pub type Set<T> = IndexSet<T, FxBuildHasher>;
pub type Map<K, V> = IndexMap<K, V, FxBuildHasher>;
pub type WeakSet<T> = WeakHashSet<T, FxBuildHasher>;
pub type BoxedFuture<T> = LocalBoxFuture<'static, T>;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.