text stringlengths 8 4.13M |
|---|
#[macro_use]
extern crate serde;
extern crate serde_json;
pub mod config;
pub mod protocol;
pub mod rules;
pub mod serializer;
use std::collections::HashMap;
pub use rules::{PwState, Dispatch};
pub use protocol::CommandError;
pub use config::Config as PwConfig;
pub struct PlanetWars {
/// Game state
state: rules::PwState,
/// Map planet names to their ids
planet_map: HashMap<String, usize>
}
impl PlanetWars {
pub fn create(config: PwConfig, num_players: usize) -> Self {
let state = config.create_state(num_players);
let planet_map = state
.planets
.iter()
.map(|p| (p.name.clone(), p.id))
.collect();
PlanetWars { state, planet_map }
}
/// Proceed to next turn
pub fn step(&mut self) {
self.state.repopulate();
self.state.step();
}
pub fn is_finished(&self) -> bool {
self.state.is_finished()
}
pub fn serialize_state(&self) -> protocol::State {
serializer::serialize(&self.state)
}
pub fn serialize_player_state(&self, player_id: usize) -> protocol::State {
serializer::serialize_rotated(&self.state, player_id - 1)
}
pub fn state<'a>(&'a self) -> &'a PwState {
&self.state
}
/// Execute a command
pub fn execute_command(
&mut self,
player_num: usize,
cmd: &protocol::Command
) -> Result<(), CommandError>
{
let dispatch = self.parse_command(player_num, cmd)?;
self.state.dispatch(&dispatch);
return Ok(());
}
/// Check the given command for validity.
/// If it is valid, return an internal representation of the dispatch
/// described by the command.
pub fn parse_command(&self, player_id: usize, cmd: &protocol::Command)
-> Result<Dispatch, CommandError>
{
let origin_id = *self
.planet_map
.get(&cmd.origin)
.ok_or(CommandError::OriginDoesNotExist)?;
let target_id = *self
.planet_map
.get(&cmd.destination)
.ok_or(CommandError::DestinationDoesNotExist)?;
if self.state.planets[origin_id].owner() != Some(player_id - 1) {
println!("owner was {:?}", self.state.planets[origin_id].owner());
return Err(CommandError::OriginNotOwned);
}
if self.state.planets[origin_id].ship_count() < cmd.ship_count {
return Err(CommandError::NotEnoughShips);
}
if cmd.ship_count == 0 {
return Err(CommandError::ZeroShipMove);
}
Ok(Dispatch {
origin: origin_id,
target: target_id,
ship_count: cmd.ship_count,
})
}
/// Execute a dispatch.
/// This assumes the dispatch is valid. You should check this yourself
/// or use `parse_command` to obtain a valid dispatch.
pub fn execute_dispatch(&mut self, dispatch: &Dispatch) {
self.state.dispatch(dispatch);
}
} |
use web_sys::{WebGl2RenderingContext};
#[derive(Clone, Copy, Debug)]
pub enum DrawKind {
StaticDraw,
DynamicDraw,
StreamDraw,
}
pub trait HasDrawKind {
fn draw_kind_constant(&self) -> u32;
}
impl HasDrawKind for DrawKind {
fn draw_kind_constant(&self) -> u32 {
match self {
DrawKind::StaticDraw => WebGl2RenderingContext::STATIC_DRAW,
DrawKind::DynamicDraw => WebGl2RenderingContext::DYNAMIC_DRAW,
DrawKind::StreamDraw => WebGl2RenderingContext::STREAM_DRAW,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum DrawArrayKind {
Points,
LineStrip,
LineLoop,
Lines,
TriangleStrip,
TriangleFan,
Triangles,
}
pub trait HasDrawArrayKind {
fn draw_array_kind_constant(&self) -> u32;
}
impl HasDrawArrayKind for DrawArrayKind {
fn draw_array_kind_constant(&self) -> u32 {
match self {
DrawArrayKind::Points => WebGl2RenderingContext::POINTS,
DrawArrayKind::LineStrip => WebGl2RenderingContext::LINE_STRIP,
DrawArrayKind::LineLoop => WebGl2RenderingContext::LINE_LOOP,
DrawArrayKind::Lines => WebGl2RenderingContext::LINES,
DrawArrayKind::TriangleStrip => WebGl2RenderingContext::TRIANGLE_STRIP,
DrawArrayKind::TriangleFan => WebGl2RenderingContext::TRIANGLE_FAN,
DrawArrayKind::Triangles => WebGl2RenderingContext::TRIANGLES,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum ClearMask {
ColorBufferBit,
DepthBufferBit,
StencilBufferBit,
}
pub trait HasClearMaskKind {
fn clear_mask_constant(&self) -> u32;
}
impl HasClearMaskKind for ClearMask {
fn clear_mask_constant(&self) -> u32 {
match self {
ClearMask::ColorBufferBit => WebGl2RenderingContext::COLOR_BUFFER_BIT,
ClearMask::DepthBufferBit => WebGl2RenderingContext::DEPTH_BUFFER_BIT,
ClearMask::StencilBufferBit => WebGl2RenderingContext::STENCIL_BUFFER_BIT,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum BufferKind {
ArrayBuffer,
ElementBuffer,
}
pub trait HasBufferKind {
fn buffer_kind_constant(&self) -> u32;
}
impl HasBufferKind for BufferKind {
fn buffer_kind_constant(&self) -> u32 {
match self {
BufferKind::ArrayBuffer => WebGl2RenderingContext::ARRAY_BUFFER,
BufferKind::ElementBuffer => WebGl2RenderingContext::ELEMENT_ARRAY_BUFFER_BINDING,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum ViewPrecision {
Byte,
Short,
UnsignedByte,
UnsignedShort,
Float,
}
pub trait HasViewPrecision {
fn view_precision_constant(&self) -> u32;
}
impl HasViewPrecision for ViewPrecision {
fn view_precision_constant(&self) -> u32 {
match self {
ViewPrecision::Byte => WebGl2RenderingContext::BYTE,
ViewPrecision::Short => WebGl2RenderingContext::SHORT,
ViewPrecision::UnsignedByte => WebGl2RenderingContext::UNSIGNED_BYTE,
ViewPrecision::UnsignedShort => WebGl2RenderingContext::UNSIGNED_SHORT,
ViewPrecision::Float => WebGl2RenderingContext::FLOAT,
}
}
}
#[derive(Clone, Copy, Debug)]
pub enum BlendFuncFactor {
/**
* Multiplies all colors by 0.
*/
Zero,
/**
* Multiplies all colors by 1.
*/
One,
/**
* Multiplies all colors by the source colors.
*/
SrcColor,
/**
* Multiplies all colors by 1 minus each source color.
*/
OneMinusSrcColor,
/**
* Multiplies all colors by the destination color.
*/
DstColor,
/**
* Multiplies all colors by 1 minus each destination color.
*/
OneMinusDstColor,
/**
* Multiplies all colors by the source alpha value.
*/
SrcAlpha,
/**
* Multiplies all colors by 1 minus the source alpha value.
*/
OneMinusSrcAlpha,
/**
* Multiplies all colors by the destination alpha value.
*/
DstAlpha,
/**
* Multiplies all colors by 1 minus the destination alpha value.
*/
OneMinusDstAlpha,
/**
* Multiplies all colors by the destination alpha value.
*/
ConstantColor,
/**
* Multiplies all colors by 1 minus the destination alpha value.
*/
OneMinusConstantColor,
/**
* Multiplies all colors by a constant color.
*/
ConstantAlpha,
/**
* Multiplies all colors by 1 minus a constant alpha value.
*/
OneMinusConstantAlpha,
/**
* Multiplies the RGB colors by the smaller of either the
* source alpha value or the value of 1 minus the destination
* alpha value. The alpha value is multiplied by 1.
*/
SrcAlphaSaturate,
}
pub trait HasBlendFuncFactor {
fn blend_func_factor_constant(&self) -> u32;
}
impl HasBlendFuncFactor for BlendFuncFactor {
fn blend_func_factor_constant(&self) -> u32 {
match self {
BlendFuncFactor::One => WebGl2RenderingContext::ONE,
BlendFuncFactor::Zero => WebGl2RenderingContext::ZERO,
BlendFuncFactor::SrcColor => WebGl2RenderingContext::SRC_COLOR,
BlendFuncFactor::OneMinusSrcColor => WebGl2RenderingContext::ONE_MINUS_SRC_COLOR,
BlendFuncFactor::DstColor => WebGl2RenderingContext::DST_COLOR,
BlendFuncFactor::OneMinusDstColor => WebGl2RenderingContext::ONE_MINUS_DST_COLOR,
BlendFuncFactor::SrcAlpha => WebGl2RenderingContext::SRC_ALPHA,
BlendFuncFactor::OneMinusSrcAlpha => WebGl2RenderingContext::ONE_MINUS_SRC_ALPHA,
BlendFuncFactor::DstAlpha => WebGl2RenderingContext::DST_ALPHA,
BlendFuncFactor::OneMinusDstAlpha => WebGl2RenderingContext::ONE_MINUS_DST_ALPHA,
BlendFuncFactor::ConstantColor => WebGl2RenderingContext::CONSTANT_COLOR,
BlendFuncFactor::OneMinusConstantColor => WebGl2RenderingContext::ONE_MINUS_CONSTANT_COLOR,
BlendFuncFactor::ConstantAlpha => WebGl2RenderingContext::CONSTANT_ALPHA,
BlendFuncFactor::OneMinusConstantAlpha => WebGl2RenderingContext::ONE_MINUS_CONSTANT_ALPHA,
BlendFuncFactor::SrcAlphaSaturate => WebGl2RenderingContext::SRC_ALPHA_SATURATE,
}
}
}
|
use util::*;
const LEN: usize = 25;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let timer = Timer::new();
let input = input::lines::<usize>(&std::env::args().nth(1).unwrap());
let mut target = 0;
for (i, n) in input.iter().enumerate().skip(LEN) {
let start = if i < LEN { 0 } else { i - LEN };
let mut nums: Vec<&usize> = input[start..i].iter().collect();
nums.sort();
let mut found = false;
let mut first = 0;
let mut last = nums.len() - 1;
while last != first {
let sum = nums[first] + nums[last];
if sum > *n {
last -= 1;
} else if sum < *n {
first += 1;
} else {
found = true;
break;
}
}
if !found {
timer.print();
println!("{}", n);
target = *n;
break;
}
}
let timer = Timer::new();
let mut start = 0;
'outer: while start < input.len() {
let mut end = start + 1;
while end < input.len() {
let range = &input[start..=end];
let sum: usize = range.iter().sum();
if sum == target {
let min = range.iter().min().unwrap();
let max = range.iter().max().unwrap();
timer.print();
println!("{}", min + max);
break 'outer;
}
end += 1;
}
start += 1;
}
Ok(())
}
|
#[doc = "Register `APBSMENR2` reader"]
pub type R = crate::R<APBSMENR2_SPEC>;
#[doc = "Register `APBSMENR2` writer"]
pub type W = crate::W<APBSMENR2_SPEC>;
#[doc = "Field `SYSCFGSMEN` reader - SYSCFG, COMP and VREFBUF clock enable during Sleep mode"]
pub type SYSCFGSMEN_R = crate::BitReader;
#[doc = "Field `SYSCFGSMEN` writer - SYSCFG, COMP and VREFBUF clock enable during Sleep mode"]
pub type SYSCFGSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM1SMEN` reader - TIM1 timer clock enable during Sleep mode"]
pub type TIM1SMEN_R = crate::BitReader;
#[doc = "Field `TIM1SMEN` writer - TIM1 timer clock enable during Sleep mode"]
pub type TIM1SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI1SMEN` reader - SPI1 clock enable during Sleep mode"]
pub type SPI1SMEN_R = crate::BitReader;
#[doc = "Field `SPI1SMEN` writer - SPI1 clock enable during Sleep mode"]
pub type SPI1SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART1SMEN` reader - USART1 clock enable during Sleep mode"]
pub type USART1SMEN_R = crate::BitReader;
#[doc = "Field `USART1SMEN` writer - USART1 clock enable during Sleep mode"]
pub type USART1SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM14SMEN` reader - TIM14 timer clock enable during Sleep mode"]
pub type TIM14SMEN_R = crate::BitReader;
#[doc = "Field `TIM14SMEN` writer - TIM14 timer clock enable during Sleep mode"]
pub type TIM14SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM16SMEN` reader - TIM16 timer clock enable during Sleep mode"]
pub type TIM16SMEN_R = crate::BitReader;
#[doc = "Field `TIM16SMEN` writer - TIM16 timer clock enable during Sleep mode"]
pub type TIM16SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM17SMEN` reader - TIM16 timer clock enable during Sleep mode"]
pub type TIM17SMEN_R = crate::BitReader;
#[doc = "Field `TIM17SMEN` writer - TIM16 timer clock enable during Sleep mode"]
pub type TIM17SMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ADCSMEN` reader - ADC clock enable during Sleep mode"]
pub type ADCSMEN_R = crate::BitReader;
#[doc = "Field `ADCSMEN` writer - ADC clock enable during Sleep mode"]
pub type ADCSMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - SYSCFG, COMP and VREFBUF clock enable during Sleep mode"]
#[inline(always)]
pub fn syscfgsmen(&self) -> SYSCFGSMEN_R {
SYSCFGSMEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 11 - TIM1 timer clock enable during Sleep mode"]
#[inline(always)]
pub fn tim1smen(&self) -> TIM1SMEN_R {
TIM1SMEN_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - SPI1 clock enable during Sleep mode"]
#[inline(always)]
pub fn spi1smen(&self) -> SPI1SMEN_R {
SPI1SMEN_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - USART1 clock enable during Sleep mode"]
#[inline(always)]
pub fn usart1smen(&self) -> USART1SMEN_R {
USART1SMEN_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - TIM14 timer clock enable during Sleep mode"]
#[inline(always)]
pub fn tim14smen(&self) -> TIM14SMEN_R {
TIM14SMEN_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 17 - TIM16 timer clock enable during Sleep mode"]
#[inline(always)]
pub fn tim16smen(&self) -> TIM16SMEN_R {
TIM16SMEN_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - TIM16 timer clock enable during Sleep mode"]
#[inline(always)]
pub fn tim17smen(&self) -> TIM17SMEN_R {
TIM17SMEN_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 20 - ADC clock enable during Sleep mode"]
#[inline(always)]
pub fn adcsmen(&self) -> ADCSMEN_R {
ADCSMEN_R::new(((self.bits >> 20) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - SYSCFG, COMP and VREFBUF clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn syscfgsmen(&mut self) -> SYSCFGSMEN_W<APBSMENR2_SPEC, 0> {
SYSCFGSMEN_W::new(self)
}
#[doc = "Bit 11 - TIM1 timer clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn tim1smen(&mut self) -> TIM1SMEN_W<APBSMENR2_SPEC, 11> {
TIM1SMEN_W::new(self)
}
#[doc = "Bit 12 - SPI1 clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn spi1smen(&mut self) -> SPI1SMEN_W<APBSMENR2_SPEC, 12> {
SPI1SMEN_W::new(self)
}
#[doc = "Bit 14 - USART1 clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn usart1smen(&mut self) -> USART1SMEN_W<APBSMENR2_SPEC, 14> {
USART1SMEN_W::new(self)
}
#[doc = "Bit 15 - TIM14 timer clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn tim14smen(&mut self) -> TIM14SMEN_W<APBSMENR2_SPEC, 15> {
TIM14SMEN_W::new(self)
}
#[doc = "Bit 17 - TIM16 timer clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn tim16smen(&mut self) -> TIM16SMEN_W<APBSMENR2_SPEC, 17> {
TIM16SMEN_W::new(self)
}
#[doc = "Bit 18 - TIM16 timer clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn tim17smen(&mut self) -> TIM17SMEN_W<APBSMENR2_SPEC, 18> {
TIM17SMEN_W::new(self)
}
#[doc = "Bit 20 - ADC clock enable during Sleep mode"]
#[inline(always)]
#[must_use]
pub fn adcsmen(&mut self) -> ADCSMEN_W<APBSMENR2_SPEC, 20> {
ADCSMEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB peripheral clock enable in Sleep mode register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apbsmenr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apbsmenr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APBSMENR2_SPEC;
impl crate::RegisterSpec for APBSMENR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apbsmenr2::R`](R) reader structure"]
impl crate::Readable for APBSMENR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apbsmenr2::W`](W) writer structure"]
impl crate::Writable for APBSMENR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APBSMENR2 to value 0"]
impl crate::Resettable for APBSMENR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
fn main() {
let a = [true, false];
let b = [1, 2, 3, 4, 5];
println!("{}, {}.", a.len(), b.len());
}
|
use engine::Registry;
macro_rules! declare_output(
($name:expr $ctor:block) => (
pub fn register(r: &mut Registry) {
fn make_it<'a>() -> Box<Output+'a> $ctor
debug!(" registering {} component", $name);
r.add_output($name.to_string(), make_it);
}
)
)
mod stdout;
pub fn register(r: &mut Registry) {
debug!("Registering output components");
stdout::register(r);
} |
use crate::endpoint;
use crate::list::model::{RequestParameters, Response};
pub async fn get(query: &RequestParameters) -> Result<Response, Box<dyn std::error::Error>> {
let client = reqwest::Client::new();
let resp = client.get(endpoint::LIST).query(query).send().await?;
//let status = resp.status();
let text: String = resp.text().await?;
// --- レスポンスのJSONの形式がおかしいため修正 ---
let text = text.replace(r#""giteki":[{"#, r#""giteki":["#);
let text = text.replace(r#""gitekiInfo":{"#, "{");
let text = text.replace("}},{{", "},{");
let text = text.replace("}}]", "}]");
// --- レスポンスのJSONの形式がおかしいため修正 ---
Ok(serde_json::from_str(text.as_str())?)
}
#[tokio::test]
async fn get_test() {
let mut req_params = RequestParameters::new();
req_params.set_nam("google");
req_params.set_num("003-180123");
req_params.set_tn("g013d");
req_params.set_dc(1);
let resp: Response = get(&req_params).await.unwrap();
assert!(resp.giteki_information.last_update_date.is_ascii());
assert!(resp.giteki_information.total_count > 0);
assert!(resp.giteki.len() > 0);
}
|
/// NO. 8: String to Integer (atoi)
pub struct Solution;
// ----- submission codes start here -----
impl Solution {
pub fn my_atoi(s: String) -> i32 {
let mut chr = s.chars().skip_while(|x| x.is_whitespace()).peekable();
let sgn = match chr.peek() {
Some('+') => {
chr.next();
1
}
Some('-') => {
chr.next();
-1
}
_ => 1,
};
chr.into_iter()
.take_while(|c| c.is_numeric())
.try_fold(0i32, |v, c| {
v.checked_mul(10)
.and_then(|v| v.checked_add(c.to_digit(10).unwrap() as i32))
})
.map(|v| v * sgn)
.unwrap_or(if sgn > 0 {
std::i32::MAX
} else {
std::i32::MIN
})
}
}
// ----- submission codes end here -----
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
assert_eq!(Solution::my_atoi("42".to_string()), 42);
assert_eq!(Solution::my_atoi(" -42".to_string()), -42);
assert_eq!(Solution::my_atoi("4193 with words".to_string()), 4193);
assert_eq!(Solution::my_atoi("words and 987".to_string()), 0);
}
}
|
// The ~engine~ (the thing that handles rules & stuff)
pub mod board;
pub mod game;
pub mod movement;
pub mod piece;
pub mod player;
pub mod rule;
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PADREGK {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `PAD43RSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD43RSELR {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD43RSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD43RSELR::PULL1_5K => 0,
PAD43RSELR::PULL6K => 1,
PAD43RSELR::PULL12K => 2,
PAD43RSELR::PULL24K => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD43RSELR {
match value {
0 => PAD43RSELR::PULL1_5K,
1 => PAD43RSELR::PULL6K,
2 => PAD43RSELR::PULL12K,
3 => PAD43RSELR::PULL24K,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `PULL1_5K`"]
#[inline]
pub fn is_pull1_5k(&self) -> bool {
*self == PAD43RSELR::PULL1_5K
}
#[doc = "Checks if the value of the field is `PULL6K`"]
#[inline]
pub fn is_pull6k(&self) -> bool {
*self == PAD43RSELR::PULL6K
}
#[doc = "Checks if the value of the field is `PULL12K`"]
#[inline]
pub fn is_pull12k(&self) -> bool {
*self == PAD43RSELR::PULL12K
}
#[doc = "Checks if the value of the field is `PULL24K`"]
#[inline]
pub fn is_pull24k(&self) -> bool {
*self == PAD43RSELR::PULL24K
}
}
#[doc = "Possible values of the field `PAD43FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD43FNCSELR {
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
#[doc = "IOM/MSPI nCE group 43 value."]
NCE43,
#[doc = "CTIMER connection 18 value."]
CT18,
#[doc = "Configure as GPIO43 value."]
GPIO43,
#[doc = "Configure as the IOMSTR3 I2C SDA or SPI WIR3 signal value."]
M3SDAWIR3,
#[doc = "Configure as the IOMSTR3 SPI MISO signal value."]
M3MISO,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD43FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD43FNCSELR::UART1RX => 0,
PAD43FNCSELR::NCE43 => 1,
PAD43FNCSELR::CT18 => 2,
PAD43FNCSELR::GPIO43 => 3,
PAD43FNCSELR::M3SDAWIR3 => 4,
PAD43FNCSELR::M3MISO => 5,
PAD43FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD43FNCSELR {
match value {
0 => PAD43FNCSELR::UART1RX,
1 => PAD43FNCSELR::NCE43,
2 => PAD43FNCSELR::CT18,
3 => PAD43FNCSELR::GPIO43,
4 => PAD43FNCSELR::M3SDAWIR3,
5 => PAD43FNCSELR::M3MISO,
i => PAD43FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART1RX`"]
#[inline]
pub fn is_uart1rx(&self) -> bool {
*self == PAD43FNCSELR::UART1RX
}
#[doc = "Checks if the value of the field is `NCE43`"]
#[inline]
pub fn is_nce43(&self) -> bool {
*self == PAD43FNCSELR::NCE43
}
#[doc = "Checks if the value of the field is `CT18`"]
#[inline]
pub fn is_ct18(&self) -> bool {
*self == PAD43FNCSELR::CT18
}
#[doc = "Checks if the value of the field is `GPIO43`"]
#[inline]
pub fn is_gpio43(&self) -> bool {
*self == PAD43FNCSELR::GPIO43
}
#[doc = "Checks if the value of the field is `M3SDAWIR3`"]
#[inline]
pub fn is_m3sdawir3(&self) -> bool {
*self == PAD43FNCSELR::M3SDAWIR3
}
#[doc = "Checks if the value of the field is `M3MISO`"]
#[inline]
pub fn is_m3miso(&self) -> bool {
*self == PAD43FNCSELR::M3MISO
}
}
#[doc = "Possible values of the field `PAD43STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD43STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD43STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD43STRNGR::LOW => false,
PAD43STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD43STRNGR {
match value {
false => PAD43STRNGR::LOW,
true => PAD43STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD43STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD43STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD43INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD43INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD43INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD43INPENR::DIS => false,
PAD43INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD43INPENR {
match value {
false => PAD43INPENR::DIS,
true => PAD43INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD43INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD43INPENR::EN
}
}
#[doc = "Possible values of the field `PAD43PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD43PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD43PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD43PULLR::DIS => false,
PAD43PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD43PULLR {
match value {
false => PAD43PULLR::DIS,
true => PAD43PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD43PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD43PULLR::EN
}
}
#[doc = "Possible values of the field `PAD42RSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD42RSELR {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD42RSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD42RSELR::PULL1_5K => 0,
PAD42RSELR::PULL6K => 1,
PAD42RSELR::PULL12K => 2,
PAD42RSELR::PULL24K => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD42RSELR {
match value {
0 => PAD42RSELR::PULL1_5K,
1 => PAD42RSELR::PULL6K,
2 => PAD42RSELR::PULL12K,
3 => PAD42RSELR::PULL24K,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `PULL1_5K`"]
#[inline]
pub fn is_pull1_5k(&self) -> bool {
*self == PAD42RSELR::PULL1_5K
}
#[doc = "Checks if the value of the field is `PULL6K`"]
#[inline]
pub fn is_pull6k(&self) -> bool {
*self == PAD42RSELR::PULL6K
}
#[doc = "Checks if the value of the field is `PULL12K`"]
#[inline]
pub fn is_pull12k(&self) -> bool {
*self == PAD42RSELR::PULL12K
}
#[doc = "Checks if the value of the field is `PULL24K`"]
#[inline]
pub fn is_pull24k(&self) -> bool {
*self == PAD42RSELR::PULL24K
}
}
#[doc = "Possible values of the field `PAD42FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD42FNCSELR {
#[doc = "Configure as the UART1 TX output signal value."]
UART1TX,
#[doc = "IOM/MSPI nCE group 42 value."]
NCE42,
#[doc = "CTIMER connection 16 value."]
CT16,
#[doc = "Configure as GPIO42 value."]
GPIO42,
#[doc = "Configure as the IOMSTR3 I2C SCL clock I/O signal value."]
M3SCL,
#[doc = "Configure as the IOMSTR3 SPI SCK output value."]
M3SCK,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD42FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD42FNCSELR::UART1TX => 0,
PAD42FNCSELR::NCE42 => 1,
PAD42FNCSELR::CT16 => 2,
PAD42FNCSELR::GPIO42 => 3,
PAD42FNCSELR::M3SCL => 4,
PAD42FNCSELR::M3SCK => 5,
PAD42FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD42FNCSELR {
match value {
0 => PAD42FNCSELR::UART1TX,
1 => PAD42FNCSELR::NCE42,
2 => PAD42FNCSELR::CT16,
3 => PAD42FNCSELR::GPIO42,
4 => PAD42FNCSELR::M3SCL,
5 => PAD42FNCSELR::M3SCK,
i => PAD42FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART1TX`"]
#[inline]
pub fn is_uart1tx(&self) -> bool {
*self == PAD42FNCSELR::UART1TX
}
#[doc = "Checks if the value of the field is `NCE42`"]
#[inline]
pub fn is_nce42(&self) -> bool {
*self == PAD42FNCSELR::NCE42
}
#[doc = "Checks if the value of the field is `CT16`"]
#[inline]
pub fn is_ct16(&self) -> bool {
*self == PAD42FNCSELR::CT16
}
#[doc = "Checks if the value of the field is `GPIO42`"]
#[inline]
pub fn is_gpio42(&self) -> bool {
*self == PAD42FNCSELR::GPIO42
}
#[doc = "Checks if the value of the field is `M3SCL`"]
#[inline]
pub fn is_m3scl(&self) -> bool {
*self == PAD42FNCSELR::M3SCL
}
#[doc = "Checks if the value of the field is `M3SCK`"]
#[inline]
pub fn is_m3sck(&self) -> bool {
*self == PAD42FNCSELR::M3SCK
}
}
#[doc = "Possible values of the field `PAD42STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD42STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD42STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD42STRNGR::LOW => false,
PAD42STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD42STRNGR {
match value {
false => PAD42STRNGR::LOW,
true => PAD42STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD42STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD42STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD42INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD42INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD42INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD42INPENR::DIS => false,
PAD42INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD42INPENR {
match value {
false => PAD42INPENR::DIS,
true => PAD42INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD42INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD42INPENR::EN
}
}
#[doc = "Possible values of the field `PAD42PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD42PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD42PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD42PULLR::DIS => false,
PAD42PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD42PULLR {
match value {
false => PAD42PULLR::DIS,
true => PAD42PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD42PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD42PULLR::EN
}
}
#[doc = "Possible values of the field `PAD41PWRDN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD41PWRDNR {
#[doc = "Power switch disabled value."]
DIS,
#[doc = "Power switch enabled (Switch pad to VSS) value."]
EN,
}
impl PAD41PWRDNR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD41PWRDNR::DIS => false,
PAD41PWRDNR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD41PWRDNR {
match value {
false => PAD41PWRDNR::DIS,
true => PAD41PWRDNR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD41PWRDNR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD41PWRDNR::EN
}
}
#[doc = "Possible values of the field `PAD41FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD41FNCSELR {
#[doc = "IOM/MSPI nCE group 41 value."]
NCE41,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
#[doc = "Configure as GPIO41 value."]
GPIO41,
#[doc = "I2S word clock input value."]
I2SWCLK,
#[doc = "Configure as the UART1 RTS output signal value."]
UA1RTS,
#[doc = "Configure as the UART0 TX output signal value."]
UART0TX,
#[doc = "Configure as the UART0 RTS output signal value."]
UA0RTS,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD41FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD41FNCSELR::NCE41 => 0,
PAD41FNCSELR::SWO => 2,
PAD41FNCSELR::GPIO41 => 3,
PAD41FNCSELR::I2SWCLK => 4,
PAD41FNCSELR::UA1RTS => 5,
PAD41FNCSELR::UART0TX => 6,
PAD41FNCSELR::UA0RTS => 7,
PAD41FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD41FNCSELR {
match value {
0 => PAD41FNCSELR::NCE41,
2 => PAD41FNCSELR::SWO,
3 => PAD41FNCSELR::GPIO41,
4 => PAD41FNCSELR::I2SWCLK,
5 => PAD41FNCSELR::UA1RTS,
6 => PAD41FNCSELR::UART0TX,
7 => PAD41FNCSELR::UA0RTS,
i => PAD41FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `NCE41`"]
#[inline]
pub fn is_nce41(&self) -> bool {
*self == PAD41FNCSELR::NCE41
}
#[doc = "Checks if the value of the field is `SWO`"]
#[inline]
pub fn is_swo(&self) -> bool {
*self == PAD41FNCSELR::SWO
}
#[doc = "Checks if the value of the field is `GPIO41`"]
#[inline]
pub fn is_gpio41(&self) -> bool {
*self == PAD41FNCSELR::GPIO41
}
#[doc = "Checks if the value of the field is `I2SWCLK`"]
#[inline]
pub fn is_i2swclk(&self) -> bool {
*self == PAD41FNCSELR::I2SWCLK
}
#[doc = "Checks if the value of the field is `UA1RTS`"]
#[inline]
pub fn is_ua1rts(&self) -> bool {
*self == PAD41FNCSELR::UA1RTS
}
#[doc = "Checks if the value of the field is `UART0TX`"]
#[inline]
pub fn is_uart0tx(&self) -> bool {
*self == PAD41FNCSELR::UART0TX
}
#[doc = "Checks if the value of the field is `UA0RTS`"]
#[inline]
pub fn is_ua0rts(&self) -> bool {
*self == PAD41FNCSELR::UA0RTS
}
}
#[doc = "Possible values of the field `PAD41STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD41STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD41STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD41STRNGR::LOW => false,
PAD41STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD41STRNGR {
match value {
false => PAD41STRNGR::LOW,
true => PAD41STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD41STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD41STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD41INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD41INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD41INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD41INPENR::DIS => false,
PAD41INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD41INPENR {
match value {
false => PAD41INPENR::DIS,
true => PAD41INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD41INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD41INPENR::EN
}
}
#[doc = "Possible values of the field `PAD41PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD41PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD41PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD41PULLR::DIS => false,
PAD41PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD41PULLR {
match value {
false => PAD41PULLR::DIS,
true => PAD41PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD41PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD41PULLR::EN
}
}
#[doc = "Possible values of the field `PAD40RSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD40RSELR {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD40RSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD40RSELR::PULL1_5K => 0,
PAD40RSELR::PULL6K => 1,
PAD40RSELR::PULL12K => 2,
PAD40RSELR::PULL24K => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD40RSELR {
match value {
0 => PAD40RSELR::PULL1_5K,
1 => PAD40RSELR::PULL6K,
2 => PAD40RSELR::PULL12K,
3 => PAD40RSELR::PULL24K,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `PULL1_5K`"]
#[inline]
pub fn is_pull1_5k(&self) -> bool {
*self == PAD40RSELR::PULL1_5K
}
#[doc = "Checks if the value of the field is `PULL6K`"]
#[inline]
pub fn is_pull6k(&self) -> bool {
*self == PAD40RSELR::PULL6K
}
#[doc = "Checks if the value of the field is `PULL12K`"]
#[inline]
pub fn is_pull12k(&self) -> bool {
*self == PAD40RSELR::PULL12K
}
#[doc = "Checks if the value of the field is `PULL24K`"]
#[inline]
pub fn is_pull24k(&self) -> bool {
*self == PAD40RSELR::PULL24K
}
}
#[doc = "Possible values of the field `PAD40FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD40FNCSELR {
#[doc = "Configure as the UART0 RX input signal value."]
UART0RX,
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
#[doc = "Configure as the ADC Trigger 0 signal value."]
TRIG0,
#[doc = "Configure as GPIO40 value."]
GPIO40,
#[doc = "Configure as the IOMSTR4 I2C SDA or SPI WIR3 signal value."]
M4SDAWIR3,
#[doc = "Configure as the IOMSTR4 SPI MISO input signal value."]
M4MISO,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD40FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD40FNCSELR::UART0RX => 0,
PAD40FNCSELR::UART1RX => 1,
PAD40FNCSELR::TRIG0 => 2,
PAD40FNCSELR::GPIO40 => 3,
PAD40FNCSELR::M4SDAWIR3 => 4,
PAD40FNCSELR::M4MISO => 5,
PAD40FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD40FNCSELR {
match value {
0 => PAD40FNCSELR::UART0RX,
1 => PAD40FNCSELR::UART1RX,
2 => PAD40FNCSELR::TRIG0,
3 => PAD40FNCSELR::GPIO40,
4 => PAD40FNCSELR::M4SDAWIR3,
5 => PAD40FNCSELR::M4MISO,
i => PAD40FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UART0RX`"]
#[inline]
pub fn is_uart0rx(&self) -> bool {
*self == PAD40FNCSELR::UART0RX
}
#[doc = "Checks if the value of the field is `UART1RX`"]
#[inline]
pub fn is_uart1rx(&self) -> bool {
*self == PAD40FNCSELR::UART1RX
}
#[doc = "Checks if the value of the field is `TRIG0`"]
#[inline]
pub fn is_trig0(&self) -> bool {
*self == PAD40FNCSELR::TRIG0
}
#[doc = "Checks if the value of the field is `GPIO40`"]
#[inline]
pub fn is_gpio40(&self) -> bool {
*self == PAD40FNCSELR::GPIO40
}
#[doc = "Checks if the value of the field is `M4SDAWIR3`"]
#[inline]
pub fn is_m4sdawir3(&self) -> bool {
*self == PAD40FNCSELR::M4SDAWIR3
}
#[doc = "Checks if the value of the field is `M4MISO`"]
#[inline]
pub fn is_m4miso(&self) -> bool {
*self == PAD40FNCSELR::M4MISO
}
}
#[doc = "Possible values of the field `PAD40STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD40STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD40STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD40STRNGR::LOW => false,
PAD40STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD40STRNGR {
match value {
false => PAD40STRNGR::LOW,
true => PAD40STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD40STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD40STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD40INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD40INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD40INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD40INPENR::DIS => false,
PAD40INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD40INPENR {
match value {
false => PAD40INPENR::DIS,
true => PAD40INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD40INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD40INPENR::EN
}
}
#[doc = "Possible values of the field `PAD40PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD40PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD40PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD40PULLR::DIS => false,
PAD40PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD40PULLR {
match value {
false => PAD40PULLR::DIS,
true => PAD40PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD40PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD40PULLR::EN
}
}
#[doc = "Values that can be written to the field `PAD43RSEL`"]
pub enum PAD43RSELW {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD43RSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD43RSELW::PULL1_5K => 0,
PAD43RSELW::PULL6K => 1,
PAD43RSELW::PULL12K => 2,
PAD43RSELW::PULL24K => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD43RSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD43RSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD43RSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Pullup is ~1.5 KOhms value."]
#[inline]
pub fn pull1_5k(self) -> &'a mut W {
self.variant(PAD43RSELW::PULL1_5K)
}
#[doc = "Pullup is ~6 KOhms value."]
#[inline]
pub fn pull6k(self) -> &'a mut W {
self.variant(PAD43RSELW::PULL6K)
}
#[doc = "Pullup is ~12 KOhms value."]
#[inline]
pub fn pull12k(self) -> &'a mut W {
self.variant(PAD43RSELW::PULL12K)
}
#[doc = "Pullup is ~24 KOhms value."]
#[inline]
pub fn pull24k(self) -> &'a mut W {
self.variant(PAD43RSELW::PULL24K)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD43FNCSEL`"]
pub enum PAD43FNCSELW {
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
#[doc = "IOM/MSPI nCE group 43 value."]
NCE43,
#[doc = "CTIMER connection 18 value."]
CT18,
#[doc = "Configure as GPIO43 value."]
GPIO43,
#[doc = "Configure as the IOMSTR3 I2C SDA or SPI WIR3 signal value."]
M3SDAWIR3,
#[doc = "Configure as the IOMSTR3 SPI MISO signal value."]
M3MISO,
}
impl PAD43FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD43FNCSELW::UART1RX => 0,
PAD43FNCSELW::NCE43 => 1,
PAD43FNCSELW::CT18 => 2,
PAD43FNCSELW::GPIO43 => 3,
PAD43FNCSELW::M3SDAWIR3 => 4,
PAD43FNCSELW::M3MISO => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD43FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD43FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD43FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as the UART1 RX input signal value."]
#[inline]
pub fn uart1rx(self) -> &'a mut W {
self.variant(PAD43FNCSELW::UART1RX)
}
#[doc = "IOM/MSPI nCE group 43 value."]
#[inline]
pub fn nce43(self) -> &'a mut W {
self.variant(PAD43FNCSELW::NCE43)
}
#[doc = "CTIMER connection 18 value."]
#[inline]
pub fn ct18(self) -> &'a mut W {
self.variant(PAD43FNCSELW::CT18)
}
#[doc = "Configure as GPIO43 value."]
#[inline]
pub fn gpio43(self) -> &'a mut W {
self.variant(PAD43FNCSELW::GPIO43)
}
#[doc = "Configure as the IOMSTR3 I2C SDA or SPI WIR3 signal value."]
#[inline]
pub fn m3sdawir3(self) -> &'a mut W {
self.variant(PAD43FNCSELW::M3SDAWIR3)
}
#[doc = "Configure as the IOMSTR3 SPI MISO signal value."]
#[inline]
pub fn m3miso(self) -> &'a mut W {
self.variant(PAD43FNCSELW::M3MISO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD43STRNG`"]
pub enum PAD43STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD43STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD43STRNGW::LOW => false,
PAD43STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD43STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD43STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD43STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD43STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD43STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD43INPEN`"]
pub enum PAD43INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD43INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD43INPENW::DIS => false,
PAD43INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD43INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD43INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD43INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD43INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD43INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD43PULL`"]
pub enum PAD43PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD43PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD43PULLW::DIS => false,
PAD43PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD43PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD43PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD43PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD43PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD43PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD42RSEL`"]
pub enum PAD42RSELW {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD42RSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD42RSELW::PULL1_5K => 0,
PAD42RSELW::PULL6K => 1,
PAD42RSELW::PULL12K => 2,
PAD42RSELW::PULL24K => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD42RSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD42RSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD42RSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Pullup is ~1.5 KOhms value."]
#[inline]
pub fn pull1_5k(self) -> &'a mut W {
self.variant(PAD42RSELW::PULL1_5K)
}
#[doc = "Pullup is ~6 KOhms value."]
#[inline]
pub fn pull6k(self) -> &'a mut W {
self.variant(PAD42RSELW::PULL6K)
}
#[doc = "Pullup is ~12 KOhms value."]
#[inline]
pub fn pull12k(self) -> &'a mut W {
self.variant(PAD42RSELW::PULL12K)
}
#[doc = "Pullup is ~24 KOhms value."]
#[inline]
pub fn pull24k(self) -> &'a mut W {
self.variant(PAD42RSELW::PULL24K)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD42FNCSEL`"]
pub enum PAD42FNCSELW {
#[doc = "Configure as the UART1 TX output signal value."]
UART1TX,
#[doc = "IOM/MSPI nCE group 42 value."]
NCE42,
#[doc = "CTIMER connection 16 value."]
CT16,
#[doc = "Configure as GPIO42 value."]
GPIO42,
#[doc = "Configure as the IOMSTR3 I2C SCL clock I/O signal value."]
M3SCL,
#[doc = "Configure as the IOMSTR3 SPI SCK output value."]
M3SCK,
}
impl PAD42FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD42FNCSELW::UART1TX => 0,
PAD42FNCSELW::NCE42 => 1,
PAD42FNCSELW::CT16 => 2,
PAD42FNCSELW::GPIO42 => 3,
PAD42FNCSELW::M3SCL => 4,
PAD42FNCSELW::M3SCK => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD42FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD42FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD42FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as the UART1 TX output signal value."]
#[inline]
pub fn uart1tx(self) -> &'a mut W {
self.variant(PAD42FNCSELW::UART1TX)
}
#[doc = "IOM/MSPI nCE group 42 value."]
#[inline]
pub fn nce42(self) -> &'a mut W {
self.variant(PAD42FNCSELW::NCE42)
}
#[doc = "CTIMER connection 16 value."]
#[inline]
pub fn ct16(self) -> &'a mut W {
self.variant(PAD42FNCSELW::CT16)
}
#[doc = "Configure as GPIO42 value."]
#[inline]
pub fn gpio42(self) -> &'a mut W {
self.variant(PAD42FNCSELW::GPIO42)
}
#[doc = "Configure as the IOMSTR3 I2C SCL clock I/O signal value."]
#[inline]
pub fn m3scl(self) -> &'a mut W {
self.variant(PAD42FNCSELW::M3SCL)
}
#[doc = "Configure as the IOMSTR3 SPI SCK output value."]
#[inline]
pub fn m3sck(self) -> &'a mut W {
self.variant(PAD42FNCSELW::M3SCK)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD42STRNG`"]
pub enum PAD42STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD42STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD42STRNGW::LOW => false,
PAD42STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD42STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD42STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD42STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD42STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD42STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD42INPEN`"]
pub enum PAD42INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD42INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD42INPENW::DIS => false,
PAD42INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD42INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD42INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD42INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD42INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD42INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD42PULL`"]
pub enum PAD42PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD42PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD42PULLW::DIS => false,
PAD42PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD42PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD42PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD42PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD42PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD42PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD41PWRDN`"]
pub enum PAD41PWRDNW {
#[doc = "Power switch disabled value."]
DIS,
#[doc = "Power switch enabled (Switch pad to VSS) value."]
EN,
}
impl PAD41PWRDNW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD41PWRDNW::DIS => false,
PAD41PWRDNW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD41PWRDNW<'a> {
w: &'a mut W,
}
impl<'a> _PAD41PWRDNW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD41PWRDNW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Power switch disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD41PWRDNW::DIS)
}
#[doc = "Power switch enabled (Switch pad to VSS) value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD41PWRDNW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD41FNCSEL`"]
pub enum PAD41FNCSELW {
#[doc = "IOM/MSPI nCE group 41 value."]
NCE41,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
#[doc = "Configure as GPIO41 value."]
GPIO41,
#[doc = "I2S word clock input value."]
I2SWCLK,
#[doc = "Configure as the UART1 RTS output signal value."]
UA1RTS,
#[doc = "Configure as the UART0 TX output signal value."]
UART0TX,
#[doc = "Configure as the UART0 RTS output signal value."]
UA0RTS,
}
impl PAD41FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD41FNCSELW::NCE41 => 0,
PAD41FNCSELW::SWO => 2,
PAD41FNCSELW::GPIO41 => 3,
PAD41FNCSELW::I2SWCLK => 4,
PAD41FNCSELW::UA1RTS => 5,
PAD41FNCSELW::UART0TX => 6,
PAD41FNCSELW::UA0RTS => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD41FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD41FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD41FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "IOM/MSPI nCE group 41 value."]
#[inline]
pub fn nce41(self) -> &'a mut W {
self.variant(PAD41FNCSELW::NCE41)
}
#[doc = "Configure as the serial wire debug SWO signal value."]
#[inline]
pub fn swo(self) -> &'a mut W {
self.variant(PAD41FNCSELW::SWO)
}
#[doc = "Configure as GPIO41 value."]
#[inline]
pub fn gpio41(self) -> &'a mut W {
self.variant(PAD41FNCSELW::GPIO41)
}
#[doc = "I2S word clock input value."]
#[inline]
pub fn i2swclk(self) -> &'a mut W {
self.variant(PAD41FNCSELW::I2SWCLK)
}
#[doc = "Configure as the UART1 RTS output signal value."]
#[inline]
pub fn ua1rts(self) -> &'a mut W {
self.variant(PAD41FNCSELW::UA1RTS)
}
#[doc = "Configure as the UART0 TX output signal value."]
#[inline]
pub fn uart0tx(self) -> &'a mut W {
self.variant(PAD41FNCSELW::UART0TX)
}
#[doc = "Configure as the UART0 RTS output signal value."]
#[inline]
pub fn ua0rts(self) -> &'a mut W {
self.variant(PAD41FNCSELW::UA0RTS)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD41STRNG`"]
pub enum PAD41STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD41STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD41STRNGW::LOW => false,
PAD41STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD41STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD41STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD41STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD41STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD41STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD41INPEN`"]
pub enum PAD41INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD41INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD41INPENW::DIS => false,
PAD41INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD41INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD41INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD41INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD41INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD41INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD41PULL`"]
pub enum PAD41PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD41PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD41PULLW::DIS => false,
PAD41PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD41PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD41PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD41PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD41PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD41PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD40RSEL`"]
pub enum PAD40RSELW {
#[doc = "Pullup is ~1.5 KOhms value."]
PULL1_5K,
#[doc = "Pullup is ~6 KOhms value."]
PULL6K,
#[doc = "Pullup is ~12 KOhms value."]
PULL12K,
#[doc = "Pullup is ~24 KOhms value."]
PULL24K,
}
impl PAD40RSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD40RSELW::PULL1_5K => 0,
PAD40RSELW::PULL6K => 1,
PAD40RSELW::PULL12K => 2,
PAD40RSELW::PULL24K => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD40RSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD40RSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD40RSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Pullup is ~1.5 KOhms value."]
#[inline]
pub fn pull1_5k(self) -> &'a mut W {
self.variant(PAD40RSELW::PULL1_5K)
}
#[doc = "Pullup is ~6 KOhms value."]
#[inline]
pub fn pull6k(self) -> &'a mut W {
self.variant(PAD40RSELW::PULL6K)
}
#[doc = "Pullup is ~12 KOhms value."]
#[inline]
pub fn pull12k(self) -> &'a mut W {
self.variant(PAD40RSELW::PULL12K)
}
#[doc = "Pullup is ~24 KOhms value."]
#[inline]
pub fn pull24k(self) -> &'a mut W {
self.variant(PAD40RSELW::PULL24K)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD40FNCSEL`"]
pub enum PAD40FNCSELW {
#[doc = "Configure as the UART0 RX input signal value."]
UART0RX,
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
#[doc = "Configure as the ADC Trigger 0 signal value."]
TRIG0,
#[doc = "Configure as GPIO40 value."]
GPIO40,
#[doc = "Configure as the IOMSTR4 I2C SDA or SPI WIR3 signal value."]
M4SDAWIR3,
#[doc = "Configure as the IOMSTR4 SPI MISO input signal value."]
M4MISO,
}
impl PAD40FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD40FNCSELW::UART0RX => 0,
PAD40FNCSELW::UART1RX => 1,
PAD40FNCSELW::TRIG0 => 2,
PAD40FNCSELW::GPIO40 => 3,
PAD40FNCSELW::M4SDAWIR3 => 4,
PAD40FNCSELW::M4MISO => 5,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD40FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD40FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD40FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as the UART0 RX input signal value."]
#[inline]
pub fn uart0rx(self) -> &'a mut W {
self.variant(PAD40FNCSELW::UART0RX)
}
#[doc = "Configure as the UART1 RX input signal value."]
#[inline]
pub fn uart1rx(self) -> &'a mut W {
self.variant(PAD40FNCSELW::UART1RX)
}
#[doc = "Configure as the ADC Trigger 0 signal value."]
#[inline]
pub fn trig0(self) -> &'a mut W {
self.variant(PAD40FNCSELW::TRIG0)
}
#[doc = "Configure as GPIO40 value."]
#[inline]
pub fn gpio40(self) -> &'a mut W {
self.variant(PAD40FNCSELW::GPIO40)
}
#[doc = "Configure as the IOMSTR4 I2C SDA or SPI WIR3 signal value."]
#[inline]
pub fn m4sdawir3(self) -> &'a mut W {
self.variant(PAD40FNCSELW::M4SDAWIR3)
}
#[doc = "Configure as the IOMSTR4 SPI MISO input signal value."]
#[inline]
pub fn m4miso(self) -> &'a mut W {
self.variant(PAD40FNCSELW::M4MISO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD40STRNG`"]
pub enum PAD40STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD40STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD40STRNGW::LOW => false,
PAD40STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD40STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD40STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD40STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD40STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD40STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD40INPEN`"]
pub enum PAD40INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD40INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD40INPENW::DIS => false,
PAD40INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD40INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD40INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD40INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD40INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD40INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD40PULL`"]
pub enum PAD40PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD40PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD40PULLW::DIS => false,
PAD40PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD40PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD40PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD40PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD40PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD40PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 30:31 - Pad 43 pullup resistor selection."]
#[inline]
pub fn pad43rsel(&self) -> PAD43RSELR {
PAD43RSELR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 27:29 - Pad 43 function select"]
#[inline]
pub fn pad43fncsel(&self) -> PAD43FNCSELR {
PAD43FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 26 - Pad 43 drive strength"]
#[inline]
pub fn pad43strng(&self) -> PAD43STRNGR {
PAD43STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - Pad 43 input enable"]
#[inline]
pub fn pad43inpen(&self) -> PAD43INPENR {
PAD43INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 24 - Pad 43 pullup enable"]
#[inline]
pub fn pad43pull(&self) -> PAD43PULLR {
PAD43PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 22:23 - Pad 42 pullup resistor selection."]
#[inline]
pub fn pad42rsel(&self) -> PAD42RSELR {
PAD42RSELR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 19:21 - Pad 42 function select"]
#[inline]
pub fn pad42fncsel(&self) -> PAD42FNCSELR {
PAD42FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 18 - Pad 42 drive strength"]
#[inline]
pub fn pad42strng(&self) -> PAD42STRNGR {
PAD42STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - Pad 42 input enable"]
#[inline]
pub fn pad42inpen(&self) -> PAD42INPENR {
PAD42INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 16 - Pad 42 pullup enable"]
#[inline]
pub fn pad42pull(&self) -> PAD42PULLR {
PAD42PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 15 - Pad 41 power switch enable"]
#[inline]
pub fn pad41pwrdn(&self) -> PAD41PWRDNR {
PAD41PWRDNR::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 11:13 - Pad 41 function select"]
#[inline]
pub fn pad41fncsel(&self) -> PAD41FNCSELR {
PAD41FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 10 - Pad 41 drive strength"]
#[inline]
pub fn pad41strng(&self) -> PAD41STRNGR {
PAD41STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Pad 41 input enable"]
#[inline]
pub fn pad41inpen(&self) -> PAD41INPENR {
PAD41INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Pad 41 pullup enable"]
#[inline]
pub fn pad41pull(&self) -> PAD41PULLR {
PAD41PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 6:7 - Pad 40 pullup resistor selection."]
#[inline]
pub fn pad40rsel(&self) -> PAD40RSELR {
PAD40RSELR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 3:5 - Pad 40 function select"]
#[inline]
pub fn pad40fncsel(&self) -> PAD40FNCSELR {
PAD40FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 2 - Pad 40 drive strength"]
#[inline]
pub fn pad40strng(&self) -> PAD40STRNGR {
PAD40STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Pad 40 input enable"]
#[inline]
pub fn pad40inpen(&self) -> PAD40INPENR {
PAD40INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 0 - Pad 40 pullup enable"]
#[inline]
pub fn pad40pull(&self) -> PAD40PULLR {
PAD40PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 404232216 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 30:31 - Pad 43 pullup resistor selection."]
#[inline]
pub fn pad43rsel(&mut self) -> _PAD43RSELW {
_PAD43RSELW { w: self }
}
#[doc = "Bits 27:29 - Pad 43 function select"]
#[inline]
pub fn pad43fncsel(&mut self) -> _PAD43FNCSELW {
_PAD43FNCSELW { w: self }
}
#[doc = "Bit 26 - Pad 43 drive strength"]
#[inline]
pub fn pad43strng(&mut self) -> _PAD43STRNGW {
_PAD43STRNGW { w: self }
}
#[doc = "Bit 25 - Pad 43 input enable"]
#[inline]
pub fn pad43inpen(&mut self) -> _PAD43INPENW {
_PAD43INPENW { w: self }
}
#[doc = "Bit 24 - Pad 43 pullup enable"]
#[inline]
pub fn pad43pull(&mut self) -> _PAD43PULLW {
_PAD43PULLW { w: self }
}
#[doc = "Bits 22:23 - Pad 42 pullup resistor selection."]
#[inline]
pub fn pad42rsel(&mut self) -> _PAD42RSELW {
_PAD42RSELW { w: self }
}
#[doc = "Bits 19:21 - Pad 42 function select"]
#[inline]
pub fn pad42fncsel(&mut self) -> _PAD42FNCSELW {
_PAD42FNCSELW { w: self }
}
#[doc = "Bit 18 - Pad 42 drive strength"]
#[inline]
pub fn pad42strng(&mut self) -> _PAD42STRNGW {
_PAD42STRNGW { w: self }
}
#[doc = "Bit 17 - Pad 42 input enable"]
#[inline]
pub fn pad42inpen(&mut self) -> _PAD42INPENW {
_PAD42INPENW { w: self }
}
#[doc = "Bit 16 - Pad 42 pullup enable"]
#[inline]
pub fn pad42pull(&mut self) -> _PAD42PULLW {
_PAD42PULLW { w: self }
}
#[doc = "Bit 15 - Pad 41 power switch enable"]
#[inline]
pub fn pad41pwrdn(&mut self) -> _PAD41PWRDNW {
_PAD41PWRDNW { w: self }
}
#[doc = "Bits 11:13 - Pad 41 function select"]
#[inline]
pub fn pad41fncsel(&mut self) -> _PAD41FNCSELW {
_PAD41FNCSELW { w: self }
}
#[doc = "Bit 10 - Pad 41 drive strength"]
#[inline]
pub fn pad41strng(&mut self) -> _PAD41STRNGW {
_PAD41STRNGW { w: self }
}
#[doc = "Bit 9 - Pad 41 input enable"]
#[inline]
pub fn pad41inpen(&mut self) -> _PAD41INPENW {
_PAD41INPENW { w: self }
}
#[doc = "Bit 8 - Pad 41 pullup enable"]
#[inline]
pub fn pad41pull(&mut self) -> _PAD41PULLW {
_PAD41PULLW { w: self }
}
#[doc = "Bits 6:7 - Pad 40 pullup resistor selection."]
#[inline]
pub fn pad40rsel(&mut self) -> _PAD40RSELW {
_PAD40RSELW { w: self }
}
#[doc = "Bits 3:5 - Pad 40 function select"]
#[inline]
pub fn pad40fncsel(&mut self) -> _PAD40FNCSELW {
_PAD40FNCSELW { w: self }
}
#[doc = "Bit 2 - Pad 40 drive strength"]
#[inline]
pub fn pad40strng(&mut self) -> _PAD40STRNGW {
_PAD40STRNGW { w: self }
}
#[doc = "Bit 1 - Pad 40 input enable"]
#[inline]
pub fn pad40inpen(&mut self) -> _PAD40INPENW {
_PAD40INPENW { w: self }
}
#[doc = "Bit 0 - Pad 40 pullup enable"]
#[inline]
pub fn pad40pull(&mut self) -> _PAD40PULLW {
_PAD40PULLW { w: self }
}
}
|
use std::cmp;
use std::io::{Write, Seek, SeekFrom};
use byteorder::{BigEndian, WriteBytesExt};
use bytecode::*;
use cst::ConstantTable;
use io;
use function::{Function, Sizes};
pub struct FunctionWriter<'a, W: 'a> where W: Write + Seek {
write: &'a mut W,
sizes_offset: u64,
instructions_offset: u64,
instruction_count: u32,
pub sizes: Sizes,
current_op_size: u16,
}
pub struct ConstantTableWriter<'a, W: 'a> where W: Write + Seek {
write: &'a mut W,
}
impl<'a, W: Write + Seek> FunctionWriter<'a, W> {
pub fn new(write: &'a mut W, name: &str, constant_table_name: &str, argument_count: u8) -> FunctionWriter<'a, W>
where W: Write + Seek {
let mut writer = FunctionWriter {
write: write,
sizes_offset: io::string_disk_size(name) as u64,
instructions_offset: Function::calculate_instructions_offset(name, constant_table_name),
instruction_count: 0,
sizes: Sizes::new(0, argument_count, 0, 0),
current_op_size: 0,
};
// Write name.
io::write_string(writer.write, name).unwrap();
// Reserve space for the sizes.
writer.write.write_u8(0).unwrap();
writer.write.write_u8(0).unwrap();
writer.write.write_u16::<BigEndian>(0).unwrap();
writer.write.write_u16::<BigEndian>(0).unwrap();
// Write constant table name.
io::write_string(writer.write, constant_table_name).unwrap();
// Reserve 4 bytes for the instruction count.
writer.write.write_u32::<BigEndian>(0).unwrap();
writer
}
/// Writes the correct sizes and instruction count.
pub fn finish(&mut self) {
// Write all sizes.
self.write.seek(SeekFrom::Start(self.sizes_offset)).unwrap();
self.write.write_u8(self.sizes.return_count).unwrap();
self.write.write_u8(self.sizes.argument_count).unwrap();
self.write.write_u16::<BigEndian>(self.sizes.locals_count).unwrap();
self.write.write_u16::<BigEndian>(self.sizes.max_operands).unwrap();
// Write instruction count.
self.write.seek(SeekFrom::Start(self.instructions_offset)).unwrap();
self.write.write_u32::<BigEndian>(self.instruction_count).unwrap();
}
pub fn write_operation(&mut self, opcode: Opcode) {
match opcode {
Opcode::Nop => { },
Opcode::Pop => {
self.sizes_pop_operands(1);
},
Opcode::Dup => {
self.sizes_pop_operands(1); // Dup requires at least one element on the stack.
self.sizes_push_operands(2);
},
_ => panic!("Opcode {:?} not supported for 'write_typed' function.", opcode),
}
self.write.write_u8(opcode as u8).unwrap();
self.instruction_count += 1;
}
pub fn write_typed(&mut self, opcode: Opcode, t: Type) {
match opcode {
Opcode::Add | Opcode::Sub | Opcode::Mul | Opcode::Div => {
// These instructions pop 2 elements from the stack,
// then push 1 element to the stack.
self.sizes_pop_operands(2);
self.sizes_push_operands(1);
},
Opcode::Print => {
self.sizes_pop_operands(1);
},
_ => panic!("Opcode {:?} not supported for 'write_typed' function.", opcode),
}
self.write.write_u8(opcode as u8).unwrap();
self.write.write_u8(t as u8).unwrap();
self.instruction_count += 1;
}
pub fn write_cst(&mut self, index: ConstantTableIndex) {
self.sizes_push_operands(1);
self.write.write_u8(Opcode::Cst as u8).unwrap();
self.write.write_u16::<BigEndian>(index).unwrap();
self.instruction_count += 1;
}
pub fn write_load(&mut self, var: VariableIndex) {
self.sizes_push_operands(1);
self.sizes_used_var(var);
self.write.write_u8(Opcode::Load as u8).unwrap();
self.write.write_u16::<BigEndian>(var).unwrap();
self.instruction_count += 1;
}
pub fn write_store(&mut self, var: VariableIndex) {
self.sizes_pop_operands(1);
self.sizes_used_var(var);
self.write.write_u8(Opcode::Store as u8).unwrap();
self.write.write_u16::<BigEndian>(var).unwrap();
self.instruction_count += 1;
}
pub fn write_ret(&mut self, count: u8) {
self.sizes_pop_operands(count as u16);
if self.sizes.return_count < count {
self.sizes.return_count = count;
}
self.write.write_u8(Opcode::Ret as u8).unwrap();
self.write.write_u8(count).unwrap();
self.instruction_count += 1;
}
fn sizes_used_var(&mut self, var: VariableIndex) {
self.sizes.locals_count = cmp::max(self.sizes.locals_count, var + 1);
}
fn sizes_push_operands(&mut self, amount: u16) {
assert!(amount < 0x8000); // Should be in range for a safe conversion to i16.
self.sizes_change_operand_stack_size(amount as i16);
}
fn sizes_pop_operands(&mut self, amount: u16) {
assert!(amount < 0x8000); // Should be in range for a safe conversion to i16.
self.sizes_change_operand_stack_size(-(amount as i16));
}
// Should NOT be used directly, because push and pop sizes could cancel out,
// although an operation might first pop, for example, 2 elements and then
// push 1. If we only look at the change after an instruction, we might allow
// an 'add' operation on a stack of 1 element, which would lead to a buffer
// underflow when the bytecode is loaded.
fn sizes_change_operand_stack_size(&mut self, change: i16) {
let diff = self.current_op_size as i32 + change as i32;
if diff < 0 {
panic!("Operand stack underflow by {} elements detected.", diff);
}
self.current_op_size = diff as u16;
if self.sizes.max_operands < self.current_op_size {
self.sizes.max_operands = self.current_op_size;
}
}
}
impl<'a, W: Write + Seek> ConstantTableWriter<'a, W> {
pub fn new(write: &'a mut W) -> ConstantTableWriter<'a, W> {
ConstantTableWriter {
write: write,
}
}
pub fn write_constant_table(&mut self, constant_table: &ConstantTable) {
self.write.write_u16::<BigEndian>(constant_table.table.len() as u16).unwrap();
for constant in &constant_table.table {
self.write_constant(constant);
}
}
fn write_constant(&mut self, constant: &Constant) {
match *constant {
Constant::U64(num) => {
self.write.write_u8(ConstantTag::U64 as u8).unwrap();
self.write.write_u64::<BigEndian>(num).unwrap();
},
Constant::U32(num) => {
self.write.write_u8(ConstantTag::U32 as u8).unwrap();
self.write.write_u32::<BigEndian>(num).unwrap();
},
Constant::I64(num) => {
self.write.write_u8(ConstantTag::I64 as u8).unwrap();
self.write.write_i64::<BigEndian>(num).unwrap();
},
Constant::I32(num) => {
self.write.write_u8(ConstantTag::I32 as u8).unwrap();
self.write.write_i32::<BigEndian>(num).unwrap();
},
Constant::F64(num) => {
self.write.write_u8(ConstantTag::F64 as u8).unwrap();
self.write.write_f64::<BigEndian>(num).unwrap();
},
Constant::F32(num) => {
self.write.write_u8(ConstantTag::F32 as u8).unwrap();
self.write.write_f32::<BigEndian>(num).unwrap();
},
Constant::Str(ref string) => {
self.write.write_u8(ConstantTag::Str as u8).unwrap();
io::write_string(self.write, string).unwrap();
},
}
}
}
|
use std::collections::HashMap;
use std::marker::PhantomData;
use analyser::interface::*;
use ops::prelude::*;
use tensor::Datum;
use Result;
#[derive(Debug, Clone, Default, new)]
pub struct Pack<T: Datum> {
n: usize, // The number of inputs
axis: usize,
_phantom: PhantomData<T>,
}
pub fn pack(pb: &::tfpb::node_def::NodeDef) -> Result<Box<Op>> {
let dtype = pb.get_attr_datatype("T")?;
let n = pb.get_input().len();
let axis = pb.get_attr_int("axis")?;
Ok(boxed_new!(Pack(dtype)(n, axis)))
}
impl<T> Op for Pack<T>
where
T: Datum,
{
/// Evaluates the operation given the input tensors.
fn eval(&self, inputs: Vec<TensorView>) -> Result<Vec<TensorView>> {
use ndarray::Axis;
let views = inputs
.iter()
.map(|m| Ok(T::tensor_to_view(&*m)?.insert_axis(Axis(self.axis))))
.collect::<Result<Vec<_>>>()?;
let array = ::ndarray::stack(Axis(self.axis), &*views)?;
Ok(vec![T::array_into_tensor(array).into()])
}
/// Returns the attributes of the operation and their values.
fn get_attributes(&self) -> HashMap<&'static str, Attr> {
hashmap!{
"T" => Attr::DataType(T::datatype()),
"n" => Attr::Usize(self.n),
"axis" => Attr::Usize(self.axis),
}
}
}
impl<T: Datum> InferenceRulesOp for Pack<T> {
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
solver: &mut Solver<'r>,
inputs: &'p TensorsProxy,
outputs: &'p TensorsProxy,
) {
let output = &outputs[0];
let n = self.n;
let axis = self.axis;
solver
.equals(&inputs.len, n as isize)
.equals(&outputs.len, 1)
.equals_all((0..n).map(|i| bexp(&inputs[i].rank)).collect())
.equals_zero(wrap!((-1, &output.rank), (1isize, 1), (1, &inputs[0].rank)))
.given(&inputs[0].rank, move |solver, r: usize| {
(0..r).for_each(|d| {
solver.equals_all((0..n).map(|i| bexp(&inputs[i].shape[d])).collect());
})
})
.given(&inputs[0].rank, move |solver, r: usize| {
(0..axis).for_each(|d| {
solver.equals(&output.shape[d], &inputs[0].shape[d]);
});
if r > 0 {
(axis..(r - 1)).for_each(|d| {
solver.equals(&output.shape[d + 1], &inputs[0].shape[d]);
});
}
})
.equals(&output.shape[axis], n as isize);
}
}
#[cfg(test)]
mod tests {
#![allow(non_snake_case)]
use super::*;
use ndarray::arr2;
use Tensor;
#[test]
fn pack_0() {
let inputs = vec![
Tensor::i32s(&[2], &[1, 4]).unwrap().into(),
Tensor::i32s(&[2], &[2, 5]).unwrap().into(),
Tensor::i32s(&[2], &[3, 6]).unwrap().into(),
];
assert_eq!(
Pack::<i32>::new(3, 0)
.eval(inputs.clone())
.unwrap()
.remove(0)
.into_tensor(),
Tensor::from(arr2(&[[1, 4], [2, 5], [3, 6]]))
);
assert_eq!(
Pack::<i32>::new(3, 1)
.eval(inputs.clone())
.unwrap()
.remove(0)
.into_tensor(),
Tensor::from(arr2(&[[1, 2, 3], [4, 5, 6]]))
);
}
#[test]
fn pack_1() {
let pack = Pack::<i32>::new(3, 0);
let input = Tensor::i32s(&[0], &[]).unwrap();
let exp: Tensor = Tensor::i32s(&[1, 0], &[]).unwrap();
let found = pack.eval(vec![input.into()]).unwrap();
assert!(
exp.close_enough(&found[0]),
"expected: {:?} found: {:?}",
exp,
found[0]
)
}
}
|
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `EN` reader - PKA enable."]
pub type EN_R = crate::BitReader<EN_A>;
#[doc = "PKA enable.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EN_A {
#[doc = "0: Disable PKA"]
Disabled = 0,
#[doc = "1: Enable PKA"]
Enabled = 1,
}
impl From<EN_A> for bool {
#[inline(always)]
fn from(variant: EN_A) -> Self {
variant as u8 != 0
}
}
impl EN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EN_A {
match self.bits {
false => EN_A::Disabled,
true => EN_A::Enabled,
}
}
#[doc = "Disable PKA"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == EN_A::Disabled
}
#[doc = "Enable PKA"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == EN_A::Enabled
}
}
#[doc = "Field `EN` writer - PKA enable."]
pub type EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EN_A>;
impl<'a, REG, const O: u8> EN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Disable PKA"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(EN_A::Disabled)
}
#[doc = "Enable PKA"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(EN_A::Enabled)
}
}
#[doc = "Field `START` reader - start the operation"]
pub type START_R = crate::BitReader<STARTW_A>;
#[doc = "start the operation\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum STARTW_A {
#[doc = "1: Writing 1 to this bit starts the operation which is selected by MODE\\[5:0\\], using the operands and data already written to the PKA RAM - This bit is always read as 0"]
Start = 1,
}
impl From<STARTW_A> for bool {
#[inline(always)]
fn from(variant: STARTW_A) -> Self {
variant as u8 != 0
}
}
impl START_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<STARTW_A> {
match self.bits {
true => Some(STARTW_A::Start),
_ => None,
}
}
#[doc = "Writing 1 to this bit starts the operation which is selected by MODE\\[5:0\\], using the operands and data already written to the PKA RAM - This bit is always read as 0"]
#[inline(always)]
pub fn is_start(&self) -> bool {
*self == STARTW_A::Start
}
}
#[doc = "Field `START` writer - start the operation"]
pub type START_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, STARTW_A>;
impl<'a, REG, const O: u8> START_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Writing 1 to this bit starts the operation which is selected by MODE\\[5:0\\], using the operands and data already written to the PKA RAM - This bit is always read as 0"]
#[inline(always)]
pub fn start(self) -> &'a mut crate::W<REG> {
self.variant(STARTW_A::Start)
}
}
#[doc = "Field `MODE` reader - PKA operation code"]
pub type MODE_R = crate::FieldReader<MODE_A>;
#[doc = "PKA operation code\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum MODE_A {
#[doc = "0: Montgomery parameter computation then modular exponentiation"]
MontgomeryCompExp = 0,
#[doc = "1: Montgomery parameter computation only"]
MontgomeryComp = 1,
#[doc = "2: Modular exponentiation only (Montgomery parameter must be loaded first)"]
MontgomeryExp = 2,
#[doc = "7: RSA CRT exponentiation"]
Rsa = 7,
#[doc = "8: Modular inversion"]
ModularInv = 8,
#[doc = "9: Arithmetic addition"]
ArithmeticAdd = 9,
#[doc = "10: Arithmetic subtraction"]
ArithmeticSub = 10,
#[doc = "11: Arithmetic multiplication"]
ArithmeticMul = 11,
#[doc = "12: Arithmetic comparison"]
ArithmeticComp = 12,
#[doc = "13: Modular reduction"]
ModularRed = 13,
#[doc = "14: Modular addition"]
ModularAdd = 14,
#[doc = "15: Modular subtraction"]
ModularSub = 15,
#[doc = "16: Montgomery multiplication"]
ModularMul = 16,
#[doc = "32: Montgomery parameter computation then ECC scalar multiplication"]
MontgomeryCompScalar = 32,
#[doc = "34: ECC scalar multiplication only (Montgomery parameter must be loaded first)"]
MontgomeryScalar = 34,
#[doc = "36: ECDSA sign"]
Ecdsasign = 36,
#[doc = "38: ECDSA verification"]
Ecdsaverif = 38,
#[doc = "40: Point on elliptic curve Fp check"]
Elliptic = 40,
}
impl From<MODE_A> for u8 {
#[inline(always)]
fn from(variant: MODE_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for MODE_A {
type Ux = u8;
}
impl MODE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<MODE_A> {
match self.bits {
0 => Some(MODE_A::MontgomeryCompExp),
1 => Some(MODE_A::MontgomeryComp),
2 => Some(MODE_A::MontgomeryExp),
7 => Some(MODE_A::Rsa),
8 => Some(MODE_A::ModularInv),
9 => Some(MODE_A::ArithmeticAdd),
10 => Some(MODE_A::ArithmeticSub),
11 => Some(MODE_A::ArithmeticMul),
12 => Some(MODE_A::ArithmeticComp),
13 => Some(MODE_A::ModularRed),
14 => Some(MODE_A::ModularAdd),
15 => Some(MODE_A::ModularSub),
16 => Some(MODE_A::ModularMul),
32 => Some(MODE_A::MontgomeryCompScalar),
34 => Some(MODE_A::MontgomeryScalar),
36 => Some(MODE_A::Ecdsasign),
38 => Some(MODE_A::Ecdsaverif),
40 => Some(MODE_A::Elliptic),
_ => None,
}
}
#[doc = "Montgomery parameter computation then modular exponentiation"]
#[inline(always)]
pub fn is_montgomery_comp_exp(&self) -> bool {
*self == MODE_A::MontgomeryCompExp
}
#[doc = "Montgomery parameter computation only"]
#[inline(always)]
pub fn is_montgomery_comp(&self) -> bool {
*self == MODE_A::MontgomeryComp
}
#[doc = "Modular exponentiation only (Montgomery parameter must be loaded first)"]
#[inline(always)]
pub fn is_montgomery_exp(&self) -> bool {
*self == MODE_A::MontgomeryExp
}
#[doc = "RSA CRT exponentiation"]
#[inline(always)]
pub fn is_rsa(&self) -> bool {
*self == MODE_A::Rsa
}
#[doc = "Modular inversion"]
#[inline(always)]
pub fn is_modular_inv(&self) -> bool {
*self == MODE_A::ModularInv
}
#[doc = "Arithmetic addition"]
#[inline(always)]
pub fn is_arithmetic_add(&self) -> bool {
*self == MODE_A::ArithmeticAdd
}
#[doc = "Arithmetic subtraction"]
#[inline(always)]
pub fn is_arithmetic_sub(&self) -> bool {
*self == MODE_A::ArithmeticSub
}
#[doc = "Arithmetic multiplication"]
#[inline(always)]
pub fn is_arithmetic_mul(&self) -> bool {
*self == MODE_A::ArithmeticMul
}
#[doc = "Arithmetic comparison"]
#[inline(always)]
pub fn is_arithmetic_comp(&self) -> bool {
*self == MODE_A::ArithmeticComp
}
#[doc = "Modular reduction"]
#[inline(always)]
pub fn is_modular_red(&self) -> bool {
*self == MODE_A::ModularRed
}
#[doc = "Modular addition"]
#[inline(always)]
pub fn is_modular_add(&self) -> bool {
*self == MODE_A::ModularAdd
}
#[doc = "Modular subtraction"]
#[inline(always)]
pub fn is_modular_sub(&self) -> bool {
*self == MODE_A::ModularSub
}
#[doc = "Montgomery multiplication"]
#[inline(always)]
pub fn is_modular_mul(&self) -> bool {
*self == MODE_A::ModularMul
}
#[doc = "Montgomery parameter computation then ECC scalar multiplication"]
#[inline(always)]
pub fn is_montgomery_comp_scalar(&self) -> bool {
*self == MODE_A::MontgomeryCompScalar
}
#[doc = "ECC scalar multiplication only (Montgomery parameter must be loaded first)"]
#[inline(always)]
pub fn is_montgomery_scalar(&self) -> bool {
*self == MODE_A::MontgomeryScalar
}
#[doc = "ECDSA sign"]
#[inline(always)]
pub fn is_ecdsasign(&self) -> bool {
*self == MODE_A::Ecdsasign
}
#[doc = "ECDSA verification"]
#[inline(always)]
pub fn is_ecdsaverif(&self) -> bool {
*self == MODE_A::Ecdsaverif
}
#[doc = "Point on elliptic curve Fp check"]
#[inline(always)]
pub fn is_elliptic(&self) -> bool {
*self == MODE_A::Elliptic
}
}
#[doc = "Field `MODE` writer - PKA operation code"]
pub type MODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O, MODE_A>;
impl<'a, REG, const O: u8> MODE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Montgomery parameter computation then modular exponentiation"]
#[inline(always)]
pub fn montgomery_comp_exp(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::MontgomeryCompExp)
}
#[doc = "Montgomery parameter computation only"]
#[inline(always)]
pub fn montgomery_comp(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::MontgomeryComp)
}
#[doc = "Modular exponentiation only (Montgomery parameter must be loaded first)"]
#[inline(always)]
pub fn montgomery_exp(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::MontgomeryExp)
}
#[doc = "RSA CRT exponentiation"]
#[inline(always)]
pub fn rsa(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::Rsa)
}
#[doc = "Modular inversion"]
#[inline(always)]
pub fn modular_inv(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ModularInv)
}
#[doc = "Arithmetic addition"]
#[inline(always)]
pub fn arithmetic_add(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ArithmeticAdd)
}
#[doc = "Arithmetic subtraction"]
#[inline(always)]
pub fn arithmetic_sub(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ArithmeticSub)
}
#[doc = "Arithmetic multiplication"]
#[inline(always)]
pub fn arithmetic_mul(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ArithmeticMul)
}
#[doc = "Arithmetic comparison"]
#[inline(always)]
pub fn arithmetic_comp(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ArithmeticComp)
}
#[doc = "Modular reduction"]
#[inline(always)]
pub fn modular_red(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ModularRed)
}
#[doc = "Modular addition"]
#[inline(always)]
pub fn modular_add(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ModularAdd)
}
#[doc = "Modular subtraction"]
#[inline(always)]
pub fn modular_sub(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ModularSub)
}
#[doc = "Montgomery multiplication"]
#[inline(always)]
pub fn modular_mul(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::ModularMul)
}
#[doc = "Montgomery parameter computation then ECC scalar multiplication"]
#[inline(always)]
pub fn montgomery_comp_scalar(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::MontgomeryCompScalar)
}
#[doc = "ECC scalar multiplication only (Montgomery parameter must be loaded first)"]
#[inline(always)]
pub fn montgomery_scalar(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::MontgomeryScalar)
}
#[doc = "ECDSA sign"]
#[inline(always)]
pub fn ecdsasign(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::Ecdsasign)
}
#[doc = "ECDSA verification"]
#[inline(always)]
pub fn ecdsaverif(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::Ecdsaverif)
}
#[doc = "Point on elliptic curve Fp check"]
#[inline(always)]
pub fn elliptic(self) -> &'a mut crate::W<REG> {
self.variant(MODE_A::Elliptic)
}
}
#[doc = "Field `PROCENDIE` reader - PROCENDIE"]
pub type PROCENDIE_R = crate::BitReader<PROCENDIE_A>;
#[doc = "PROCENDIE\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PROCENDIE_A {
#[doc = "0: No interrupt is generated when PROCENDF flag is set in PKA_SR"]
Disabled = 0,
#[doc = "1: An interrupt is generated when PROCENDF flag is set in PKA_SR"]
Enabled = 1,
}
impl From<PROCENDIE_A> for bool {
#[inline(always)]
fn from(variant: PROCENDIE_A) -> Self {
variant as u8 != 0
}
}
impl PROCENDIE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PROCENDIE_A {
match self.bits {
false => PROCENDIE_A::Disabled,
true => PROCENDIE_A::Enabled,
}
}
#[doc = "No interrupt is generated when PROCENDF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == PROCENDIE_A::Disabled
}
#[doc = "An interrupt is generated when PROCENDF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == PROCENDIE_A::Enabled
}
}
#[doc = "Field `PROCENDIE` writer - PROCENDIE"]
pub type PROCENDIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PROCENDIE_A>;
impl<'a, REG, const O: u8> PROCENDIE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No interrupt is generated when PROCENDF flag is set in PKA_SR"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(PROCENDIE_A::Disabled)
}
#[doc = "An interrupt is generated when PROCENDF flag is set in PKA_SR"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(PROCENDIE_A::Enabled)
}
}
#[doc = "Field `RAMERRIE` reader - RAM error interrupt enable"]
pub type RAMERRIE_R = crate::BitReader<RAMERRIE_A>;
#[doc = "RAM error interrupt enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RAMERRIE_A {
#[doc = "0: No interrupt is generated when RAMERRF flag is set in PKA_SR"]
Disabled = 0,
#[doc = "1: An interrupt is generated when RAMERRF flag is set in PKA_SR"]
Enabled = 1,
}
impl From<RAMERRIE_A> for bool {
#[inline(always)]
fn from(variant: RAMERRIE_A) -> Self {
variant as u8 != 0
}
}
impl RAMERRIE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RAMERRIE_A {
match self.bits {
false => RAMERRIE_A::Disabled,
true => RAMERRIE_A::Enabled,
}
}
#[doc = "No interrupt is generated when RAMERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == RAMERRIE_A::Disabled
}
#[doc = "An interrupt is generated when RAMERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == RAMERRIE_A::Enabled
}
}
#[doc = "Field `RAMERRIE` writer - RAM error interrupt enable"]
pub type RAMERRIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RAMERRIE_A>;
impl<'a, REG, const O: u8> RAMERRIE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No interrupt is generated when RAMERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(RAMERRIE_A::Disabled)
}
#[doc = "An interrupt is generated when RAMERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(RAMERRIE_A::Enabled)
}
}
#[doc = "Field `ADDRERRIE` reader - Address error interrupt enable"]
pub type ADDRERRIE_R = crate::BitReader<ADDRERRIE_A>;
#[doc = "Address error interrupt enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADDRERRIE_A {
#[doc = "0: No interrupt is generated when ADDRERRF flag is set in PKA_SR"]
Disabled = 0,
#[doc = "1: An interrupt is generated when ADDRERRF flag is set in PKA_SR"]
Enabled = 1,
}
impl From<ADDRERRIE_A> for bool {
#[inline(always)]
fn from(variant: ADDRERRIE_A) -> Self {
variant as u8 != 0
}
}
impl ADDRERRIE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADDRERRIE_A {
match self.bits {
false => ADDRERRIE_A::Disabled,
true => ADDRERRIE_A::Enabled,
}
}
#[doc = "No interrupt is generated when ADDRERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == ADDRERRIE_A::Disabled
}
#[doc = "An interrupt is generated when ADDRERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == ADDRERRIE_A::Enabled
}
}
#[doc = "Field `ADDRERRIE` writer - Address error interrupt enable"]
pub type ADDRERRIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ADDRERRIE_A>;
impl<'a, REG, const O: u8> ADDRERRIE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No interrupt is generated when ADDRERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(ADDRERRIE_A::Disabled)
}
#[doc = "An interrupt is generated when ADDRERRF flag is set in PKA_SR"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(ADDRERRIE_A::Enabled)
}
}
impl R {
#[doc = "Bit 0 - PKA enable."]
#[inline(always)]
pub fn en(&self) -> EN_R {
EN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - start the operation"]
#[inline(always)]
pub fn start(&self) -> START_R {
START_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 8:13 - PKA operation code"]
#[inline(always)]
pub fn mode(&self) -> MODE_R {
MODE_R::new(((self.bits >> 8) & 0x3f) as u8)
}
#[doc = "Bit 17 - PROCENDIE"]
#[inline(always)]
pub fn procendie(&self) -> PROCENDIE_R {
PROCENDIE_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 19 - RAM error interrupt enable"]
#[inline(always)]
pub fn ramerrie(&self) -> RAMERRIE_R {
RAMERRIE_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Address error interrupt enable"]
#[inline(always)]
pub fn addrerrie(&self) -> ADDRERRIE_R {
ADDRERRIE_R::new(((self.bits >> 20) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - PKA enable."]
#[inline(always)]
#[must_use]
pub fn en(&mut self) -> EN_W<CR_SPEC, 0> {
EN_W::new(self)
}
#[doc = "Bit 1 - start the operation"]
#[inline(always)]
#[must_use]
pub fn start(&mut self) -> START_W<CR_SPEC, 1> {
START_W::new(self)
}
#[doc = "Bits 8:13 - PKA operation code"]
#[inline(always)]
#[must_use]
pub fn mode(&mut self) -> MODE_W<CR_SPEC, 8> {
MODE_W::new(self)
}
#[doc = "Bit 17 - PROCENDIE"]
#[inline(always)]
#[must_use]
pub fn procendie(&mut self) -> PROCENDIE_W<CR_SPEC, 17> {
PROCENDIE_W::new(self)
}
#[doc = "Bit 19 - RAM error interrupt enable"]
#[inline(always)]
#[must_use]
pub fn ramerrie(&mut self) -> RAMERRIE_W<CR_SPEC, 19> {
RAMERRIE_W::new(self)
}
#[doc = "Bit 20 - Address error interrupt enable"]
#[inline(always)]
#[must_use]
pub fn addrerrie(&mut self) -> ADDRERRIE_W<CR_SPEC, 20> {
ADDRERRIE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `IWDG_WINR` reader"]
pub type R = crate::R<IWDG_WINR_SPEC>;
#[doc = "Register `IWDG_WINR` writer"]
pub type W = crate::W<IWDG_WINR_SPEC>;
#[doc = "Field `WIN` reader - WIN"]
pub type WIN_R = crate::FieldReader<u16>;
#[doc = "Field `WIN` writer - WIN"]
pub type WIN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>;
impl R {
#[doc = "Bits 0:11 - WIN"]
#[inline(always)]
pub fn win(&self) -> WIN_R {
WIN_R::new((self.bits & 0x0fff) as u16)
}
}
impl W {
#[doc = "Bits 0:11 - WIN"]
#[inline(always)]
#[must_use]
pub fn win(&mut self) -> WIN_W<IWDG_WINR_SPEC, 0> {
WIN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Window register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`iwdg_winr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`iwdg_winr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct IWDG_WINR_SPEC;
impl crate::RegisterSpec for IWDG_WINR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`iwdg_winr::R`](R) reader structure"]
impl crate::Readable for IWDG_WINR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`iwdg_winr::W`](W) writer structure"]
impl crate::Writable for IWDG_WINR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets IWDG_WINR to value 0x0fff"]
impl crate::Resettable for IWDG_WINR_SPEC {
const RESET_VALUE: Self::Ux = 0x0fff;
}
|
use std::collections::HashMap;
use std::io;
use std::str::FromStr;
use crate::base::Part;
pub fn part1(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::One)
}
pub fn part2(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::Two)
}
fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> {
let mut input = String::new();
r.read_to_string(&mut input).map_err(|e| e.to_string())?;
let layers = parse_input(&input);
match part {
Part::One => {
let total_severity: u64 = layers
.iter()
.map(|(&layer, &depth)| severity(layer, depth, 0))
.sum();
Ok(total_severity.to_string())
}
Part::Two => Ok(find_min_delay(&layers).to_string()),
}
}
fn parse_input(input: &str) -> HashMap<u64, u64> {
input.lines().map(parse_line).collect()
}
fn parse_line(line: &str) -> (u64, u64) {
let parts: Vec<&str> = line.split(": ").collect();
let layer = u64::from_str(parts[0]).unwrap();
let depth = u64::from_str(parts[1]).unwrap();
(layer, depth)
}
fn detected_when_entering(picosecond: u64, depth: u64, delay: u64) -> bool {
(picosecond + delay) % ((depth - 1) * 2) == 0
}
fn severity(layer: u64, depth: u64, delay: u64) -> u64 {
if detected_when_entering(layer, depth, delay) {
layer * depth
} else {
0
}
}
fn any_detection_with_delay(layers: &HashMap<u64, u64>, delay: u64) -> bool {
layers
.iter()
.any(|(&layer, &depth)| detected_when_entering(layer, depth, delay))
}
fn find_min_delay(layers: &HashMap<u64, u64>) -> u64 {
(0..)
.find(|&delay| !any_detection_with_delay(layers, delay))
.unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test;
mod part1 {
use super::*;
test!(example, file "testdata/day13/ex", "24", part1);
test!(actual, file "../../../inputs/2017/13", "2508", part1);
}
mod part2 {
use super::*;
test!(example, file "testdata/day13/ex", "10", part2);
test!(actual, file "../../../inputs/2017/13", "3913186", part2);
}
}
|
use std::{
borrow::Borrow, cell::RefCell, collections::VecDeque, fmt::Display, fs::File, io::Read,
path::Path, str::FromStr,
};
use crate::NamespacedId;
use ahash::{AHashMap, AHashSet};
use blocks::BlockId;
use generated::{BlockKind, EntityKind, Item};
use protocol::{
packets::server::{AllTags, Tag},
VarInt,
};
use serde::Deserialize;
use smartstring::{Compact, SmartString};
use thiserror::Error;
use walkdir::WalkDir;
pub use generated::vanilla_tags::*;
/// The tag registry builder's purpose is to serve as a stepping stone to construct the full tag registry.
/// Once all datapacks are loaded, the builder resolves all tag "symlinks".
/// An example of this behaviour is the tag `#minecraft:fences`, which includes `minecraft:nether_brick_fence` and `#minecraft:wooden_fences`.
#[derive(Debug, Default)]
pub struct TagRegistryBuilder {
block_map: AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
entity_map: AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
fluid_map: AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
item_map: AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
}
impl TagRegistryBuilder {
pub fn new() -> Self {
Self {
..Default::default()
}
}
pub fn add_tags_from_dir(
&mut self,
dir: &Path,
namespace: &str,
) -> Result<(), crate::TagLoadError> {
assert!(dir.is_dir());
let blocks = dir.join("blocks");
let entity_types = dir.join("entity_types");
let fluids = dir.join("fluids");
let items = dir.join("items");
if blocks.exists() {
Self::fill_map(&blocks, &mut self.block_map, namespace)?;
}
if entity_types.exists() {
Self::fill_map(&entity_types, &mut self.entity_map, namespace)?;
}
if fluids.exists() {
Self::fill_map(&fluids, &mut self.fluid_map, namespace)?;
}
if items.exists() {
Self::fill_map(&items, &mut self.item_map, namespace)?;
}
Ok(())
}
pub fn from_dir(dir: &Path, namespace: &str) -> Result<Self, crate::TagLoadError> {
let mut this = Self::new();
this.add_tags_from_dir(dir, namespace)?;
Ok(this)
}
fn fill_map(
dir: &Path,
map: &mut AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
namespace: &str,
) -> Result<(), crate::TagLoadError> {
for entry in WalkDir::new(dir).into_iter() {
let entry = entry?;
let entry = entry.path();
if !entry.is_file() {
continue;
}
log::trace!("{}", entry.to_string_lossy());
let path_to_file = entry.parent().unwrap();
let file_name = entry.file_stem().unwrap();
let tag_name = std::borrow::Cow::Owned(
path_to_file
.strip_prefix(dir)
.unwrap()
.to_string_lossy()
.replace("\\", "/"),
) + file_name.to_string_lossy();
let namespaced = NamespacedId::from_parts(namespace, &tag_name[..])?;
if !map.contains_key(&namespaced) {
map.insert(namespaced.clone(), Default::default());
}
Self::fill_set(entry, map.get_mut(&namespaced).unwrap())?;
}
Ok(())
}
fn fill_set(
file: &Path,
set: &mut AHashSet<SmartString<Compact>>,
) -> Result<(), crate::TagLoadError> {
assert!(file.is_file());
let mut s = String::new();
File::open(file).unwrap().read_to_string(&mut s).unwrap();
let file: TagFile = serde_json::from_str(&s[..])?;
if file.replace {
set.clear();
}
for entry in file.values {
set.insert(SmartString::from(entry));
}
Ok(())
}
fn parse(
source: &AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
target: &mut AHashMap<NamespacedId, AHashSet<NamespacedId>>,
) -> Result<(), crate::TagLoadError> {
let mut stack = VecDeque::new();
for tag in source.keys().cloned() {
if target.contains_key(&tag) {
continue;
}
Self::parse_rec(tag, &mut stack, source, target)?;
}
Ok(())
}
fn parse_rec(
tag: NamespacedId,
stack: &mut VecDeque<NamespacedId>,
source: &AHashMap<NamespacedId, AHashSet<SmartString<Compact>>>,
target: &mut AHashMap<NamespacedId, AHashSet<NamespacedId>>,
) -> Result<(), crate::TagLoadError> {
if stack.contains(&tag) {
return Err(LoopError(stack.iter().cloned().collect()).into());
}
let set = match source.get(&tag) {
Some(s) => s,
None => {
return Err(crate::TagLoadError::InvalidLink(
stack.pop_back().unwrap(),
tag,
))
}
};
assert!(target.insert(tag.clone(), Default::default()).is_none());
// Parse all child tags
for child in set.iter().filter_map(|s| s.strip_prefix('#')) {
let child = NamespacedId::from_str(child)?;
if !target.contains_key(&child) {
// Skip already parsed tags
stack.push_back(tag.clone());
Self::parse_rec(child.clone(), stack, source, target)?;
}
for element in target.get(&child).unwrap().clone() {
// Insert child entry
target.get_mut(&tag).unwrap().insert(element);
}
}
let target_entry = target.get_mut(&tag).unwrap();
for i in source
.get(&tag)
.unwrap()
.iter()
.filter(|e| !e.starts_with('#'))
{
// Insert all non-tag entries
target_entry.insert(NamespacedId::from_str(i)?);
}
// This tag is now parsed.
stack.pop_back();
Ok(())
}
pub fn build(self) -> Result<TagRegistry, crate::TagLoadError> {
let mut res = TagRegistry::new();
Self::parse(&self.block_map, &mut res.block_map)?;
Self::parse(&self.entity_map, &mut res.entity_map)?;
Self::parse(&self.fluid_map, &mut res.fluid_map)?;
Self::parse(&self.item_map, &mut res.item_map)?;
Ok(res)
}
}
/// A registry for keeping track of tags.
#[derive(Debug, Default)]
pub struct TagRegistry {
block_map: AHashMap<NamespacedId, AHashSet<NamespacedId>>,
entity_map: AHashMap<NamespacedId, AHashSet<NamespacedId>>,
fluid_map: AHashMap<NamespacedId, AHashSet<NamespacedId>>,
item_map: AHashMap<NamespacedId, AHashSet<NamespacedId>>,
cached_packet: RefCell<Option<Box<AllTags>>>,
}
impl TagRegistry {
pub fn new() -> Self {
Self {
..Default::default()
}
}
pub fn check_block_tag<T>(&self, block: BlockKind, tag: &T) -> bool
where
T: Into<NamespacedId> + Clone,
{
self.block_map
.get(&tag.clone().into())
.map(|set| set.get(&NamespacedId::from_str(block.name()).unwrap()))
.flatten()
.is_some()
}
pub fn check_entity_tag<T>(&self, entity: EntityKind, tag: &T) -> bool
where
T: Into<NamespacedId> + Clone,
{
self.entity_map
.get(&tag.clone().into())
.map(|set| set.get(&NamespacedId::from_str(entity.name()).unwrap()))
.flatten()
.is_some()
}
pub fn check_fluid_tag<T>(&self, fluid: BlockKind, tag: &T) -> bool
where
T: Into<NamespacedId> + Clone,
{
self.fluid_map
.get(&tag.clone().into())
.map(|set| set.get(&NamespacedId::from_str(fluid.name()).unwrap()))
.flatten()
.is_some()
}
pub fn check_item_tag<T>(&self, item: Item, tag: &T) -> bool
where
T: Into<NamespacedId> + Clone,
{
self.item_map
.get(&tag.clone().into())
.map(|set| set.get(&NamespacedId::from_str(item.name()).unwrap()))
.flatten()
.is_some()
}
pub fn check_for_any_tag<T>(&self, thing: impl Borrow<str>, tag: &T) -> bool
where
T: Into<NamespacedId> + Clone,
{
let thing = NamespacedId::from_str(thing.borrow()).unwrap();
let tag = tag.clone().into();
self.block_map.get(&tag).map(|s| s.get(&thing)).is_some()
| self.entity_map.get(&tag).map(|s| s.get(&thing)).is_some()
| self.fluid_map.get(&tag).map(|s| s.get(&thing)).is_some()
| self.item_map.get(&tag).map(|s| s.get(&thing)).is_some()
}
/// Provides an `AllTags` packet for sending to the client. This tag is cached to save some performance.
pub fn all_tags(&self) -> AllTags {
let mut inner = self.cached_packet.borrow_mut();
if inner.is_some() {
inner.as_ref().unwrap().as_ref().to_owned()
} else {
let tags = self.build_tags_packet();
*inner = Some(Box::new(tags.clone()));
tags
}
}
fn build_tags_packet(&self) -> AllTags {
let mut block_tags = vec![];
let mut entity_tags = vec![];
let mut fluid_tags = vec![];
let mut item_tags = vec![];
for (tag_name, block_names) in &self.block_map {
block_tags.push(Tag {
name: tag_name.to_string(),
entries: block_names
.iter()
.map(|e| VarInt(generated::BlockKind::from_name(e.name()).unwrap().id() as i32))
.collect(),
});
}
for (tag_name, entity_names) in &self.entity_map {
entity_tags.push(Tag {
name: tag_name.to_string(),
entries: entity_names
.iter()
.map(
|e| VarInt(generated::EntityKind::from_name(e.name()).unwrap().id() as i32),
)
.collect(),
});
}
for (tag_name, fluid_names) in &self.fluid_map {
let mut entries = vec![];
for entry in fluid_names {
let block = match BlockId::from_identifier(&entry.to_string()) {
Some(s) => s,
None => BlockId::from_identifier(&entry.to_string().replace("flowing_", ""))
.unwrap()
.with_water_level(1),
};
entries.push(VarInt(block.vanilla_fluid_id().unwrap() as i32));
}
fluid_tags.push(Tag {
name: tag_name.to_string(),
entries,
});
}
for (tag_name, item_names) in &self.item_map {
item_tags.push(Tag {
name: tag_name.to_string(),
entries: item_names
.iter()
.map(|e| VarInt(generated::Item::from_name(e.name()).unwrap().id() as i32))
.collect(),
});
}
AllTags {
block_tags,
item_tags,
fluid_tags,
entity_tags,
}
}
fn display_helper(
map: &AHashMap<NamespacedId, AHashSet<NamespacedId>>,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
let mut m = map.iter().collect::<Vec<_>>();
m.sort_by(|a, b| a.0.cmp(b.0));
for (a, b) in m {
writeln!(f, "{}: ", a)?;
let mut n = b.iter().collect::<Vec<_>>();
n.sort();
for c in n {
writeln!(f, " {}", c)?;
}
}
Ok(())
}
}
impl Display for TagRegistry {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Self::display_helper(&self.block_map, f)?;
Self::display_helper(&self.entity_map, f)?;
Self::display_helper(&self.fluid_map, f)?;
Self::display_helper(&self.item_map, f)?;
Ok(())
}
}
#[derive(Deserialize)]
struct TagFile {
pub replace: bool,
pub values: Vec<String>,
}
#[derive(Debug, Error)]
pub struct LoopError(Vec<NamespacedId>);
impl Display for LoopError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for entry in &self.0 {
writeln!(f, "{}", entry)?;
}
Ok(())
}
}
impl From<VanillaBlockTags> for crate::NamespacedId {
fn from(tag: VanillaBlockTags) -> Self {
NamespacedId::from_str(tag.name()).unwrap()
}
}
impl From<VanillaEntityTypes> for crate::NamespacedId {
fn from(tag: VanillaEntityTypes) -> Self {
NamespacedId::from_str(tag.name()).unwrap()
}
}
impl From<VanillaFluidTags> for crate::NamespacedId {
fn from(tag: VanillaFluidTags) -> Self {
NamespacedId::from_str(tag.name()).unwrap()
}
}
impl From<VanillaItemTags> for crate::NamespacedId {
fn from(tag: VanillaItemTags) -> Self {
NamespacedId::from_str(tag.name()).unwrap()
}
}
|
use serde::{Serialize, Deserialize};
pub fn collect_answer_data(client: &reqwest::blocking::Client) -> Result<Vec<Answer>, reqwest::Error> {
// 1. get the user information
let user_info = client.get("https://launchpad.37signals.com/authorization.json")
.send()?
.json::<AuthEndpoint>()?;
// println!("auth response {:#?}", user_info);
// 2. get the projects of the user
let base_url = &user_info.accounts[0].href;
let project_url = [base_url.to_string(), "/projects.json".to_string()].concat();
let projects = client.get(&project_url).send()?.text()?;
// println!("projects {:#?}", projects); // prints excessive content
let projects_json: serde_json::Value = serde_json::from_str(&projects).unwrap(); // could create the concrete type to parse to instead here
// println!("Project: {} \n Purpose: {}",projects_json[0]["name"], projects_json[0]["purpose"]);
// print out the names of all the projects
// let project_names: Vec<&str> = projects_json.as_array().unwrap().iter()
// .map(|project| project["name"].as_str().unwrap()).collect(); // must use as_str, NOT to_string
// println!("All project names: {:?}", project_names );
// get the project I want
let family_project = projects_json.as_array().unwrap().iter()
.find(|project| project["name"].as_str().unwrap().eq(&String::from("Family"))).unwrap();
// println!("Family Project: {}", family_project);
// print all names from the dock
// let dock_names: Vec<&str> = family_project["dock"].as_array().unwrap().iter()
// .map(|dock_item| dock_item["name"].as_str().unwrap()).collect();
// println!("dock names {:?}", dock_names);
// use the dock property to find the "questionnaire" value of the "name" property
let questionnaire = family_project["dock"].as_array().unwrap().iter()
.find(|dock| dock["name"].as_str().unwrap().eq(&String::from("questionnaire"))).unwrap();
// println!("Questionnaire: {}", questionnaire);
// 3. get the questionnaire so I have the questions url
let questionnaire_url = questionnaire["url"].as_str().unwrap().to_string();
let questionnaire_info = client.get(&questionnaire_url).send()?.text()?;
// println!("\n questionnaire info {}", questionnaire_info);
let questionnaire_json: serde_json::Value = serde_json::from_str(&questionnaire_info).unwrap();
// 4. get the questions
let questions_url = questionnaire_json["questions_url"].as_str().unwrap().to_string();
let questions = client.get(&questions_url).send()?.text()?;
let questions_json: serde_json::Value = serde_json::from_str(&questions).unwrap();
// println!("questions! {:?}", questions);
// find the running question I'm looking for
let run_question = questions_json.as_array().unwrap().iter()
.find(|question| question["title"].as_str().unwrap().eq(&String::from("Did you get to run today?"))).unwrap();
// println!("question: {:?} \nanswer url: {:?}", run_question["title"], run_question["answers_url"]);
// 5. get the answers, and parse!
let answer_url = run_question["answers_url"].as_str().unwrap().to_string();
let answers_resp: reqwest::blocking::Response = client.get(&answer_url).send()?;
let mut answers_headers = answers_resp.headers().clone();
let mut answers_body = answers_resp.json::<Vec<Answer>>()?;
// println!("answers! {:?}", answers_body);
// loop through all the paginated answers until we have them all!
let mut link_header = answers_headers.get("link");
loop {
match link_header {
None => break,
Some(head) => {
let new_page = client.get(extract_link_header(head)).send()?;
answers_headers = new_page.headers().clone();
let mut new_page_body = new_page.json::<Vec<Answer>>()?;
link_header = answers_headers.get("link");
answers_body.append(&mut new_page_body);
}
}
}
Ok(answers_body)
}
/// manual extraction of the next page link in the Link value of the header
// Maybe I should just ask for the whole HeaderMap and extract the link header myself?
fn extract_link_header(link_header_value: &reqwest::header::HeaderValue) -> &str {
link_header_value.to_str().unwrap().split_terminator(";").take(1).collect::<Vec<&str>>()
.first().unwrap().strip_prefix("<").unwrap().strip_suffix(">").unwrap()
}
/// struct used to parse the answers endpoint in step 5 of the API calls
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Answer {
id: u64,
status: String,
visible_to_clients: bool,
created_at: String,
updated_at: String,
title: String,
inherits_status: bool,
r#type: String,
url: String,
app_url: String,
bookmark_url: String,
subscription_url: String,
comments_count: u64,
comments_url: String,
parent: Parent,
bucket: Bucket,
creator: Creator,
pub content: String,
group_on: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Parent {
id: u64,
title: String,
r#type: String,
url: String,
app_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Bucket {
id: u64,
name: String,
r#type: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Creator {
id: u64,
attachable_sgid: String,
name: String,
email_address: String,
personable_type: String,
title: Option<String>,
bio: Option<String>,
created_at: String,
updated_at: String,
admin: bool,
owner: bool,
client: bool,
time_zone: String,
avatar_url: String,
company: Company,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Company {
id: u64,
name: String,
}
#[derive(Serialize, Deserialize, Debug)]
struct AuthEndpoint {
expires_at: String,
identity: Identity,
accounts: Vec<Account>,
}
#[derive(Serialize, Deserialize, Debug)]
struct Identity {
id: u64,
first_name: String,
last_name: String,
email_address: String,
}
#[derive(Serialize, Deserialize, Debug)]
struct Account {
product: String,
id: u64,
name: String,
href: String,
app_href: String,
} |
use proconio::{fastout, input};
#[fastout]
fn main() {
input! {
x: i64,
n: usize,
p_vec: [usize; n],
}
let mut d: [i64; 102] = [-1; 102];
p_vec.iter().for_each(|p| {
d[*p] = 1;
});
let mut ans: (i64, i64) = (9999999, -1);
(0..=101).for_each(|i| {
if d[i] != 1 {
let dif = (x - i as i64).abs();
ans = if ans.0 > dif {
(dif, i as i64)
} else if ans.0 == dif && ans.1 > i as i64 {
(dif, i as i64)
} else {
ans
};
}
});
println!("{}", ans.1)
}
|
mod core;
fn main() {
let mut n = core::Dj::new(4);
n.distance_set(0, 1, 2);
n.distance_set(0, 2, 3);
n.distance_set(0, 3, 9);
n.distance_set(1, 3, 1);
n.distance_set(2, 3, 4);
let p = n.run(0, 3);
print!("{:?}", p);
println!("End Dijkstra")
}
|
//! Tsukuyomi is an asynchronous Web framework for Rust.
#![doc(html_root_url = "https://docs.rs/tsukuyomi/0.6.0-dev")]
#![deny(
missing_debug_implementations,
nonstandard_style,
rust_2018_idioms,
rust_2018_compatibility,
unused
)]
#![cfg_attr(test, deny(warnings))]
#![deny(clippy::unimplemented)]
#[macro_use]
pub mod util;
mod generic;
mod uri;
pub mod app;
pub mod endpoint;
pub mod error;
pub mod extractor;
pub mod fs;
pub mod future;
pub mod handler;
pub mod input;
pub mod output;
pub mod server;
pub mod test;
pub mod upgrade;
#[doc(inline)]
pub use crate::{
app::App,
endpoint::Endpoint,
error::{
Error, //
HttpError,
Result,
},
extractor::Extractor,
handler::{Handler, ModifyHandler},
input::Input,
output::{IntoResponse, Responder},
};
/// Re-export of crates used within the framework and frequently used on the user side.
pub mod vendor {
pub use futures01 as futures;
pub use http;
}
|
#![feature(async_await)]
#![feature(async_closure)]
use byteorder::{ByteOrder, NetworkEndian};
use futures::prelude::*;
use runtime::net::{TcpListener, TcpStream};
use std::io;
use std::net::Shutdown;
#[runtime::main]
async fn main() -> io::Result<()> {
let mut listener = TcpListener::bind("127.0.0.1:15000")?;
println!("listening on {}", listener.local_addr()?);
listener
.incoming()
.try_for_each_concurrent(None, async move |mut stream| {
runtime::spawn(async move {
println!("accepting from: {}", stream.peer_addr()?);
handshake(&mut stream).await?;
stream.shutdown(Shutdown::Both)?;
Ok::<(), io::Error>(())
})
.await
})
.await
}
async fn handshake(stream: &mut TcpStream) -> io::Result<()> {
let mut client_handshake = [0u8; 4];
stream.read_exact(&mut client_handshake).await?;
if &client_handshake != b"\0\0\0\0" {
println!("incorrect handshake");
return Ok(());
}
let mut server_handshake = [0u8; 4];
NetworkEndian::write_u32(&mut server_handshake, 42);
stream.write_all(&server_handshake).await?;
Ok(())
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use core::fmt;
use nix::fcntl::AtFlags;
use nix::fcntl::OFlag;
use nix::sched::CloneFlags;
use nix::sys::epoll::EpollCreateFlags;
use nix::sys::eventfd::EfdFlags;
use nix::sys::inotify::InitFlags;
use nix::sys::mman::MapFlags;
use nix::sys::mman::ProtFlags;
use nix::sys::signalfd::SfdFlags;
use nix::sys::socket::AddressFamily;
use nix::sys::socket::SockFlag;
use nix::sys::socket::SockProtocol;
use nix::sys::stat::Mode;
use nix::sys::stat::SFlag;
use nix::sys::timerfd::TimerFlags;
use nix::sys::wait::WaitPidFlag;
use nix::unistd::Pid;
use crate::memory::Addr;
use crate::memory::AddrMut;
use crate::memory::MemoryAccess;
use crate::Errno;
/// A wrapper that combines an address space and a syscall. This is useful for
/// displaying the contents of syscall pointer inputs.
pub struct Display<'a, M, T> {
/// How we access memory.
memory: &'a M,
/// The syscall arguments we need to display.
syscall: &'a T,
/// Whether or not to display output arguments.
outputs: bool,
}
impl<'a, M, T> Display<'a, M, T> {
/// Allocate a new display struct from a memory and a syscall whose
/// arguments read from that memory.
pub fn new(memory: &'a M, syscall: &'a T, outputs: bool) -> Self {
Display {
memory,
syscall,
outputs,
}
}
}
impl<'a, M, T> fmt::Display for Display<'a, M, T>
where
M: MemoryAccess,
T: Displayable,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.syscall.fmt(self.memory, self.outputs, f)
}
}
/// Trait that all syscalls and their arguments need to implement in order to be
/// printed out.
pub trait Displayable {
/// Displays a syscall with all of its arguments.
fn fmt<M: MemoryAccess>(
&self,
memory: &M,
outputs: bool,
f: &mut fmt::Formatter,
) -> fmt::Result;
/// Returns an object that implements `std::fmt::Display` and displays only
/// syscall inputs.
fn display<'a, M>(&'a self, memory: &'a M) -> Display<'a, M, Self>
where
M: MemoryAccess,
Self: Sized,
{
Display::new(memory, self, false)
}
/// Returns an object that implements `std::fmt::Display` and displays
/// syscall inputs as well as outputs. Useful for displaying pointer
/// arguments that are only valid after a syscall has been executed.
fn display_with_outputs<'a, M>(&'a self, memory: &'a M) -> Display<'a, M, Self>
where
M: MemoryAccess,
Self: Sized,
{
Display::new(memory, self, true)
}
}
impl<'a, T> Displayable for Option<Addr<'a, T>> {
fn fmt<M: MemoryAccess>(
&self,
_memory: &M,
_outputs: bool,
f: &mut fmt::Formatter,
) -> fmt::Result {
match self {
None => write!(f, "NULL"),
Some(addr) => write!(f, "{:?}", addr),
}
}
}
impl<'a, T> Displayable for Option<AddrMut<'a, T>> {
fn fmt<M: MemoryAccess>(
&self,
_memory: &M,
_outputs: bool,
f: &mut fmt::Formatter,
) -> fmt::Result {
match self {
None => write!(f, "NULL"),
Some(addr) => write!(f, "{:?}", addr),
}
}
}
impl Displayable for OFlag {
fn fmt<M: MemoryAccess>(
&self,
_memory: &M,
_outputs: bool,
f: &mut fmt::Formatter,
) -> fmt::Result {
if self.is_empty() {
// Without this special case, the default Debug implementation will
// print "O_LARGEFILE | O_RDONLY" because both of those flags are
// zeros.
f.write_str("0")
} else {
fmt::Debug::fmt(self, f)
}
}
}
impl<T> Displayable for Result<T, Errno>
where
T: fmt::Display,
{
fn fmt<M: MemoryAccess>(
&self,
_memory: &M,
_outputs: bool,
f: &mut fmt::Formatter,
) -> fmt::Result {
match self {
Ok(x) => fmt::Display::fmt(x, f),
Err(err) => fmt::Display::fmt(err, f),
}
}
}
/// Macro that implements ['Displayable'] trait for a given type based on
/// another trait implementation e.g. ['Debug'] or ['Display'], etc
#[macro_export]
macro_rules! impl_displayable {
($fmt:ident $t:ty) => {
impl $crate::Displayable for $t {
fn fmt<M: $crate::MemoryAccess>(
&self,
_memory: &M,
_outputs: bool,
f: &mut ::core::fmt::Formatter,
) -> ::core::fmt::Result {
::core::fmt::$fmt::fmt(self, f)
}
}
};
}
/// Macro that implements a wrapper for a pointer e.g. ['AddrMut']
/// or ['Addr] with custom ['Displayable'] implementation
#[macro_export]
macro_rules! displayable_ptr {
($type:ident, $pointer:ident<$value:ident>) => {
/// A pointer to a `timeval` buffer.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[allow(missing_docs)]
pub struct $type<'a>(pub $crate::$pointer<'a, $value>);
impl<'a> $crate::FromToRaw for std::option::Option<$type<'a>> {
fn from_raw(raw: usize) -> Self {
$crate::$pointer::from_ptr(raw as *const $value).map($type)
}
fn into_raw(self) -> usize {
self.map(|p| p.0).into_raw()
}
}
impl<'a> $crate::ReadAddr for $type<'a> {
type Target = $value;
type Error = $crate::Errno;
fn read<M: $crate::MemoryAccess>(
&self,
memory: &M,
) -> Result<Self::Target, Self::Error> {
memory.read_value(self.0)
}
}
impl<'a> $crate::Displayable for std::option::Option<$type<'a>> {
fn fmt<M: $crate::MemoryAccess>(
&self,
memory: &M,
outputs: bool,
f: &mut std::fmt::Formatter,
) -> std::fmt::Result {
$crate::fmt_nullable_ptr(f, &self.map(|x| x.0), memory, outputs)
}
}
impl<'a> From<$type<'a>> for $crate::AddrMut<'a, $value> {
fn from(time_ptr: $type<'a>) -> Self {
time_ptr.0
}
}
impl<'a> From<$type<'a>> for $crate::Addr<'a, $value> {
fn from(time_ptr: $type<'a>) -> Self {
time_ptr.0.into()
}
}
};
}
impl_displayable!(Debug AtFlags);
impl_displayable!(Debug CloneFlags);
impl_displayable!(Debug Mode);
impl_displayable!(Debug SFlag);
impl_displayable!(Debug WaitPidFlag);
impl_displayable!(Debug MapFlags);
impl_displayable!(Debug ProtFlags);
impl_displayable!(Debug EpollCreateFlags);
impl_displayable!(Debug EfdFlags);
impl_displayable!(Debug SfdFlags);
impl_displayable!(Debug InitFlags);
impl_displayable!(Debug SockFlag);
impl_displayable!(Debug AddressFamily);
impl_displayable!(Debug SockProtocol);
impl_displayable!(Debug Option<SockProtocol>);
impl_displayable!(Debug TimerFlags);
impl_displayable!(Display Pid);
impl_displayable!(Display i32);
impl_displayable!(Display u32);
impl_displayable!(Display i64);
impl_displayable!(Display u64);
impl_displayable!(Display isize);
impl_displayable!(Display usize);
|
use specs::*;
use types::*;
use component::channel::OnPlayerTerrainCollision;
use component::channel::OnPlayerTerrainCollisionReader;
use component::time::{StartTime, ThisFrame};
use airmash_protocol::server::EventBounce;
use airmash_protocol::{to_bytes, ServerPacket};
use websocket::OwnedMessage;
pub struct BounceSystem {
reader: Option<OnPlayerTerrainCollisionReader>,
}
#[derive(SystemData)]
pub struct BounceSystemData<'a> {
pub entity: Entities<'a>,
pub vel: WriteStorage<'a, Velocity>,
pub pos: ReadStorage<'a, Position>,
pub rot: ReadStorage<'a, Rotation>,
pub plane: ReadStorage<'a, Plane>,
pub keystate: ReadStorage<'a, KeyState>,
pub conns: Read<'a, Connections>,
pub config: Read<'a, Config>,
pub channel: Read<'a, OnPlayerTerrainCollision>,
pub thisframe: Read<'a, ThisFrame>,
pub starttime: Read<'a, StartTime>,
}
impl BounceSystem {
pub fn new() -> Self {
Self { reader: None }
}
}
impl<'a> System<'a> for BounceSystem {
type SystemData = BounceSystemData<'a>;
fn setup(&mut self, res: &mut Resources) {
self.reader = Some(
res.fetch_mut::<OnPlayerTerrainCollision>()
.register_reader(),
);
Self::SystemData::setup(res);
}
fn run(&mut self, mut data: Self::SystemData) {
let channel_reader = data
.channel
.read(self.reader.as_mut().unwrap())
.map(|x| x.0);
for evt in channel_reader {
if evt.0.layer == 0 || evt.1.layer == 0 {
assert!(evt.1.layer != evt.0.layer);
let rel;
let maxspd;
let ent;
if evt.0.layer == 0 {
ent = evt.1.ent;
rel = (evt.1.pos - evt.0.pos).normalized();
maxspd = *data.vel.get(evt.1.ent).unwrap();
} else {
ent = evt.0.ent;
rel = (evt.0.pos - evt.1.pos).normalized();
maxspd = *data.vel.get(evt.0.ent).unwrap();
};
let vel = rel * Speed::max(maxspd.length(), Speed::new(1.0));
match data.vel.get_mut(ent) {
Some(v) => *v = vel,
None => {
warn!(
target: "server",
"EventBounce triggered for non-player entity {:?}",
ent
);
continue;
}
}
let pos = data.pos.get(ent).unwrap();
let rot = data.rot.get(ent).unwrap();
let keystate = data.keystate.get(ent).unwrap();
let plane = data.plane.get(ent).unwrap();
let state = keystate.to_server(&plane);
let packet = EventBounce {
clock: (data.thisframe.0 - data.starttime.0).to_clock(),
id: ent,
pos: *pos,
rot: *rot,
speed: vel,
keystate: state,
};
data.conns.send_to_all(OwnedMessage::Binary(
to_bytes(&ServerPacket::EventBounce(packet)).unwrap(),
));
}
}
}
}
use super::PlaneCollisionSystem;
use dispatch::SystemInfo;
impl SystemInfo for BounceSystem {
type Dependencies = PlaneCollisionSystem;
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
fn new() -> Self {
Self::new()
}
}
|
use std::{error, fmt};
use serde_json::Value;
use util::JsonType;
pub use self::generated::*;
/// A error that occurs during validation.
#[derive(Debug)]
pub struct ValidationError<'json> {
/// Reason for the error.
pub reason: ErrorKind,
/// Pointer to the relevant JSON node.
pub node: &'json Value,
}
impl<'json> ValidationError<'json> {
pub(crate) fn type_mismatch(
node: &'json Value,
expected: JsonType,
found: JsonType,
) -> ValidationError<'json> {
ValidationError {
reason: ErrorKind::TypeMismatch { expected, found },
node: node,
}
}
}
impl<'json> fmt::Display for ValidationError<'json> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Error at JSON value `{}`: {}", self.node, self.reason)
}
}
/// A list of errors.
#[derive(Debug)]
pub struct ValidationErrors<'json>(pub Vec<ValidationError<'json>>);
impl<'json> fmt::Display for ValidationErrors<'json> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for error in &self.0 {
write!(f, "Error at {}: {}\n", error.node, error.reason)?;
}
Ok(())
}
}
impl<'json> error::Error for ValidationErrors<'json> {
fn description(&self) -> &str {
"Errors occurred during validation"
}
}
impl<'json> From<ValidationErrors<'json>> for Error {
fn from(err: ValidationErrors<'json>) -> Error {
Error::from(format!("{}", err))
}
}
#[allow(missing_docs)]
mod generated {
use util::JsonType;
use string::Format;
use regex::Regex;
error_chain! {
foreign_links {
Io(::std::io::Error);
Serde(::serde_json::Error);
}
errors {
TypeMismatch { expected: JsonType, found: JsonType } {
description("Type mismatch")
display("Type mismatch: expected {}, found {}", expected, found)
}
TupleLengthMismatch { schemas: usize, tuple: usize } {
description("Tuple length mismatch")
display(
"Tuple length mismatch: expected {}, found {}",
schemas, tuple)
}
MaxLength { expected: usize, found: usize } {
description("Maximum length exceeded")
display(
"Length mismatch: Expected a maximum of {}, found {}",
expected, found)
}
MinLength { expected: usize, found: usize } {
description("Value below minumum length")
display(
"Length mismatch: Expected a minimum of {}, found {}",
expected, found)
}
MissingProperty(prop: String) {
description("Missing object property")
display("Missing object property: `{}`", prop)
}
ArrayItemNotUnique {
description("Array items are not unique")
display("Array items are not unique")
}
NumberRange { bound: f64, value: f64 } {
description("Number out of range")
display("Number out of range: bound is {}, value is {}", bound, value)
}
PropertyCount { bound: usize, found: usize } {
description("Property count out of range")
display(
"Property count out of range: bound is {}, value is {}",
bound, found)
}
InvalidRegex(regex: String) {
description("Invalid regex")
display("Invalid regex: {}", regex)
}
InvalidFormat(format: Format) {
description("Error parsing with format")
display("Error parsing with format: {:?}", format)
}
RegexMismatch { regex: Regex } {
description("Regex did not match")
display("Regex did not match: {}", regex)
}
}
}
}
|
#[doc = "Register `ETH_MACTxTSSNR` reader"]
pub type R = crate::R<ETH_MACTX_TSSNR_SPEC>;
#[doc = "Field `TXTSSLO` reader - TXTSSLO"]
pub type TXTSSLO_R = crate::FieldReader<u32>;
#[doc = "Field `TXTSSMIS` reader - TXTSSMIS"]
pub type TXTSSMIS_R = crate::BitReader;
impl R {
#[doc = "Bits 0:30 - TXTSSLO"]
#[inline(always)]
pub fn txtsslo(&self) -> TXTSSLO_R {
TXTSSLO_R::new(self.bits & 0x7fff_ffff)
}
#[doc = "Bit 31 - TXTSSMIS"]
#[inline(always)]
pub fn txtssmis(&self) -> TXTSSMIS_R {
TXTSSMIS_R::new(((self.bits >> 31) & 1) != 0)
}
}
#[doc = "This register contains the nanosecond part of timestamp captured for Transmit packets when Tx status is disabled.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_mactx_tssnr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETH_MACTX_TSSNR_SPEC;
impl crate::RegisterSpec for ETH_MACTX_TSSNR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eth_mactx_tssnr::R`](R) reader structure"]
impl crate::Readable for ETH_MACTX_TSSNR_SPEC {}
#[doc = "`reset()` method sets ETH_MACTxTSSNR to value 0"]
impl crate::Resettable for ETH_MACTX_TSSNR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
fn main() {
let input: &str = include_str!("input.txt");
let mut grid = Grid::from(input);
let mut runs = 0;
loop {
runs += 1;
let moves = do_move(&mut grid);
if moves == 0 {
break;
}
}
println!("Count {}", runs);
}
fn do_move(grid: &mut Grid) -> usize {
let mut total_moves = 0;
let mut moves = Vec::new();
for x in 0..grid.grid.len() {
for y in 0..grid.grid[x].len() {
if grid.grid[x][y].is_some() {
if grid.grid[x][y].unwrap() == Cucumber::Right {
let new_coords = grid.make_safe(x + 1, y);
if grid.grid[new_coords.x][new_coords.y].is_none() {
moves.push(Move {
move_type: Some(Cucumber::Right),
coord: new_coords
});
moves.push(Move {
move_type: None,
coord: Coord { x, y }
});
}
}
}
}
}
total_moves += moves.len();
for the_move in moves {
grid.grid[the_move.coord.x][the_move.coord.y] = the_move.move_type;
}
let mut moves = Vec::new();
for x in 0..grid.grid.len() {
for y in 0..grid.grid[x].len() {
if grid.grid[x][y].is_some() {
if grid.grid[x][y].unwrap() == Cucumber::Down {
let new_coords = grid.make_safe(x, y + 1);
if grid.grid[new_coords.x][new_coords.y].is_none() {
moves.push(Move {
move_type: Some(Cucumber::Down),
coord: new_coords
});
moves.push(Move {
move_type: None,
coord: Coord { x, y }
});
}
}
}
}
}
total_moves += moves.len();
for the_move in moves {
grid.grid[the_move.coord.x][the_move.coord.y] = the_move.move_type;
}
total_moves
}
struct Move {
move_type: Option<Cucumber>,
coord: Coord
}
struct Coord {
x: usize,
y: usize
}
#[derive(Debug, Clone, PartialEq, Copy)]
enum Cucumber {
Right,
Down
}
#[derive(Debug)]
struct Grid {
grid: Vec<Vec<Option<Cucumber>>>
}
impl Grid {
fn from(str: &str) -> Grid {
let mut grid = Vec::new();
let mut y = 0;
for line in str.lines() {
let mut x = 0;
for char in line.chars() {
if grid.len() < x + 1 {
grid.resize(x + 1, vec![None]);
}
if grid[x].len() < y + 1 {
grid[x].resize(y + 1, None);
}
let mut c = None;
if char == 'v' {
c = Some(Cucumber::Down)
} else if char == '>' {
c = Some(Cucumber::Right)
}
grid[x][y] = c;
x += 1;
}
y += 1;
}
Grid { grid }
}
fn make_safe(&self, x: usize, y: usize) -> Coord {
let mut x = x;
let mut y = y;
while x >= self.grid.len() {
x = x - self.grid.len();
}
while y >= self.grid[x].len() {
y = y - self.grid[x].len();
}
Coord{ x, y }
}
} |
use super::http_verbs::HttpVerbs;
pub struct Request {
path: String,
query_string: Option<String>,
method: HttpVerbs,
} |
use proconio::input;
use proconio::marker::*;
use std::cmp::*;
fn main() {
input! {
n: u32,
u: u32,
v: u32,
}
}
|
pub mod sink;
use crate::tokens::TokenInfo;
use crate::parser::event::ParseEvent;
use crate::syntax_kind::SyntaxKindId;
use crate::parser::parser_impl::sink::ParseEventSink;
use crate::parser::event::tree_builder::TreeBuilder;
use crate::parser::parser_impl::sink::GreenTreeEventSink;
use errors::TextDiagnostic;
use crate::language::LanguageId;
use crate::parser::parser_api::TriviaHandler;
pub(crate) struct ParserImpl<'a, T: TriviaHandler> {
events: Vec<ParseEvent>,
input: ParserInput<'a>,
position: u32,
language_id: LanguageId,
trivia_handler: &'a T
}
impl<'a, T: TriviaHandler> ParserImpl<'a, T> {
pub fn new(tokens: Vec<TokenInfo>, text: &'a str, language_id: LanguageId, trivia_handler: &'a T) -> Self {
ParserImpl { events: vec![], input: ParserInput { text, tokens }, position: 0, language_id, trivia_handler }
}
pub fn into_events(self) -> Vec<ParseEvent> {
self.events
}
/// Start internal node.
pub fn start(&mut self) -> u32 {
let pos = self.events.len() as u32;
self.event(ParseEvent::Start { kind: SyntaxKindId::TOMBSTONE, forward_parent: None });
pos
}
/// Finish internal node
pub fn finish(&mut self, pos: u32, kind: SyntaxKindId) {
match self.events[pos as usize] {
ParseEvent::Start {
kind: ref mut slot, ..
} => {
*slot = kind;
}
_ => unreachable!(),
}
self.event(ParseEvent::Finish {});
}
pub fn error(&mut self, error: String) {
self.event(ParseEvent::Error { diagnostic: error });
}
/// nth element's type from current position of parser
pub fn nth(&self, offset: u32) -> SyntaxKindId {
self.input.token_type(self.position + offset)
}
// TODO bump must skip whitespaces and other trivias
pub fn bump(&mut self) {
self.position += 1;
while self.trivia_handler.is_trivia(self.nth(0)) {
let id = self.nth(0);
self.trivia(id)
}
self.position += 1;
}
pub fn trivia(&mut self, token_type: SyntaxKindId) {
self.position += 1;
self.event(ParseEvent::Token { is_trivia: true, token_type })
}
pub fn leaf(&mut self, token_type: SyntaxKindId) {
self.event(ParseEvent::Token { is_trivia: false, token_type, });
self.bump();
}
fn event(&mut self, event: ParseEvent) {
self.events.push(event)
}
pub fn build<TR, S: ParseEventSink<TR>>(mut self, sink: S) -> TR {
let builder: TreeBuilder<TR, S> = TreeBuilder::new(
sink,
&self.input.tokens,
&mut self.events,
self.input.text,
self.language_id
);
builder.build().finish()
}
}
pub struct Tree {}
pub struct ParserInput<'a> {
tokens: Vec<TokenInfo>,
text: &'a str,
}
impl<'a> ParserInput<'a> {
fn token_type(&self, position: u32) -> SyntaxKindId {
if position >= self.tokens.len() as u32 {
SyntaxKindId::END
} else {
self.tokens[position as usize].token_type
}
}
} |
use std::fmt;
#[derive(Debug)]
pub enum Statusx {
Native(tonic::Code),
UserNotFound,
TelOrPasswordIncorrect,
UnameExist,
}
impl Statusx {
pub fn assemble_details(&self) {
println!("This is message impl");
}
pub fn message(&self) -> &'static str {
match self {
Statusx::UserNotFound => "user not found",
Statusx::TelOrPasswordIncorrect => "tel or password incorrect",
Statusx::UnameExist => "user name exist",
Statusx::Native(code) => code.description(),
}
}
}
impl From<Statusx> for tonic::Status {
fn from(s: Statusx) -> Self {
match s {
Statusx::UserNotFound => tonic::Status::not_found(s.message()),
Statusx::TelOrPasswordIncorrect => tonic::Status::not_found(s.message()),
Statusx::UnameExist => tonic::Status::not_found(s.message()),
Statusx::Native(code) => tonic::Status::new(code, code.description()),
}
}
}
|
use super::*;
use crate::construction::Quota;
use crate::models::examples::create_example_problem;
use crate::solver::TelemetryMode;
use crate::utils::Environment;
use std::sync::Arc;
parameterized_test! {can_enable_telemetry_metrics, (mode, evolution_size), {
can_enable_telemetry_metrics_impl(mode, evolution_size);
}}
can_enable_telemetry_metrics! {
case01: (TelemetryMode::OnlyMetrics { track_population: 100 }, 31),
case02: (TelemetryMode::OnlyMetrics { track_population: 99 }, 32),
case03: (TelemetryMode::All {
logger: Arc::new(|_| {}), log_best: 100, log_population: 1000,
track_population: 100, dump_population: false,
}, 31),
}
fn can_enable_telemetry_metrics_impl(mode: TelemetryMode, evolution_size: usize) {
let config = EvolutionConfig {
telemetry: Telemetry::new(mode),
..EvolutionConfig::new(create_example_problem(), Arc::new(Environment::default()))
};
let evolution = EvolutionSimulator::new(config).unwrap();
let (_, metrics) = evolution.run().unwrap();
let metrics = metrics.expect("metrics are empty");
assert_eq!(metrics.generations, 3000);
assert_eq!(metrics.evolution.len(), evolution_size);
assert!(metrics.duration > 0);
assert!(metrics.speed > 0.);
}
parameterized_test! {can_disable_telemetry_metrics, mode, {
can_disable_telemetry_metrics_impl(mode);
}}
can_disable_telemetry_metrics! {
case01: TelemetryMode::None,
case02: TelemetryMode::OnlyLogging {
logger: Arc::new(|_| {}), log_best: 100, log_population: 1000, dump_population: false
},
}
fn can_disable_telemetry_metrics_impl(mode: TelemetryMode) {
let config = EvolutionConfig {
telemetry: Telemetry::new(mode),
..EvolutionConfig::new(create_example_problem(), Arc::new(Environment::default()))
};
let evolution = EvolutionSimulator::new(config).unwrap();
let (_, metrics) = evolution.run().unwrap();
assert!(metrics.is_none())
}
#[test]
fn can_use_quota() {
struct FullQuota {}
impl Quota for FullQuota {
fn is_reached(&self) -> bool {
true
}
}
let config = EvolutionConfig {
quota: Some(Arc::new(FullQuota {})),
telemetry: Telemetry::new(TelemetryMode::OnlyMetrics { track_population: 100 }),
..EvolutionConfig::new(create_example_problem(), Arc::new(Environment::default()))
};
let evolution = EvolutionSimulator::new(config).unwrap();
let (_, metrics) = evolution.run().unwrap();
let metrics = metrics.expect("metrics are empty");
assert_eq!(metrics.generations, 0);
assert_eq!(metrics.evolution.len(), 1);
}
|
#![no_std]
#![feature(alloc)]
#[macro_use]
extern crate alloc;
use alloc::string::String;
use alloc::vec::Vec;
extern crate common;
use common::bytesrepr::ToBytes;
use common::contract_api::{call_contract, new_uref};
use common::contract_api::pointers::ContractPointer;
use common::value::Value;
#[no_mangle]
pub extern "C" fn call() {
//This hash comes from blake2b256( [0;32] ++ [0;8] ++ [0;4] )
let hash = ContractPointer::Hash([
164, 102, 153, 51, 236, 214, 169, 167, 126, 44, 250, 247, 179, 214, 203, 229, 239, 69, 145,
25, 5, 153, 113, 55, 255, 188, 176, 201, 7, 4, 42, 100,
]);
let arg = "World";
let args = vec![arg.to_bytes()];
let result: String = call_contract(hash, &args, &Vec::new());
assert_eq!("Hello, World", result);
//store the result at a uref so it can be seen as an effect on the global state
let _uref = new_uref(Value::String(result));
}
|
use super::{Trader, Order, Action};
use crate::indicators::{Value, Indicator};
use crate::economy::Monetary;
enum Safe {
Base,
Quote,
None
}
pub struct FullStop<T, const SAFE: &'static str>
where
T: Trader
{
trader: T,
safe: Safe,
}
impl<T, const SAFE: &'static str> Trader for FullStop<T, SAFE>
where
T: Trader
{
type Indicators = T::Indicators;
fn initialize(base: &str, quote: &str) -> FullStop<T, SAFE> {
FullStop {
trader: T::initialize(base, quote),
safe: if base == SAFE {
Safe::Base
} else
if quote == SAFE {
Safe::Quote
} else {
Safe::None
}
}
}
fn evaluate(&mut self, output: <Self::Indicators as Indicator>::Output) -> Option<Order> {
if let Some(order) = self.trader.evaluate(output) {
Some(match (&self.safe, order) {
(Safe::Base, Order::Limit(Action::Buy, quantity, value)) => {
Order::Limit(Action::Buy, 1.0, value)
},
(Safe::Quote, Order::Limit(Action::Sell, quantity, value)) => {
Order::Limit(Action::Sell, 1.0, value)
},
(_, order) => {
order
}
})
} else {
None
}
}
} |
extern crate ray_tracer_challenge;
use ray_tracer_challenge::camera::Camera;
use ray_tracer_challenge::lighting::PointLight;
use ray_tracer_challenge::materials::Material;
use ray_tracer_challenge::matrices::Matrix4;
use ray_tracer_challenge::shapes::{Plane, Sphere};
use ray_tracer_challenge::transforms::view_transform;
use ray_tracer_challenge::tuples::Tuple;
use ray_tracer_challenge::world::World;
use std::f32::consts::PI;
use std::fs::File;
use std::io::prelude::*;
fn main() -> std::io::Result<()> {
let light = PointLight::new(
Tuple::point(-1.0, 2.5, -1.0),
Tuple::color(1.0, 1.0, 1.0),
);
let mut world = World::new();
world.light_source = Some(light);
let mut side_color = Material::default();
side_color.color = Tuple::color(1.0, 0.9, 0.9);
side_color.specular = 0.0;
{
let mut floor = Plane::new();
floor.material = side_color;
world.objects.push(floor);
}
// hexagon angles
// 120 degrees around y-axis
// 2 PI / 3.0 radians
// if 90 degrees is PI/2.0
for hex in 0..6 {
let mut wall = Plane::new();
wall.transform = Matrix4::translation(0.0, 0.0, 4.0)
* Matrix4::rotation_y(hex as f32 * (PI / 3.0))
* Matrix4::rotation_x(PI / 2.0);
wall.material = side_color;
world.objects.push(wall);
}
{
let mut middle = Sphere::new();
middle.transform = Matrix4::translation(-0.5, -0.1, 0.5);
middle.material = Material::default();
middle.material.color = Tuple::color(0.1, 1.0, 0.5);
middle.material.diffuse = 0.7;
middle.material.specular = 0.3;
world.objects.push(middle);
}
{
let mut right = Sphere::new();
right.transform = Matrix4::translation(1.5, 0.5, -0.5)
* Matrix4::scaling(0.5, 0.5, 0.5);
right.material = Material::default();
right.material.color = Tuple::color(0.5, 1.0, 0.1);
right.material.diffuse = 0.7;
right.material.specular = 0.3;
world.objects.push(right);
}
{
let mut left = Sphere::new();
left.transform = Matrix4::translation(-1.5, 0.33, -0.75)
* Matrix4::scaling(0.33, 0.33, 0.33);
left.material = Material::default();
left.material.color = Tuple::color(1.0, 0.8, 0.1);
left.material.diffuse = 0.7;
left.material.specular = 0.3;
world.objects.push(left);
}
let aspect = 2560.0 / 1440.0;
let vsize = 100;
let hsize = (aspect * vsize as f32).round() as u32;
let mut camera = Camera::new(hsize, vsize, PI / 3.0);
camera.transform = view_transform(
Tuple::point(0.0, 10.5, 0.0),
Tuple::point(0.0, 1.0, 0.0),
Tuple::vector(0.0, 0.0, 1.0),
);
println!("Rendering world with {} pixels", camera.num_pixels());
let image = camera.render(world);
let ppm = image.to_ppm();
let filename = "plane_example.ppm";
let mut file = File::create(filename)?;
file.write_all(ppm.as_bytes())?;
Ok(())
}
|
#![allow(dead_code)]
extern crate serde_json;
extern crate serde_yaml;
extern crate ureq;
extern crate obofoundry;
#[test]
fn yaml() {
let url = "http://www.obofoundry.org/registry/ontologies.yml";
let res = ureq::get(url).call();
let reader = res.unwrap().into_reader();
let _foundry: obofoundry::Foundry = serde_yaml::from_reader(reader).unwrap();
}
#[test]
fn json() {
let url = "http://www.obofoundry.org/registry/ontologies.jsonld";
let res = ureq::get(url).call();
let reader = res.unwrap().into_reader();
let _foundry: obofoundry::Foundry = serde_json::from_reader(reader).unwrap();
}
|
#[macro_use]
extern crate serde;
pub mod client;
pub mod user;
use client::InnerClient;
use std::{convert::Infallible, sync::Arc};
use warp::{reply::json, Filter, Reply};
async fn fetch_users(db: Arc<InnerClient>) -> Result<impl Reply, Infallible> {
Ok(json(&db.users().await.unwrap()))
}
async fn fetch_big(db: Arc<InnerClient>) -> Result<impl Reply, Infallible> {
Ok(json(&db.big_users().await.unwrap()))
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let db = Arc::new(InnerClient::new("file:../test.db").await?);
let users_db = db.clone();
let users = warp::path("users")
.and(warp::any().map(move || users_db.clone()))
.and_then(fetch_users);
let big = warp::path("big")
.and(warp::any().map(move || db.clone()))
.and_then(fetch_big);
let routes = warp::get().and(users.or(big));
warp::serve(routes).run(([127, 0, 0, 1], 3030)).await;
Ok(())
}
|
use multiplatform_test::multiplatform_test;
#[multiplatform_test]
fn test_default() {}
#[multiplatform_test(test, env_tracing)]
fn test_tracing() {
tracing::warn!("This is a tracing warning!");
}
#[multiplatform_test(test, env_logging)]
fn test_logging() {
log::warn!("This is a logging warning!");
}
|
//! Scenes from the first book, *Raytracing in One Weekend*.
use std::sync::Arc;
use crate::camera::Camera;
use crate::hittable::{HittableList, Sphere};
use crate::material::Material::{Dielectric, Lambertian, Metallic};
use crate::material::{Diel, Lambert, Metal};
use crate::texture::SolidColor;
use crate::vec3::{Color, Point3, Vec3};
/// The first scene in the book with multiple balls and materials.
pub fn base_metal_lambert(img_w: u32, img_h: u32) -> (crate::camera::Camera, HittableList) {
let mut world = HittableList::with_capacity(4);
world.add(Arc::new(Sphere::new(
Point3::new(0.0, 0.0, -1.0),
0.5,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.7, 0.3, 0.3)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(0.0, -100.5, -1.0),
100.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.8, 0.8, 0.0)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(1.0, 0.0, -1.0),
0.5,
Metallic(Metal::new(Color::new(0.8, 0.6, 0.2), 0.0)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-1.0, 0.0, -1.0),
0.5,
Metallic(Metal::new(Color::new(0.8, 0.8, 0.8), 0.0)),
)));
let cam = Camera::new(
Point3::new_with(0.0),
Point3::new(0.0, 0.0, -1.0),
Vec3::new(0.0, 1.0, 0.0),
90.0,
f64::from(img_w) * f64::from(img_h).recip(),
0.0,
1.0,
0.0,
1.0,
);
(cam, world)
}
/// Scene for "all objects refract", includes first parts of dielectrics.
pub fn all_refract() -> HittableList {
let mut world = HittableList::with_capacity(4);
world.add(Arc::new(Sphere::new(
Point3::new(0.0, 0.0, -1.0),
0.5,
Dielectric(Diel::new(1.5)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(0.0, -100.5, -1.0),
100.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.8, 0.8, 0.0)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(1.0, 0.0, -1.0),
0.5,
Dielectric(Diel::new(1.5)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-1.0, 0.0, -1.0),
0.5,
Metallic(Metal::new(Color::new(0.8, 0.8, 0.8), 0.0)),
)));
world
}
/// Scene for the section where refraction was added to dielectrics.
pub fn sometimes_refract() -> HittableList {
let mut world = HittableList::with_capacity(4);
world.add(Arc::new(Sphere::new(
Point3::new(0.0, 0.0, -1.0),
0.5,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.1, 0.2, 0.5)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(0.0, -100.5, -1.0),
100.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.8, 0.8, 0.0)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(1.0, 0.0, -1.0),
0.5,
Metallic(Metal::new(Color::new(0.8, 0.6, 0.2), 0.0)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-1.0, 0.0, -1.0),
0.5,
Dielectric(Diel::new(1.5)),
)));
world
}
/// Scene where one ball was made into a glass bubble.
pub fn bubble() -> HittableList {
let mut world = HittableList::with_capacity(4);
world.add(Arc::new(Sphere::new(
Point3::new(0.0, 0.0, -1.0),
0.5,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.1, 0.2, 0.5)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(0.0, -100.5, -1.0),
100.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.8, 0.8, 0.0)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(1.0, 0.0, -1.0),
0.5,
Metallic(Metal::new(Color::new(0.8, 0.6, 0.2), 0.3)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-1.0, 0.0, -1.0),
0.5,
Dielectric(Diel::new(1.5)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-1.0, 0.0, -1.0),
-0.45,
Dielectric(Diel::new(1.5)),
)));
world
}
/// Two balls side by side for zoomed in scene.
pub fn wide_view() -> HittableList {
let mut world = HittableList::with_capacity(2);
let r = core::f64::consts::FRAC_PI_4;
world.add(Arc::new(Sphere::new(
Point3::new(-r, 0.0, -1.0),
r,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.0, 1.0, 1.0)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(r, 0.0, -1.0),
r,
Lambertian(Lambert::new(Arc::new(SolidColor::new(1.0, 0.2, 0.0)))),
)));
world
}
/// Book cover scene.
pub fn final_scene<R: rand::Rng>(
rng: &mut R,
img_w: u32,
img_h: u32,
) -> (crate::camera::Camera, HittableList) {
let mut world = HittableList::new();
world.add(Arc::new(Sphere::new(
Point3::new(0.5, -1000.0, 0.0),
1000.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new_with(0.5)))),
)));
// Add more balls to the scene and randomize the radius of the smaller ones
let bound = 15;
for (a, b) in (-bound..bound).flat_map(|x| core::iter::repeat(x).zip(-bound..bound)) {
let radius = rng.gen_range(0.1, 0.3);
let choose_mat = rng.gen::<f64>();
let center = Point3::new(
f64::from(a) + 0.9 * rng.gen::<f64>(),
radius,
f64::from(b) + 0.9 * rng.gen::<f64>(),
);
if (center - Point3::new(4.0, radius, 0.0)).length() > 0.9 {
if choose_mat < 0.8 {
// diffuse
world.add(Arc::new(Sphere::new(
center,
radius,
Lambertian(Lambert::new(Arc::new(SolidColor::from_color(
Color::random(rng) * Color::random(rng),
)))),
)));
} else if choose_mat < 0.95 {
// metal
world.add(Arc::new(Sphere::new(
center,
radius,
Metallic(Metal::new(
Color::random_range(rng, 0.3, 1.0),
rng.gen_range(0.0, 0.5),
)),
)));
} else {
// glass
world.add(Arc::new(Sphere::new(
center,
radius,
Dielectric(Diel::new(1.5)),
)));
}
}
}
world.add(Arc::new(Sphere::new(
Point3::new(0.0, 1.0, 0.0),
1.0,
Dielectric(Diel::new(1.5)),
)));
world.add(Arc::new(Sphere::new(
Point3::new(-4.0, 1.0, 0.0),
1.0,
Lambertian(Lambert::new(Arc::new(SolidColor::new(0.4, 0.2, 0.1)))),
)));
world.add(Arc::new(Sphere::new(
Point3::new(4.0, 1.0, 0.0),
1.0,
Metallic(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0)),
)));
let lookfrom = Point3::new(13.0, 2.0, 3.0);
let lookat = Point3::new(0.0, 0.0, 0.0);
let vup = Vec3::new(0.0, 1.0, 0.0);
let vfov = 20.0;
let aspect_ratio = f64::from(img_w) * f64::from(img_h).recip();
let focus_dist = 10.0;
let aperture = 0.0;
let cam = crate::camera::Camera::new(
lookfrom,
lookat,
vup,
vfov,
aspect_ratio,
aperture,
focus_dist,
0.0,
1.0,
);
(cam, world)
}
|
extern crate syntax;
use syntax::ast;
use syntax::parse::{new_parse_sess};
use syntax::parse::{ParseSess};
use syntax::parse::{new_parser_from_source_str};
use syntax::parse::parser::Parser;
use syntax::parse::token;
use syntax::visit;
use syntax::codemap;
use std::gc::Gc;
use std::task;
use racer::Match;
use racer;
use racer::util;
use racer::nameres::{resolve_path};
use racer::typeinf;
use syntax::visit::Visitor;
use racer::nameres;
#[deriving(Clone)]
struct Scope {
pub filepath: Path,
pub point: uint
}
// This code ripped from libsyntax::util::parser_testing
pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> {
new_parser_from_source_str(ps,
Vec::new(),
"bogofile".to_string(),
source_str)
}
// pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: ~str) -> Parser<'a> {
// new_parser_from_source_str(ps, Vec::new(), "bogofile".to_owned(), source_str)
// }
fn with_error_checking_parse<T>(s: String, f: |&mut Parser| -> T) -> T {
let ps = new_parse_sess();
let mut p = string_to_parser(&ps, s);
let x = f(&mut p);
p.abort_if_errors();
x
}
// parse a string, return an expr
pub fn string_to_expr (source_str : String) -> Gc<ast::Expr> {
with_error_checking_parse(source_str, |p| {
p.parse_expr()
})
}
// parse a string, return an item
pub fn string_to_item (source_str : String) -> Option<Gc<ast::Item>> {
with_error_checking_parse(source_str, |p| {
p.parse_item(Vec::new())
})
}
// parse a string, return a stmt
pub fn string_to_stmt(source_str : String) -> Gc<ast::Stmt> {
with_error_checking_parse(source_str, |p| {
p.parse_stmt(Vec::new())
})
}
// parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate {
with_error_checking_parse(source_str, |p| {
p.parse_crate_mod()
})
}
pub struct ViewItemVisitor {
pub ident : Option<String>,
pub paths : Vec<Vec<String>>
}
impl visit::Visitor<()> for ViewItemVisitor {
fn visit_view_item(&mut self, i: &ast::ViewItem, e: ()) {
match i.node {
ast::ViewItemUse(ref path) => {
match path.node {
ast::ViewPathSimple(ident, ref path, _) => {
let mut v = Vec::new();
for seg in path.segments.iter() {
v.push(token::get_ident(seg.identifier).get().to_string())
}
self.paths.push(v);
self.ident = Some(token::get_ident(ident).get().to_string());
},
ast::ViewPathList(ref pth, ref paths, _) => {
let mut v = Vec::new();
for seg in pth.segments.iter() {
v.push(token::get_ident(seg.identifier).get().to_string())
}
for path in paths.iter() {
//debug!("PHIL view path list item {}",token::get_ident(path.node.name));
match path.node {
ast::PathListIdent{name, ..} => {
let mut vv = v.clone();
vv.push(token::get_ident(name).get().to_string());
self.paths.push(vv);
}
ast::PathListMod{..} => (), // TODO
}
}
}
ast::ViewPathGlob(_, id) => {
debug!("PHIL got glob {:?}",id);
}
}
},
ast::ViewItemExternCrate(ident, ref loc, _) => {
self.ident = Some(token::get_ident(ident).get().to_string());
let ll = loc.clone();
ll.map(|(ref istr, _ /* str style */)| {
let mut v = Vec::new();
v.push(istr.get().to_string());
self.paths.push(v);
});
}
}
visit::walk_view_item(self, i, e)
}
}
struct LetVisitor {
scope: Scope,
parseinit: bool,
result: Option<LetResult>
}
pub struct LetResult {
pub name: String,
pub point: uint,
pub inittype: Option<Match>
}
fn path_to_vec(pth: &ast::Path) -> Vec<String> {
let mut v = Vec::new();
for seg in pth.segments.iter() {
v.push(token::get_ident(seg.identifier).get().to_string());
}
return v;
}
struct ExprTypeVisitor {
scope: Scope,
result: Option<Match>
}
fn find_match(fqn: &Vec<String>, fpath: &Path, pos: uint) -> Option<Match> {
let myfqn = util::to_refs(fqn);
return resolve_path(myfqn.as_slice(), fpath, pos, racer::ExactMatch,
racer::BothNamespaces).nth(0);
}
fn find_type_match(fqn: &Vec<String>, fpath: &Path, pos: uint) -> Option<Match> {
let myfqn = util::to_refs(fqn);
return resolve_path(myfqn.as_slice(), fpath, pos,
racer::ExactMatch, racer::TypeNamespace).nth(0);
}
fn get_type_of_path(fqn: &Vec<String>, fpath: &Path, pos: uint) -> Option<Match> {
let om = find_match(fqn, fpath, pos);
if om.is_some() {
let m = om.unwrap();
let msrc = racer::load_file_and_mask_comments(&m.filepath);
return typeinf::get_type_of_match(m, msrc.as_slice())
} else {
return None;
}
}
impl visit::Visitor<()> for ExprTypeVisitor {
fn visit_expr(&mut self, expr: &ast::Expr, _: ()) {
debug!("PHIL visit_expr {:?}",expr);
//walk_expr(self, ex, e)
match expr.node {
ast::ExprPath(ref path) => {
debug!("PHIL expr is a path {}",path_to_vec(path));
let pathvec = path_to_vec(path);
self.result = get_type_of_path(&pathvec,
&self.scope.filepath,
self.scope.point);
}
ast::ExprCall(callee_expression, _/*ref arguments*/) => {
self.visit_expr(&*callee_expression, ());
let mut newres: Option<Match> = None;
{
let res = &self.result;
match *res {
Some(ref m) => {
let fqn = racer::typeinf::get_return_type_of_function(m);
debug!("PHIL found exprcall return type: {}",fqn);
newres = find_match(&fqn, &m.filepath, m.point);
},
None => {}
}
}
self.result = newres;
}
ast::ExprStruct(ref path, _, _) => {
let pathvec = path_to_vec(path);
self.result = find_type_match(&pathvec,
&self.scope.filepath,
self.scope.point);
}
ast::ExprMethodCall(ref spannedident, ref types, ref arguments) => {
// spannedident.node is an ident I think
let methodname = token::get_ident(spannedident.node).get().to_string();
debug!("PHIL method call ast name {}",methodname);
debug!("PHIL method call ast types {:?} {}",types, types.len());
let objexpr = arguments[0];
//println!("PHIL obj expr is {:?}",objexpr);
self.visit_expr(&*objexpr, ());
let mut newres: Option<Match> = None;
match self.result {
Some(ref m) => {
debug!("PHIL obj expr type is {:?}",m);
// locate the method
let omethod = nameres::search_for_impl_methods(
m.matchstr.as_slice(),
methodname.as_slice(),
m.point,
&m.filepath,
m.local,
racer::ExactMatch).nth(0);
match omethod {
Some(ref m) => {
let fqn = racer::typeinf::get_return_type_of_function(m);
debug!("PHIL found exprcall return type: {}",fqn);
newres = find_match(&fqn, &m.filepath, m.point);
}
None => {}
}
}
None => {}
}
self.result = newres;
}
_ => {}
}
}
}
impl LetVisitor {
fn visit_let_initializer(&mut self, name: &str, point: uint, init: Option<Gc<ast::Expr>> ) {
// chances are we can't parse the init yet, so the default is to leave blank
self.result = Some(LetResult{name: name.to_string(),
point: point,
inittype: None});
if !self.parseinit {
return;
}
debug!("PHIL result before is {:?}",self.result);
// attempt to parse the init
init.map(|initexpr| {
debug!("PHIL init node is {:?}",initexpr.node);
let mut v = ExprTypeVisitor{ scope: self.scope.clone(),
result: None};
v.visit_expr(&*initexpr, ());
self.result = Some(LetResult{name: name.to_string(), point: point,
inittype: v.result});
});
debug!("PHIL result is {:?}",self.result);
}
}
impl visit::Visitor<()> for LetVisitor {
fn visit_decl(&mut self, decl: &ast::Decl, e: ()) {
debug!("PHIL decl {:?}",decl);
match decl.node {
ast::DeclLocal(local) => {
match local.pat.node {
ast::PatIdent(_ , ref spannedident, _) => {
let codemap::BytePos(point) = spannedident.span.lo;
let name = token::get_ident(spannedident.node).get().to_string();
self.visit_let_initializer(name.as_slice(),
point.to_uint().unwrap(),
local.init);
},
_ => {}
}
}
ast::DeclItem(_) => {}
}
visit::walk_decl(self, decl, e);
}
}
struct StructVisitor {
pub fields: Vec<(String, uint)>
}
impl visit::Visitor<()> for StructVisitor {
fn visit_struct_def(&mut self, s: &ast::StructDef, _: ast::Ident, _: &ast::Generics, _: ast::NodeId, e: ()) {
visit::walk_struct_def(self, s, e)
}
fn visit_struct_field(&mut self, field: &ast::StructField, _: ()) {
let codemap::BytePos(point) = field.span.lo;
match field.node.kind {
ast::NamedField(name, _) => {
//visitor.visit_ident(struct_field.span, name, env.clone())
let n = String::from_str(token::get_ident(name).get());
self.fields.push((n, point as uint));
}
_ => {}
}
//visit::walk_struct_field(self, s, e)
}
}
struct ImplVisitor {
name_path: Vec<String>
}
impl visit::Visitor<()> for ImplVisitor {
fn visit_item(&mut self, item: &ast::Item, _: ()) {
match item.node {
ast::ItemImpl(_, _,typ,_) => {
match typ.node {
ast::TyPath(ref path, _, _) => {
self.name_path = path_to_vec(path);
}
ast::TyRptr(_, ref ty) => {
// HACK for now, treat refs the same as unboxed types
// so that we can match '&str' to 'str'
match ty.ty.node {
ast::TyPath(ref path, _, _) => {
self.name_path = path_to_vec(path);
}
_ => {}
}
}
_ => {}
}
},
_ => {}
}
}
}
pub struct FnVisitor {
pub name: String,
pub output: Vec<String>,
pub args: Vec<(String, uint, Vec<String>)>,
pub is_method: bool
}
impl visit::Visitor<()> for FnVisitor {
fn visit_fn(&mut self, fk: &visit::FnKind, fd: &ast::FnDecl, _: &ast::Block, _: codemap::Span, _: ast::NodeId, _: ()) {
self.name = token::get_ident(visit::name_of_fn(fk)).get().to_string();
for arg in fd.inputs.iter() {
debug!("PHIL fn arg ast is {:?}",arg);
let res =
match arg.pat.node {
ast::PatIdent(_ , ref spannedident, _) => {
let codemap::BytePos(point) = spannedident.span.lo;
let argname = token::get_ident(spannedident.node).get().to_string();
Some((String::from_str(argname.as_slice()), point as uint))
}
_ => None
};
if res.is_none() {
return;
}
let (name, pos) = res.unwrap();
let typepath = match arg.ty.node {
ast::TyRptr(_, ref ty) => {
match ty.ty.node {
ast::TyPath(ref path, _, _) => {
let type_ = path_to_vec(path);
debug!("PHIL arg type is {}", type_);
type_
}
_ => Vec::new()
}
}
ast::TyPath(ref path, _, _) => {
let type_ = path_to_vec(path);
debug!("PHIL arg type is {}", type_);
type_
}
_ => Vec::new()
};
debug!("PHIL typepath {}", typepath);
self.args.push((name, pos, typepath))
}
debug!("PHIL parsed args: {}", self.args);
match fd.output.node {
ast::TyPath(ref path, _, _) => {
self.output = path_to_vec(path);
}
_ => {}
}
self.is_method = match *fk {
visit::FkMethod(_, _, _) => true,
_ => false
}
}
}
pub struct ModVisitor {
pub name: Option<String>
}
impl visit::Visitor<()> for ModVisitor {
fn visit_item(&mut self, item: &ast::Item, _: ()) {
match item.node {
ast::ItemMod(_) => {
self.name = Some(String::from_str(token::get_ident(item.ident).get()));
}
_ => {}
}
}
}
pub struct EnumVisitor {
pub name: String,
pub values: Vec<(String, uint)>,
}
impl visit::Visitor<()> for EnumVisitor {
fn visit_item(&mut self, i: &ast::Item, _: ()) {
match i.node {
ast::ItemEnum(ref enum_definition, _) => {
self.name = String::from_str(token::get_ident(i.ident).get());
//visitor.visit_generics(type_parameters, env.clone());
//visit::walk_enum_def(self, enum_definition, type_parameters, e)
let codemap::BytePos(point) = i.span.lo;
let codemap::BytePos(point2) = i.span.hi;
debug!("PHIL name point is {} {}",point,point2);
for &variant in enum_definition.variants.iter() {
let codemap::BytePos(point) = variant.span.lo;
self.values.push((String::from_str(token::get_ident(variant.node.name).get()), point as uint));
}
},
_ => {}
}
}
}
pub fn parse_view_item(s: String) -> ViewItemVisitor {
// parser can fail!() so isolate it in another task
let result = task::try(proc() {
let cr = string_to_crate(s);
let mut v = ViewItemVisitor{ident: None, paths: Vec::new()};
visit::walk_crate(&mut v, &cr, ());
return v;
});
match result {
Ok(s) => {return s;},
Err(_) => {
return ViewItemVisitor{ident: None, paths: Vec::new()};
}
}
}
pub fn parse_let(s: String, fpath: Path, pos: uint, parseinit: bool) -> Option<LetResult> {
let result = task::try(proc() {
debug!("PHIL parse_let s=|{}|",s);
let stmt = string_to_stmt(s);
debug!("PHIL parse_let stmt={:?}",stmt);
let scope = Scope{filepath: fpath, point: pos};
let mut v = LetVisitor{ scope: scope, result: None, parseinit: parseinit};
visit::walk_stmt(&mut v, &*stmt, ());
return v.result;
});
match result {
Ok(s) => {return s;},
Err(_) => {
return None;
}
}
}
pub fn parse_struct_fields(s: String) -> Vec<(String, uint)> {
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = StructVisitor{ fields: Vec::new() };
visit::walk_stmt(&mut v, &*stmt, ());
return v.fields;
}).ok().unwrap_or(Vec::new());
}
pub fn parse_impl_name(s: String) -> Option<String> {
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = ImplVisitor{ name_path: Vec::new() };
visit::walk_stmt(&mut v, &*stmt, ());
if v.name_path.len() == 1 {
return Some(v.name_path.pop().unwrap());
} else {
return None;
}
}).ok().unwrap_or(None);
}
pub fn parse_fn_output(s: String) -> Vec<String> {
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = FnVisitor { name: "".to_string(), args: Vec::new(), output: Vec::new(), is_method: false };
visit::walk_stmt(&mut v, &*stmt, ());
return v.output;
}).ok().unwrap();
}
pub fn parse_fn(s: String) -> FnVisitor {
debug!("PHIL parse_fn |{}|",s);
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = FnVisitor { name: "".to_string(), args: Vec::new(), output: Vec::new(), is_method: false };
visit::walk_stmt(&mut v, &*stmt, ());
return v;
}).ok().unwrap();
}
pub fn parse_mod(s: String) -> ModVisitor {
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = ModVisitor { name: None };
visit::walk_stmt(&mut v, &*stmt, ());
return v;
}).ok().unwrap();
}
pub fn parse_enum(s: String) -> EnumVisitor {
return task::try(proc() {
let stmt = string_to_stmt(s);
let mut v = EnumVisitor { name: String::new(), values: Vec::new()};
visit::walk_stmt(&mut v, &*stmt, ());
return v;
}).ok().unwrap();
}
pub fn get_type_of(s: String, fpath: &Path, pos: uint) -> Option<Match> {
let myfpath = fpath.clone();
return task::try(proc() {
let stmt = string_to_stmt(s);
let startscope = Scope {
filepath: myfpath,
point: pos
};
let mut v = ExprTypeVisitor{ scope: startscope,
result: None};
visit::walk_stmt(&mut v, &*stmt, ());
return v.result;
}).ok().unwrap();
}
#[test]
fn ast_sandbox() {
// let src = "pub mod foo {}";
// let stmt = string_to_stmt(String::from_str(src));
// let mut v = ModVisitor{ name: None };
// visit::walk_stmt(&mut v, stmt, ());
// println!("PHIL {:?}", stmt);
// println!("PHIL {}", v.name);
// fail!("");
// let mut v = ExprTypeVisitor{ scope: startscope,
// result: None};
// let src = ~"fn myfn(a: uint) -> Foo {}";
// let src = ~"impl blah{ fn visit_item(&mut self, item: &ast::Item, _: ()) {} }";
// let src = "Foo::Bar().baz(32)";
//let src = "std::vec::Vec::new().push_all()";
// let src = "impl visit::Visitor<()> for ExprTypeVisitor {}";
// let src = "impl<'a> StrSlice<'a> for &'a str {}";
//let src = "a(|b|{});";
// let src = "(a,b) = (3,4);";
// let src = "fn foo() -> (a,b) {}";
// let stmt = string_to_stmt(String::from_str(src));
// let src = "extern crate core_collections = \"collections\";";
// let src = "use foo = bar;";
// let src = "use bar::baz;";
// let src = "use bar::{baz,blah};";
// let src = "extern crate collections;";
// let cr = string_to_crate(String::from_str(src));
// let mut v = ViewItemVisitor{ ident: None, paths: Vec::new() };
// visit::walk_crate(&mut v, &cr, ());
// println!("PHIL v {} {}",v.ident, v.paths);
//visit::walk_stmt(&mut v, stmt, ());
// println!("PHIL stmt {:?}",stmt);
// let mut v = ImplVisitor{ name_path: Vec::new() };
// visit::walk_stmt(&mut v, stmt, ());
// println!("v {}",v.name_path);
// pub struct Match {
// pub matchstr: ~str,
// pub filepath: Path,
// pub point: uint,
// pub linetxt: ~str,
// pub local: bool,
// pub mtype: MatchType
// }
// let startscope = Match{
// matchstr: "".to_owned(),
// filepath: Path::new("./ast.rs"),
// point: 0,
// linetxt: "".to_owned(),
// local: true,
// mtype: racer::Module
// };
// let startscope = Scope {
// filepath: Path::new("./ast.rs"),
// point: 0
// };
// let mut v = ExprTypeVisitor{ scope: startscope,
// result: None};
// visit::walk_stmt(&mut v, stmt, ());
// println!("PHIL result was {:?}",v.result);
//return v.result;
// let mut v = EnumVisitor { name: String::new(), values: Vec::new()};
// visit::walk_stmt(&mut v, cr, ());
// println!("PHIL {} {}", v.name, v.values);
// let src = "let v = Foo::new();".to_owned();
// let res = parse_let(src);
// debug!("PHIL res {}",res.unwrap().init);
}
|
use num::abs;
use num_integer::gcd;
use num_rational::Ratio;
use std::error;
use std::io;
use std::io::BufRead;
use crate::day;
pub type BoxResult<T> = Result<T, Box<dyn error::Error>>;
pub struct Day10 {}
impl day::Day for Day10 {
fn tag(&self) -> &str { "10" }
fn part1(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
println!("{:?}", self.part1_impl(&mut *input()));
}
fn part2(&self, input: &dyn Fn() -> Box<dyn io::Read>) {
println!("{:?}", self.part2_impl(&mut *input(), 200));
}
}
impl Day10 {
fn find_best<'a>(&self, asteroids: &'a Vec<(usize, usize)>)
-> (&'a (usize, usize), usize) {
asteroids.iter().map(|p| {
let others = asteroids.iter().filter(|&a| p != a).collect::<Vec<_>>();
// eprintln!("p {:?} {:?}", p, others);
(p,
others.iter().map(|&(x, y)| {
let x = *x as i64;
let y = *y as i64;
let px = p.0 as i64;
let py = p.1 as i64;
let dx = x - px;
let dy = y - py;
let n = gcd(dx, dy);
let sx = dx / n;
let sy = dy / n;
let steps = if dx != 0 { dx / sx } else { dy / sy };
let (_, _, obstructed) = (1..steps).fold(
(px + sx, py + sy, false),
|(x, y, obstructed), _|
(x + sx, y + sy,
obstructed || asteroids.contains(
&(x as usize, y as usize))));
// eprintln!("{}", obstructed);
if !obstructed { 1 } else { 0 }
}).sum())
}).max_by_key(|&(_, s)| s).unwrap()
}
fn part1_impl(self: &Self, input: &mut dyn io::Read) -> BoxResult<usize> {
let reader = io::BufReader::new(input);
let asteroids = reader.split(b'\n')
.filter(|l| !l.as_ref().unwrap().is_empty())
.enumerate().flat_map(|(y, r)| {
let m = r.unwrap().into_iter().map(|b| b == b'#');
let e = m.enumerate();
e.filter_map(move|(x, b)|
if b { Some((x, y)) } else { None })
})
.collect::<Vec<_>>();
Ok(self.find_best(&asteroids).1)
}
fn part2_impl(self: &Self, input: &mut dyn io::Read, n: usize) -> BoxResult<usize> {
let reader = io::BufReader::new(input);
let asteroids = reader.split(b'\n')
.filter(|l| !l.as_ref().unwrap().is_empty())
.enumerate().flat_map(|(y, r)| {
let m = r.unwrap().into_iter().map(|b| b == b'#');
let e = m.enumerate();
e.filter_map(move|(x, b)|
if b { Some((x, y)) } else { None })
})
.collect::<Vec<_>>();
let &p = self.find_best(&asteroids).0;
let (px, py) = p;
let mut others = asteroids.iter().filter(|&a| p != *a).map(|&a| {
let (x, y) = a;
let (dx, dy) = (x as i64 - px as i64, y as i64 - py as i64);
if x >= px && y < py {
(0, Ratio::new(dx, -dy), abs(dx) + abs(dy), a)
} else if x > px && y >= py {
(1, Ratio::new(dy, dx), abs(dx) + abs(dy), a)
} else if x <= px && y > py {
(2, abs(Ratio::new(-dx, dy)), abs(dx) + abs(dy), a)
} else {
(3, abs(Ratio::new(-dy, -dx)), abs(dx) + abs(dy), a)
}
}).collect::<Vec<_>>();
others.sort_by(|&(aq, ar, ad, _), &(bq, br, bd, _)|
aq.cmp(&bq).then(ar.cmp(&br)).then(ad.cmp(&bd)));
let mut lq = -1;
let mut lr = Ratio::new(1, 1);
let mut i = 0;
let mut s = 0;
let (x, y);
loop {
let (q, r, d, (ax, ay)) = &mut others[i];
if *d > 0 && (*q != lq || *r != lr) {
s += 1;
if s == n { x = *ax; y = *ay; break; };
*d = 0;
lq = *q;
lr = *r;
}
i = (i + 1) % others.len();
}
Ok(x * 100 + y)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test1(s: &str, o: usize) {
assert_eq!(Day10 {}.part1_impl(&mut s.as_bytes()).unwrap(), o);
}
#[test]
fn part1() {
test1("\
.#..#
.....
#####
....#
...##", 8);
test1("\
......#.#.
#..#.#....
..#######.
.#.#.###..
.#..#.....
..#....#.#
#..#....#.
.##.#..###
##...#..#.
.#....####", 33);
test1("\
#.#...#.#.
.###....#.
.#....#...
##.#.#.#.#
....#.#.#.
.##..###.#
..#...##..
..##....##
......#...
.####.###.", 35);
test1("\
.#..#..###
####.###.#
....###.#.
..###.##.#
##.##.#.#.
....###..#
..#.#..#.#
#..#.#.###
.##...##.#
.....#.#..", 41);
test1("\
.#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##", 210);
}
fn test2(s: &str, n: usize, o: usize) {
assert_eq!(Day10 {}.part2_impl(&mut s.as_bytes(), n).unwrap(), o);
}
#[test]
fn part2() {
test2("\
.#....#####...#..
##...##.#####..##
##...#...#.#####.
..#.....X...###..
..#.#.....#....##", 36, 1403);
test2("\
.#..##.###...#######
##.############..##.
.#.######.########.#
.###.#######.####.#.
#####.##.#.##.###.##
..#####..#.#########
####################
#.####....###.#.#.##
##.#################
#####.##.###..####..
..######..##.#######
####.##.####...##..#
.#####..#.######.###
##...#.##########...
#.##########.#######
.####.#.###.###.#.##
....##.##.###..#####
.#.#.###########.###
#.#.#.#####.####.###
###.##.####.##.#..##", 200, 802)
}
} |
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate diesel;
extern crate rand;
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
pub mod db;
pub mod model;
pub mod schema;
|
mod bg_game;
mod higherlower_game;
mod minesweeper;
pub use bg_game::*;
pub use higherlower_game::*;
pub use minesweeper::*;
|
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct SoundBias(u16);
impl SoundBias {
const_new!();
bitfield_int!(u16; 1..=9: u16, bias, with_bias, set_bias);
bitfield_enum!(u16; 14..=15: SampleBits, sample_bits, with_sample_bits, set_sample_bits);
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u16)]
pub enum SampleBits {
_9 = 0 << 14,
_8 = 1 << 14,
_7 = 2 << 14,
_6 = 3 << 14,
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
use std::cmp::Ordering;
/// Neighbor node
#[derive(Debug, Clone, Copy)]
pub struct Neighbor {
/// The id of the node
pub id: u32,
/// The distance from the query node to current node
pub distance: f32,
/// Whether the current is visited or not
pub visited: bool,
}
impl Neighbor {
/// Create the neighbor node and it has not been visited
pub fn new (id: u32, distance: f32) -> Self {
Self {
id,
distance,
visited: false
}
}
}
impl Default for Neighbor {
fn default() -> Self {
Self { id: 0, distance: 0.0_f32, visited: false }
}
}
impl PartialEq for Neighbor {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Neighbor {}
impl Ord for Neighbor {
fn cmp(&self, other: &Self) -> Ordering {
let ord = self.distance.partial_cmp(&other.distance).unwrap_or(std::cmp::Ordering::Equal);
if ord == Ordering::Equal {
return self.id.cmp(&other.id);
}
ord
}
}
impl PartialOrd for Neighbor {
#[inline]
fn lt(&self, other: &Self) -> bool {
self.distance < other.distance || (self.distance == other.distance && self.id < other.id)
}
// Reason for allowing panic = "Does not support comparing Neighbor with partial_cmp"
#[allow(clippy::panic)]
fn partial_cmp(&self, _: &Self) -> Option<std::cmp::Ordering> {
panic!("Neighbor only allows eq and lt")
}
}
#[cfg(test)]
mod neighbor_test {
use super::*;
#[test]
fn eq_lt_works() {
let n1 = Neighbor::new(1, 1.1);
let n2 = Neighbor::new(2, 2.0);
let n3 = Neighbor::new(1, 1.1);
assert!(n1 != n2);
assert!(n1 < n2);
assert!(n1 == n3);
}
#[test]
#[should_panic]
fn gt_should_panic() {
let n1 = Neighbor::new(1, 1.1);
let n2 = Neighbor::new(2, 2.0);
assert!(n2 > n1);
}
#[test]
#[should_panic]
fn le_should_panic() {
let n1 = Neighbor::new(1, 1.1);
let n2 = Neighbor::new(2, 2.0);
assert!(n1 <= n2);
}
}
|
#[doc = "Reader of register RCC_MP_AHB5LPENSETR"]
pub type R = crate::R<u32, super::RCC_MP_AHB5LPENSETR>;
#[doc = "Writer for register RCC_MP_AHB5LPENSETR"]
pub type W = crate::W<u32, super::RCC_MP_AHB5LPENSETR>;
#[doc = "Register RCC_MP_AHB5LPENSETR `reset()`'s with value 0x0171"]
impl crate::ResetValue for super::RCC_MP_AHB5LPENSETR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0171
}
}
#[doc = "GPIOZLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GPIOZLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<GPIOZLPEN_A> for bool {
#[inline(always)]
fn from(variant: GPIOZLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GPIOZLPEN`"]
pub type GPIOZLPEN_R = crate::R<bool, GPIOZLPEN_A>;
impl GPIOZLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GPIOZLPEN_A {
match self.bits {
false => GPIOZLPEN_A::B_0X0,
true => GPIOZLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GPIOZLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GPIOZLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `GPIOZLPEN`"]
pub struct GPIOZLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> GPIOZLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: GPIOZLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(GPIOZLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(GPIOZLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "CRYP1LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRYP1LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<CRYP1LPEN_A> for bool {
#[inline(always)]
fn from(variant: CRYP1LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `CRYP1LPEN`"]
pub type CRYP1LPEN_R = crate::R<bool, CRYP1LPEN_A>;
impl CRYP1LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CRYP1LPEN_A {
match self.bits {
false => CRYP1LPEN_A::B_0X0,
true => CRYP1LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == CRYP1LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == CRYP1LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `CRYP1LPEN`"]
pub struct CRYP1LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> CRYP1LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CRYP1LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(CRYP1LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(CRYP1LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "HASH1LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HASH1LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<HASH1LPEN_A> for bool {
#[inline(always)]
fn from(variant: HASH1LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `HASH1LPEN`"]
pub type HASH1LPEN_R = crate::R<bool, HASH1LPEN_A>;
impl HASH1LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HASH1LPEN_A {
match self.bits {
false => HASH1LPEN_A::B_0X0,
true => HASH1LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == HASH1LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == HASH1LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `HASH1LPEN`"]
pub struct HASH1LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> HASH1LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HASH1LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(HASH1LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(HASH1LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "RNG1LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RNG1LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<RNG1LPEN_A> for bool {
#[inline(always)]
fn from(variant: RNG1LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `RNG1LPEN`"]
pub type RNG1LPEN_R = crate::R<bool, RNG1LPEN_A>;
impl RNG1LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RNG1LPEN_A {
match self.bits {
false => RNG1LPEN_A::B_0X0,
true => RNG1LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == RNG1LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == RNG1LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `RNG1LPEN`"]
pub struct RNG1LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> RNG1LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RNG1LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(RNG1LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(RNG1LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "BKPSRAMLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BKPSRAMLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the clock is disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral clock\r\n in CSLEEP, reading means that the clock is\r\n enabled in CSLEEP"]
B_0X1 = 1,
}
impl From<BKPSRAMLPEN_A> for bool {
#[inline(always)]
fn from(variant: BKPSRAMLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `BKPSRAMLPEN`"]
pub type BKPSRAMLPEN_R = crate::R<bool, BKPSRAMLPEN_A>;
impl BKPSRAMLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BKPSRAMLPEN_A {
match self.bits {
false => BKPSRAMLPEN_A::B_0X0,
true => BKPSRAMLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == BKPSRAMLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == BKPSRAMLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `BKPSRAMLPEN`"]
pub struct BKPSRAMLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> BKPSRAMLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BKPSRAMLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the clock is disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(BKPSRAMLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clock in CSLEEP, reading means that the clock is enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(BKPSRAMLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
impl R {
#[doc = "Bit 0 - GPIOZLPEN"]
#[inline(always)]
pub fn gpiozlpen(&self) -> GPIOZLPEN_R {
GPIOZLPEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 4 - CRYP1LPEN"]
#[inline(always)]
pub fn cryp1lpen(&self) -> CRYP1LPEN_R {
CRYP1LPEN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - HASH1LPEN"]
#[inline(always)]
pub fn hash1lpen(&self) -> HASH1LPEN_R {
HASH1LPEN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - RNG1LPEN"]
#[inline(always)]
pub fn rng1lpen(&self) -> RNG1LPEN_R {
RNG1LPEN_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 8 - BKPSRAMLPEN"]
#[inline(always)]
pub fn bkpsramlpen(&self) -> BKPSRAMLPEN_R {
BKPSRAMLPEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - GPIOZLPEN"]
#[inline(always)]
pub fn gpiozlpen(&mut self) -> GPIOZLPEN_W {
GPIOZLPEN_W { w: self }
}
#[doc = "Bit 4 - CRYP1LPEN"]
#[inline(always)]
pub fn cryp1lpen(&mut self) -> CRYP1LPEN_W {
CRYP1LPEN_W { w: self }
}
#[doc = "Bit 5 - HASH1LPEN"]
#[inline(always)]
pub fn hash1lpen(&mut self) -> HASH1LPEN_W {
HASH1LPEN_W { w: self }
}
#[doc = "Bit 6 - RNG1LPEN"]
#[inline(always)]
pub fn rng1lpen(&mut self) -> RNG1LPEN_W {
RNG1LPEN_W { w: self }
}
#[doc = "Bit 8 - BKPSRAMLPEN"]
#[inline(always)]
pub fn bkpsramlpen(&mut self) -> BKPSRAMLPEN_W {
BKPSRAMLPEN_W { w: self }
}
}
|
#[cfg(target_os = "macos")]
pub mod osx_clipboard {
use common::*;
use core_graphics::color_space::{CGColorSpace, CGColorSpaceRef};
use core_graphics::image::{CGImage, CGImageAlphaInfo};
use core_graphics::{
base::{kCGRenderingIntentDefault, CGFloat},
data_provider::{CGDataProvider, CustomData},
};
use objc::runtime::{Class, Object};
use objc_foundation::{INSArray, INSObject, INSString};
use objc_foundation::{NSArray, NSDictionary, NSObject, NSString};
use objc_id::{Id, Owned};
use std::error::Error;
use std::{mem::transmute, ops::Deref};
pub struct OSXClipboardContext {
pasteboard: Id<Object>,
}
#[link(name = "AppKit", kind = "framework")]
extern "C" {}
#[repr(C)]
pub struct NSSize {
pub width: CGFloat,
pub height: CGFloat,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for NSSize {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for NSSize {
#[inline]
fn clone(&self) -> NSSize {
{
let _: ::core::clone::AssertParamIsClone<CGFloat>;
let _: ::core::clone::AssertParamIsClone<CGFloat>;
*self
}
}
}
struct PixelArray {
data: Vec<u8>,
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for PixelArray {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self {
PixelArray {
data: ref __self_0_0,
} => {
let mut debug_trait_builder = f.debug_struct("PixelArray");
let _ = debug_trait_builder.field("data", &&(*__self_0_0));
debug_trait_builder.finish()
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for PixelArray {
#[inline]
fn clone(&self) -> PixelArray {
match *self {
PixelArray {
data: ref __self_0_0,
} => PixelArray {
data: ::core::clone::Clone::clone(&(*__self_0_0)),
},
}
}
}
impl CustomData for PixelArray {
unsafe fn ptr(&self) -> *const u8 {
self.data.as_ptr()
}
unsafe fn len(&self) -> usize {
self.data.len()
}
}
/// Returns an NSImage object on success.
fn image_from_pixels(
pixels: Vec<u8>,
width: usize,
height: usize,
) -> Result<Id<NSObject>, Box<dyn Error>> {
let colorspace = CGColorSpace::create_device_rgb();
let bitmap_info: u32 = CGImageAlphaInfo::CGImageAlphaLast as u32;
let pixel_data: Box<Box<dyn CustomData>> = Box::new(Box::new(PixelArray { data: pixels }));
let provider = unsafe { CGDataProvider::from_custom_data(pixel_data) };
let rendering_intent = kCGRenderingIntentDefault;
let cg_image = CGImage::new(
width,
height,
8,
32,
4 * width,
&colorspace,
bitmap_info,
&provider,
false,
rendering_intent,
);
let NSImage_class = Class::get("NSImage").ok_or(err("Class::get(\"NSImage\")"))?;
let size = NSSize {
width: width as CGFloat,
height: height as CGFloat,
};
let image: Id<NSObject> = unsafe {
Id::from_ptr({
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("alloc\u{0}")
}
};
let result;
match ::objc::__send_message(&*NSImage_class, sel, ()) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
})
};
let ptr: *const std::ffi::c_void = unsafe { transmute(&*cg_image) };
let a = &*cg_image;
let image: Id<NSObject> = unsafe {
Id::from_ptr({
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("initWithCGImage:size:\u{0}")
}
};
let result;
match ::objc::__send_message(&*image, sel, (cg_image, &size)) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
})
};
Ok(image)
}
impl ClipboardProvider for OSXClipboardContext {
fn new() -> Result<OSXClipboardContext, Box<Error>> {
let cls = match Class::get("NSPasteboard").ok_or(err("Class::get(\"NSPasteboard\")")) {
::core::result::Result::Ok(val) => val,
::core::result::Result::Err(err) => {
return ::core::result::Result::Err(::core::convert::From::from(err));
}
};
let pasteboard: *mut Object = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("generalPasteboard\u{0}")
}
};
let result;
match ::objc::__send_message(&*cls, sel, ()) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
}
};
if pasteboard.is_null() {
return Err(err("NSPasteboard#generalPasteboard returned null"));
}
let pasteboard: Id<Object> = unsafe { Id::from_ptr(pasteboard) };
Ok(OSXClipboardContext {
pasteboard: pasteboard,
})
}
fn get_text(&mut self) -> Result<String, Box<Error>> {
let string_class: Id<NSObject> = {
let cls: Id<Class> = unsafe { Id::from_ptr(class("NSString")) };
unsafe { transmute(cls) }
};
let classes: Id<NSArray<NSObject, Owned>> =
NSArray::from_vec(<[_]>::into_vec(box [string_class]));
let options: Id<NSDictionary<NSObject, NSObject>> = NSDictionary::new();
let string_array: Id<NSArray<NSString>> = unsafe {
let obj: *mut NSArray<NSString> = {
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("readObjectsForClasses:options:\u{0}")
}
};
let result;
match ::objc::__send_message(&*self.pasteboard, sel, (&*classes, &*options)) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
};
if obj.is_null() {
return Err(err(
"pasteboard#readObjectsForClasses:options: returned null",
));
}
Id::from_ptr(obj)
};
if string_array.count() == 0 {
Err(err(
"pasteboard#readObjectsForClasses:options: returned empty",
))
} else {
Ok(string_array[0].as_str().to_owned())
}
}
fn set_text(&mut self, data: String) -> Result<(), Box<Error>> {
let string_array = NSArray::from_vec(<[_]>::into_vec(box [NSString::from_str(&data)]));
let _: usize = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("clearContents\u{0}")
}
};
let result;
match ::objc::__send_message(&*self.pasteboard, sel, ()) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
}
};
let success: bool = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("writeObjects:\u{0}")
}
};
let result;
match ::objc::__send_message(&*self.pasteboard, sel, (string_array,)) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
}
};
return if success {
Ok(())
} else {
Err(err("NSPasteboard#writeObjects: returned false"))
};
}
fn get_binary_contents(&mut self) -> Result<Option<ClipboardContent>, Box<Error>> {
let string_class: Id<NSObject> = {
let cls: Id<Class> = unsafe { Id::from_ptr(class("NSString")) };
unsafe { transmute(cls) }
};
let image_class: Id<NSObject> = {
let cls: Id<Class> = unsafe { Id::from_ptr(class("NSImage")) };
unsafe { transmute(cls) }
};
let url_class: Id<NSObject> = {
let cls: Id<Class> = unsafe { Id::from_ptr(class("NSURL")) };
unsafe { transmute(cls) }
};
let classes = <[_]>::into_vec(box [url_class, image_class, string_class]);
let classes: Id<NSArray<NSObject, Owned>> = NSArray::from_vec(classes);
let options: Id<NSDictionary<NSObject, NSObject>> = NSDictionary::new();
let contents: Id<NSArray<NSObject>> = unsafe {
let obj: *mut NSArray<NSObject> = {
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("readObjectsForClasses:options:\u{0}")
}
};
let result;
match ::objc::__send_message(&*self.pasteboard, sel, (&*classes, &*options)) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
};
if obj.is_null() {
return Err(err(
"pasteboard#readObjectsForClasses:options: returned null",
));
}
Id::from_ptr(obj)
};
if contents.count() == 0 {
Ok(None)
} else {
let obj = &contents[0];
if obj.is_kind_of(Class::get("NSString").unwrap()) {
let s: &NSString = unsafe { transmute(obj) };
Ok(Some(ClipboardContent::Utf8(s.as_str().to_owned())))
} else if obj.is_kind_of(Class::get("NSImage").unwrap()) {
let tiff: &NSArray<NSObject> = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL
.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _,
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("TIFFRepresentation\u{0}")
}
};
let result;
match ::objc::__send_message(&*obj, sel, ()) {
Err(s) => {
::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
))
}
Ok(r) => result = r,
}
result
}
};
let len: usize = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL
.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _,
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("length\u{0}")
}
};
let result;
match ::objc::__send_message(&*tiff, sel, ()) {
Err(s) => {
::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
))
}
Ok(r) => result = r,
}
result
}
};
let bytes: *const u8 = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL
.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _,
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("bytes\u{0}")
}
};
let result;
match ::objc::__send_message(&*tiff, sel, ()) {
Err(s) => {
::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
))
}
Ok(r) => result = r,
}
result
}
};
let vec = unsafe { std::slice::from_raw_parts(bytes, len) };
Ok(Some(ClipboardContent::Tiff(vec.into())))
} else if obj.is_kind_of(Class::get("NSURL").unwrap()) {
let s: &NSString = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL
.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _,
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("absoluteString\u{0}")
}
};
let result;
match ::objc::__send_message(&*obj, sel, ()) {
Err(s) => {
::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
))
}
Ok(r) => result = r,
}
result
}
};
Ok(Some(ClipboardContent::Utf8(s.as_str().to_owned())))
} else {
Err(err(
"pasteboard#readObjectsForClasses:options: returned unknown class",
))
}
}
}
fn get_image(&mut self) -> Result<ImageData, Box<dyn Error>> {
Err("Not implemented".into())
}
fn set_image(&mut self, data: ImageData) -> Result<(), Box<dyn Error>> {
let pixels = data.bytes.into();
let image = image_from_pixels(pixels, data.width, data.height)?;
let objects: Id<NSArray<NSObject, Owned>> =
NSArray::from_vec(<[_]>::into_vec(box [image]));
let _: usize = unsafe {
{
let sel = {
{
#[allow(deprecated)]
#[inline(always)]
fn register_sel(name: &str) -> ::objc::runtime::Sel {
unsafe {
static SEL: ::std::sync::atomic::AtomicUsize =
::std::sync::atomic::ATOMIC_USIZE_INIT;
let ptr = SEL.load(::std::sync::atomic::Ordering::Relaxed)
as *const ::std::os::raw::c_void;
if ptr.is_null() {
let sel = ::objc::runtime::sel_registerName(
name.as_ptr() as *const _
);
SEL.store(
sel.as_ptr() as usize,
::std::sync::atomic::Ordering::Relaxed,
);
sel
} else {
::objc::runtime::Sel::from_ptr(ptr)
}
}
}
register_sel("writeObjects:\u{0}")
}
};
let result;
match ::objc::__send_message(&*self.pasteboard, sel, (&*objects,)) {
Err(s) => ::std::rt::begin_panic_fmt(&::core::fmt::Arguments::new_v1(
&[""],
&match (&s,) {
(arg0,) => [::core::fmt::ArgumentV1::new(
arg0,
::core::fmt::Display::fmt,
)],
},
)),
Ok(r) => result = r,
}
result
}
};
Ok(())
}
}
#[inline]
pub fn class(name: &str) -> *mut Class {
unsafe { transmute(Class::get(name)) }
}
}
|
use actix_web::{web};
mod session;
mod user;
pub fn init(cfg: &mut web::ServiceConfig) {
session::init(cfg);
user::init(cfg);
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Services_Get(#[from] services::get::Error),
#[error(transparent)]
Services_CreateOrUpdate(#[from] services::create_or_update::Error),
#[error(transparent)]
Services_Update(#[from] services::update::Error),
#[error(transparent)]
Services_Delete(#[from] services::delete::Error),
#[error(transparent)]
Services_List(#[from] services::list::Error),
#[error(transparent)]
Services_ListByResourceGroup(#[from] services::list_by_resource_group::Error),
#[error(transparent)]
Services_CheckNameAvailability(#[from] services::check_name_availability::Error),
#[error(transparent)]
PrivateEndpointConnections_ListByService(#[from] private_endpoint_connections::list_by_service::Error),
#[error(transparent)]
PrivateEndpointConnections_Get(#[from] private_endpoint_connections::get::Error),
#[error(transparent)]
PrivateEndpointConnections_CreateOrUpdate(#[from] private_endpoint_connections::create_or_update::Error),
#[error(transparent)]
PrivateEndpointConnections_Delete(#[from] private_endpoint_connections::delete::Error),
#[error(transparent)]
PrivateLinkResources_ListByService(#[from] private_link_resources::list_by_service::Error),
#[error(transparent)]
PrivateLinkResources_Get(#[from] private_link_resources::get::Error),
#[error(transparent)]
Workspaces_ListBySubscription(#[from] workspaces::list_by_subscription::Error),
#[error(transparent)]
Workspaces_ListByResourceGroup(#[from] workspaces::list_by_resource_group::Error),
#[error(transparent)]
Workspaces_Get(#[from] workspaces::get::Error),
#[error(transparent)]
Workspaces_CreateOrUpdate(#[from] workspaces::create_or_update::Error),
#[error(transparent)]
Workspaces_Update(#[from] workspaces::update::Error),
#[error(transparent)]
Workspaces_Delete(#[from] workspaces::delete::Error),
#[error(transparent)]
DicomServices_ListByWorkspace(#[from] dicom_services::list_by_workspace::Error),
#[error(transparent)]
DicomServices_Get(#[from] dicom_services::get::Error),
#[error(transparent)]
DicomServices_CreateOrUpdate(#[from] dicom_services::create_or_update::Error),
#[error(transparent)]
DicomServices_Update(#[from] dicom_services::update::Error),
#[error(transparent)]
DicomServices_Delete(#[from] dicom_services::delete::Error),
#[error(transparent)]
IotConnectors_ListByWorkspace(#[from] iot_connectors::list_by_workspace::Error),
#[error(transparent)]
IotConnectors_Get(#[from] iot_connectors::get::Error),
#[error(transparent)]
IotConnectors_CreateOrUpdate(#[from] iot_connectors::create_or_update::Error),
#[error(transparent)]
IotConnectors_Update(#[from] iot_connectors::update::Error),
#[error(transparent)]
IotConnectors_Delete(#[from] iot_connectors::delete::Error),
#[error(transparent)]
FhirDestinations_ListByIotConnector(#[from] fhir_destinations::list_by_iot_connector::Error),
#[error(transparent)]
IotConnectorFhirDestination_Get(#[from] iot_connector_fhir_destination::get::Error),
#[error(transparent)]
IotConnectorFhirDestination_CreateOrUpdate(#[from] iot_connector_fhir_destination::create_or_update::Error),
#[error(transparent)]
IotConnectorFhirDestination_Delete(#[from] iot_connector_fhir_destination::delete::Error),
#[error(transparent)]
FhirServices_ListByWorkspace(#[from] fhir_services::list_by_workspace::Error),
#[error(transparent)]
FhirServices_Get(#[from] fhir_services::get::Error),
#[error(transparent)]
FhirServices_CreateOrUpdate(#[from] fhir_services::create_or_update::Error),
#[error(transparent)]
FhirServices_Update(#[from] fhir_services::update::Error),
#[error(transparent)]
FhirServices_Delete(#[from] fhir_services::delete::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
OperationResults_Get(#[from] operation_results::get::Error),
}
pub mod services {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<models::ServicesDescription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
service_description: &models::ServicesDescription,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(service_description).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescription = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescription = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ServicesDescription),
Created201(models::ServicesDescription),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
service_patch_description: &models::ServicesPatchDescription,
) -> std::result::Result<models::ServicesDescription, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(service_patch_description).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescription =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::ServicesDescriptionListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HealthcareApis/services",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescriptionListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::ServicesDescriptionListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesDescriptionListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
check_name_availability_inputs: &models::CheckNameAvailabilityParameters,
) -> std::result::Result<models::ServicesNameAvailabilityInfo, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HealthcareApis/checkNameAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(check_name_availability_inputs).map_err(check_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ServicesNameAvailabilityInfo = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_name_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod private_endpoint_connections {
use super::{models, API_VERSION};
pub async fn list_by_service(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<models::PrivateEndpointConnectionListResultDescription, list_by_service::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateEndpointConnections",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_service::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_service::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_service::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_service::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnectionListResultDescription = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_service::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_service::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_service::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_service {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<models::PrivateEndpointConnectionDescription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnectionDescription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
private_endpoint_connection_name: &str,
properties: &models::PrivateEndpointConnection,
) -> std::result::Result<models::PrivateEndpointConnectionDescription, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(properties).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateEndpointConnectionDescription = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
private_endpoint_connection_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateEndpointConnections/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name,
private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod private_link_resources {
use super::{models, API_VERSION};
pub async fn list_by_service(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<models::PrivateLinkResourceListResultDescription, list_by_service::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateLinkResources",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_service::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_service::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_service::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_service::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateLinkResourceListResultDescription = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_service::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_service::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_service::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_service {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
group_name: &str,
) -> std::result::Result<models::PrivateLinkResourceDescription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/services/{}/privateLinkResources/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name,
group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PrivateLinkResourceDescription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod workspaces {
use super::{models, API_VERSION};
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::WorkspaceList, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HealthcareApis/workspaces",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::WorkspaceList, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::WorkspaceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
) -> std::result::Result<models::Workspace, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
workspace: &models::Workspace,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(workspace).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Workspace),
Created201(models::Workspace),
Accepted202(models::Workspace),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
workspace_patch_resource: &models::WorkspacePatchResource,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(workspace_patch_resource).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::Workspace =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Workspace),
Accepted202(models::Workspace),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod dicom_services {
use super::{models, API_VERSION};
pub async fn list_by_workspace(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
) -> std::result::Result<models::DicomServiceCollection, list_by_workspace::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/dicomservices",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_workspace::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_workspace::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_workspace::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_workspace::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DicomServiceCollection = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_workspace::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_workspace {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
dicom_service_name: &str,
) -> std::result::Result<models::DicomService, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/dicomservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
dicom_service_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
dicom_service_name: &str,
dicomservice: &models::DicomService,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/dicomservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
dicom_service_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(dicomservice).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::DicomService),
Created201(models::DicomService),
Accepted202(models::DicomService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
dicom_service_name: &str,
workspace_name: &str,
dicomservice_patch_resource: &models::DicomServicePatchResource,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/dicomservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
dicom_service_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(dicomservice_patch_resource).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::DicomService =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::DicomService),
Accepted202(models::DicomService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
dicom_service_name: &str,
workspace_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/dicomservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
dicom_service_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod iot_connectors {
use super::{models, API_VERSION};
pub async fn list_by_workspace(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
) -> std::result::Result<models::IotConnectorCollection, list_by_workspace::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_workspace::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_workspace::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_workspace::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_workspace::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnectorCollection = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_workspace::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_workspace {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
iot_connector_name: &str,
) -> std::result::Result<models::IotConnector, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
iot_connector_name: &str,
iot_connector: &models::IotConnector,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iot_connector).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IotConnector),
Created201(models::IotConnector),
Accepted202(models::IotConnector),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
iot_connector_name: &str,
workspace_name: &str,
iot_connector_patch_resource: &models::IotConnectorPatchResource,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iot_connector_patch_resource).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::IotConnector =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IotConnector),
Accepted202(models::IotConnector),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
iot_connector_name: &str,
workspace_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod fhir_destinations {
use super::{models, API_VERSION};
pub async fn list_by_iot_connector(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
iot_connector_name: &str,
) -> std::result::Result<models::IotFhirDestinationCollection, list_by_iot_connector::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}/fhirdestinations",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_iot_connector::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_iot_connector::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_iot_connector::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_iot_connector::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotFhirDestinationCollection = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_iot_connector::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_iot_connector::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_iot_connector::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_iot_connector {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod iot_connector_fhir_destination {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
iot_connector_name: &str,
fhir_destination_name: &str,
) -> std::result::Result<models::IotFhirDestination, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}/fhirdestinations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name,
fhir_destination_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotFhirDestination =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
iot_connector_name: &str,
fhir_destination_name: &str,
iot_fhir_destination: &models::IotFhirDestination,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}/fhirdestinations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name,
fhir_destination_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(iot_fhir_destination).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::IotFhirDestination = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::IotFhirDestination = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::IotFhirDestination = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IotFhirDestination),
Created201(models::IotFhirDestination),
Accepted202(models::IotFhirDestination),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
workspace_name: &str,
iot_connector_name: &str,
fhir_destination_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/iotconnectors/{}/fhirdestinations/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
iot_connector_name,
fhir_destination_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod fhir_services {
use super::{models, API_VERSION};
pub async fn list_by_workspace(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
) -> std::result::Result<models::FhirServiceCollection, list_by_workspace::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/fhirservices",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_workspace::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_workspace::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_workspace::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_workspace::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FhirServiceCollection = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_workspace::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_workspace::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_workspace {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
fhir_service_name: &str,
) -> std::result::Result<models::FhirService, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/fhirservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
fhir_service_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
workspace_name: &str,
fhir_service_name: &str,
fhirservice: &models::FhirService,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/fhirservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
fhir_service_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(fhirservice).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::FhirService),
Created201(models::FhirService),
Accepted202(models::FhirService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
fhir_service_name: &str,
workspace_name: &str,
fhirservice_patch_resource: &models::FhirServicePatchResource,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/fhirservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
fhir_service_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(fhirservice_patch_resource).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::FhirService =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::FhirService),
Accepted202(models::FhirService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
fhir_service_name: &str,
workspace_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.HealthcareApis/workspaces/{}/fhirservices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
workspace_name,
fhir_service_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::ListOperations, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.HealthcareApis/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListOperations =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operation_results {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
location_name: &str,
operation_result_id: &str,
) -> std::result::Result<models::OperationResultsDescription, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.HealthcareApis/locations/{}/operationresults/{}",
operation_config.base_path(),
subscription_id,
location_name,
operation_result_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationResultsDescription =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorDetails,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use yasl_macro::yasl_vert;
#[yasl_vert]
note_vert! {
// layout<input,0> i_color: vec3<f32>;
// layout<input,1> i_uv: vec2<f32>;
// layout<input,2> i_size: vec2<f32>;
// layout<input,3> i_radius: f32;
static c: f32 = 0.0;
// fn add(a: f32, b: f32) -> f32{
// return a + b;
// }
fn main() {
// let b = c + 1.0;
// let d = add(1.0,2.0);
let a = 1.0;
let b = a;
{
let b: u32 = 0;
let a = b;
}
// {
// let b = c;
// }
// let color: vec3<f32> = i_color;
// let alpha: f32 = 1.0;
// let pos: vec2<f32> = i_uv * i_size;
// let xMax: f32 = i_size.x - i_radius;
// let a = 0.0 + 1.0;
// let b = c;
// let b = a;
//
// let a: f32 = add(1.0,1.0);
// let num: i32 = 1.0 as i32;
// {
// let num3: f32 = 0.0;
// }
// let num2: f32 = a;
// num2 = 1.0;
// if num2 == 1.0 {
// num2 += 1.0;
// } else if true{
// num2 = 1.0;
// } else{
// num2 = 0.0;
// }
// let f : vec2<f32> = vec2(1.0,1.0);
// let f1 : vec2<f32> = f32::vec2(1.0,1.0);
// let f2 : vec2<f64> = f64::vec2(0.0,0.0);
// let i1 : vec2<i32> = i32::vec2(0,0);
// let i2 : vec2<u32> = u32::vec2(0,0);
// glsl::gl_Position = vec4(0.0,0.0,0.0,1.0);
}
}
fn main() {
println!("Hello, world!");
}
|
fn main() {
let plus_one = | x: i32| x + 1;
assert_eq!(2, plus_one(1));
let plus_two = |x| {
let mut result: i32 = x;
result += 1;
result += 1;
result
};
assert_eq!(4, plus_two(2));
fn _plus_one_v1 (x: i32) -> i32 { x + 1 }
let _plus_one_v2 = |x: i32| -> i32 { x + 1 };
let _plus_one_v3 = |x: i32| x + 1 ;
// this is error!
//let _plus_one_v4 = |x| x + 1 ;
let mut num = 5;
num += 1;
let plus_num = |x: i32| x + num;
// already borrowed
//let y = &mut num;
let _y = #
assert_eq!(11, plus_num(5));
// move closure
let num = 5;
let owns_num = move |x: i32| num + x;
println!("{}", owns_num(100));
let mut num = 5;
{
let mut add_num = |x: i32| num += x;
add_num(5);
}
assert_eq!(10, num);
let mut num = 5;
{
// move closure はコピーの所有権を取得する
let mut add_num = move |x: i32| num += x;
add_num(5);
}
assert_eq!(5, num);
// closure as args
fn call_with_one<F>(some_closure: F) -> i32
where F: Fn(i32) -> i32 {
some_closure(1)
}
let answer = call_with_one(|x| x + 2);
assert_eq!(3, answer);
// 関数ポインタとクロージャ
fn call_with_two(some_closure: &Fn(i32) -> i32) -> i32 {
some_closure(2)
}
fn add_one(i: i32) -> i32 {
i + 1
}
let f = add_one;
let answer = call_with_two(&f);
assert_eq!(3, answer);
// return closure
fn factory() -> Box<Fn(i32) -> i32> {
let num = 5;
Box::new(move |x| x + num)
}
let f = factory();
let answer = f(1);
assert_eq!(6, answer);
}
|
//! Tests auto-converted from "sass-spec/spec/core_functions/meta/get_function"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/core_functions/meta/get_function/different_module.hrx"
mod different_module {
#[allow(unused)]
use super::rsass;
#[test]
fn chosen_prefix() {
assert_eq!(
rsass(
"@use \"sass:color\" as a;\
\nb {c: call(get-function(\"red\", $module: \"a\"), #abcdef)}\
\n"
)
.unwrap(),
"b {\
\n c: 171;\
\n}\
\n"
);
}
#[test]
fn defined() {
assert_eq!(
rsass(
"@use \"sass:color\";\
\na {b: call(get-function(\"red\", $module: \"color\"), #abcdef)}\
\n"
)
.unwrap(),
"a {\
\n b: 171;\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
rsass(
"@use \"sass:color\";\
\na {b: call(get-function($name: \"red\", $module: \"color\"), #abcdef)}\
\n"
)
.unwrap(),
"a {\
\n b: 171;\
\n}\
\n"
);
}
mod through_forward {
#[allow(unused)]
use super::rsass;
#[test]
#[ignore] // unexepected error
fn test_as() {
assert_eq!(
rsass(
"@use \"midstream\" as *;\
\na {\
\n b: call(get-function(c-d));\
\n}\
\n"
)
.unwrap(),
"a {\
\n b: d;\
\n}\
\n"
);
}
#[test]
#[ignore] // unexepected error
fn bare() {
assert_eq!(
rsass(
"@use \"midstream\" as *;\
\na {b: call(get-function(c))}\
\n"
)
.unwrap(),
"a {\
\n b: c;\
\n}\
\n"
);
}
#[test]
#[ignore] // unexepected error
fn hide() {
assert_eq!(
rsass(
"@use \"midstream\" as *;\
\na {\
\n b: call(get-function(d));\
\n}\
\n"
)
.unwrap(),
"a {\
\n b: d;\
\n}\
\n"
);
}
#[test]
#[ignore] // unexepected error
fn show() {
assert_eq!(
rsass(
"@use \"midstream\" as *;\
\na {\
\n b: call(get-function(c));\
\n}\
\n"
)
.unwrap(),
"a {\
\n b: c;\
\n}\
\n"
);
}
}
#[test]
#[ignore] // unexepected error
fn through_use() {
assert_eq!(
rsass(
"@use \"other\" as *;\
\na {b: call(get-function(add-two), 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/meta/get_function/equality.hrx"
mod equality {
#[allow(unused)]
use super::rsass;
mod built_in {
#[allow(unused)]
use super::rsass;
#[test]
fn different() {
assert_eq!(
rsass(
"a {b: get-function(lighten) == get-function(darken)}\
\n"
)
.unwrap(),
"a {\
\n b: false;\
\n}\
\n"
);
}
#[test]
fn same() {
assert_eq!(
rsass(
"a {b: get-function(lighten) == get-function(lighten)}\
\n"
)
.unwrap(),
"a {\
\n b: true;\
\n}\
\n"
);
}
}
#[test]
fn same_value() {
assert_eq!(
rsass(
"$lighten-fn: get-function(lighten);\
\na {b: $lighten-fn == $lighten-fn}\
\n"
)
.unwrap(),
"a {\
\n b: true;\
\n}\
\n"
);
}
mod user_defined {
#[allow(unused)]
use super::rsass;
#[test]
fn different() {
assert_eq!(
rsass(
"@function user-defined-1() {@return null}\
\n@function user-defined-2() {@return null}\
\na {b: get-function(user-defined-1) == get-function(user-defined-2)}\
\n"
)
.unwrap(),
"a {\
\n b: false;\
\n}\
\n"
);
}
#[test]
#[ignore] // wrong result
fn redefined() {
assert_eq!(
rsass(
"@function user-defined() {@return null}\
\n$first-reference: get-function(user-defined);\
\n\
\n@function user-defined() {@return null}\
\n$second-reference: get-function(user-defined);\
\na {b: $first-reference == $second-reference}\
\n"
)
.unwrap(),
"a {\
\n b: false;\
\n}\
\n"
);
}
#[test]
fn same() {
assert_eq!(
rsass(
"@function user-defined() {@return null}\
\na {b: get-function(user-defined) == get-function(user-defined)}\
\n"
)
.unwrap(),
"a {\
\n b: true;\
\n}\
\n"
);
}
}
}
// From "sass-spec/spec/core_functions/meta/get_function/error.hrx"
mod error {
#[allow(unused)]
use super::rsass;
mod argument {
#[allow(unused)]
use super::rsass;
// Ignoring "function_ref", error tests are not supported yet.
// Ignoring "too_few", error tests are not supported yet.
// Ignoring "too_many", error tests are not supported yet.
mod test_type {
#[allow(unused)]
use super::rsass;
// Ignoring "module", error tests are not supported yet.
// Ignoring "name", error tests are not supported yet.
}
}
// Ignoring "conflict", error tests are not supported yet.
// Ignoring "division", error tests are not supported yet.
// Ignoring "function_exists", error tests are not supported yet.
mod module {
#[allow(unused)]
use super::rsass;
// Ignoring "and_css", error tests are not supported yet.
// Ignoring "built_in_but_not_loaded", error tests are not supported yet.
// Ignoring "dash_sensitive", error tests are not supported yet.
// Ignoring "non_existent", error tests are not supported yet.
// Ignoring "undefined", error tests are not supported yet.
}
// Ignoring "non_existent", error tests are not supported yet.
mod through_forward {
#[allow(unused)]
use super::rsass;
// Ignoring "hide", error tests are not supported yet.
// Ignoring "show", error tests are not supported yet.
}
}
// From "sass-spec/spec/core_functions/meta/get_function/meta.hrx"
mod meta {
#[allow(unused)]
use super::rsass;
#[test]
fn inspect() {
assert_eq!(
rsass(
"a {b: inspect(get-function(lighten))};\
\n"
)
.unwrap(),
"a {\
\n b: get-function(\"lighten\");\
\n}\
\n"
);
}
#[test]
fn type_of() {
assert_eq!(
rsass(
"a {b: type-of(get-function(lighten))};\
\n"
)
.unwrap(),
"a {\
\n b: function;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/meta/get_function/same_module.hrx"
mod same_module {
#[allow(unused)]
use super::rsass;
#[test]
fn built_in() {
assert_eq!(
rsass(
"$lighten-fn: get-function(lighten);\
\n\
\na {b: call($lighten-fn, red, 30%)}\
\n"
)
.unwrap(),
"a {\
\n b: #ff9999;\
\n}\
\n"
);
}
mod dash_insensitive {
#[allow(unused)]
use super::rsass;
#[test]
fn dash_to_underscore() {
assert_eq!(
rsass(
"@function add_two($v) {@return $v + 2}\
\n\
\na {b: call(get-function(add-two), 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
#[test]
fn underscore_to_dash() {
assert_eq!(
rsass(
"@function add-two($v) {@return $v + 2}\
\n\
\na {b: call(get-function(add_two), 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
}
#[test]
fn plain_css() {
assert_eq!(
rsass(
"$sass-fn: get-function(lighten);\
\n$css-fn: get-function(lighten, $css: true);\
\n\
\na {\
\n sass-fn: call($sass-fn, red, 30%);\
\n css-fn: call($css-fn, red, 30%);\
\n}\
\n"
)
.unwrap(),
"a {\
\n sass-fn: #ff9999;\
\n css-fn: lighten(red, 30%);\
\n}\
\n"
);
}
#[test]
fn redefined() {
assert_eq!(
rsass(
"@function add-two($v) {@return $v + 2}\
\n$add-two-fn: get-function(add-two);\
\n\
\n// The function returned by `get-function()` is locked in place when it\'s\
\n// called. Redefining the function after the fact shouldn\'t affect the stored\
\n// value.\
\n@function add-two($v) {@error \"Should not be called\"}\
\n\
\na {b: call($add-two-fn, 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
#[test]
#[ignore] // unexepected error
fn through_import() {
assert_eq!(
rsass(
"@import \"other\";\
\na {b: call(get-function(add-two), 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
#[test]
fn user_defined() {
assert_eq!(
rsass(
"@function add-two($v) {@return $v + 2}\
\n$add-two-fn: get-function(add-two);\
\n\
\na {b: call($add-two-fn, 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
}
// From "sass-spec/spec/core_functions/meta/get_function/scope.hrx"
mod scope {
#[allow(unused)]
use super::rsass;
#[test]
fn captures_inner_scope() {
assert_eq!(
rsass(
"@function add-two($v) {@error \"Should not be called\"}\
\n.scope1 {\
\n @function add-two($v) {@error \"Should not be called\"}\
\n .scope2 {\
\n @function add-two($v) {@error \"Should not be called\"}\
\n .scope3 {\
\n @function add-two($v) {@return $v + 2}\
\n\
\n // Like a normal function call, get-function() will always use the\
\n // innermost definition of a function.\
\n a: call(get-function(add-two), 10);\
\n }\
\n }\
\n}\
\n"
)
.unwrap(),
".scope1 .scope2 .scope3 {\
\n a: 12;\
\n}\
\n"
);
}
#[test]
fn stores_local_scope() {
assert_eq!(
rsass(
"$add-two-fn: null;\
\n\
\n.scope {\
\n @function add-two($v) {@return $v + 2}\
\n\
\n // This function reference will still refer to this nested `add-two` function\
\n // even when it goes out of scope.\
\n $add-two-fn: get-function(add-two) !global;\
\n}\
\n\
\na {b: call($add-two-fn, 10)}\
\n"
)
.unwrap(),
"a {\
\n b: 12;\
\n}\
\n"
);
}
}
|
use crate::endpoint::TcpAccept;
use indexmap::IndexSet;
use linkerd_app_core::{svc::stack::Predicate, Error};
use std::sync::Arc;
/// A connection policy that drops
#[derive(Clone, Debug)]
pub struct RequireIdentityForPorts {
ports: Arc<IndexSet<u16>>,
}
#[derive(Debug)]
pub struct IdentityRequired(());
impl<T: IntoIterator<Item = u16>> From<T> for RequireIdentityForPorts {
fn from(ports: T) -> Self {
Self {
ports: Arc::new(ports.into_iter().collect()),
}
}
}
impl Predicate<TcpAccept> for RequireIdentityForPorts {
type Request = TcpAccept;
fn check(&mut self, meta: TcpAccept) -> Result<TcpAccept, Error> {
let port = meta.target_addr.port();
let id_required = self.ports.contains(&port);
tracing::debug!(%port, peer.id = ?meta.peer_id, %id_required);
if id_required && meta.peer_id.is_none() {
return Err(IdentityRequired(()).into());
}
Ok(meta)
}
}
impl std::fmt::Display for IdentityRequired {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "identity required")
}
}
impl std::error::Error for IdentityRequired {}
|
use crate::{
ec_cycle_pcd::ECCyclePCDConfig,
variable_length_crh::{constraints::VariableLengthCRHGadget, VariableLengthCRH},
PCDPredicate,
};
use ark_crypto_primitives::snark::{FromFieldElementsGadget, SNARKGadget, SNARK};
use ark_ff::PrimeField;
use ark_r1cs_std::{
alloc::AllocVar, bits::boolean::Boolean, bits::uint8::UInt8, fields::fp::FpVar, prelude::*,
};
use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError};
use ark_std::vec::Vec;
pub struct ECCyclePCDPK<
MainField: PrimeField,
HelpField: PrimeField,
IC: ECCyclePCDConfig<MainField, HelpField>,
> {
pub crh_pp: <IC::CRH as VariableLengthCRH<MainField>>::Parameters,
pub main_pk: <IC::MainSNARK as SNARK<MainField>>::ProvingKey,
pub main_pvk: <IC::MainSNARK as SNARK<MainField>>::ProcessedVerifyingKey,
pub help_pk: <IC::HelpSNARK as SNARK<HelpField>>::ProvingKey,
pub help_vk: <IC::HelpSNARK as SNARK<HelpField>>::VerifyingKey,
}
impl<MainField: PrimeField, HelpField: PrimeField, IC: ECCyclePCDConfig<MainField, HelpField>> Clone
for ECCyclePCDPK<MainField, HelpField, IC>
{
fn clone(&self) -> Self {
Self {
crh_pp: self.crh_pp.clone(),
main_pk: self.main_pk.clone(),
main_pvk: self.main_pvk.clone(),
help_pk: self.help_pk.clone(),
help_vk: self.help_vk.clone(),
}
}
}
pub struct ECCyclePCDVK<
MainField: PrimeField,
HelpField: PrimeField,
IC: ECCyclePCDConfig<MainField, HelpField>,
> {
pub crh_pp: <IC::CRH as VariableLengthCRH<MainField>>::Parameters,
pub help_vk: <IC::HelpSNARK as SNARK<HelpField>>::VerifyingKey,
}
impl<MainField: PrimeField, HelpField: PrimeField, IC: ECCyclePCDConfig<MainField, HelpField>> Clone
for ECCyclePCDVK<MainField, HelpField, IC>
{
fn clone(&self) -> Self {
Self {
crh_pp: self.crh_pp.clone(),
help_vk: self.help_vk.clone(),
}
}
}
pub struct DefaultCircuit {
pub public_input_size: usize,
}
impl Clone for DefaultCircuit {
fn clone(&self) -> Self {
Self {
public_input_size: self.public_input_size,
}
}
}
impl Copy for DefaultCircuit {}
impl<F: PrimeField> ConstraintSynthesizer<F> for DefaultCircuit {
fn generate_constraints(self, cs: ConstraintSystemRef<F>) -> Result<(), SynthesisError> {
for _ in 0..self.public_input_size {
let gadget = FpVar::<F>::new_input(ark_relations::ns!(cs, "alloc"), || Ok(F::one()))?;
gadget.to_bits_le()?;
}
Ok(())
}
}
pub struct MainCircuit<
MainField: PrimeField,
HelpField: PrimeField,
IC: ECCyclePCDConfig<MainField, HelpField>,
P: PCDPredicate<MainField>,
> {
pub crh_pp: <IC::CRH as VariableLengthCRH<MainField>>::Parameters,
pub predicate: P,
pub input_hash: Option<<IC::CRH as VariableLengthCRH<MainField>>::Output>,
pub help_vk: Option<<IC::HelpSNARK as SNARK<HelpField>>::VerifyingKey>,
pub msg: Option<P::Message>,
pub witness: Option<P::LocalWitness>,
pub prior_msgs: Vec<P::Message>,
pub prior_proofs: Vec<<IC::HelpSNARK as SNARK<HelpField>>::Proof>,
pub base_case_bit: Option<bool>,
}
impl<
MainField: PrimeField,
HelpField: PrimeField,
IC: ECCyclePCDConfig<MainField, HelpField>,
P: PCDPredicate<MainField>,
> ConstraintSynthesizer<MainField> for MainCircuit<MainField, HelpField, IC, P>
{
fn generate_constraints(
self,
cs: ConstraintSystemRef<MainField>,
) -> Result<(), SynthesisError> {
assert!(self.base_case_bit != Some(false) || self.prior_msgs.len() == P::PRIOR_MSG_LEN);
assert!(self.base_case_bit != Some(false) || self.prior_proofs.len() == P::PRIOR_MSG_LEN);
/*
* allocation
*/
let input_hash_gadget =
<IC::CRHGadget as VariableLengthCRHGadget<IC::CRH, MainField>>::OutputVar::new_input(
ark_relations::ns!(cs, "alloc#x"),
|| Ok(self.input_hash.clone().unwrap_or_default()),
)?;
let main_public_input =
IC::CRH::convert_output_to_field_elements(self.input_hash.unwrap_or_default()).unwrap();
let help_public_input = <IC::MainSNARKGadget as SNARKGadget<
MainField,
HelpField,
IC::MainSNARK,
>>::InputVar::repack_input(&main_public_input);
let default_circ = DefaultCircuit {
public_input_size: help_public_input.len(),
};
let mut default_rng = ark_std::test_rng();
let (default_pk, default_vk) =
IC::HelpSNARK::circuit_specific_setup(default_circ, &mut default_rng).unwrap();
let default_proof =
<IC::HelpSNARK as SNARK<HelpField>>::prove(&default_pk, default_circ, &mut default_rng)
.unwrap();
let crh_pp_gadget = <IC::CRHGadget as VariableLengthCRHGadget<
IC::CRH,
MainField,
>>::ParametersVar::new_constant(
ark_relations::ns!(cs, "alloc_crh_for_cycle_ivc"),
self.crh_pp.clone(),
)?;
let help_vk = self.help_vk.unwrap_or(default_vk);
let help_vk_gadget = <IC::HelpSNARKGadget as SNARKGadget<
HelpField,
MainField,
IC::HelpSNARK,
>>::new_verification_key_unchecked(
ark_relations::ns!(cs, "alloc#vk"),
|| Ok(help_vk),
AllocationMode::Witness,
)?;
let msg = self.msg.unwrap_or_default();
let msg_gadget = P::MessageVar::new_witness(ark_relations::ns!(cs, "alloc_z"), || Ok(msg))?;
let witness = self.witness.unwrap_or_default();
let witness_gadget =
P::LocalWitnessVar::new_witness(ark_relations::ns!(cs, "alloc_z_loc"), || Ok(witness))?;
let mut prior_msg_gadgets = Vec::new();
if self.base_case_bit != Some(false) {
let default_msg = P::Message::default();
for _ in 0..P::PRIOR_MSG_LEN {
prior_msg_gadgets.push(P::MessageVar::new_witness(
ark_relations::ns!(cs, "alloc_z_in"),
|| Ok(default_msg.clone()),
)?);
}
} else {
for prior_msg in self.prior_msgs.iter() {
prior_msg_gadgets.push(P::MessageVar::new_witness(
ark_relations::ns!(cs, "alloc_z_in"),
|| Ok(prior_msg),
)?);
}
}
let mut prior_proof_gadgets = Vec::new();
if self.base_case_bit != Some(false) {
for _ in 0..P::PRIOR_MSG_LEN {
prior_proof_gadgets.push(<IC::HelpSNARKGadget as SNARKGadget<
HelpField,
MainField,
IC::HelpSNARK,
>>::ProofVar::new_witness(
ark_relations::ns!(cs, "alloc_prior_proof"),
|| Ok(default_proof.clone()),
)?);
}
} else {
for prior_proof in self.prior_proofs.iter() {
prior_proof_gadgets.push(<IC::HelpSNARKGadget as SNARKGadget<
HelpField,
MainField,
IC::HelpSNARK,
>>::ProofVar::new_witness(
ark_relations::ns!(cs, "alloc_prior_proof"),
|| Ok(prior_proof),
)?);
}
}
let base_case_bit = self.base_case_bit.unwrap_or_default();
let b_base_gadget =
Boolean::new_witness(ark_relations::ns!(cs, "alloc_b_base"), || Ok(base_case_bit))?;
/*
* compute vk hash
*/
let help_vk_bytes_gadget = help_vk_gadget.to_bytes()?;
let mut committed_vk = Vec::<UInt8<MainField>>::new();
for byte in &help_vk_bytes_gadget {
committed_vk.push(byte.clone());
}
let vk_hash_gadget = IC::CRHGadget::check_evaluation_gadget(&crh_pp_gadget, &committed_vk)?;
let vk_hash_bytes_gadget = vk_hash_gadget.to_bytes()?;
/*
* check input
*/
let msg_bytes_gadget = msg_gadget.to_bytes()?;
let mut committed_input = Vec::<UInt8<MainField>>::new();
for byte in &vk_hash_bytes_gadget {
committed_input.push(byte.clone());
}
for byte in &msg_bytes_gadget {
committed_input.push(byte.clone());
}
let input_hash_supposed_gadget =
IC::CRHGadget::check_evaluation_gadget(&crh_pp_gadget, &committed_input)?;
input_hash_supposed_gadget.enforce_equal(&input_hash_gadget)?;
/*
* check the predicate
*/
self.predicate.generate_constraints(
ark_relations::ns!(cs, "check_predicate").cs(),
&msg_gadget,
&witness_gadget,
&prior_msg_gadgets,
&b_base_gadget,
)?;
/*
* check each prior proof
*/
let mut prior_proofs_verified = Boolean::Constant(true);
for (prior_msg_gadget, prior_proof_gadget) in
prior_msg_gadgets.iter().zip(prior_proof_gadgets.iter())
{
let prior_msg_bytes_gadget = prior_msg_gadget.to_bytes()?;
let mut committed_prior_input = Vec::<UInt8<MainField>>::new();
for byte in vk_hash_bytes_gadget.iter() {
committed_prior_input.push(byte.clone());
}
for byte in &prior_msg_bytes_gadget {
committed_prior_input.push(byte.clone());
}
let prior_input_hash_gadget =
IC::CRHGadget::check_evaluation_gadget(&crh_pp_gadget, &committed_prior_input)?;
let prior_input_hash_gadget_field_gadgets =
IC::CRHGadget::convert_output_to_field_gadgets(&prior_input_hash_gadget)?;
let prior_input_hash_converted_gadget = <IC::HelpSNARKGadget as SNARKGadget<
HelpField,
MainField,
IC::HelpSNARK,
>>::InputVar::from_field_elements(
&prior_input_hash_gadget_field_gadgets
)?;
let verification_result =
<IC::HelpSNARKGadget as SNARKGadget<HelpField, MainField, IC::HelpSNARK>>::verify(
&help_vk_gadget,
&prior_input_hash_converted_gadget,
&prior_proof_gadget,
)?;
prior_proofs_verified = prior_proofs_verified.and(&verification_result)?;
}
b_base_gadget
.or(&prior_proofs_verified)?
.enforce_equal(&Boolean::constant(true))?;
Ok(())
}
}
pub struct HelpCircuit<
MainField: PrimeField,
HelpField: PrimeField,
IC: ECCyclePCDConfig<MainField, HelpField>,
> {
pub main_pvk: <IC::MainSNARK as SNARK<MainField>>::ProcessedVerifyingKey,
pub input_hash: Option<<IC::CRH as VariableLengthCRH<MainField>>::Output>,
pub main_proof: Option<<IC::MainSNARK as SNARK<MainField>>::Proof>,
}
impl<MainField: PrimeField, HelpField: PrimeField, IC: ECCyclePCDConfig<MainField, HelpField>>
ConstraintSynthesizer<HelpField> for HelpCircuit<MainField, HelpField, IC>
{
fn generate_constraints(
self,
cs: ConstraintSystemRef<HelpField>,
) -> Result<(), SynthesisError> {
let input_hash = self.input_hash.unwrap_or_default();
let main_public_input_num_of_field_elements =
IC::CRH::convert_output_to_field_elements(input_hash.clone())
.unwrap()
.len();
let default_circ = DefaultCircuit {
public_input_size: main_public_input_num_of_field_elements,
};
let mut default_rng = ark_std::test_rng();
let (default_pk, _) = <IC::MainSNARK as SNARK<MainField>>::circuit_specific_setup(
default_circ,
&mut default_rng,
)
.unwrap();
let default_proof =
<IC::MainSNARK as SNARK<MainField>>::prove(&default_pk, default_circ, &mut default_rng)
.unwrap();
let main_proof = self.main_proof.unwrap_or(default_proof);
let hash_field_elements = IC::CRH::convert_output_to_field_elements(input_hash).unwrap();
let input_hash_gadget = <IC::MainSNARKGadget as SNARKGadget<
MainField,
HelpField,
IC::MainSNARK,
>>::InputVar::new_input(
ark_relations::ns!(cs, "verifier"),
|| Ok(hash_field_elements),
)?;
let main_pvk_gadget = <IC::MainSNARKGadget as SNARKGadget<
MainField,
HelpField,
IC::MainSNARK,
>>::ProcessedVerifyingKeyVar::new_constant(
ark_relations::ns!(cs, "alloc_pvk"),
self.main_pvk,
)?;
let main_proof_gadget = <IC::MainSNARKGadget as SNARKGadget<
MainField,
HelpField,
IC::MainSNARK,
>>::ProofVar::new_witness(
ark_relations::ns!(cs, "alloc_pi_alpha"), || Ok(main_proof)
)?;
<IC::MainSNARKGadget as SNARKGadget<
MainField,
HelpField,
IC::MainSNARK,
>>::verify_with_processed_vk(
&main_pvk_gadget,
&input_hash_gadget,
&main_proof_gadget,
)?.enforce_equal(&Boolean::Constant(true))?;
Ok(())
}
}
|
// Copyright (c) 2021, Roel Schut. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use std::borrow::Borrow;
use gdnative::prelude::*;
pub const SIGNAL: &str = "instance_node";
pub trait InstanceNodeEmitter<T>
where T: NativeClass {
fn add_instance_node_signal(builder: &ClassBuilder<T>) {
let scene = Variant::from_object::<Ref<PackedScene>>(PackedScene::new().into_shared());
builder.add_signal(Signal {
name: SIGNAL,
args: &[
SignalArgument {
name: "node",
default: scene.clone(),
export_info: ExportInfo::new(scene.get_type()),
usage: PropertyUsage::DEFAULT,
},
SignalArgument {
name: "location",
default: Variant::from_vector2(Vector2::zero().borrow()),
export_info: ExportInfo::new(VariantType::Vector2),
usage: PropertyUsage::DEFAULT,
},
],
});
}
fn emit_instance_node(&self, owner: &Object, node: &Ref<PackedScene>, location: Vector2) {
owner.emit_signal(SIGNAL, &[
Variant::from_object(node),
Variant::from_vector2(location.borrow()),
]);
}
}
|
fn main() {
{
let aa: u8 = 3;
aa = 'a';
println!("{:?}", aa);
let bb = 3u8; // 与 aa 的定义一样, 只是类型作为了字面量的后缀了
bb = 'a';
println!("{:?}", bb);
}
{
let aa: u32 = 3;
aa = 'a';
println!("{:?}", aa);
let bb = 3u32; // 与 aa 的定义一样, 只是类型作为了字面量的后缀了
bb = 'a';
println!("{:?}", bb);
}
{
let aa: u128 = 3;
aa = 'a';
println!("{:?}", aa);
let bb = 3u128; // 与 aa 的定义一样, 只是类型作为了字面量的后缀了
bb = 'a';
println!("{:?}", bb);
}
}
|
// Copyright 2019 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Traits for handling value types.
use std::rc::Rc;
use std::sync::Arc;
pub use druid_derive_data::Data;
/// A trait used to represent value types.
///
/// These should be cheap to compare and cheap to clone.
///
/// See <https://sinusoid.es/lager/model.html#id2> for a well-written
/// explanation of value types (albeit within a C++ context).
///
/// ## Derive macro
///
/// For simple types where each field implements `Data`, one should
/// use the `derive(Data)` attribute on the type in question instead of
/// writing the implementation by hand.
///
/// ```
/// # use std::sync::Arc;
/// # use druid::Data;
/// #[derive(Clone, Data)]
/// enum Foo {
/// Case1(i32, f32),
/// Case2 { a: String, b: Arc<i32> }
/// }
/// ```
///
/// Note that in the case of a union that only contains tags without
/// fields, the implementation that is generated checks for
/// equality. Therefore, such types must also implement the [`Eq`]
/// trait.
pub trait Data: Clone {
/// Determine whether two values are the same.
///
/// This is intended to always be a fast operation. If it returns
/// `true`, the two values *must* be equal, but two equal values
/// need not be considered the same here, as will often be the
/// case when two copies are separately allocated.
///
/// Note that "equal" above has a slightly different meaning than
/// `PartialEq`, for example two floating point NaN values should
/// be considered equal when they have the same bit representation.
fn same(&self, other: &Self) -> bool;
}
/// An impl of `Data` suitable for simple types.
///
/// The `same` method is implemented with equality, so the type should
/// implement `Eq` at least.
macro_rules! impl_data_simple {
($t:ty) => {
impl Data for $t {
fn same(&self, other: &Self) -> bool {
self == other
}
}
};
}
impl_data_simple!(i8);
impl_data_simple!(i16);
impl_data_simple!(i32);
impl_data_simple!(i64);
impl_data_simple!(isize);
impl_data_simple!(u8);
impl_data_simple!(u16);
impl_data_simple!(u32);
impl_data_simple!(u64);
impl_data_simple!(usize);
impl_data_simple!(char);
impl_data_simple!(bool);
impl_data_simple!(String);
impl Data for f32 {
fn same(&self, other: &Self) -> bool {
self.to_bits() == other.to_bits()
}
}
impl Data for f64 {
fn same(&self, other: &Self) -> bool {
self.to_bits() == other.to_bits()
}
}
impl<T: ?Sized> Data for Arc<T> {
fn same(&self, other: &Self) -> bool {
Arc::ptr_eq(self, other)
}
}
impl<T: ?Sized> Data for Rc<T> {
fn same(&self, other: &Self) -> bool {
Rc::ptr_eq(self, other)
}
}
impl<T: Data> Data for Option<T> {
fn same(&self, other: &Self) -> bool {
match (self, other) {
(Some(a), Some(b)) => a.same(b),
(None, None) => true,
_ => false,
}
}
}
impl<T: Data> Data for &T {
fn same(&self, other: &Self) -> bool {
Data::same(*self, *other)
}
}
impl<T: Data, U: Data> Data for Result<T, U> {
fn same(&self, other: &Self) -> bool {
match (self, other) {
(Ok(a), Ok(b)) => a.same(b),
(Err(a), Err(b)) => a.same(b),
_ => false,
}
}
}
impl Data for () {
fn same(&self, _other: &Self) -> bool {
true
}
}
impl<T0: Data> Data for (T0,) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
}
}
impl<T0: Data, T1: Data> Data for (T0, T1) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0) && self.1.same(&other.1)
}
}
impl<T0: Data, T1: Data, T2: Data> Data for (T0, T1, T2) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0) && self.1.same(&other.1) && self.2.same(&other.2)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data> Data for (T0, T1, T2, T3) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data, T4: Data> Data for (T0, T1, T2, T3, T4) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
&& self.4.same(&other.4)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data, T4: Data, T5: Data> Data for (T0, T1, T2, T3, T4, T5) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
&& self.4.same(&other.4)
&& self.5.same(&other.5)
}
}
|
pub mod leisure_parameters;
pub use leisure_parameters::LeisureParameters;
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsApiStepSubtype : The subtype of the Synthetic multistep API test step, currently only supporting `http`.
/// The subtype of the Synthetic multistep API test step, currently only supporting `http`.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum SyntheticsApiStepSubtype {
#[serde(rename = "http")]
HTTP,
}
impl ToString for SyntheticsApiStepSubtype {
fn to_string(&self) -> String {
match self {
Self::HTTP => String::from("http"),
}
}
}
|
#![forbid(unsafe_code)]
use crate::cli::{help, SudoAction, SudoOptions};
use crate::common::{resolve::resolve_current_user, Context, Error};
use crate::log::dev_info;
use crate::system::timestamp::RecordScope;
use crate::system::{time::Duration, timestamp::SessionRecordFile, Process};
use pam::PamAuthenticator;
use pipeline::{Pipeline, PolicyPlugin};
use std::os::unix::fs::MetadataExt;
use std::path::Path;
use std::{env, fs};
mod diagnostic;
mod pam;
mod pipeline;
const VERSION: &str = env!("CARGO_PKG_VERSION");
fn candidate_sudoers_file() -> &'static Path {
let pb_rs: &'static Path = Path::new("/etc/sudoers-rs");
if pb_rs.exists() {
dev_info!("Running with /etc/sudoers-rs file");
pb_rs
} else {
dev_info!("Running with /etc/sudoers file");
Path::new("/etc/sudoers")
}
}
#[derive(Default)]
pub(crate) struct SudoersPolicy {}
impl PolicyPlugin for SudoersPolicy {
type PreJudgementPolicy = crate::sudoers::Sudoers;
type Policy = crate::sudoers::Judgement;
fn init(&mut self) -> Result<Self::PreJudgementPolicy, Error> {
let sudoers_path = candidate_sudoers_file();
let (sudoers, syntax_errors) = crate::sudoers::Sudoers::open(sudoers_path)
.map_err(|e| Error::Configuration(format!("{e}")))?;
for crate::sudoers::Error(pos, error) in syntax_errors {
diagnostic::diagnostic!("{error}", sudoers_path @ pos);
}
Ok(sudoers)
}
fn judge(
&mut self,
pre: Self::PreJudgementPolicy,
context: &Context,
) -> Result<Self::Policy, Error> {
Ok(pre.check(
&context.current_user,
&context.hostname,
crate::sudoers::Request {
user: &context.target_user,
group: &context.target_group,
command: &context.command.command,
arguments: &context.command.arguments,
},
))
}
}
fn sudo_process() -> Result<(), Error> {
crate::log::SudoLogger::new("sudo: ").into_global_logger();
dev_info!("development logs are enabled");
self_check()?;
let pipeline = Pipeline {
policy: SudoersPolicy::default(),
authenticator: PamAuthenticator::new_cli(),
};
// parse cli options
match SudoOptions::from_env() {
Ok(options) => match options.action {
SudoAction::Help => {
eprintln_ignore_io_error!("{}", help::long_help_message());
std::process::exit(0);
}
SudoAction::Version => {
eprintln_ignore_io_error!("sudo-rs {VERSION}");
std::process::exit(0);
}
SudoAction::RemoveTimestamp => {
let user = resolve_current_user()?;
let mut record_file =
SessionRecordFile::open_for_user(&user.name, Duration::seconds(0))?;
record_file.reset()?;
Ok(())
}
SudoAction::ResetTimestamp => {
if let Some(scope) = RecordScope::for_process(&Process::new()) {
let user = resolve_current_user()?;
let mut record_file =
SessionRecordFile::open_for_user(&user.name, Duration::seconds(0))?;
record_file.disable(scope, None)?;
}
Ok(())
}
SudoAction::Validate => pipeline.run_validate(options),
SudoAction::Run(ref cmd) => {
// special case for when no command is given
if cmd.is_empty() && !options.shell && !options.login {
eprintln_ignore_io_error!("{}", help::USAGE_MSG);
std::process::exit(1);
} else {
pipeline.run(options)
}
}
SudoAction::List(_) => pipeline.run_list(options),
SudoAction::Edit(_) => {
unimplemented!();
}
},
Err(e) => {
eprintln_ignore_io_error!("{e}\n{}", help::USAGE_MSG);
std::process::exit(1);
}
}
}
fn self_check() -> Result<(), Error> {
const ROOT: u32 = 0;
const SETUID_BIT: u32 = 0o4000;
let path = env::current_exe().map_err(|e| Error::IoError(None, e))?;
let metadata = fs::metadata(path).map_err(|e| Error::IoError(None, e))?;
let owned_by_root = metadata.uid() == ROOT;
let setuid_bit_is_set = metadata.mode() & SETUID_BIT != 0;
if owned_by_root && setuid_bit_is_set {
Ok(())
} else {
Err(Error::SelfCheck)
}
}
pub fn main() {
match sudo_process() {
Ok(()) => (),
Err(error) => {
if !error.is_silent() {
diagnostic::diagnostic!("{error}");
}
std::process::exit(1);
}
}
}
|
fn main() {
let width1 = 30;
let height1 =50;
println!("The area of the rectangle is{} square pixels.",area(width1,height1));
}
fn area(width: u32, height: u32) ->u32{
width * height
} |
use crate::task::Task;
use colored::*;
use std::fs::{metadata, read_dir};
use std::path::PathBuf;
use std::process::Command;
use std::str;
pub struct LinkTask {
inputs: Vec<PathBuf>,
output: PathBuf,
}
impl LinkTask {
pub fn new() -> LinkTask {
let files = read_dir("out/").expect("out directory does not exist");
let objects: Vec<PathBuf> = files
.map(|file| file.unwrap())
.filter(|file| file.path().extension().is_some())
.filter(|file| file.path().extension().expect("file missing extension") == "o")
.map(|file| file.path())
.collect();
LinkTask {
inputs: objects,
output: PathBuf::from("out/target"),
}
}
}
impl Task for LinkTask {
fn run(&self) {
println!("{}", "Linking".bold());
if !self.is_stale() {
println!("{}", "Done".bright_green().bold());
return;
}
let mut command = Command::new("g++");
self.inputs.iter().for_each(|file| {
command.arg(&file);
});
command.arg("-o").arg(&self.output);
println!("{:?}", command);
let result = command.output().expect("Link failed");
let stderr = str::from_utf8(&result.stderr);
println!("{}", result.status);
println!("{:#?}", stderr);
println!("{}", "Done".bright_green().bold());
}
fn is_stale(&self) -> bool {
let output_metadata = metadata(&self.output);
if output_metadata.is_err() {
return true;
}
let output_time = output_metadata
.expect("Cannot read output metadata")
.modified()
.expect("Cannot read file modified time");
let stale = self
.inputs
.iter()
.map(|x| {
metadata(&x)
.expect("Cannot read metadata")
.modified()
.expect("Cannot read file modified time")
})
.any(|dependency_time| dependency_time > output_time);
stale
}
}
|
pub fn str_to_hex_val(buf: String) -> Vec<u32> {
let mut hex_buf: Vec<u32> = Vec::new();
let mut hex_merge: u32 = 0;
let mut pos = 1;
for c in buf.chars() {
hex_merge |= c.to_digit(16).unwrap();
if pos == 2 {
hex_buf.push(hex_merge);
hex_merge = 0;
pos = 1;
continue;
}
pos += 1;
hex_merge <<= 4;
}
hex_buf
}
pub fn str_to_hex_u8_buf(buf: &str) -> Vec<u8> {
let mut hex_buf: Vec<u8> = Vec::new();
let mut hex_merge: u8 = 0;
let mut pos = 1;
for c in buf.chars() {
hex_merge |= translate_char_to_hex_val(c) as u8;
if pos == 2 {
hex_buf.push(hex_merge);
hex_merge = 0;
pos = 1;
continue;
}
pos += 1;
hex_merge <<= 4;
}
hex_buf
}
pub fn translate_char_to_hex_val(x: char) -> usize {
match x.to_ascii_lowercase() {
'0' => 0,
'1' => 1,
'2' => 2,
'3' => 3,
'4' => 4,
'5' => 5,
'6' => 6,
'7' => 7,
'8' => 8,
'9' => 9,
'a' => 10,
'b' => 11,
'c' => 12,
'd' => 13,
'e' => 14,
'f' => 15,
_ => panic!("Error in translating hex val to usize val"),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_str_to_hex_u8() {
let input = "00112233445566778899aabbccddeeff";
let input: Vec<u8> = str_to_hex_u8_buf(input);
for i in input {
print!("{:02x} ", i);
}
}
#[test]
pub fn test_alpha_to_hex() {
let alpha = "abcdefghijklmnopqrstuvwxyz";
for a in alpha.chars() {
println!("{}", a);
}
}
} |
extern crate csv;
extern crate itertools;
extern crate rayon;
#[macro_use]
extern crate serde_derive;
use std::io::BufRead;
use itertools::Itertools;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use std::sync::mpsc;
mod paf;
mod utils;
use paf::Reader;
use utils::*;
pub fn basic(filename: &str) -> Read2Mapping {
let mut result: Read2Mapping = std::collections::HashMap::new();
let file = std::io::BufReader::new(std::fs::File::open(filename).unwrap());
for r in Reader::new(file).records() {
let record = r.unwrap();
let key_a = NameLen {
name: record.read_a,
len: record.length_a,
};
let val_a = Interval {
begin: record.begin_a,
end: record.end_a,
};
let key_b = NameLen {
name: record.read_b,
len: record.length_b,
};
let val_b = Interval {
begin: record.begin_b,
end: record.end_b,
};
result.entry(key_a).or_insert(Vec::new()).push(val_a);
result.entry(key_b).or_insert(Vec::new()).push(val_b);
}
return result;
}
pub fn mutex(filename: &str, nb_record: usize, nb_thread: usize) -> Read2Mapping {
let result = Arc::new(Mutex::new(HashMap::new()));
let file = std::io::BufReader::new(std::fs::File::open(filename).unwrap());
let pool = rayon::ThreadPoolBuilder::new().num_threads(nb_thread).build().unwrap();
pool.install(|| {
rayon::scope(|s| {
for chunk in file.lines().chunks(nb_record*1).into_iter() {
let result_ = Arc::clone(&result);
let buffer = chunk.map(|x| x.unwrap()).collect::<Vec<String>>().join("\n").into_bytes();
s.spawn(move |_| {
for r in Reader::new(buffer.as_slice()).records() {
let record = r.unwrap();
let key_a = NameLen {
name: record.read_a,
len: record.length_a,
};
let val_a = Interval {
begin: record.begin_a,
end: record.end_a,
};
let key_b = NameLen {
name: record.read_b,
len: record.length_b,
};
let val_b = Interval {
begin: record.begin_b,
end: record.end_b,
};
{
let mut re = result_.lock().unwrap();
re.entry(key_a).or_insert(Vec::new()).push(val_a);
re.entry(key_b).or_insert(Vec::new()).push(val_b);
}
}
});
}
})
});
let lock = Arc::try_unwrap(result).expect("Lock still has multiple owners");
lock.into_inner().expect("Mutex cannot be locked")
}
pub fn message(filename: &str, nb_record: usize, nb_thread: usize) -> Read2Mapping {
let mut result: Read2Mapping = HashMap::new();
let file = std::io::BufReader::new(std::fs::File::open(filename).unwrap());
let (sender, receiver) = mpsc::channel();
let pool = rayon::ThreadPoolBuilder::new().num_threads(nb_thread).build().unwrap();
pool.install(|| {
rayon::scope(|s| {
for chunk in file.lines().chunks(nb_record*1).into_iter() {
let buffer = chunk.map(|x| x.unwrap()).collect::<Vec<String>>().join("\n").into_bytes();
let sender = sender.clone();
s.spawn(move |_| {
for r in Reader::new(buffer.as_slice()).records() {
let record = r.unwrap();
let key_a = NameLen {
name: record.read_a,
len: record.length_a,
};
let val_a = Interval {
begin: record.begin_a,
end: record.end_a,
};
let key_b = NameLen {
name: record.read_b,
len: record.length_b,
};
let val_b = Interval {
begin: record.begin_b,
end: record.end_b,
};
sender.send((Some(key_a), Some(val_a))).unwrap();
sender.send((Some(key_b), Some(val_b))).unwrap();
}
});
}
drop(sender);
})
});
for (k, v) in receiver.iter() {
result.entry(k.unwrap()).or_insert(Vec::new()).push(v.unwrap());
}
return result;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let ba = basic("1.paf");
let mut me = message("1.paf", 128, 4);
let mut mu = mutex("1.paf", 128, 4);
{
let mut ba_key = ba.keys().collect::<Vec<&NameLen>>();
let mut me_key = me.keys().collect::<Vec<&NameLen>>();
let mut mu_key = mu.keys().collect::<Vec<&NameLen>>();
ba_key.sort();
me_key.sort();
mu_key.sort();
assert_eq!(ba_key, me_key);
assert_eq!(me_key, mu_key);
}
for (k, v) in ba {
let mut a = v;
let mut b = me.get_mut(&k).unwrap();
let mut c = mu.get_mut(&k).unwrap();
a.sort();
b.sort();
c.sort();
assert_eq!(&mut a, b);
assert_eq!(b, c);
}
}
}
|
use super::*;
use crate::{mock::*, Error};
use frame_support::{assert_noop, assert_ok};
// 测试创建kitty
#[test]
fn create_kitty() {
kitty_test_ext().execute_with(|| {
assert_ok!(KittiesModule::create(Origin::signed(1)));
})
}
// 测试转移kitty
|
use std::f32::consts::PI;
use std::ops::{Add, Index, IndexMut, Mul, Sub};
pub const TAU: f32 = 2.0 * PI;
macro_rules! define_vec {
($name:ident, $size:expr) => (
/// A column vector.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct $name(pub [f32; $size]);
impl $name {
/// Create a vector with all fields set to zero.
pub fn zero() -> Self {
$name([0.0; $size])
}
/// Calculate the square of the length (or norm) of the vector. Slightly faster than
/// `length`.
pub fn length_squared(self) -> f32 {
self.dot(self)
}
/// Calculate the the length (or norm) of the vector.
pub fn length(self) -> f32 {
self.length_squared().sqrt()
}
/// Normalize the vector so that it has the same orientation but a length of 1.
pub fn normalize(&mut self) {
let length = self.length();
for i in 0..$size {
self[i] /= length;
}
}
/// Calculate the vector dot product.
pub fn dot(self, other: Self) -> f32 {
let mut result = 0.0;
for i in 0..$size {
result += self[i] * other[i];
}
result
}
}
impl Index<usize> for $name {
type Output = f32;
fn index(&self, i: usize) -> &f32 {
&self.0[i]
}
}
impl IndexMut<usize> for $name {
fn index_mut(&mut self, i: usize) -> &mut f32 {
&mut self.0[i]
}
}
impl Add for $name {
type Output = Self;
fn add(self, other: Self) -> Self {
let mut result = $name::zero();
for i in 0..$size {
result[i] = self[i] + other[i];
}
result
}
}
impl Sub for $name {
type Output = Self;
fn sub(self, other: Self) -> Self {
let mut result = $name::zero();
for i in 0..$size {
result[i] = self[i] - other[i];
}
result
}
}
);
}
define_vec!(Vec3, 3);
define_vec!(Vec4, 4);
impl Vec3 {
/// Calculate the vector cross product.
pub fn cross(self, other: Self) -> Self {
Vec3([
self[1] * other[2] - self[2] * other[1],
self[2] * other[0] - self[0] * other[2],
self[0] * other[1] - self[1] * other[0],
])
}
}
/// A matrix stored in column-major order.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Mat4(pub [[f32; 4]; 4]);
impl Mat4 {
/// The zero matrix.
pub fn zero() -> Self {
Mat4([
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
])
}
/// The identity matrix.
pub fn identity() -> Self {
Mat4([
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
}
/// Build a matrix representing a scaling by the given factors.
pub fn scale(x: f32, y: f32, z: f32) -> Self {
Mat4([
[x, 0.0, 0.0, 0.0],
[0.0, y, 0.0, 0.0],
[0.0, 0.0, z, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
}
/// Build a matrix representing a translation.
pub fn translate(x: f32, y: f32, z: f32) -> Self {
Mat4([
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[x, y, z, 1.0],
])
}
/// Build a matrix representing a rotation around the X-axis by the given angle (in radians).
pub fn rotate_x(angle: f32) -> Self {
let cos = angle.cos();
let sin = angle.sin();
Mat4([
[1.0, 0.0, 0.0, 0.0],
[0.0, cos, -sin, 0.0],
[0.0, sin, cos, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
}
/// Build a matrix representing a rotation around the Y-axis by the given angle (in radians).
pub fn rotate_y(angle: f32) -> Self {
let cos = angle.cos();
let sin = angle.sin();
Mat4([
[ cos, 0.0, sin, 0.0],
[ 0.0, 1.0, 0.0, 0.0],
[-sin, 0.0, cos, 0.0],
[ 0.0, 0.0, 0.0, 1.0],
])
}
/// Build a matrix representing a rotation around the Z-axis by the given angle (in radians).
pub fn rotate_z(angle: f32) -> Self {
let cos = angle.cos();
let sin = angle.sin();
Mat4([
[cos, -sin, 0.0, 0.0],
[sin, cos, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
])
}
/// Build a camera view matrix with the camera at `eye` looking toward `center` with `up` as
/// the vertical direction.
pub fn look_at(eye: Vec3, center: Vec3, up: Vec3) -> Self {
// Set the Z-axis to the unit vector pointing from the center toward the eye (the depth
// axis).
let mut z = eye - center;
z.normalize();
// Make the Y-axis the vertical direction.
let mut y = up;
// Make the X-axis perpendicular to Y and Z, pointing to the right.
let mut x = y.cross(z);
// Make the Y-axis perpendicular to Z and X.
y = z.cross(x);
// Normalize the axes to unit vectors.
x.normalize();
y.normalize();
// Build the rotation/translation matrix that transforms coordinates to the new coordinate
// system.
Mat4([
[ x[0], y[0], z[0], 0.0],
[ x[1], y[1], z[1], 0.0],
[ x[2], y[2], z[2], 0.0],
[-x.dot(eye), -y.dot(eye), -z.dot(eye), 1.0],
])
}
/// Build a perspective projection matrix with the given vertical field of view (in radians),
/// aspect ratio, and Z-axis clipping distances.
pub fn perspective(fov_y: f32, aspect: f32, z_near: f32, z_far: f32) -> Self {
assert!(aspect != 0.0);
assert!(z_near != z_far);
let f = 1.0 / (fov_y / 2.0).tan();
let z_diff = z_near - z_far;
let mut result = Mat4::zero();
result[0][0] = f / aspect;
result[1][1] = f;
result[2][2] = (z_near + z_far) / z_diff;
result[2][3] = -1.0;
result[3][2] = (2.0 * z_near * z_far) / z_diff;
result
}
}
impl Index<usize> for Mat4 {
type Output = [f32; 4];
fn index(&self, col: usize) -> &[f32; 4] {
&self.0[col]
}
}
impl IndexMut<usize> for Mat4 {
fn index_mut(&mut self, col: usize) -> &mut [f32; 4] {
&mut self.0[col]
}
}
impl Mul<Mat4> for Mat4 {
type Output = Mat4;
fn mul(self, other: Mat4) -> Mat4 {
let mut result = Mat4::zero();
for col in 0..4 {
for row in 0..4 {
for i in 0..4 {
result[col][row] += self[i][row] * other[col][i];
}
}
}
result
}
}
impl Mul<Vec4> for Mat4 {
type Output = Vec4;
fn mul(self, vec: Vec4) -> Vec4 {
let mut result = Vec4::zero();
for col in 0..4 {
for row in 0..4 {
result[row] += self[col][row] * vec[col];
}
}
result
}
}
#[test]
fn test_math() {
let scale = Mat4::scale(2.0, 2.0, 2.0);
let trans = Mat4::translate(1.0, 2.0, 3.0);
let combined = trans * scale;
let original = Vec4([3.0, 3.0, 3.0, 1.0]);
let expected = Vec4([7.0, 8.0, 9.0, 1.0]);
assert_eq!(expected, combined * original);
}
|
//! Trait defining a generalized linear model for common functionality.
//! Models are fit such that <Y> = g^-1(X*B) where g is the link function.
use crate::link::{Link, Transform};
use crate::{
error::RegressionResult,
fit::{options::FitOptions, Fit},
irls::Irls,
model::Model,
num::Float,
regularization::IrlsReg,
};
use ndarray::{Array1, Array2};
use ndarray_linalg::SolveH;
/// Trait describing generalized linear model that enables the IRLS algorithm
/// for fitting.
pub trait Glm: Sized {
/// The link function type of the GLM instantiation. Implementations specify
/// this manually so that the provided methods can be called in this trait
/// without necessitating a trait parameter.
type Link: Link<Self>;
/// The link function which maps the expected value of the response variable
/// to the linear predictor.
fn link<F: Float>(y: Array1<F>) -> Array1<F> {
y.mapv(Self::Link::func)
}
/// The inverse of the link function which maps the linear predictors to the
/// expected value of the prediction.
fn mean<F: Float>(lin_pred: &Array1<F>) -> Array1<F> {
lin_pred.mapv(Self::Link::func_inv)
}
/// The logarithm of the partition function in terms of the natural parameter.
/// This can be used to calculate the likelihood generally. All input terms
/// are summed over in the result.
fn log_partition<F: Float>(nat_par: &Array1<F>) -> F;
/// The variance as a function of the mean. This should be related to the
/// Laplacian of the log-partition function, or in other words, the
/// derivative of the inverse link function mu = g^{-1}(eta). This is unique
/// to each response function, but should not depend on the link function.
fn variance<F: Float>(mean: F) -> F;
/// Returns the likelihood function of the response distribution as a
/// function of the response variable y and the natural parameters of each
/// observation. Terms that depend only on the response variable `y` are
/// dropped. This dispersion parameter is taken to be 1, as it does not
/// affect the IRLS steps.
/// The default implementation can be overwritten for performance or numerical
/// accuracy, but should be mathematically equivalent to the default implementation.
fn log_like_natural<F>(y: &Array1<F>, nat: &Array1<F>) -> F
where
F: Float,
{
// subtracting the saturated likelihood to keep the likelihood closer to
// zero, but this can complicate some fit statistics. In addition to
// causing some null likelihood tests to fail as written, it would make
// the current deviance calculation incorrect.
(y * nat).sum() - Self::log_partition(nat)
}
/// Returns the likelihood of a saturated model where every observation can
/// be fit exactly.
fn log_like_sat<F>(y: &Array1<F>) -> F
where
F: Float;
/// Returns the likelihood function including regularization terms.
fn log_like_reg<F>(
data: &Model<Self, F>,
regressors: &Array1<F>,
regularization: &dyn IrlsReg<F>,
) -> F
where
F: Float,
{
let lin_pred = data.linear_predictor(®ressors);
// the likelihood prior to regularization
let l_unreg = Self::log_like_natural(&data.y, &Self::Link::nat_param(lin_pred));
(*regularization).likelihood(l_unreg, regressors)
}
/// Provide an initial guess for the parameters. This can be overridden
/// but this should provide a decent general starting point. The y data is
/// averaged with its mean to prevent infinities resulting from application
/// of the link function:
/// X * beta_0 ~ g(0.5*(y + y_avg))
/// This is equivalent to minimizing half the sum of squared differences
/// between X*beta and g(0.5*(y + y_avg)).
// TODO: consider incorporating weights and/or correlations.
fn init_guess<F>(data: &Model<Self, F>) -> Array1<F>
where
F: Float,
Array2<F>: SolveH<F>,
{
let y_bar: F = data.y.mean().unwrap_or_else(F::zero);
let mu_y: Array1<F> = data.y.mapv(|y| F::from(0.5).unwrap() * (y + y_bar));
let link_y = mu_y.mapv(Self::Link::func);
// Compensate for linear offsets if they are present
let link_y: Array1<F> = if let Some(off) = &data.linear_offset {
&link_y - off
} else {
link_y
};
let x_mat: Array2<F> = data.x.t().dot(&data.x);
let init_guess: Array1<F> =
x_mat
.solveh_into(data.x.t().dot(&link_y))
.unwrap_or_else(|err| {
eprintln!("WARNING: failed to get initial guess for IRLS. Will begin at zero.");
eprintln!("{}", err);
Array1::<F>::zeros(data.x.ncols())
});
init_guess
}
/// Do the regression and return a result. Returns object holding fit result.
fn regression<F>(
data: &Model<Self, F>,
options: FitOptions<F>,
) -> RegressionResult<Fit<Self, F>>
where
F: Float,
Self: Sized,
{
let initial: Array1<F> = options
.init_guess
.clone()
.unwrap_or_else(|| Self::init_guess(&data));
// This represents the number of overall iterations
let mut n_iter: usize = 0;
// This is the number of steps tried, which includes those arising from step halving.
let mut n_steps: usize = 0;
// initialize the result and likelihood in case no steps are taken.
let mut result: Array1<F> = initial.clone();
let mut model_like: F = Self::log_like_reg(&data, &initial, options.reg.as_ref());
let irls: Irls<Self, F> = Irls::new(&data, initial, &options, model_like);
for iteration in irls {
let it_result = iteration?;
result.assign(&it_result.guess);
model_like = it_result.like;
// This number of iterations does not include any extras from step halving.
n_iter += 1;
n_steps += it_result.steps;
}
Ok(Fit::new(data, result, options, model_like, n_iter, n_steps))
}
}
/// Describes the domain of the response variable for a GLM, e.g. integer for
/// Poisson, float for Linear, bool for logistic. Implementing this trait for a
/// type Y shows how to convert to a floating point type and allows that type to
/// be used as a response variable.
pub trait Response<M: Glm> {
/// Converts the domain to a floating-point value for IRLS.
fn to_float<F: Float>(self) -> RegressionResult<F>;
}
|
#![allow(clippy::excessive_precision)]
#[macro_use]
mod support;
macro_rules! impl_vec2_tests {
($t:ident, $new:ident, $vec2:ident, $vec3:ident, $mask:ident) => {
glam_test!(test_const, {
const V0: $vec2 = $vec2::splat(1 as $t);
const V1: $vec2 = $vec2::new(1 as $t, 2 as $t);
const V2: $vec2 = $vec2::from_array([1 as $t, 2 as $t]);
assert_eq!([1 as $t, 1 as $t], *V0.as_ref());
assert_eq!([1 as $t, 2 as $t], *V1.as_ref());
assert_eq!([1 as $t, 2 as $t], *V2.as_ref());
});
glam_test!(test_vec2_consts, {
assert_eq!($vec2::ZERO, $new(0 as $t, 0 as $t));
assert_eq!($vec2::ONE, $new(1 as $t, 1 as $t));
assert_eq!($vec2::X, $new(1 as $t, 0 as $t));
assert_eq!($vec2::Y, $new(0 as $t, 1 as $t));
assert_eq!($vec2::MIN, $new($t::MIN, $t::MIN));
assert_eq!($vec2::MAX, $new($t::MAX, $t::MAX));
});
glam_test!(test_new, {
let v = $new(1 as $t, 2 as $t);
assert_eq!(v.x, 1 as $t);
assert_eq!(v.y, 2 as $t);
let t = (1 as $t, 2 as $t);
let v = $vec2::from(t);
assert_eq!(t, v.into());
let a = [1 as $t, 2 as $t];
let v = $vec2::from(a);
let a1: [$t; 2] = v.into();
assert_eq!(a, a1);
assert_eq!(a, v.to_array());
assert_eq!(a, *v.as_ref());
let mut v2 = $vec2::default();
*v2.as_mut() = a;
assert_eq!(a, v2.to_array());
let v = $vec2::new(t.0, t.1);
assert_eq!(t, v.into());
assert_eq!($vec2::new(1 as $t, 0 as $t), $vec2::X);
assert_eq!($vec2::new(0 as $t, 1 as $t), $vec2::Y);
});
glam_test!(test_fmt, {
let a = $vec2::new(1 as $t, 2 as $t);
assert_eq!(
format!("{:?}", a),
format!("{}({:?}, {:?})", stringify!($vec2), a.x, a.y)
);
assert_eq!(
format!("{:#?}", a),
format!(
"{}(\n {:#?},\n {:#?},\n)",
stringify!($vec2),
a.x,
a.y
)
);
assert_eq!(format!("{}", a), "[1, 2]");
});
glam_test!(test_zero, {
let v = $vec2::ZERO;
assert_eq!($new(0 as $t, 0 as $t), v);
assert_eq!(v, $vec2::default());
});
glam_test!(test_splat, {
let v = $vec2::splat(1 as $t);
assert_eq!($vec2::ONE, v);
});
glam_test!(test_accessors, {
let mut a = $vec2::ZERO;
a.x = 1 as $t;
a.y = 2 as $t;
assert_eq!(1 as $t, a.x);
assert_eq!(2 as $t, a.y);
assert_eq!($vec2::new(1 as $t, 2 as $t), a);
let mut a = $vec2::ZERO;
a[0] = 1 as $t;
a[1] = 2 as $t;
assert_eq!(1 as $t, a[0]);
assert_eq!(2 as $t, a[1]);
assert_eq!($vec2::new(1 as $t, 2 as $t), a);
});
glam_test!(test_dot_unsigned, {
let x = $new(1 as $t, 0 as $t);
let y = $new(0 as $t, 1 as $t);
assert_eq!(1 as $t, x.dot(x));
assert_eq!(0 as $t, x.dot(y));
assert_eq!(
$new(8 as $t, 8 as $t),
$new(1 as $t, 2 as $t).dot_into_vec($new(4 as $t, 2 as $t))
);
});
glam_test!(test_length_squared_unsigned, {
let x = $new(1 as $t, 0 as $t);
assert_eq!(4 as $t, (2 as $t * x).length_squared());
assert_eq!(
2 as $t * 2 as $t + 3 as $t * 3 as $t,
$new(2 as $t, 3 as $t).length_squared()
);
});
glam_test!(test_ops, {
let a = $new(2 as $t, 4 as $t);
assert_eq!($new(4 as $t, 8 as $t), (a + a));
assert_eq!($new(2 as $t, 4 as $t), 0 as $t + a);
assert_eq!($new(0 as $t, 0 as $t), (a - a));
assert_eq!($new(14 as $t, 12 as $t), 16 as $t - a);
assert_eq!($new(4 as $t, 16 as $t), (a * a));
assert_eq!($new(4 as $t, 8 as $t), (a * 2 as $t));
assert_eq!($new(4 as $t, 8 as $t), (2 as $t * a));
assert_eq!($new(1 as $t, 1 as $t), (a / a));
assert_eq!($new(1 as $t, 2 as $t), (a / 2 as $t));
assert_eq!($new(2 as $t, 1 as $t), (4 as $t / a));
assert_eq!($new(0 as $t, 0 as $t), a % a);
assert_eq!($new(0 as $t, 1 as $t), a % (a - 1 as $t));
assert_eq!($new(0 as $t, 0 as $t), a % 1 as $t);
assert_eq!($new(2 as $t, 1 as $t), a % 3 as $t);
assert_eq!($new(1 as $t, 1 as $t), 17 as $t % a);
assert_eq!($new(2 as $t, 4 as $t), a % 8 as $t);
});
glam_test!(test_assign_ops, {
let a = $new(1 as $t, 2 as $t);
let mut b = a;
b += 2 as $t;
assert_eq!($new(3 as $t, 4 as $t), b);
b -= 2 as $t;
assert_eq!($new(1 as $t, 2 as $t), b);
b *= 2 as $t;
assert_eq!($new(2 as $t, 4 as $t), b);
b /= 2 as $t;
assert_eq!($new(1 as $t, 2 as $t), b);
b %= 2 as $t;
assert_eq!($new(1 as $t, 0 as $t), b);
b = a;
b += a;
assert_eq!($new(2 as $t, 4 as $t), b);
b -= a;
assert_eq!($new(1 as $t, 2 as $t), b);
b *= a;
assert_eq!($new(1 as $t, 4 as $t), b);
b /= a;
assert_eq!($new(1 as $t, 2 as $t), b);
b *= 2 as $t;
assert_eq!($new(2 as $t, 4 as $t), b);
b /= 2 as $t;
assert_eq!($new(1 as $t, 2 as $t), b);
b %= (b + 1 as $t);
assert_eq!($new(1 as $t, 2 as $t), b);
b %= b;
assert_eq!($new(0 as $t, 0 as $t), b);
});
glam_test!(test_min_max, {
let a = $new(0 as $t, 2 as $t);
let b = $new(1 as $t, 1 as $t);
assert_eq!($new(0 as $t, 1 as $t), a.min(b));
assert_eq!($new(0 as $t, 1 as $t), b.min(a));
assert_eq!($new(1 as $t, 2 as $t), a.max(b));
assert_eq!($new(1 as $t, 2 as $t), b.max(a));
});
glam_test!(test_clamp, {
fn vec(x: i32, y: i32) -> $vec2 {
$vec2::new(x as $t, y as $t)
}
let min = vec(1, 3);
let max = vec(6, 8);
assert_eq!(vec(0, 0).clamp(min, max), vec(1, 3));
assert_eq!(vec(2, 2).clamp(min, max), vec(2, 3));
assert_eq!(vec(4, 5).clamp(min, max), vec(4, 5));
assert_eq!(vec(6, 6).clamp(min, max), vec(6, 6));
assert_eq!(vec(7, 7).clamp(min, max), vec(6, 7));
assert_eq!(vec(9, 9).clamp(min, max), vec(6, 8));
should_glam_assert!({ $vec2::clamp($vec2::ZERO, $vec2::ONE, $vec2::ZERO) });
});
glam_test!(test_hmin_hmax, {
assert_eq!(1 as $t, $new(1 as $t, 2 as $t).min_element());
assert_eq!(1 as $t, $new(2 as $t, 1 as $t).min_element());
assert_eq!(2 as $t, $new(1 as $t, 2 as $t).max_element());
assert_eq!(2 as $t, $new(2 as $t, 1 as $t).max_element());
});
glam_test!(test_eq, {
let a = $new(1 as $t, 1 as $t);
let b = $new(1 as $t, 2 as $t);
assert!(a.cmpeq(a).all());
assert!(b.cmpeq(b).all());
assert!(a.cmpne(b).any());
assert!(b.cmpne(a).any());
assert!(b.cmpeq(a).any());
});
glam_test!(test_cmp, {
assert!(!$mask::default().any());
assert!(!$mask::default().all());
assert_eq!($mask::default().bitmask(), 0x0);
let a = $new(1 as $t, 1 as $t);
let b = $new(2 as $t, 2 as $t);
let c = $new(1 as $t, 1 as $t);
let d = $new(2 as $t, 1 as $t);
assert_eq!(a.cmplt(a).bitmask(), 0x0);
assert_eq!(a.cmplt(b).bitmask(), 0x3);
assert_eq!(a.cmplt(d).bitmask(), 0x1);
assert_eq!(c.cmple(a).bitmask(), 0x3);
assert!(a.cmplt(b).all());
assert!(a.cmplt(d).any());
assert!(a.cmple(b).all());
assert!(a.cmple(a).all());
assert!(b.cmpgt(a).all());
assert!(b.cmpge(a).all());
assert!(b.cmpge(b).all());
assert!(!(a.cmpge(d).all()));
assert!(c.cmple(c).all());
assert!(c.cmpge(c).all());
assert!(a == a);
});
glam_test!(test_extend_truncate, {
let a = $new(1 as $t, 2 as $t);
let b = a.extend(3 as $t);
assert_eq!($vec3::new(1 as $t, 2 as $t, 3 as $t), b);
});
glam_test!(test_vec2mask, {
// make sure the unused 'z' value doesn't break $vec2 behaviour
let a = $vec3::ZERO;
let mut b = a.truncate();
b.x = 1 as $t;
b.y = 1 as $t;
assert!(!b.cmpeq($vec2::ZERO).any());
assert!(b.cmpeq($vec2::splat(1 as $t)).all());
});
glam_test!(test_mask_into_array_u32, {
assert_eq!(Into::<[u32; 2]>::into($mask::new(false, false)), [0, 0]);
assert_eq!(Into::<[u32; 2]>::into($mask::new(true, false)), [!0, 0]);
assert_eq!(Into::<[u32; 2]>::into($mask::new(false, true)), [0, !0]);
assert_eq!(Into::<[u32; 2]>::into($mask::new(true, true)), [!0, !0]);
});
glam_test!(test_mask_into_array_bool, {
assert_eq!(
Into::<[bool; 2]>::into($mask::new(false, false)),
[false, false]
);
assert_eq!(
Into::<[bool; 2]>::into($mask::new(true, false)),
[true, false]
);
assert_eq!(
Into::<[bool; 2]>::into($mask::new(false, true)),
[false, true]
);
assert_eq!(
Into::<[bool; 2]>::into($mask::new(true, true)),
[true, true]
);
});
glam_test!(test_mask_splat, {
assert_eq!($mask::splat(false), $mask::new(false, false));
assert_eq!($mask::splat(true), $mask::new(true, true));
});
glam_test!(test_mask_bitmask, {
assert_eq!($mask::new(false, false).bitmask(), 0b00);
assert_eq!($mask::new(true, false).bitmask(), 0b01);
assert_eq!($mask::new(false, true).bitmask(), 0b10);
assert_eq!($mask::new(true, true).bitmask(), 0b11);
});
glam_test!(test_mask_any, {
assert_eq!($mask::new(false, false).any(), false);
assert_eq!($mask::new(true, false).any(), true);
assert_eq!($mask::new(false, true).any(), true);
assert_eq!($mask::new(true, true).any(), true);
});
glam_test!(test_mask_all, {
assert_eq!($mask::new(false, false).all(), false);
assert_eq!($mask::new(true, false).all(), false);
assert_eq!($mask::new(false, true).all(), false);
assert_eq!($mask::new(true, true).all(), true);
});
glam_test!(test_mask_select, {
let a = $vec2::new(1 as $t, 2 as $t);
let b = $vec2::new(3 as $t, 4 as $t);
assert_eq!(
$vec2::select($mask::new(true, true), a, b),
$vec2::new(1 as $t, 2 as $t),
);
assert_eq!(
$vec2::select($mask::new(true, false), a, b),
$vec2::new(1 as $t, 4 as $t),
);
assert_eq!(
$vec2::select($mask::new(false, true), a, b),
$vec2::new(3 as $t, 2 as $t),
);
assert_eq!(
$vec2::select($mask::new(false, false), a, b),
$vec2::new(3 as $t, 4 as $t),
);
});
glam_test!(test_mask_and, {
assert_eq!(
($mask::new(false, false) & $mask::new(false, false)).bitmask(),
0b00,
);
assert_eq!(
($mask::new(true, true) & $mask::new(true, false)).bitmask(),
0b01,
);
assert_eq!(
($mask::new(true, false) & $mask::new(false, true)).bitmask(),
0b00,
);
assert_eq!(
($mask::new(true, true) & $mask::new(true, true)).bitmask(),
0b11,
);
let mut mask = $mask::new(true, true);
mask &= $mask::new(true, false);
assert_eq!(mask.bitmask(), 0b01);
});
glam_test!(test_mask_or, {
assert_eq!(
($mask::new(false, false) | $mask::new(false, false)).bitmask(),
0b00,
);
assert_eq!(
($mask::new(false, false) | $mask::new(false, true)).bitmask(),
0b10,
);
assert_eq!(
($mask::new(true, false) | $mask::new(false, true)).bitmask(),
0b11,
);
assert_eq!(
($mask::new(true, true) | $mask::new(true, true)).bitmask(),
0b11,
);
let mut mask = $mask::new(true, true);
mask |= $mask::new(true, false);
assert_eq!(mask.bitmask(), 0b11);
});
glam_test!(test_mask_xor, {
assert_eq!(
($mask::new(false, false) ^ $mask::new(false, false)).bitmask(),
0b00,
);
assert_eq!(
($mask::new(false, false) ^ $mask::new(false, true)).bitmask(),
0b10,
);
assert_eq!(
($mask::new(true, false) ^ $mask::new(false, true)).bitmask(),
0b11,
);
assert_eq!(
($mask::new(true, true) ^ $mask::new(true, true)).bitmask(),
0b00,
);
let mut mask = $mask::new(false, true);
mask ^= $mask::new(true, false);
assert_eq!(mask.bitmask(), 0b11);
});
glam_test!(test_mask_not, {
assert_eq!((!$mask::new(false, false)).bitmask(), 0b11);
assert_eq!((!$mask::new(true, false)).bitmask(), 0b10);
assert_eq!((!$mask::new(false, true)).bitmask(), 0b01);
assert_eq!((!$mask::new(true, true)).bitmask(), 0b00);
});
glam_test!(test_mask_fmt, {
let a = $mask::new(true, false);
assert_eq!(
format!("{:?}", a),
format!("{}(0xffffffff, 0x0)", stringify!($mask))
);
assert_eq!(format!("{}", a), "[true, false]");
});
glam_test!(test_mask_eq, {
let a = $mask::new(true, false);
let b = $mask::new(true, false);
let c = $mask::new(false, true);
assert_eq!(a, b);
assert_eq!(b, a);
assert_ne!(a, c);
assert_ne!(b, c);
});
glam_test!(test_mask_test, {
let a = $mask::new(true, false);
assert_eq!(a.test(0), true);
assert_eq!(a.test(1), false);
let b = $mask::new(false, true);
assert_eq!(b.test(0), false);
assert_eq!(b.test(1), true);
});
glam_test!(test_mask_set, {
let mut a = $mask::new(false, true);
a.set(0, true);
assert_eq!(a.test(0), true);
a.set(1, false);
assert_eq!(a.test(1), false);
let mut b = $mask::new(true, false);
b.set(0, false);
assert_eq!(b.test(0), false);
b.set(1, true);
assert_eq!(b.test(1), true);
});
glam_test!(test_mask_hash, {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hash;
use std::hash::Hasher;
let a = $mask::new(true, false);
let b = $mask::new(true, false);
let c = $mask::new(false, true);
let mut hasher = DefaultHasher::new();
a.hash(&mut hasher);
let a_hashed = hasher.finish();
let mut hasher = DefaultHasher::new();
b.hash(&mut hasher);
let b_hashed = hasher.finish();
let mut hasher = DefaultHasher::new();
c.hash(&mut hasher);
let c_hashed = hasher.finish();
assert_eq!(a, b);
assert_eq!(a_hashed, b_hashed);
assert_ne!(a, c);
assert_ne!(a_hashed, c_hashed);
});
glam_test!(test_to_from_slice, {
let v = $vec2::new(1 as $t, 2 as $t);
let mut a = [0 as $t, 0 as $t];
v.write_to_slice(&mut a);
assert_eq!(v, $vec2::from_slice(&a));
should_panic!({ $vec2::ONE.write_to_slice(&mut [0 as $t]) });
should_panic!({ $vec2::from_slice(&[0 as $t]) });
});
glam_test!(test_sum, {
let one = $vec2::ONE;
assert_eq!([one, one].iter().sum::<$vec2>(), one + one);
assert_eq!([one, one].into_iter().sum::<$vec2>(), one + one);
});
glam_test!(test_product, {
let two = $vec2::new(2 as $t, 2 as $t);
assert_eq!([two, two].iter().product::<$vec2>(), two * two);
assert_eq!([two, two].into_iter().product::<$vec2>(), two * two);
});
};
}
macro_rules! impl_vec2_signed_tests {
($t:ident, $new:ident, $vec2:ident, $vec3:ident, $mask:ident) => {
impl_vec2_tests!($t, $new, $vec2, $vec3, $mask);
glam_test!(test_is_negative_bitmask, {
assert_eq!($vec2::ZERO.is_negative_bitmask(), 0b00);
assert_eq!($vec2::ONE.is_negative_bitmask(), 0b00);
assert_eq!((-$vec2::ONE).is_negative_bitmask(), 0b11);
assert_eq!($vec2::new(-1 as $t, 2 as $t).is_negative_bitmask(), 0b01);
assert_eq!($vec2::new(8 as $t, 3 as $t).is_negative_bitmask(), 0b00);
assert_eq!($vec2::new(3 as $t, -4 as $t).is_negative_bitmask(), 0b10);
assert_eq!($vec2::new(-2 as $t, -6 as $t).is_negative_bitmask(), 0b11);
});
glam_test!(test_abs, {
assert_eq!($vec2::ZERO.abs(), $vec2::ZERO);
assert_eq!($vec2::ONE.abs(), $vec2::ONE);
assert_eq!((-$vec2::ONE).abs(), $vec2::ONE);
});
glam_test!(test_dot_signed, {
let x = $new(1 as $t, 0 as $t);
let y = $new(0 as $t, 1 as $t);
assert_eq!(1 as $t, x.dot(x));
assert_eq!(0 as $t, x.dot(y));
assert_eq!(-1 as $t, x.dot(-x));
});
glam_test!(test_length_squared_signed, {
let x = $new(1 as $t, 0 as $t);
let y = $new(0 as $t, 1 as $t);
assert_eq!(9 as $t, (-3 as $t * y).length_squared());
assert_eq!(2 as $t, x.distance_squared(y));
assert_eq!(13 as $t, (2 as $t * x).distance_squared(-3 as $t * y));
});
glam_test!(test_neg, {
let a = $new(1 as $t, 2 as $t);
assert_eq!($new(-1 as $t, -2 as $t), (-a));
assert_eq!($new(-0.0 as $t, -0.0 as $t), -$new(0.0 as $t, 0.0 as $t));
assert_eq!($new(0.0 as $t, -0.0 as $t), -$new(-0.0 as $t, 0.0 as $t));
});
glam_test!(test_perp, {
let v1 = $vec2::new(1 as $t, 2 as $t);
let v2 = $vec2::new(1 as $t, 1 as $t);
let v1_perp = $vec2::new(-2 as $t, 1 as $t);
assert_eq!(v1_perp, v1.perp());
assert_eq!(v1.perp().dot(v1), 0 as $t);
assert_eq!(v2.perp().dot(v2), 0 as $t);
assert_eq!(v1.perp().dot(v2), v1.perp_dot(v2));
});
glam_test!(test_rotate, {
assert_eq!(
$vec2::new(0 as $t, 1 as $t).rotate($vec2::new(1 as $t, 1 as $t)),
$vec2::new(-1 as $t, 1 as $t)
);
});
glam_test!(test_div_euclid, {
let one = $vec2::ONE;
let two = one + one;
let three = two + one;
assert_eq!(three.div_euclid(two), one);
assert_eq!((-three).div_euclid(two), -two);
assert_eq!(three.div_euclid(-two), -one);
assert_eq!((-three).div_euclid(-two), two);
});
glam_test!(test_rem_euclid, {
let one = $vec2::ONE;
let two = one + one;
let three = two + one;
let four = three + one;
assert_eq!(four.rem_euclid(three), one);
assert_eq!((-four).rem_euclid(three), two);
assert_eq!(four.rem_euclid(-three), one);
assert_eq!((-four).rem_euclid(-three), two);
});
};
}
macro_rules! impl_vec2_signed_integer_tests {
($t:ident, $new:ident, $vec2:ident, $vec3:ident, $mask:ident) => {
impl_vec2_signed_tests!($t, $new, $vec2, $vec3, $mask);
glam_test!(test_signum, {
assert_eq!($vec3::ZERO.signum(), $vec3::ZERO);
assert_eq!($vec3::ONE.signum(), $vec3::ONE);
assert_eq!((-$vec3::ONE).signum(), -$vec3::ONE);
});
};
}
macro_rules! impl_vec2_eq_hash_tests {
($t:ident, $new:ident) => {
glam_test!(test_ve2_hash, {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hash;
use std::hash::Hasher;
let a = $new(1 as $t, 2 as $t);
let b = $new(1 as $t, 2 as $t);
let c = $new(3 as $t, 2 as $t);
let mut hasher = DefaultHasher::new();
a.hash(&mut hasher);
let a_hashed = hasher.finish();
let mut hasher = DefaultHasher::new();
b.hash(&mut hasher);
let b_hashed = hasher.finish();
let mut hasher = DefaultHasher::new();
c.hash(&mut hasher);
let c_hashed = hasher.finish();
assert_eq!(a, b);
assert_eq!(a_hashed, b_hashed);
assert_ne!(a, c);
assert_ne!(a_hashed, c_hashed);
});
};
}
macro_rules! impl_vec2_float_tests {
($t:ident, $new:ident, $vec2:ident, $vec3:ident, $mask:ident) => {
impl_vec2_signed_tests!($t, $new, $vec2, $vec3, $mask);
impl_vec_float_normalize_tests!($t, $vec2);
use core::$t::INFINITY;
use core::$t::NAN;
use core::$t::NEG_INFINITY;
glam_test!(test_vec2_nan, {
assert!($vec2::NAN.is_nan());
assert!(!$vec2::NAN.is_finite());
});
glam_test!(test_length, {
let x = $new(1.0, 0.0);
let y = $new(0.0, 1.0);
assert_eq!(2.0, (-2.0 * x).length());
assert_eq!(3.0, (3.0 * y).length());
assert_eq!((2.0 as $t).sqrt(), x.distance(y));
assert_eq!(5.0, (3.0 * x).distance(-4.0 * y));
assert_eq!(13.0, (-5.0 * x).distance(12.0 * y));
assert_eq!(x, (2.0 * x).normalize());
assert_eq!(1.0 * 3.0 + 2.0 * 4.0, $new(1.0, 2.0).dot($new(3.0, 4.0)));
assert_eq!(
(2.0 as $t * 2.0 + 3.0 * 3.0).sqrt(),
$new(2.0, 3.0).length()
);
assert_eq!(
1.0 / (2.0 as $t * 2.0 + 3.0 * 3.0).sqrt(),
$new(2.0, 3.0).length_recip()
);
assert!($new(2.0, 3.0).normalize().is_normalized());
assert_eq!(
$new(2.0, 3.0) / (2.0 as $t * 2.0 + 3.0 * 3.0).sqrt(),
$new(2.0, 3.0).normalize()
);
assert_eq!($new(0.5, 0.25), $new(2.0, 4.0).recip());
});
glam_test!(test_project_reject, {
assert_eq!($new(0.0, 1.0), $new(1.0, 1.0).project_onto($new(0.0, 2.0)));
assert_eq!($new(1.0, 0.0), $new(1.0, 1.0).reject_from($new(0.0, 2.0)));
assert_eq!(
$new(0.0, 1.0),
$new(1.0, 1.0).project_onto_normalized($new(0.0, 1.0))
);
assert_eq!(
$new(1.0, 0.0),
$new(1.0, 1.0).reject_from_normalized($new(0.0, 1.0))
);
should_glam_assert!({ $vec2::ONE.project_onto($vec2::ZERO) });
should_glam_assert!({ $vec2::ONE.reject_from($vec2::ZERO) });
should_glam_assert!({ $vec2::ONE.project_onto_normalized($vec2::ONE) });
should_glam_assert!({ $vec2::ONE.reject_from_normalized($vec2::ONE) });
});
glam_test!(test_signum, {
assert_eq!($vec2::ZERO.signum(), $vec2::ONE);
assert_eq!((-$vec2::ZERO).signum(), -$vec2::ONE);
assert_eq!($vec2::ONE.signum(), $vec2::ONE);
assert_eq!((-$vec2::ONE).signum(), -$vec2::ONE);
assert_eq!($vec2::INFINITY.signum(), $vec2::ONE);
assert_eq!($vec2::NEG_INFINITY.signum(), -$vec2::ONE);
assert!($vec2::NAN.signum().is_nan_mask().all());
});
glam_test!(test_copysign, {
assert_eq!($vec2::ZERO.copysign(-$vec2::ZERO), -$vec2::ZERO);
assert_eq!((-$vec2::ZERO).copysign(-$vec2::ZERO), -$vec2::ZERO);
assert_eq!($vec2::ZERO.copysign($vec2::ZERO), $vec2::ZERO);
assert_eq!((-$vec2::ZERO).copysign($vec2::ZERO), $vec2::ZERO);
assert_eq!($vec2::ONE.copysign(-$vec2::ZERO), -$vec2::ONE);
assert_eq!((-$vec2::ONE).copysign(-$vec2::ZERO), -$vec2::ONE);
assert_eq!($vec2::ONE.copysign($vec2::ZERO), $vec2::ONE);
assert_eq!((-$vec2::ONE).copysign($vec2::ZERO), $vec2::ONE);
assert_eq!($vec2::ZERO.copysign(-$vec2::ONE), -$vec2::ZERO);
assert_eq!((-$vec2::ZERO).copysign(-$vec2::ONE), -$vec2::ZERO);
assert_eq!($vec2::ZERO.copysign($vec2::ONE), $vec2::ZERO);
assert_eq!((-$vec2::ZERO).copysign($vec2::ONE), $vec2::ZERO);
assert_eq!($vec2::ONE.copysign(-$vec2::ONE), -$vec2::ONE);
assert_eq!((-$vec2::ONE).copysign(-$vec2::ONE), -$vec2::ONE);
assert_eq!($vec2::ONE.copysign($vec2::ONE), $vec2::ONE);
assert_eq!((-$vec2::ONE).copysign($vec2::ONE), $vec2::ONE);
assert_eq!($vec2::INFINITY.copysign($vec2::ONE), $vec2::INFINITY);
assert_eq!($vec2::INFINITY.copysign(-$vec2::ONE), $vec2::NEG_INFINITY);
assert_eq!($vec2::NEG_INFINITY.copysign($vec2::ONE), $vec2::INFINITY);
assert_eq!(
$vec2::NEG_INFINITY.copysign(-$vec2::ONE),
$vec2::NEG_INFINITY
);
assert!($vec2::NAN.copysign($vec2::ONE).is_nan_mask().all());
assert!($vec2::NAN.copysign(-$vec2::ONE).is_nan_mask().all());
});
glam_test!(test_float_is_negative_bitmask, {
assert_eq!($vec2::ZERO.is_negative_bitmask(), 0b00);
assert_eq!((-$vec2::ZERO).is_negative_bitmask(), 0b11);
assert_eq!($vec2::ONE.is_negative_bitmask(), 0b00);
assert_eq!((-$vec2::ONE).is_negative_bitmask(), 0b11);
assert_eq!($vec2::new(-1.0, 2.0).is_negative_bitmask(), 0b01);
assert_eq!($vec2::new(8.0, 3.0).is_negative_bitmask(), 0b00);
assert_eq!($vec2::new(3.0, -4.0).is_negative_bitmask(), 0b10);
assert_eq!($vec2::new(-2.0, -6.0).is_negative_bitmask(), 0b11);
});
glam_test!(test_round, {
assert_eq!($vec2::new(1.35, 0.0).round().x, 1.0);
assert_eq!($vec2::new(0.0, 1.5).round().y, 2.0);
assert_eq!($vec2::new(0.0, -15.5).round().y, -16.0);
assert_eq!($vec2::new(0.0, 0.0).round().y, 0.0);
assert_eq!($vec2::new(0.0, 21.1).round().y, 21.0);
assert_eq!($vec2::new(0.0, 11.123).round().y, 11.0);
assert_eq!($vec2::new(0.0, 11.499).round().y, 11.0);
assert_eq!(
$vec2::new(NEG_INFINITY, INFINITY).round(),
$vec2::new(NEG_INFINITY, INFINITY)
);
assert!($vec2::new(NAN, 0.0).round().x.is_nan());
});
glam_test!(test_floor, {
assert_eq!($vec2::new(1.35, -1.5).floor(), $vec2::new(1.0, -2.0));
assert_eq!(
$vec2::new(INFINITY, NEG_INFINITY).floor(),
$vec2::new(INFINITY, NEG_INFINITY)
);
assert!($vec2::new(NAN, 0.0).floor().x.is_nan());
assert_eq!(
$vec2::new(-2000000.123, 10000000.123).floor(),
$vec2::new(-2000001.0, 10000000.0)
);
});
glam_test!(test_fract, {
assert_approx_eq!($vec2::new(1.35, -1.5).fract(), $vec2::new(0.35, 0.5));
assert_approx_eq!(
$vec2::new(-2000000.123, 1000000.123).fract(),
$vec2::new(0.877, 0.123),
0.002
);
});
glam_test!(test_ceil, {
assert_eq!($vec2::new(1.35, -1.5).ceil(), $vec2::new(2.0, -1.0));
assert_eq!(
$vec2::new(INFINITY, NEG_INFINITY).ceil(),
$vec2::new(INFINITY, NEG_INFINITY)
);
assert!($vec2::new(NAN, 0.0).ceil().x.is_nan());
assert_eq!(
$vec2::new(-2000000.123, 1000000.123).ceil(),
$vec2::new(-2000000.0, 1000001.0)
);
});
glam_test!(test_trunc, {
assert_eq!($vec2::new(1.35, -1.5).trunc(), $vec2::new(1.0, -1.0));
assert_eq!(
$vec2::new(INFINITY, NEG_INFINITY).trunc(),
$vec2::new(INFINITY, NEG_INFINITY)
);
assert!($vec2::new(0.0, NAN).trunc().y.is_nan());
assert_eq!(
$vec2::new(-0.0, -2000000.123).trunc(),
$vec2::new(-0.0, -2000000.0)
);
});
glam_test!(test_lerp, {
let v0 = $vec2::new(-1.0, -1.0);
let v1 = $vec2::new(1.0, 1.0);
assert_approx_eq!(v0, v0.lerp(v1, 0.0));
assert_approx_eq!(v1, v0.lerp(v1, 1.0));
assert_approx_eq!($vec2::ZERO, v0.lerp(v1, 0.5));
});
glam_test!(test_is_finite, {
assert!($vec2::new(0.0, 0.0).is_finite());
assert!($vec2::new(-1e-10, 1e10).is_finite());
assert!(!$vec2::new(INFINITY, 0.0).is_finite());
assert!(!$vec2::new(0.0, NAN).is_finite());
assert!(!$vec2::new(0.0, NEG_INFINITY).is_finite());
assert!(!$vec2::new(INFINITY, NEG_INFINITY).is_finite());
assert!(!$vec2::INFINITY.is_finite());
assert!(!$vec2::NEG_INFINITY.is_finite());
});
glam_test!(test_powf, {
assert_eq!($vec2::new(2.0, 4.0).powf(2.0), $vec2::new(4.0, 16.0));
});
glam_test!(test_exp, {
assert_approx_eq!(
$vec2::new(1.0, 2.0).exp(),
$vec2::new((1.0 as $t).exp(), (2.0 as $t).exp())
);
});
glam_test!(test_angle_between, {
let angle = $vec2::new(1.0, 0.0).angle_between($vec2::new(0.0, 1.0));
assert_approx_eq!(core::$t::consts::FRAC_PI_2, angle, 1e-6);
let angle = $vec2::new(10.0, 0.0).angle_between($vec2::new(0.0, 5.0));
assert_approx_eq!(core::$t::consts::FRAC_PI_2, angle, 1e-6);
let angle = $vec2::new(-1.0, 0.0).angle_between($vec2::new(0.0, 1.0));
assert_approx_eq!(-core::$t::consts::FRAC_PI_2, angle, 1e-6);
});
glam_test!(test_clamp_length, {
// Too long gets shortened
assert_eq!(
$vec2::new(12.0, 16.0).clamp_length(7.0, 10.0),
$vec2::new(6.0, 8.0) // shortened to length 10.0
);
// In the middle is unchanged
assert_eq!(
$vec2::new(2.0, 1.0).clamp_length(0.5, 5.0),
$vec2::new(2.0, 1.0) // unchanged
);
// Too short gets lengthened
assert_eq!(
$vec2::new(0.6, 0.8).clamp_length(10.0, 20.0),
$vec2::new(6.0, 8.0) // lengthened to length 10.0
);
should_glam_assert!({ $vec2::ONE.clamp_length(1.0, 0.0) });
});
glam_test!(test_clamp_length_max, {
// Too long gets shortened
assert_eq!(
$vec2::new(12.0, 16.0).clamp_length_max(10.0),
$vec2::new(6.0, 8.0) // shortened to length 10.0
);
// Not too long is unchanged
assert_eq!(
$vec2::new(2.0, 1.0).clamp_length_max(5.0),
$vec2::new(2.0, 1.0) // unchanged
);
});
glam_test!(test_clamp_length_min, {
// Not too short is unchanged
assert_eq!(
$vec2::new(2.0, 1.0).clamp_length_min(0.5),
$vec2::new(2.0, 1.0) // unchanged
);
// Too short gets lengthened
assert_eq!(
$vec2::new(0.6, 0.8).clamp_length_min(10.0),
$vec2::new(6.0, 8.0) // lengthened to length 10.0
);
});
#[cfg(any(feature = "glam-assert", feature = "debug-glam-assert"))]
glam_test!(test_float_glam_assert, {
use std::panic::catch_unwind;
assert!(catch_unwind(|| $vec2::ZERO.normalize()).is_err());
});
glam_test!(test_mul_add, {
assert_eq!(
$vec2::new(1.0, 1.0).mul_add($vec2::new(0.5, 2.0), $vec2::new(-1.0, -1.0)),
$vec2::new(-0.5, 1.0)
);
});
glam_test!(test_from_angle, {
assert_approx_eq!($vec2::from_angle(0.0), $vec2::new(1.0, 0.0));
assert_approx_eq!(
$vec2::from_angle(core::$t::consts::FRAC_PI_2),
$vec2::new(0.0, 1.0)
);
assert_approx_eq!(
$vec2::from_angle(core::$t::consts::PI),
$vec2::new(-1.0, 0.0)
);
assert_approx_eq!(
$vec2::from_angle(-core::$t::consts::FRAC_PI_2),
$vec2::new(0.0, -1.0)
);
});
};
}
macro_rules! impl_vec2_scalar_shift_op_test {
($vec2:ident, $t_min:literal, $t_max:literal, $rhs_min:literal, $rhs_max:literal) => {
glam_test!(test_vec2_scalar_shift_ops, {
for x in $t_min..$t_max {
for y in $t_min..$t_max {
for rhs in $rhs_min..$rhs_max {
assert_eq!($vec2::new(x, y) << rhs, $vec2::new(x << rhs, y << rhs));
assert_eq!($vec2::new(x, y) >> rhs, $vec2::new(x >> rhs, y >> rhs));
}
}
}
});
};
}
macro_rules! impl_vec2_scalar_shift_op_tests {
($vec2:ident, $t_min:literal, $t_max:literal) => {
mod shift_by_i8 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0i8, 2);
}
mod shift_by_i16 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0i16, 2);
}
mod shift_by_i32 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0i32, 2);
}
mod shift_by_i64 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0i64, 2);
}
mod shift_by_u8 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0u8, 2);
}
mod shift_by_u16 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0u16, 2);
}
mod shift_by_u32 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0u32, 2);
}
mod shift_by_u64 {
use glam::$vec2;
impl_vec2_scalar_shift_op_test!($vec2, $t_min, $t_max, 0u64, 2);
}
};
}
macro_rules! impl_vec2_shift_op_test {
($vec2:ident, $rhs:ident, $t_min:literal, $t_max:literal) => {
glam_test!(test_vec2_shift_ops, {
for x1 in $t_min..$t_max {
for y1 in $t_min..$t_max {
for x2 in $t_min..$t_max {
for y2 in $t_min..$t_max {
assert_eq!(
$vec2::new(x1, y1) << $rhs::new(x2, y2),
$vec2::new(x1 << x2, y1 << y2)
);
assert_eq!(
$vec2::new(x1, y1) >> $rhs::new(x2, y2),
$vec2::new(x1 >> x2, y1 >> y2)
);
}
}
}
}
});
};
}
macro_rules! impl_vec2_shift_op_tests {
($vec2:ident) => {
mod shift_ivec2_by_ivec2 {
use super::*;
impl_vec2_shift_op_test!($vec2, IVec2, 0, 2);
}
mod shift_ivec2_by_uvec2 {
use super::*;
impl_vec2_shift_op_test!($vec2, UVec2, 0, 2);
}
};
}
macro_rules! impl_vec2_scalar_bit_op_tests {
($vec2:ident, $t_min:literal, $t_max:literal) => {
glam_test!(test_vec2_scalar_bit_ops, {
for x in $t_min..$t_max {
for y in $t_min..$t_max {
for rhs in $t_min..$t_max {
assert_eq!($vec2::new(x, y) & rhs, $vec2::new(x & rhs, y & rhs));
assert_eq!($vec2::new(x, y) | rhs, $vec2::new(x | rhs, y | rhs));
assert_eq!($vec2::new(x, y) ^ rhs, $vec2::new(x ^ rhs, y ^ rhs));
}
}
}
});
};
}
macro_rules! impl_vec2_bit_op_tests {
($vec2:ident, $t_min:literal, $t_max:literal) => {
glam_test!(test_vec2_bit_ops, {
for x1 in $t_min..$t_max {
for y1 in $t_min..$t_max {
assert_eq!(!$vec2::new(x1, y1), $vec2::new(!x1, !y1));
for x2 in $t_min..$t_max {
for y2 in $t_min..$t_max {
assert_eq!(
$vec2::new(x1, y1) & $vec2::new(x2, y2),
$vec2::new(x1 & x2, y1 & y2)
);
assert_eq!(
$vec2::new(x1, y1) | $vec2::new(x2, y2),
$vec2::new(x1 | x2, y1 | y2)
);
assert_eq!(
$vec2::new(x1, y1) ^ $vec2::new(x2, y2),
$vec2::new(x1 ^ x2, y1 ^ y2)
);
}
}
}
}
});
};
}
mod vec2 {
use glam::{vec2, BVec2, Vec2, Vec3};
glam_test!(test_align, {
use core::mem;
assert_eq!(8, mem::size_of::<Vec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(4, mem::align_of::<Vec2>());
#[cfg(feature = "cuda")]
assert_eq!(8, mem::align_of::<Vec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_as, {
use glam::{DVec2, I64Vec2, IVec2, U64Vec2, UVec2};
assert_eq!(DVec2::new(-1.0, -2.0), Vec2::new(-1.0, -2.0).as_dvec2());
assert_eq!(IVec2::new(-1, -2), Vec2::new(-1.0, -2.0).as_ivec2());
assert_eq!(UVec2::new(1, 2), Vec2::new(1.0, 2.0).as_uvec2());
assert_eq!(I64Vec2::new(-1, -2), Vec2::new(-1.0, -2.0).as_i64vec2());
assert_eq!(U64Vec2::new(1, 2), Vec2::new(1.0, 2.0).as_u64vec2());
assert_eq!(IVec2::new(-1, -2), DVec2::new(-1.0, -2.0).as_ivec2());
assert_eq!(UVec2::new(1, 2), DVec2::new(1.0, 2.0).as_uvec2());
assert_eq!(Vec2::new(-1.0, -2.0), DVec2::new(-1.0, -2.0).as_vec2());
assert_eq!(I64Vec2::new(-1, -2), DVec2::new(-1.0, -2.0).as_i64vec2());
assert_eq!(U64Vec2::new(1, 2), DVec2::new(1.0, 2.0).as_u64vec2());
assert_eq!(DVec2::new(-1.0, -2.0), IVec2::new(-1, -2).as_dvec2());
assert_eq!(UVec2::new(1, 2), IVec2::new(1, 2).as_uvec2());
assert_eq!(Vec2::new(-1.0, -2.0), IVec2::new(-1, -2).as_vec2());
assert_eq!(I64Vec2::new(-1, -2), IVec2::new(-1, -2).as_i64vec2());
assert_eq!(U64Vec2::new(1, 2), IVec2::new(1, 2).as_u64vec2());
assert_eq!(DVec2::new(1.0, 2.0), UVec2::new(1, 2).as_dvec2());
assert_eq!(IVec2::new(1, 2), UVec2::new(1, 2).as_ivec2());
assert_eq!(Vec2::new(1.0, 2.0), UVec2::new(1, 2).as_vec2());
assert_eq!(I64Vec2::new(1, 2), UVec2::new(1, 2).as_i64vec2());
assert_eq!(U64Vec2::new(1, 2), UVec2::new(1, 2).as_u64vec2());
assert_eq!(DVec2::new(-1.0, -2.0), I64Vec2::new(-1, -2).as_dvec2());
assert_eq!(UVec2::new(1, 2), I64Vec2::new(1, 2).as_uvec2());
assert_eq!(Vec2::new(-1.0, -2.0), I64Vec2::new(-1, -2).as_vec2());
assert_eq!(IVec2::new(-1, -2), I64Vec2::new(-1, -2).as_ivec2());
assert_eq!(U64Vec2::new(1, 2), I64Vec2::new(1, 2).as_u64vec2());
assert_eq!(DVec2::new(1.0, 2.0), U64Vec2::new(1, 2).as_dvec2());
assert_eq!(IVec2::new(1, 2), U64Vec2::new(1, 2).as_ivec2());
assert_eq!(Vec2::new(1.0, 2.0), U64Vec2::new(1, 2).as_vec2());
assert_eq!(I64Vec2::new(1, 2), U64Vec2::new(1, 2).as_i64vec2());
assert_eq!(UVec2::new(1, 2), U64Vec2::new(1, 2).as_uvec2());
});
impl_vec2_float_tests!(f32, vec2, Vec2, Vec3, BVec2);
}
mod dvec2 {
use glam::{dvec2, BVec2, DVec2, DVec3, IVec2, UVec2, Vec2};
glam_test!(test_align, {
use core::mem;
assert_eq!(16, mem::size_of::<DVec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(mem::align_of::<f64>(), mem::align_of::<DVec2>());
#[cfg(feature = "cuda")]
assert_eq!(16, mem::align_of::<DVec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_try_from, {
assert_eq!(DVec2::new(1.0, 2.0), DVec2::from(Vec2::new(1.0, 2.0)));
assert_eq!(DVec2::new(1.0, 2.0), DVec2::from(IVec2::new(1, 2)));
assert_eq!(DVec2::new(1.0, 2.0), DVec2::from(UVec2::new(1, 2)));
});
impl_vec2_float_tests!(f64, dvec2, DVec2, DVec3, BVec2);
}
mod ivec2 {
use glam::{ivec2, BVec2, I64Vec2, IVec2, IVec3, U64Vec2, UVec2};
glam_test!(test_align, {
use core::mem;
assert_eq!(8, mem::size_of::<IVec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(4, mem::align_of::<IVec2>());
#[cfg(feature = "cuda")]
assert_eq!(8, mem::align_of::<IVec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_try_from, {
assert_eq!(IVec2::new(1, 2), IVec2::try_from(UVec2::new(1, 2)).unwrap());
assert!(IVec2::try_from(UVec2::new(u32::MAX, 2)).is_err());
assert!(IVec2::try_from(UVec2::new(1, u32::MAX)).is_err());
assert_eq!(
IVec2::new(1, 2),
IVec2::try_from(I64Vec2::new(1, 2)).unwrap()
);
assert!(IVec2::try_from(I64Vec2::new(i64::MAX, 2)).is_err());
assert!(IVec2::try_from(I64Vec2::new(1, i64::MAX)).is_err());
assert_eq!(
IVec2::new(1, 2),
IVec2::try_from(U64Vec2::new(1, 2)).unwrap()
);
assert!(IVec2::try_from(U64Vec2::new(u64::MAX, 2)).is_err());
assert!(IVec2::try_from(U64Vec2::new(1, u64::MAX)).is_err());
});
impl_vec2_signed_integer_tests!(i32, ivec2, IVec2, IVec3, BVec2);
impl_vec2_eq_hash_tests!(i32, ivec2);
impl_vec2_scalar_shift_op_tests!(IVec2, -2, 2);
impl_vec2_shift_op_tests!(IVec2);
impl_vec2_scalar_bit_op_tests!(IVec2, -2, 2);
impl_vec2_bit_op_tests!(IVec2, -2, 2);
}
mod uvec2 {
use glam::{uvec2, BVec2, I64Vec2, IVec2, U64Vec2, UVec2, UVec3};
glam_test!(test_align, {
use core::mem;
assert_eq!(8, mem::size_of::<UVec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(4, mem::align_of::<UVec2>());
#[cfg(feature = "cuda")]
assert_eq!(8, mem::align_of::<UVec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_try_from, {
assert_eq!(UVec2::new(1, 2), UVec2::try_from(IVec2::new(1, 2)).unwrap());
assert!(UVec2::try_from(IVec2::new(-1, 2)).is_err());
assert!(UVec2::try_from(IVec2::new(1, -2)).is_err());
assert_eq!(
UVec2::new(1, 2),
UVec2::try_from(I64Vec2::new(1, 2)).unwrap()
);
assert!(UVec2::try_from(I64Vec2::new(-1, 2)).is_err());
assert!(UVec2::try_from(I64Vec2::new(1, -2)).is_err());
assert!(UVec2::try_from(I64Vec2::new(i64::MAX, 2)).is_err());
assert!(UVec2::try_from(I64Vec2::new(1, i64::MAX)).is_err());
assert_eq!(
UVec2::new(1, 2),
UVec2::try_from(U64Vec2::new(1, 2)).unwrap()
);
assert!(UVec2::try_from(U64Vec2::new(u64::MAX, 2)).is_err());
assert!(UVec2::try_from(U64Vec2::new(1, u64::MAX)).is_err());
});
impl_vec2_tests!(u32, uvec2, UVec2, UVec3, BVec2);
impl_vec2_eq_hash_tests!(u32, uvec2);
impl_vec2_scalar_shift_op_tests!(UVec2, 0, 2);
impl_vec2_shift_op_tests!(UVec2);
impl_vec2_scalar_bit_op_tests!(UVec2, 0, 2);
impl_vec2_bit_op_tests!(UVec2, 0, 2);
}
mod i64vec2 {
use glam::{i64vec2, BVec2, I64Vec2, I64Vec3, IVec2, U64Vec2, UVec2};
glam_test!(test_align, {
use core::mem;
assert_eq!(16, mem::size_of::<I64Vec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(8, mem::align_of::<I64Vec2>());
#[cfg(feature = "cuda")]
assert_eq!(16, mem::align_of::<I64Vec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_try_from, {
assert_eq!(
I64Vec2::new(1, 2),
I64Vec2::try_from(IVec2::new(1, 2)).unwrap()
);
assert_eq!(
I64Vec2::new(1, 2),
I64Vec2::try_from(U64Vec2::new(1, 2)).unwrap()
);
assert!(I64Vec2::try_from(U64Vec2::new(u64::MAX, 2)).is_err());
assert!(I64Vec2::try_from(U64Vec2::new(1, u64::MAX)).is_err());
});
impl_vec2_signed_integer_tests!(i64, i64vec2, I64Vec2, I64Vec3, BVec2);
impl_vec2_eq_hash_tests!(i64, i64vec2);
impl_vec2_scalar_shift_op_tests!(I64Vec2, -2, 2);
impl_vec2_shift_op_tests!(I64Vec2);
impl_vec2_scalar_bit_op_tests!(I64Vec2, -2, 2);
impl_vec2_bit_op_tests!(I64Vec2, -2, 2);
}
mod u64vec2 {
use glam::{u64vec2, BVec2, I64Vec2, IVec2, U64Vec2, U64Vec3, UVec2};
glam_test!(test_align, {
use core::mem;
assert_eq!(16, mem::size_of::<U64Vec2>());
#[cfg(not(feature = "cuda"))]
assert_eq!(8, mem::align_of::<U64Vec2>());
#[cfg(feature = "cuda")]
assert_eq!(16, mem::align_of::<U64Vec2>());
assert_eq!(2, mem::size_of::<BVec2>());
assert_eq!(1, mem::align_of::<BVec2>());
});
glam_test!(test_try_from, {
assert_eq!(
U64Vec2::new(1, 2),
U64Vec2::try_from(UVec2::new(1, 2)).unwrap()
);
assert_eq!(
U64Vec2::new(1, 2),
U64Vec2::try_from(I64Vec2::new(1, 2)).unwrap()
);
assert!(U64Vec2::try_from(I64Vec2::new(-1, 2)).is_err());
assert!(U64Vec2::try_from(I64Vec2::new(1, -2)).is_err());
});
impl_vec2_tests!(u64, u64vec2, U64Vec2, U64Vec3, BVec2);
impl_vec2_eq_hash_tests!(u64, u64vec2);
impl_vec2_scalar_shift_op_tests!(U64Vec2, 0, 2);
impl_vec2_shift_op_tests!(U64Vec2);
impl_vec2_scalar_bit_op_tests!(U64Vec2, 0, 2);
impl_vec2_bit_op_tests!(U64Vec2, 0, 2);
}
|
//Restarted script log at Fri 12 Jul 2013 12:04:45 CEST
getBody(1).select();
|
#[doc = "Register `CR1` reader"]
pub type R = crate::R<CR1_SPEC>;
#[doc = "Register `CR1` writer"]
pub type W = crate::W<CR1_SPEC>;
#[doc = "Field `LPMS` reader - Low-power mode selection"]
pub type LPMS_R = crate::FieldReader;
#[doc = "Field `LPMS` writer - Low-power mode selection"]
pub type LPMS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `FPD_STOP` reader - Flash memory powered down during Stop mode"]
pub type FPD_STOP_R = crate::BitReader;
#[doc = "Field `FPD_STOP` writer - Flash memory powered down during Stop mode"]
pub type FPD_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPD_LPRUN` reader - Flash memory powered down during Low-power run mode"]
pub type FPD_LPRUN_R = crate::BitReader;
#[doc = "Field `FPD_LPRUN` writer - Flash memory powered down during Low-power run mode"]
pub type FPD_LPRUN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPD_LPSLP` reader - Flash memory powered down during Low-power sleep mode"]
pub type FPD_LPSLP_R = crate::BitReader;
#[doc = "Field `FPD_LPSLP` writer - Flash memory powered down during Low-power sleep mode"]
pub type FPD_LPSLP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBP` reader - Disable backup domain write protection"]
pub type DBP_R = crate::BitReader;
#[doc = "Field `DBP` writer - Disable backup domain write protection"]
pub type DBP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VOS` reader - Voltage scaling range selection"]
pub type VOS_R = crate::FieldReader;
#[doc = "Field `VOS` writer - Voltage scaling range selection"]
pub type VOS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `LPR` reader - Low-power run"]
pub type LPR_R = crate::BitReader;
#[doc = "Field `LPR` writer - Low-power run"]
pub type LPR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:2 - Low-power mode selection"]
#[inline(always)]
pub fn lpms(&self) -> LPMS_R {
LPMS_R::new((self.bits & 7) as u8)
}
#[doc = "Bit 3 - Flash memory powered down during Stop mode"]
#[inline(always)]
pub fn fpd_stop(&self) -> FPD_STOP_R {
FPD_STOP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Flash memory powered down during Low-power run mode"]
#[inline(always)]
pub fn fpd_lprun(&self) -> FPD_LPRUN_R {
FPD_LPRUN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Flash memory powered down during Low-power sleep mode"]
#[inline(always)]
pub fn fpd_lpslp(&self) -> FPD_LPSLP_R {
FPD_LPSLP_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 8 - Disable backup domain write protection"]
#[inline(always)]
pub fn dbp(&self) -> DBP_R {
DBP_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bits 9:10 - Voltage scaling range selection"]
#[inline(always)]
pub fn vos(&self) -> VOS_R {
VOS_R::new(((self.bits >> 9) & 3) as u8)
}
#[doc = "Bit 14 - Low-power run"]
#[inline(always)]
pub fn lpr(&self) -> LPR_R {
LPR_R::new(((self.bits >> 14) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Low-power mode selection"]
#[inline(always)]
#[must_use]
pub fn lpms(&mut self) -> LPMS_W<CR1_SPEC, 0> {
LPMS_W::new(self)
}
#[doc = "Bit 3 - Flash memory powered down during Stop mode"]
#[inline(always)]
#[must_use]
pub fn fpd_stop(&mut self) -> FPD_STOP_W<CR1_SPEC, 3> {
FPD_STOP_W::new(self)
}
#[doc = "Bit 4 - Flash memory powered down during Low-power run mode"]
#[inline(always)]
#[must_use]
pub fn fpd_lprun(&mut self) -> FPD_LPRUN_W<CR1_SPEC, 4> {
FPD_LPRUN_W::new(self)
}
#[doc = "Bit 5 - Flash memory powered down during Low-power sleep mode"]
#[inline(always)]
#[must_use]
pub fn fpd_lpslp(&mut self) -> FPD_LPSLP_W<CR1_SPEC, 5> {
FPD_LPSLP_W::new(self)
}
#[doc = "Bit 8 - Disable backup domain write protection"]
#[inline(always)]
#[must_use]
pub fn dbp(&mut self) -> DBP_W<CR1_SPEC, 8> {
DBP_W::new(self)
}
#[doc = "Bits 9:10 - Voltage scaling range selection"]
#[inline(always)]
#[must_use]
pub fn vos(&mut self) -> VOS_W<CR1_SPEC, 9> {
VOS_W::new(self)
}
#[doc = "Bit 14 - Low-power run"]
#[inline(always)]
#[must_use]
pub fn lpr(&mut self) -> LPR_W<CR1_SPEC, 14> {
LPR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Power control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR1_SPEC;
impl crate::RegisterSpec for CR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr1::R`](R) reader structure"]
impl crate::Readable for CR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr1::W`](W) writer structure"]
impl crate::Writable for CR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR1 to value 0x0200"]
impl crate::Resettable for CR1_SPEC {
const RESET_VALUE: Self::Ux = 0x0200;
}
|
use crate::{Device, Raster};
use js_sys::Error;
use zerocopy::{AsBytes, FromBytes};
#[repr(align(16), C)]
#[derive(AsBytes, FromBytes, Debug, Default)]
pub struct RasterData {
width: f32,
height: f32,
inv_width: f32,
inv_height: f32,
}
impl Device {
pub(crate) fn update_raster(&mut self, raster: &Raster) -> Result<(), Error> {
let mut data = RasterData::default();
data.width = raster.width as f32;
data.height = raster.height as f32;
data.inv_width = 1.0 / data.width;
data.inv_height = 1.0 / data.height;
self.raster_buffer.write(&data)
}
}
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The CodeMap tracks all the source code used within a single crate, mapping
//! from integer byte positions to the original source code location. Each bit
//! of source parsed during crate parsing (typically files, in-memory strings,
//! or various bits of macro expansion) cover a continuous range of bytes in the
//! CodeMap and are represented by FileMaps. Byte positions are stored in
//! `spans` and used pervasively in the compiler. They are absolute positions
//! within the CodeMap, which upon request can be converted to line and column
//! information, source code snippets, etc.
pub use self::ExpnFormat::*;
use std::cell::RefCell;
use std::path::{Path,PathBuf};
use std::rc::Rc;
use std::env;
use std::fs;
use std::io::{self, Read};
pub use syntax_pos::*;
use errors::CodeMapper;
use ast::Name;
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site));
let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site));
match (call_site1, call_site2) {
(None, _) => sp,
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
(Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp),
}
}
/// The source of expansion.
#[derive(Clone, Hash, Debug, PartialEq, Eq)]
pub enum ExpnFormat {
/// e.g. #[derive(...)] <item>
MacroAttribute(Name),
/// e.g. `format!()`
MacroBang(Name),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
/// Build a span that covers the two provided spans.
pub fn combine_spans(sp1: Span, sp2: Span) -> Span {
if sp1 == DUMMY_SP && sp2 == DUMMY_SP {
DUMMY_SP
} else if sp1 == DUMMY_SP {
sp2
} else if sp2 == DUMMY_SP {
sp1
} else {
Span {
lo: if sp1.lo < sp2.lo { sp1.lo } else { sp2.lo },
hi: if sp1.hi > sp2.hi { sp1.hi } else { sp2.hi },
expn_id: if sp1.expn_id == sp2.expn_id { sp1.expn_id } else { NO_EXPANSION },
}
}
}
#[derive(Clone, Hash, Debug)]
pub struct NameAndSpan {
/// The format with which the macro was invoked.
pub format: ExpnFormat,
/// Whether the macro is allowed to use #[unstable]/feature-gated
/// features internally without forcing the whole crate to opt-in
/// to them.
pub allow_internal_unstable: bool,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
pub span: Option<Span>
}
impl NameAndSpan {
pub fn name(&self) -> Name {
match self.format {
ExpnFormat::MacroAttribute(s) => s,
ExpnFormat::MacroBang(s) => s,
}
}
}
/// Extra information for tracking spans of macro and syntax sugar expansion
#[derive(Hash, Debug)]
pub struct ExpnInfo {
/// The location of the actual macro invocation or syntax sugar , e.g.
/// `let x = foo!();` or `if let Some(y) = x {}`
///
/// This may recursively refer to other macro invocations, e.g. if
/// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site
/// pointing to the `foo!` invocation.
pub call_site: Span,
/// Information about the expansion.
pub callee: NameAndSpan
}
// _____________________________________________________________________________
// FileMap, MultiByteChar, FileName, FileLines
//
/// An abstraction over the fs operations used by the Parser.
pub trait FileLoader {
/// Query the existence of a file.
fn file_exists(&self, path: &Path) -> bool;
/// Return an absolute path to a file, if possible.
fn abs_path(&self, path: &Path) -> Option<PathBuf>;
/// Read the contents of an UTF-8 file into memory.
fn read_file(&self, path: &Path) -> io::Result<String>;
}
/// A FileLoader that uses std::fs to load real files.
pub struct RealFileLoader;
impl FileLoader for RealFileLoader {
fn file_exists(&self, path: &Path) -> bool {
fs::metadata(path).is_ok()
}
fn abs_path(&self, path: &Path) -> Option<PathBuf> {
if path.is_absolute() {
Some(path.to_path_buf())
} else {
env::current_dir()
.ok()
.map(|cwd| cwd.join(path))
}
}
fn read_file(&self, path: &Path) -> io::Result<String> {
let mut src = String::new();
try!(try!(fs::File::open(path)).read_to_string(&mut src));
Ok(src)
}
}
// _____________________________________________________________________________
// CodeMap
//
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>,
file_loader: Box<FileLoader>
}
impl CodeMap {
pub fn new() -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
file_loader: Box::new(RealFileLoader)
}
}
pub fn with_file_loader(file_loader: Box<FileLoader>) -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
file_loader: file_loader
}
}
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = try!(self.file_loader.read_file(path));
let abs_path = self.file_loader.abs_path(path).map(|p| p.to_str().unwrap().to_string());
Ok(self.new_filemap(path.to_str().unwrap().to_string(), abs_path, src))
}
fn next_start_pos(&self) -> usize {
let files = self.files.borrow();
match files.last() {
None => 0,
// Add one so there is some space between files. This lets us distinguish
// positions in the codemap, even in the presence of zero-length files.
Some(last) => last.end_pos.to_usize() + 1,
}
}
/// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, abs_path: Option<FileName>,
mut src: String) -> Rc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
// Remove utf-8 BOM if any.
if src.starts_with("\u{feff}") {
src.drain(..3);
}
let end_pos = start_pos + src.len();
let filemap = Rc::new(FileMap {
name: filename,
abs_path: abs_path,
src: Some(Rc::new(src)),
start_pos: Pos::from_usize(start_pos),
end_pos: Pos::from_usize(end_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
files.push(filemap.clone());
filemap
}
/// Creates a new filemap and sets its line information.
pub fn new_filemap_and_lines(&self, filename: &str, abs_path: Option<&str>,
src: &str) -> Rc<FileMap> {
let fm = self.new_filemap(filename.to_string(),
abs_path.map(|s| s.to_owned()),
src.to_owned());
let mut byte_pos: u32 = fm.start_pos.0;
for line in src.lines() {
// register the start of this line
fm.next_line(BytePos(byte_pos));
// update byte_pos to include this line and the \n at the end
byte_pos += line.len() as u32 + 1;
}
fm
}
/// Allocates a new FileMap representing a source file from an external
/// crate. The source code of such an "imported filemap" is not available,
/// but we still know enough to generate accurate debuginfo location
/// information for things inlined from other crates.
pub fn new_imported_filemap(&self,
filename: FileName,
abs_path: Option<FileName>,
source_len: usize,
mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>)
-> Rc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
let end_pos = Pos::from_usize(start_pos + source_len);
let start_pos = Pos::from_usize(start_pos);
for pos in &mut file_local_lines {
*pos = *pos + start_pos;
}
for mbc in &mut file_local_multibyte_chars {
mbc.pos = mbc.pos + start_pos;
}
let filemap = Rc::new(FileMap {
name: filename,
abs_path: abs_path,
src: None,
start_pos: start_pos,
end_pos: end_pos,
lines: RefCell::new(file_local_lines),
multibyte_chars: RefCell::new(file_local_multibyte_chars),
});
files.push(filemap.clone());
filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_usize() + 1)).to_string()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
let chpos = self.bytepos_to_file_charpos(pos);
match self.lookup_line(pos) {
Ok(FileMapAndLine { fm: f, line: a }) => {
let line = a + 1; // Line numbers start at 1
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("byte pos {:?} is on the line at byte pos {:?}",
pos, linebpos);
debug!("char pos {:?} is on the line at char pos {:?}",
chpos, linechpos);
debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line: line,
col: chpos - linechpos,
}
}
Err(f) => {
Loc {
file: f,
line: 0,
col: chpos,
}
}
}
}
// If the relevant filemap is empty, we don't return a line number.
fn lookup_line(&self, pos: BytePos) -> Result<FileMapAndLine, Rc<FileMap>> {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
let len = f.lines.borrow().len();
if len == 0 {
return Err(f);
}
let mut a = 0;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1 {
let m = (a + b) / 2;
if (*lines)[m] > pos {
b = m;
} else {
a = m;
}
}
assert!(a <= lines.len());
}
Ok(FileMapAndLine { fm: f, line: a })
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.to_string(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if sp == COMMAND_LINE_SP {
return "<command line option>".to_string();
}
if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_usize() + 1,
hi.line,
hi.col.to_usize() + 1)).to_string()
}
// Returns true if two spans have the same callee
// (Assumes the same ExpnFormat implies same callee)
fn match_callees(&self, sp_a: &Span, sp_b: &Span) -> bool {
let fmt_a = self
.with_expn_info(sp_a.expn_id,
|ei| ei.map(|ei| ei.callee.format.clone()));
let fmt_b = self
.with_expn_info(sp_b.expn_id,
|ei| ei.map(|ei| ei.callee.format.clone()));
fmt_a == fmt_b
}
/// Returns a formatted string showing the expansion chain of a span
///
/// Spans are printed in the following format:
///
/// filename:start_line:col: end_line:col
/// snippet
/// Callee:
/// Callee span
/// Callsite:
/// Callsite span
///
/// Callees and callsites are printed recursively (if available, otherwise header
/// and span is omitted), expanding into their own callee/callsite spans.
/// Each layer of recursion has an increased indent, and snippets are truncated
/// to at most 50 characters. Finally, recursive calls to the same macro are squashed,
/// with '...' used to represent any number of recursive calls.
pub fn span_to_expanded_string(&self, sp: Span) -> String {
self.span_to_expanded_string_internal(sp, "")
}
fn span_to_expanded_string_internal(&self, sp:Span, indent: &str) -> String {
let mut indent = indent.to_owned();
let mut output = "".to_owned();
let span_str = self.span_to_string(sp);
let mut span_snip = self.span_to_snippet(sp)
.unwrap_or("Snippet unavailable".to_owned());
// Truncate by code points - in worst case this will be more than 50 characters,
// but ensures at least 50 characters and respects byte boundaries.
let char_vec: Vec<(usize, char)> = span_snip.char_indices().collect();
if char_vec.len() > 50 {
span_snip.truncate(char_vec[49].0);
span_snip.push_str("...");
}
output.push_str(&format!("{}{}\n{}`{}`\n", indent, span_str, indent, span_snip));
if sp.expn_id == NO_EXPANSION || sp.expn_id == COMMAND_LINE_EXPN {
return output;
}
let mut callee = self.with_expn_info(sp.expn_id,
|ei| ei.and_then(|ei| ei.callee.span.clone()));
let mut callsite = self.with_expn_info(sp.expn_id,
|ei| ei.map(|ei| ei.call_site.clone()));
indent.push_str(" ");
let mut is_recursive = false;
while callee.is_some() && self.match_callees(&sp, &callee.unwrap()) {
callee = self.with_expn_info(callee.unwrap().expn_id,
|ei| ei.and_then(|ei| ei.callee.span.clone()));
is_recursive = true;
}
if let Some(span) = callee {
output.push_str(&indent);
output.push_str("Callee:\n");
if is_recursive {
output.push_str(&indent);
output.push_str("...\n");
}
output.push_str(&(self.span_to_expanded_string_internal(span, &indent)));
}
is_recursive = false;
while callsite.is_some() && self.match_callees(&sp, &callsite.unwrap()) {
callsite = self.with_expn_info(callsite.unwrap().expn_id,
|ei| ei.map(|ei| ei.call_site.clone()));
is_recursive = true;
}
if let Some(span) = callsite {
output.push_str(&indent);
output.push_str("Callsite:\n");
if is_recursive {
output.push_str(&indent);
output.push_str("...\n");
}
output.push_str(&(self.span_to_expanded_string_internal(span, &indent)));
}
output
}
/// Return the source span - this is either the supplied span, or the span for
/// the macro callsite that expanded to it.
pub fn source_callsite(&self, sp: Span) -> Span {
let mut span = sp;
// Special case - if a macro is parsed as an argument to another macro, the source
// callsite is the first callsite, which is also source-equivalent to the span.
let mut first = true;
while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN {
if let Some(callsite) = self.with_expn_info(span.expn_id,
|ei| ei.map(|ei| ei.call_site.clone())) {
if first && span.source_equal(&callsite) {
if self.lookup_char_pos(span.lo).file.is_real_file() {
return Span { expn_id: NO_EXPANSION, .. span };
}
}
first = false;
span = callsite;
}
else {
break;
}
}
span
}
/// Return the source callee.
///
/// Returns None if the supplied span has no expansion trace,
/// else returns the NameAndSpan for the macro definition
/// corresponding to the source callsite.
pub fn source_callee(&self, sp: Span) -> Option<NameAndSpan> {
let mut span = sp;
// Special case - if a macro is parsed as an argument to another macro, the source
// callsite is source-equivalent to the span, and the source callee is the first callee.
let mut first = true;
while let Some(callsite) = self.with_expn_info(span.expn_id,
|ei| ei.map(|ei| ei.call_site.clone())) {
if first && span.source_equal(&callsite) {
if self.lookup_char_pos(span.lo).file.is_real_file() {
return self.with_expn_info(span.expn_id,
|ei| ei.map(|ei| ei.callee.clone()));
}
}
first = false;
if let Some(_) = self.with_expn_info(callsite.expn_id,
|ei| ei.map(|ei| ei.call_site.clone())) {
span = callsite;
}
else {
return self.with_expn_info(span.expn_id,
|ei| ei.map(|ei| ei.callee.clone()));
}
}
None
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_string()
}
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
debug!("span_to_lines(sp={:?})", sp);
if sp.lo > sp.hi {
return Err(SpanLinesError::IllFormedSpan(sp));
}
let lo = self.lookup_char_pos(sp.lo);
debug!("span_to_lines: lo={:?}", lo);
let hi = self.lookup_char_pos(sp.hi);
debug!("span_to_lines: hi={:?}", hi);
if lo.file.start_pos != hi.file.start_pos {
return Err(SpanLinesError::DistinctSources(DistinctSources {
begin: (lo.file.name.clone(), lo.file.start_pos),
end: (hi.file.name.clone(), hi.file.start_pos),
}));
}
assert!(hi.line >= lo.line);
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
// The span starts partway through the first line,
// but after that it starts from offset 0.
let mut start_col = lo.col;
// For every line but the last, it extends from `start_col`
// and to the end of the line. Be careful because the line
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
// lines.
for line_index in lo.line-1 .. hi.line-1 {
let line_len = lo.file.get_line(line_index)
.map(|s| s.chars().count())
.unwrap_or(0);
lines.push(LineInfo { line_index: line_index,
start_col: start_col,
end_col: CharPos::from_usize(line_len) });
start_col = CharPos::from_usize(0);
}
// For the last line, it extends from `start_col` to `hi.col`:
lines.push(LineInfo { line_index: hi.line - 1,
start_col: start_col,
end_col: hi.col });
Ok(FileLines {file: lo.file, lines: lines})
}
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
if sp.lo > sp.hi {
return Err(SpanSnippetError::IllFormedSpan(sp));
}
let local_begin = self.lookup_byte_offset(sp.lo);
let local_end = self.lookup_byte_offset(sp.hi);
if local_begin.fm.start_pos != local_end.fm.start_pos {
return Err(SpanSnippetError::DistinctSources(DistinctSources {
begin: (local_begin.fm.name.clone(),
local_begin.fm.start_pos),
end: (local_end.fm.name.clone(),
local_end.fm.start_pos)
}));
} else {
match local_begin.fm.src {
Some(ref src) => {
let start_index = local_begin.pos.to_usize();
let end_index = local_end.pos.to_usize();
let source_len = (local_begin.fm.end_pos -
local_begin.fm.start_pos).to_usize();
if start_index > end_index || end_index > source_len {
return Err(SpanSnippetError::MalformedForCodemap(
MalformedCodemapPositions {
name: local_begin.fm.name.clone(),
source_len: source_len,
begin_pos: local_begin.pos,
end_pos: local_end.pos,
}));
}
return Ok((&src[start_index..end_index]).to_string())
}
None => {
return Err(SpanSnippetError::SourceNotAvailable {
filename: local_begin.fm.name.clone()
});
}
}
}
}
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if filename == fm.name {
return Some(fm.clone());
}
}
None
}
/// For a global BytePos compute the local offset within the containing FileMap
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the filemap.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
}
// Return the index of the filemap (in self.files) which contains pos.
fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
let files = self.files.borrow();
let files = &*files;
let count = files.len();
// Binary search for the filemap.
let mut a = 0;
let mut b = count;
while b - a > 1 {
let m = (a + b) / 2;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
assert!(a < count, "position {} does not resolve to a source location", pos.to_usize());
return a;
}
pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId {
let mut expansions = self.expansions.borrow_mut();
expansions.push(expn_info);
let len = expansions.len();
if len > u32::max_value() as usize {
panic!("too many ExpnInfo's!");
}
ExpnId(len as u32 - 1)
}
pub fn with_expn_info<T, F>(&self, id: ExpnId, f: F) -> T where
F: FnOnce(Option<&ExpnInfo>) -> T,
{
match id {
NO_EXPANSION | COMMAND_LINE_EXPN => f(None),
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize]))
}
}
/// Check if a span is "internal" to a macro in which #[unstable]
/// items can be used (that is, a macro marked with
/// `#[allow_internal_unstable]`).
pub fn span_allows_unstable(&self, span: Span) -> bool {
debug!("span_allows_unstable(span = {:?})", span);
let mut allows_unstable = false;
let mut expn_id = span.expn_id;
loop {
let quit = self.with_expn_info(expn_id, |expninfo| {
debug!("span_allows_unstable: expninfo = {:?}", expninfo);
expninfo.map_or(/* hit the top level */ true, |info| {
let span_comes_from_this_expansion =
info.callee.span.map_or(span.source_equal(&info.call_site), |mac_span| {
mac_span.contains(span)
});
debug!("span_allows_unstable: span: {:?} call_site: {:?} callee: {:?}",
(span.lo, span.hi),
(info.call_site.lo, info.call_site.hi),
info.callee.span.map(|x| (x.lo, x.hi)));
debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}",
span_comes_from_this_expansion,
info.callee.allow_internal_unstable);
if span_comes_from_this_expansion {
allows_unstable = info.callee.allow_internal_unstable;
// we've found the right place, stop looking
true
} else {
// not the right place, keep looking
expn_id = info.call_site.expn_id;
false
}
})
});
if quit {
break
}
}
debug!("span_allows_unstable? {}", allows_unstable);
allows_unstable
}
pub fn count_lines(&self) -> usize {
self.files.borrow().iter().fold(0, |a, f| a + f.count_lines())
}
pub fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace> {
let mut last_span = DUMMY_SP;
let mut span = span;
let mut result = vec![];
loop {
let span_name_span = self.with_expn_info(span.expn_id, |expn_info| {
expn_info.map(|ei| {
let (pre, post) = match ei.callee.format {
MacroAttribute(..) => ("#[", "]"),
MacroBang(..) => ("", "!"),
};
let macro_decl_name = format!("{}{}{}",
pre,
ei.callee.name(),
post);
let def_site_span = ei.callee.span;
(ei.call_site, macro_decl_name, def_site_span)
})
});
match span_name_span {
None => break,
Some((call_site, macro_decl_name, def_site_span)) => {
// Don't print recursive invocations
if !call_site.source_equal(&last_span) {
result.push(MacroBacktrace {
call_site: call_site,
macro_decl_name: macro_decl_name,
def_site_span: def_site_span,
});
}
last_span = span;
span = call_site;
}
}
}
result
}
}
impl CodeMapper for CodeMap {
fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_char_pos(pos)
}
fn span_to_lines(&self, sp: Span) -> FileLinesResult {
self.span_to_lines(sp)
}
fn span_to_string(&self, sp: Span) -> String {
self.span_to_string(sp)
}
fn span_to_filename(&self, sp: Span) -> FileName {
self.span_to_filename(sp)
}
fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace> {
self.macro_backtrace(span)
}
}
// _____________________________________________________________________________
// Tests
//
#[cfg(test)]
mod tests {
use super::*;
use std::rc::Rc;
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
// Test we can get lines with partial line info.
assert_eq!(fm.get_line(0), Some("first line."));
// TESTING BROKEN BEHAVIOR: line break declared before actual line break.
fm.next_line(BytePos(10));
assert_eq!(fm.get_line(1), Some("."));
fm.next_line(BytePos(12));
assert_eq!(fm.get_line(2), Some("second line"));
}
#[test]
#[should_panic]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fm.next_line(BytePos(2));
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
None,
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
fm2.next_line(fm2.start_pos);
fm3.next_line(fm3.start_pos);
fm3.next_line(fm3.start_pos + BytePos(12));
cm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(23));
assert_eq!(fmabp1.fm.name, "blork.rs");
assert_eq!(fmabp1.pos, BytePos(23));
let fmabp1 = cm.lookup_byte_offset(BytePos(24));
assert_eq!(fmabp1.fm.name, "empty.rs");
assert_eq!(fmabp1.pos, BytePos(0));
let fmabp2 = cm.lookup_byte_offset(BytePos(25));
assert_eq!(fmabp2.fm.name, "blork2.rs");
assert_eq!(fmabp2.pos, BytePos(0));
}
#[test]
fn t4() {
// Test bytepos_to_file_charpos
let cm = init_code_map();
let cp1 = cm.bytepos_to_file_charpos(BytePos(22));
assert_eq!(cp1, CharPos(22));
let cp2 = cm.bytepos_to_file_charpos(BytePos(25));
assert_eq!(cp2, CharPos(0));
}
#[test]
fn t5() {
// Test zero-length filemaps.
let cm = init_code_map();
let loc1 = cm.lookup_char_pos(BytePos(22));
assert_eq!(loc1.file.name, "blork.rs");
assert_eq!(loc1.line, 2);
assert_eq!(loc1.col, CharPos(10));
let loc2 = cm.lookup_char_pos(BytePos(25));
assert_eq!(loc2.file.name, "blork2.rs");
assert_eq!(loc2.line, 1);
assert_eq!(loc2.col, CharPos(0));
}
fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new();
// € is a three byte utf8 char.
let fm1 =
cm.new_filemap("blork.rs".to_string(),
None,
"fir€st €€€€ line.\nsecond line".to_string());
let fm2 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line€€.\n€ second line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(28));
fm2.next_line(fm2.start_pos);
fm2.next_line(fm2.start_pos + BytePos(20));
fm1.record_multibyte_char(BytePos(3), 3);
fm1.record_multibyte_char(BytePos(9), 3);
fm1.record_multibyte_char(BytePos(12), 3);
fm1.record_multibyte_char(BytePos(15), 3);
fm1.record_multibyte_char(BytePos(18), 3);
fm2.record_multibyte_char(fm2.start_pos + BytePos(10), 3);
fm2.record_multibyte_char(fm2.start_pos + BytePos(13), 3);
fm2.record_multibyte_char(fm2.start_pos + BytePos(18), 3);
cm
}
#[test]
fn t6() {
// Test bytepos_to_file_charpos in the presence of multi-byte chars
let cm = init_code_map_mbc();
let cp1 = cm.bytepos_to_file_charpos(BytePos(3));
assert_eq!(cp1, CharPos(3));
let cp2 = cm.bytepos_to_file_charpos(BytePos(6));
assert_eq!(cp2, CharPos(4));
let cp3 = cm.bytepos_to_file_charpos(BytePos(56));
assert_eq!(cp3, CharPos(12));
let cp4 = cm.bytepos_to_file_charpos(BytePos(61));
assert_eq!(cp4, CharPos(15));
}
#[test]
fn t7() {
// Test span_to_lines for a span ending at the end of filemap
let cm = init_code_map();
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION};
let file_lines = cm.span_to_lines(span).unwrap();
assert_eq!(file_lines.file.name, "blork.rs");
assert_eq!(file_lines.lines.len(), 1);
assert_eq!(file_lines.lines[0].line_index, 1);
}
/// Given a string like " ~~~~~~~~~~~~ ", produces a span
/// coverting that range. The idea is that the string has the same
/// length as the input, and we uncover the byte positions. Note
/// that this can span lines and so on.
fn span_from_selection(input: &str, selection: &str) -> Span {
assert_eq!(input.len(), selection.len());
let left_index = selection.find('~').unwrap() as u32;
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION }
}
/// Test span_to_snippet and span_to_lines for a span coverting 3
/// lines in the middle of a file.
#[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() {
let cm = CodeMap::new();
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
cm.new_filemap_and_lines("blork.rs", None, inputtext);
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
// check that span_to_lines gives us the complete result with the lines/cols we expected
let lines = cm.span_to_lines(span).unwrap();
let expected = vec![
LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) },
LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) },
LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) }
];
assert_eq!(lines.lines, expected);
}
#[test]
fn t8() {
// Test span_to_snippet for a span ending at the end of filemap
let cm = init_code_map();
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION};
let snippet = cm.span_to_snippet(span);
assert_eq!(snippet, Ok("second line".to_string()));
}
#[test]
fn t9() {
// Test span_to_str for a span ending at the end of filemap
let cm = init_code_map();
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION};
let sstr = cm.span_to_string(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12");
}
#[test]
fn t10() {
// Test span_to_expanded_string works in base case (no expansion)
let cm = init_code_map();
let span = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
let sstr = cm.span_to_expanded_string(span);
assert_eq!(sstr, "blork.rs:1:1: 1:12\n`first line.`\n");
let span = Span { lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION };
let sstr = cm.span_to_expanded_string(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12\n`second line`\n");
}
#[test]
fn t11() {
// Test span_to_expanded_string works with expansion
use ast::Name;
let cm = init_code_map();
let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
let format = ExpnFormat::MacroBang(Name(0u32));
let callee = NameAndSpan { format: format,
allow_internal_unstable: false,
span: None };
let info = ExpnInfo { call_site: root, callee: callee };
let id = cm.record_expansion(info);
let sp = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id };
let sstr = cm.span_to_expanded_string(sp);
assert_eq!(sstr,
"blork.rs:2:1: 2:12\n`second line`\n Callsite:\n \
blork.rs:1:1: 1:12\n `first line.`\n");
}
/// Returns the span corresponding to the `n`th occurrence of
/// `substring` in `source_text`.
trait CodeMapExtension {
fn span_substr(&self,
file: &Rc<FileMap>,
source_text: &str,
substring: &str,
n: usize)
-> Span;
}
impl CodeMapExtension for CodeMap {
fn span_substr(&self,
file: &Rc<FileMap>,
source_text: &str,
substring: &str,
n: usize)
-> Span
{
println!("span_substr(file={:?}/{:?}, substring={:?}, n={})",
file.name, file.start_pos, substring, n);
let mut i = 0;
let mut hi = 0;
loop {
let offset = source_text[hi..].find(substring).unwrap_or_else(|| {
panic!("source_text `{}` does not have {} occurrences of `{}`, only {}",
source_text, n, substring, i);
});
let lo = hi + offset;
hi = lo + substring.len();
if i == n {
let span = Span {
lo: BytePos(lo as u32 + file.start_pos.0),
hi: BytePos(hi as u32 + file.start_pos.0),
expn_id: NO_EXPANSION,
};
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
substring);
return span;
}
i += 1;
}
}
}
fn init_expansion_chain(cm: &CodeMap) -> Span {
// Creates an expansion chain containing two recursive calls
// root -> expA -> expA -> expB -> expB -> end
use ast::Name;
let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
let format_root = ExpnFormat::MacroBang(Name(0u32));
let callee_root = NameAndSpan { format: format_root,
allow_internal_unstable: false,
span: Some(root) };
let info_a1 = ExpnInfo { call_site: root, callee: callee_root };
let id_a1 = cm.record_expansion(info_a1);
let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 };
let format_a = ExpnFormat::MacroBang(Name(1u32));
let callee_a = NameAndSpan { format: format_a,
allow_internal_unstable: false,
span: Some(span_a1) };
let info_a2 = ExpnInfo { call_site: span_a1, callee: callee_a.clone() };
let id_a2 = cm.record_expansion(info_a2);
let span_a2 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a2 };
let info_b1 = ExpnInfo { call_site: span_a2, callee: callee_a };
let id_b1 = cm.record_expansion(info_b1);
let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 };
let format_b = ExpnFormat::MacroBang(Name(2u32));
let callee_b = NameAndSpan { format: format_b,
allow_internal_unstable: false,
span: None };
let info_b2 = ExpnInfo { call_site: span_b1, callee: callee_b.clone() };
let id_b2 = cm.record_expansion(info_b2);
let span_b2 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b2 };
let info_end = ExpnInfo { call_site: span_b2, callee: callee_b };
let id_end = cm.record_expansion(info_end);
Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end }
}
#[test]
fn t12() {
// Test span_to_expanded_string collapses recursive macros and handles
// recursive callsite and callee expansions
let cm = init_code_map();
let end = init_expansion_chain(&cm);
let sstr = cm.span_to_expanded_string(end);
let res_str =
r"blork2.rs:2:1: 2:12
`second line`
Callsite:
...
blork2.rs:1:1: 1:12
`first line.`
Callee:
blork.rs:2:1: 2:12
`second line`
Callee:
blork.rs:1:1: 1:12
`first line.`
Callsite:
blork.rs:1:1: 1:12
`first line.`
Callsite:
...
blork.rs:2:1: 2:12
`second line`
Callee:
blork.rs:1:1: 1:12
`first line.`
Callsite:
blork.rs:1:1: 1:12
`first line.`
";
assert_eq!(sstr, res_str);
}
}
|
mod args;
pub use args::Args;
mod error;
pub use error::{Error, ErrorKind};
mod simple;
mod traverse;
pub use {simple::simple_reduce, traverse::traverse_reduce};
#[cfg(test)]
mod tests;
pub fn get_git_branch() -> Option<String> {
std::process::Command::new("git")
.args(&["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.ok()
.and_then(|s| String::from_utf8(s.stdout).ok())
}
pub trait BoolAsOptional {
fn as_opt<E>(&self, el: E) -> Option<E>;
}
impl BoolAsOptional for bool {
fn as_opt<E>(&self, el: E) -> Option<E> {
if *self {
Some(el)
} else {
None
}
}
}
pub(crate) fn fix_trailing(input: &mut String) {
debug_assert!(!input.is_empty());
// this is a hack
if let Some(index) = input.rfind('\\') {
input.truncate(index);
}
let mut bad = vec![];
for (i, ch) in input.char_indices() {
if ch == '\\' {
bad.push(i);
}
}
for bad in bad {
input.replace_range(bad..bad + 1, "/");
}
if !input.ends_with('/') {
input.push('/');
}
}
|
use structopt::StructOpt;
use std::fs::File;
use std::io::{BufRead, BufReader, Result};
use std::str::FromStr;
-#[derive(StructOpt)]
struct Cli {
/// Columns pattern. e.g 1:id,10-15:amt,...
pattern: String ,
/// The path to the file to read
#[structopt(parse(from_os_str))]
path: std::path::PathBuf,
}
struct Field{
start:u8,
end:u8,
name:String,
}
struct FieldParseError{}
impl Error for FieldParseError{}
impl FromStr for Field{
type Err = FieldParseError;
fn from_str(s:&str)->Result<Self,Self::Err>{
x
}
}
fn main() -> Result<()> {
let args = Cli::from_args();
let fields:Vec<&str> = args.pattern.split(",").collect();
let file = File::open(args.path)?;
for line in BufReader::new(file).lines() {
println!("{}", line?);
}
Ok(())
}
|
// Copyright 2014 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::sync::Mutex;
use rustc_serialize::{Encoder, Encodable};
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug)]
pub enum Event {
Intern(u64),
Insert(u64, String),
Remove(u64),
}
lazy_static! {
pub static ref LOG: Mutex<Vec<Event>>
= Mutex::new(Vec::with_capacity(50_000));
}
pub fn log(e: Event) {
LOG.lock().unwrap().push(e);
}
macro_rules! log (($e:expr) => (::event::log($e)));
// Serialize by converting to this private struct,
// which produces more convenient output.
#[derive(RustcEncodable)]
struct SerializeEvent<'a> {
event: &'static str,
id: u64,
string: Option<&'a String>,
}
impl Encodable for Event {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let (event, id, string) = match *self {
Event::Intern(id) => ("intern", id, None),
Event::Insert(id, ref s) => ("insert", id, Some(s)),
Event::Remove(id) => ("remove", id, None),
};
SerializeEvent {
event: event,
id: id,
string: string
}.encode(s)
}
}
|
extern crate libc;
use libc::c_char;
use libc::c_int;
use libc::c_void;
use libc::size_t;
use std::ffi::CStr;
use std::ffi::CString;
use std::str;
#[link(name = "clib/opencc")]
extern "C" {
fn opencc_open(configFileName: *const c_char) -> *mut c_void;
fn opencc_convert_utf8(
opencc: *mut c_void,
input: *const c_char,
length: size_t,
) -> *mut c_char;
fn opencc_convert_utf8_free(str_converted: *mut c_char);
fn opencc_error() -> *mut c_char;
fn opencc_close(opencc: *mut c_void) -> c_int;
}
pub fn convert(config_filepath: &str, text: &str) -> String {
//println!("1");
let conf_filepath = CString::new(config_filepath).unwrap();
let conf_filepath_ccharp: *const c_char = conf_filepath.as_ptr();
let od = unsafe { opencc_open(conf_filepath_ccharp) };
// ERROR CHECK
////println!("od: {}", od as i8);
let r_err_raw: *const c_char = unsafe { opencc_error() };
let r_err: &CStr = unsafe { CStr::from_ptr(r_err_raw) };
let r_err_slice: &str = r_err.to_str().unwrap();
//let r_err_string: String = r_err_slice.to_owned();
////println!("r_err: {}", r_err_slice);
if od as i8 == -1 {
panic!("Config file cannot be found!");
}
//let od = unsafe { opencc_open(conf_filepath_ccharp) };
//println!("2");
let input = CString::new(text).unwrap();
//println!("{}", c);
let r_raw = unsafe { opencc_convert_utf8(od, input.as_ptr(), text.len() as size_t) };
//println!("3");
let r_str: &CStr = unsafe { CStr::from_ptr(r_raw) };
let r_slice: &str = r_str.to_str().unwrap();
let result = r_slice.to_owned();
// SOME CLEAN WORK
unsafe {
opencc_convert_utf8_free(r_raw);
let exit_status = opencc_close(od);
////println!("Finish status: {}.", exit_status);
}
result
}
|
mod query;
mod user;
use std::{cmp::Reverse, str::FromStr, sync::Arc};
use twilight_model::application::{
command::CommandOptionChoice,
interaction::{application_command::CommandOptionValue, ApplicationCommand},
};
use crate::{
commands::{parse_discord, DoubleResultCow, MyCommand, MyCommandOption},
core::Context,
custom_client::OsekaiBadge,
database::UserConfig,
error::Error,
util::{
constants::common_literals::{DISCORD, NAME, SORT},
InteractionExt, MessageExt,
},
BotResult,
};
pub use query::handle_autocomplete as handle_badge_autocomplete;
use super::{option_discord, option_name, require_link};
struct BadgeArgs {
kind: BadgeCommandKind,
sort_by: BadgeOrder,
}
enum BadgeOrder {
Alphabet,
Date,
OwnerCount,
}
impl BadgeOrder {
fn apply(self, badges: &mut [OsekaiBadge]) {
match self {
Self::Alphabet => badges.sort_unstable_by(|a, b| a.name.cmp(&b.name)),
Self::Date => badges.sort_unstable_by_key(|badge| Reverse(badge.awarded_at)),
Self::OwnerCount => badges.sort_unstable_by_key(|badge| Reverse(badge.users.len())),
}
}
}
impl FromStr for BadgeOrder {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"alphabet" => Ok(BadgeOrder::Alphabet),
"date" => Ok(BadgeOrder::Date),
"owner_count" => Ok(BadgeOrder::OwnerCount),
_ => Err(Error::InvalidCommandOptions),
}
}
}
impl Default for BadgeOrder {
fn default() -> Self {
Self::Date
}
}
enum BadgeCommandKind {
Query { name: String },
User { config: UserConfig },
}
impl BadgeArgs {
async fn slash(ctx: &Context, command: &mut ApplicationCommand) -> DoubleResultCow<Self> {
let option = command
.data
.options
.pop()
.ok_or(Error::InvalidCommandOptions)?;
let mut kind = None;
let mut sort_by = None;
match option.value {
CommandOptionValue::SubCommand(options) => match option.name.as_str() {
"query" => {
for option in options {
match option.value {
CommandOptionValue::String(name) => match option.name.as_str() {
NAME => kind = Some(BadgeCommandKind::Query { name }),
SORT => sort_by = Some(name.parse()?),
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
}
}
"user" => {
let mut config = ctx.user_config(command.user_id()?).await?;
for option in options {
match option.value {
CommandOptionValue::String(value) => match option.name.as_str() {
NAME => config.osu = Some(value.into()),
SORT => sort_by = Some(value.parse()?),
_ => return Err(Error::InvalidCommandOptions),
},
CommandOptionValue::User(value) => match option.name.as_str() {
DISCORD => match parse_discord(ctx, value).await? {
Ok(osu) => config.osu = Some(osu),
Err(content) => return Ok(Err(content)),
},
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
}
kind = Some(BadgeCommandKind::User { config });
}
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
let args = Self {
kind: kind.ok_or(Error::InvalidCommandOptions)?,
sort_by: sort_by.unwrap_or_default(),
};
Ok(Ok(args))
}
}
pub async fn slash_badges(ctx: Arc<Context>, mut command: ApplicationCommand) -> BotResult<()> {
let BadgeArgs { kind, sort_by } = match BadgeArgs::slash(&ctx, &mut command).await? {
Ok(args) => args,
Err(content) => return command.error(&ctx, content).await,
};
match kind {
BadgeCommandKind::Query { name } => query::query_(ctx, command, name, sort_by).await,
BadgeCommandKind::User { config } => {
user::user_(ctx, command.into(), config, sort_by).await
}
}
}
fn sort_option() -> MyCommandOption {
let sort_choices = vec![
CommandOptionChoice::String {
name: "Alphabet".to_owned(),
value: "alphabet".to_owned(),
},
CommandOptionChoice::String {
name: "Date".to_owned(),
value: "date".to_owned(),
},
CommandOptionChoice::String {
name: "Owner count".to_owned(),
value: "owner_count".to_owned(),
},
];
let sort_description = "Choose how the badges should be ordered, defaults to date";
MyCommandOption::builder(SORT, sort_description).string(sort_choices, false)
}
pub fn define_badges() -> MyCommand {
let name = MyCommandOption::builder(NAME, "Specify the badge name or acronym")
.autocomplete()
.string(Vec::new(), true);
let query = MyCommandOption::builder("query", "Display all badges matching the query")
.subcommand(vec![name, sort_option()]);
let name = option_name();
let discord = option_discord();
let options = vec![name, sort_option(), discord];
let user = MyCommandOption::builder("user", "Display all badges of a user").subcommand(options);
MyCommand::new("badges", "Display info about badges").options(vec![query, user])
}
|
use std::{fs, str, u64};
use regex::Regex;
#[derive(Clone, Debug)]
struct PasswordRule {
pub rule_letter: char,
pub rule_min: u64,
pub rule_max: u64,
pub password: String,
}
impl PasswordRule {
/// Attempts to parse passworld rules from the specified string. Panics on failure.
fn try_parse_str(input: &str ) -> Option<Self> {
let re = Regex::new(r"(\d+)-(\d+)\s([a-z]):\s([a-z]+)").unwrap();
// only consider first match:
match re.captures(input) {
Some(cap) => {
Some(Self {
rule_min: cap[1].parse().unwrap(),
rule_max: cap[2].parse().unwrap(),
rule_letter: cap[3].chars().next().unwrap(),
password: cap[4].to_owned(),
})
},
None => {
println!("Failed to parse {}", input);
None
},
}
}
fn validate_task_1(&self) -> bool {
let occurences = self.password.chars().filter(|c| c == &self.rule_letter).count() as u64;
occurences >= self.rule_min && occurences <= self.rule_max
}
fn validate_task_2(&self) -> bool {
let chars : Vec<char> = self.password.chars().collect();
(chars[self.rule_min as usize -1] == self.rule_letter) ^ (chars[self.rule_max as usize -1] == self.rule_letter)
}
}
pub fn day2(){
let input = fs::read_to_string("inputs/d2").unwrap();
let values : Vec<PasswordRule> =
input
.lines()
.filter_map(|s| PasswordRule::try_parse_str(s))
.collect();
let solution1 =
values.clone()
.into_iter()
.filter(|rule| rule.validate_task_1());
// do not collect to prevent allocation
println!("Count of rule-abiding passwords for task1: {}", solution1.count());
let solution2 =
values.into_iter()
.filter(|rule| rule.validate_task_2());
// do not collect to prevent allocation
println!("Count of rule-abiding passwords for task2: {}", solution2.count());
}
// I'd been looking for a reason to try out the parser combinator library nom for a while.
// In the end, I found it rather unergonomic and verbose compared to Haskell's parsec (and its derivatives).
// So I just used regex to get on with the next challenges.
// use nom::{IResult, bytes::complete::{tag, take_while}, character::{complete::{anychar}}, combinator::map_res};
// fn from_limit(input: &str) -> Result<u64, ParseIntError> {
// u64::from_str_radix(input, 10)
// }
// fn parse_limit(input: &str) -> IResult<&str, u64> {
// map_res(
// take_while( char::is_ascii_digit),
// from_limit
// )(input)
// // let (input, digits) = take_while(char::is_ascii_digit)(input);
// // let u = from_limit(digits);
// // Ok((input, ))
// }
// fn parse_password(input: &str) -> IResult<&str, String> {
// let (input, alphabetic) = take_while(char::is_alphabetic)(input)?;
// Ok((input, alphabetic.to_string()))
// }
// fn parse_password_rule(input: &str) -> IResult<&str, PasswordRule> {
// let (input, rule_min) = parse_limit(input)?;
// let (input, _) = tag("-")(input)?;
// let (input, rule_max) = parse_limit(input)?;
// let (input, _) = tag(" ")(input)?;
// let (input, rule_letter) = anychar(input)?;
// let (input, _) = tag(": ")(input)?;
// let (input, password) = parse_password(input)?;
// Ok((input,
// PasswordRule {
// rule_letter,
// rule_min,
// rule_max,
// password,
// }
// ))
// } |
//! http://cache.nxp.com/documents/data_sheet/PCA9685.pdf?pspll=1
extern crate tessel;
use std::io;
use std::thread;
use std::time::Duration;
use std::ops::Range;
#[repr(u8)]
#[allow(dead_code)]
#[derive(Copy, Clone)]
enum Command {
MODE1 = 0x0,
LED0_ON_L = 0x06,
LED0_ON_H = 0x07,
LED0_OFF_L = 0x08,
LED0_OFF_H = 0x09,
PRESCALE = 0xFE,
}
const MAX: u16 = 4096;
//const MODE1 = 0x0;
//const PRE_SCALE = 0xFE;
const I2C_ID: u8 = 0x73;
#[allow(dead_code)]
pub struct ServoArray<'a> {
i2c: tessel::I2cPort<'a>,
addr2: tessel::Pin<'a>,
addr3: tessel::Pin<'a>,
output_enable: tessel::Pin<'a>,
range: Range<f64>,
i2c_id: u8,
}
impl<'a> ServoArray<'a> {
pub fn new<'b>(port: tessel::Port, addr2: bool, addr3: bool) -> ServoArray<'b> {
let (i2c, gpio) = port.i2c();
let (addr2, addr3, output_enable) = gpio.pin_select((5, 6, 7));
ServoArray {
i2c: i2c,
addr2: addr2,
addr3: addr3,
output_enable: output_enable,
range: 0.0..1.0,
i2c_id: I2C_ID, // TODO
}
}
/// Reads sequential buffers.
//fn read(&mut self, values: &[Command], buf: &mut [u8]) -> io::Result<()> {
// let a: Vec<u8> = values.iter().map(|x| *x as u8).collect();
// try!(self.i2c.transfer(I2C_ID, &a, buf));
// Ok(())
//}
//fn write(&mut self, values: &[Command]) -> io::Result<()> {
// let mut a: Vec<u8> = values.iter().map(|x| *x as u8).collect();
// a.push(value);
// self.i2c.send(I2C_ID, &a);
// Ok(())
//}
pub fn connect(&mut self) -> io::Result<()> {
// Enable the outputs.
self.output_enable.output(false);
//TODO
self.addr2.output(false);
self.addr3.output(false);
//let mut buf = [0; 6];
//println!("hi");
//try!(self.read(&[Command::ReadId3, Command::ReadId4], &mut buf));
//println!("hey");
//println!("hi {:?}", buf);
//if buf[0] != 0x14 {
// return Err(io::Error::new(io::ErrorKind::InvalidData, "Invalid connection code."))
//}
self.set_module_frequency(50);
Ok(())
}
pub fn set_module_frequency(&mut self, frequency: u64) {
let prescale: u8 = (((25000000 / (MAX as u64)) / frequency) - 1) as u8;
let mut buf = [0; 1];
self.i2c.transfer(self.i2c_id, &[Command::MODE1 as u8], &mut buf);
let mode = buf[0];
self.i2c.send(self.i2c_id, &[Command::MODE1 as u8, mode | 0x10]);
self.i2c.send(self.i2c_id, &[Command::PRESCALE as u8, prescale]);
self.i2c.send(self.i2c_id, &[Command::MODE1 as u8, mode]);
self.i2c.send(self.i2c_id, &[Command::MODE1 as u8, 0xA1]);
}
/// Set duty cycle for entry 1 to 16.
pub fn set_duty_cycle(&mut self, i: usize, value: f64) {
let offset = ((i - 1) * 4) as u8;
let reg = (((MAX - 1) as f64) * f64::max(f64::min(value, 1.0), 0.0)) as u16;
println!("0 0 {:?} {:?}", (reg & 0xFF) as u8, ((reg >> 8) & 0xFF) as u8);
self.i2c.send(self.i2c_id, &[Command::LED0_ON_L as u8 + offset, 0]);
self.i2c.send(self.i2c_id, &[Command::LED0_ON_H as u8 + offset, 0]);
self.i2c.send(self.i2c_id, &[Command::LED0_OFF_L as u8 + offset, (reg & 0xFF) as u8]);
self.i2c.send(self.i2c_id, &[Command::LED0_OFF_H as u8 + offset, ((reg >> 8) & 0xFF) as u8]);
}
}
|
pub mod buffer;
pub mod core_proxy;
pub mod dispatch;
pub mod lsp;
pub mod plugin;
use dispatch::Dispatcher;
pub fn mainloop() {
let (sender, receiver, io_threads) = lapce_rpc::stdio();
let dispatcher = Dispatcher::new(sender);
dispatcher.mainloop(receiver);
}
|
use std::sync::mpsc::Sender;
use std::mem::{uninitialized};
use libc::{c_long};
pub static GAME_LIB_DIR: &'static str = "./af/target/debug/";
pub static GAME_LIB_PATH: &'static str = "./af/target/debug/libaf.so";
pub static GAME_LIB_FILE: &'static str = "./libaf.so";
#[repr(C)]
struct Timespec {
tv_sec: usize, // time_t
tv_nsec: c_long
}
extern "C" {
fn clock_getcpuclockid(pid: i32, clock_id: *mut usize) -> isize;
fn clock_getres(clock_id: usize, res: *mut Timespec) -> isize;
fn clock_gettime(clock_id: usize, tp: *mut Timespec) -> isize;
}
const CLOCK_MONOTOMIC_RAW: usize = 4;
pub fn watch_for_updated_game_lib(ref sender: &Sender<()>) {
println!("on linux - no hot code update for now!");
}
pub fn query_performance_counter(counter: &mut i64) {
unsafe {
let mut time: Timespec = uninitialized();
if clock_gettime(CLOCK_MONOTOMIC_RAW, &mut time) != 0 { panic!("Error retrieving clock time") }
*counter = ((time.tv_sec * 1_000_000_000) as i64 + time.tv_nsec) as i64;
}
}
pub fn query_performance_frequency() -> i64 {
1_000_000_000
}
|
extern crate tini;
extern crate backlight;
use std::fs::File;
use std::io::prelude::*;
use std::{thread, time};
use std::io::SeekFrom;
use std::fs::create_dir_all;
use std::cmp;
use std::env;
use std::path::Path;
use tini::Ini;
use backlight::Backlight;
#[derive(Debug)]
struct Illuminance {
file: File,
}
impl Illuminance {
fn from_config(config: &Ini) -> Illuminance {
let filename: String = config.get("illuminance", "file").unwrap();
let file = File::open(filename).unwrap();
Illuminance { file }
}
fn get(&mut self) -> i32 {
let mut buffer = String::new();
self.file.seek(SeekFrom::Start(0)).ok();
self.file.read_to_string(&mut buffer).ok();
match buffer.trim().parse::<i32>() {
Ok(value) => value,
Err(_) => panic!("can't parse `{}` value", buffer),
}
}
}
#[derive(Debug, Clone, Copy)]
struct Transition {
step: time::Duration,
sleep: time::Duration,
start: f32,
end: f32,
steps: i32,
cur: i32,
}
impl Transition {
fn from_config(config: &Ini) -> Transition {
let step = time::Duration::from_millis(config.get("transition", "step").unwrap());
let sleep = time::Duration::from_millis(config.get("transition", "sleep").unwrap());
let start = 0f32;
let end = 0f32;
let steps = 0i32;
let cur = 0i32;
Transition {
step,
sleep,
start,
end,
steps,
cur,
}
}
/// Transition function. f(0) = 0, f(1) = 1
fn f(x: f32) -> f32 {
1.0 / ((-15.0 * (x - 0.5)).exp() + 1.0)
}
/// Current progress of transition
fn progress(&self) -> f32 {
Transition::f(self.cur as f32 / self.steps as f32)
}
pub fn set(&mut self, start: f32, end: f32) {
self.start = start;
self.end = end;
self.steps = if (self.end - self.start).abs() > 0.1 {
cmp::min(30, ((self.end - self.start).abs() * 100f32) as i32)
} else {
0i32
};
self.cur = 0i32;
}
}
impl Iterator for Transition {
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.cur >= self.steps - 1 {
thread::sleep(self.sleep);
return None;
}
thread::sleep(self.step);
let v = self.start + (self.end - self.start) * self.progress();
self.cur += 1;
Some(v)
}
}
struct Transform {
i2b: Vec<i32>,
}
impl Transform {
fn from_config(config: &Ini) -> Transform {
let i2b: Vec<i32> = config.get_vec("illuminance", "i2b").unwrap();
Transform { i2b }
}
pub fn to_backlight(&self, value: i32) -> f32 {
let last = self.i2b.len() - 1;
let step = 1f32 / last as f32;
let mut r = last + 1;
for (n, i) in self.i2b.iter().enumerate() {
if value < *i {
r = n;
break;
}
}
if r == 0 {
0f32
} else if r > last {
1f32
} else {
(r as f32 + (value - self.i2b[r]) as f32 / (self.i2b[r] - self.i2b[r - 1]) as f32) *
step
}
}
}
fn create_default_config() -> Ini {
Ini::new()
.section("illuminance")
.item(
"file",
"/sys/bus/acpi/devices/ACPI0008:00/iio:device0/in_illuminance_raw",
)
.item("i2b", "-5,20,300,700,1100,7100")
.section("transition")
.item("step", "50")
.item("sleep", "1000")
}
fn main() {
let default_config = create_default_config();
let user_home = env::var("HOME").unwrap();
let user_path = match env::var("XDG_CONFIG_HOME") {
Ok(path) => Path::new(&path).join("lumos/config.ini"),
Err(_) => Path::new(&user_home).join(".config/lumos/config.ini"),
};
if !user_path.exists() {
create_dir_all(user_path.parent().unwrap()).unwrap();
default_config.to_file(&user_path).unwrap();
}
let config = Ini::from_file(&user_path).unwrap();
let backlight = Backlight::new();
let mut illuminance = Illuminance::from_config(&config);
let transform = Transform::from_config(&config);
let mut transition = Transition::from_config(&config);
loop {
// dirty hack for avoid blinking
let mut value = illuminance.get();
if value == 0 {
// maybe sleep here?
value = illuminance.get();
}
transition.set(backlight.get(), transform.to_backlight(value));
for v in transition {
backlight.set(v);
}
}
}
|
extern crate actix;
extern crate actix_prometheus;
extern crate actix_web;
use actix_prometheus::register_default_counters;
use actix_prometheus::PrometheusTransform;
use actix_web::{http, web, HttpResponse};
use actix_web::{App, HttpServer};
fn main() {
println!("Running prometheus example");
register_default_counters();
HttpServer::new(|| {
App::new()
.wrap(PrometheusTransform)
.service(
web::resource("/test")
.route(web::get().to(|| HttpResponse::Ok().body("Well done!"))),
)
.route("/metrics", web::get().to(actix_prometheus::metric_export))
})
.bind("0.0.0.0:8088")
.unwrap()
.run()
.unwrap()
}
|
use metrics::{counter, gauge};
use std::time::Instant;
pub fn inc_storm_gc() {
counter!("storm.gc", 1);
}
pub fn inc_storm_cache_island_gc() {
counter!("storm.cache.island.gc", 1);
}
pub fn inc_storm_execute_time(instant: Instant, op: &'static str, ty: &'static str) {
counter!("storm.execute.count", 1, "op" => op, "type" => ty);
counter!("storm.execute.time", instant.elapsed().as_nanos() as u64, "op" => op, "type" => ty);
}
pub fn update_storm_table_rows(mut len: usize, ty: &'static str) {
if len > u32::MAX as usize {
len = u32::MAX as usize;
}
gauge!("storm.table.rows", len as f64, "type" => ty);
}
|
mod phi;
pub use self::phi::phi; |
use std::*;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("That is not a valid email address")]
InvalidEmailAddressError,
#[error("That email address already exists, try logging in")]
EmailAlreadyExists,
#[error("The mutex guarding the Sqlite connection was posioned")]
MutexPoisonError,
#[error("An error occured trying to retrieve the current time")]
SystemTimeError(#[from] time::SystemTimeError),
#[error("Could not find any user that fits the specified requirements")]
UserNotFoundError,
#[error("RusqliteError: {0}")]
RusqliteError(#[from] rusqlite::Error),
#[error("Argon2ParsingError: {0}")]
Argon2ParsingError(#[from] argon2::Error),
#[error("Unspecified")]
Unspecified,
#[error("Unspecified")]
QueryError,
#[error("UnmanagedStateError")]
UnmanagedStateError,
#[error("FormValidationError")]
FormValidationError,
#[error("UnauthenticatedError: The operation failed because the client is not authenticated")]
UnauthenticatedError,
#[error("Incorrect email or password")]
InvalidCredentialsError,
#[error("The password must be at least 8 characters long")]
UnsafePasswordTooShort,
#[error("The password must include a digit")]
UnsafePasswordHasNoDigit,
#[error("The password must include an upper case character")]
UnsafePasswordHasNoUpper,
#[error("The password must include a lower case character")]
UnsafePasswordHasNoLower,
#[error("Incorrect email or password")]
UnauthorizedError,
#[error("SerdeError: {0}")]
SerdeError(#[from] serde_json::Error),
}
use std::sync::PoisonError;
impl<T> From<PoisonError<T>> for Error {
fn from(_error: PoisonError<T>) -> Error {
Error::MutexPoisonError
}
}
impl From<()> for Error {
fn from(_: ()) -> Error {
Error::Unspecified
}
}
use self::Error::*;
impl Error {
fn message(&self) -> String {
match self {
InvalidEmailAddressError
| InvalidCredentialsError
| EmailAlreadyExists
| UnauthorizedError
| UserNotFoundError
| UnsafePasswordTooShort
| UnsafePasswordHasNoDigit
| UnsafePasswordHasNoLower
| UnsafePasswordHasNoUpper => format!("{}", self),
_ => "undefined".into(),
}
}
}
use rocket::http::ContentType;
use rocket::request::Request;
use rocket::response::{self, Responder, Response};
use std::io::Cursor;
impl<'r> Responder<'r, 'static> for Error {
fn respond_to(self, _: &'r Request<'_>) -> response::Result<'static> {
Response::build()
.header(ContentType::Plain)
.sized_body(self.message().len(), Cursor::new(self.message()))
.ok()
}
}
|
use std::fs::File;
#[cfg(test)]
pub mod gen;
pub mod pipelines;
#[cfg(test)]
pub mod parse;
#[test]
pub fn token() {
}
|
use iced_futures::futures;
// Just a little utility function
pub fn post<T: ToString>(url: T, body: Box<[u8]>) -> iced::Subscription<Progress> {
iced::Subscription::from_recipe(Download {
url: url.to_string(),
body,
})
}
pub struct Download {
url: String,
body: Box<[u8]>,
}
// Make sure iced can use our download stream
impl<H, I> iced_native::subscription::Recipe<H, I> for Download
where
H: std::hash::Hasher,
{
type Output = Progress;
fn hash(&self, state: &mut H) {
use std::hash::Hash;
std::any::TypeId::of::<Self>().hash(state);
self.url.hash(state);
}
fn stream(
self: Box<Self>,
_input: futures::stream::BoxStream<'static, I>,
) -> futures::stream::BoxStream<'static, Self::Output> {
Box::pin(futures::stream::unfold(
State::Ready(self.url, self.body),
|state| async move {
match state {
State::Ready(url, body) => {
let client = reqwest::Client::new();
let tx = hex::encode(&body);
dbg!(&tx);
let response = client
.post(&url)
.header("Content-Type", "application/octet-stream")
.body(body.to_vec())
.send()
.await;
match response {
Ok(response) => {
if let Some(total) = response.content_length() {
Some((
Progress::Started,
State::Downloading {
response,
total,
downloaded: 0,
bytes: Vec::with_capacity(total as usize),
},
))
} else {
Some((
Progress::Errored {
status_code: response.status(),
},
State::Finished,
))
}
}
Err(error) => Some((
Progress::Failure {
error: error.to_string(),
},
State::Finished,
)),
}
}
State::Downloading {
mut response,
total,
downloaded,
mut bytes,
} => {
dbg!(response.status());
let status = response.status();
if status != reqwest::StatusCode::OK {
dbg!(response.text().await);
return Some((
Progress::Errored {
status_code: status,
},
State::Finished,
));
}
match response.chunk().await {
Ok(Some(chunk)) => {
let downloaded = downloaded + chunk.len() as u64;
let percentage = (downloaded as f32 / total as f32) * 100.0;
bytes.extend_from_slice(&chunk);
Some((
Progress::Advanced(percentage),
State::Downloading {
response,
total,
downloaded,
bytes,
},
))
}
Ok(None) => {
let id = String::from_utf8_lossy(&bytes).to_string();
Some((Progress::Finished { id }, State::Finished))
}
Err(error) => Some((
Progress::Failure {
error: error.to_string(),
},
State::Finished,
)),
}
}
State::Finished => {
dbg!();
// We do not let the stream die, as it would start a
// new download repeatedly if the user is not careful
// in case of errors.
let _: () = iced::futures::future::pending().await;
None
}
}
},
))
}
}
#[derive(Debug, Clone)]
pub enum Progress {
Started,
Advanced(f32),
Finished { id: String },
Errored { status_code: reqwest::StatusCode },
Failure { error: String },
}
pub enum State {
Ready(String, Box<[u8]>),
Downloading {
response: reqwest::Response,
total: u64,
downloaded: u64,
bytes: Vec<u8>,
},
Finished,
}
|
extern crate io;
pub use self::io::*;
use arch::memory::LOGICAL_OFFSET;
/// A wrapper for physical addresses
/// T is the size the physical address should be stored in
#[repr(packed)]
pub struct PhysAddr<I: Io> {
inner: I
}
impl<I: Io> PhysAddr<I> {
pub fn new(inner: I) -> PhysAddr<I> {
PhysAddr {
inner: inner
}
}
}
impl<I: Io<Value=u32>> PhysAddr<I> {
/// Write a value that may be logical
pub fn write(&mut self, mut value: u32) {
if value >= LOGICAL_OFFSET as u32 {
value -= LOGICAL_OFFSET as u32;
}
self.inner.write(value);
}
}
|
#[doc = "Register `CFGR2` reader"]
pub type R = crate::R<CFGR2_SPEC>;
#[doc = "Register `CFGR2` writer"]
pub type W = crate::W<CFGR2_SPEC>;
#[doc = "Field `PVD_LOCK` reader - PVD lock enable bit"]
pub type PVD_LOCK_R = crate::BitReader<PVD_LOCK_A>;
#[doc = "PVD lock enable bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PVD_LOCK_A {
#[doc = "0: PVD interrupt disconnected from TIM15/16/17 Break input"]
Disconnected = 0,
#[doc = "1: PVD interrupt connected to TIM15/16/17 Break input"]
Connected = 1,
}
impl From<PVD_LOCK_A> for bool {
#[inline(always)]
fn from(variant: PVD_LOCK_A) -> Self {
variant as u8 != 0
}
}
impl PVD_LOCK_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PVD_LOCK_A {
match self.bits {
false => PVD_LOCK_A::Disconnected,
true => PVD_LOCK_A::Connected,
}
}
#[doc = "PVD interrupt disconnected from TIM15/16/17 Break input"]
#[inline(always)]
pub fn is_disconnected(&self) -> bool {
*self == PVD_LOCK_A::Disconnected
}
#[doc = "PVD interrupt connected to TIM15/16/17 Break input"]
#[inline(always)]
pub fn is_connected(&self) -> bool {
*self == PVD_LOCK_A::Connected
}
}
#[doc = "Field `PVD_LOCK` writer - PVD lock enable bit"]
pub type PVD_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PVD_LOCK_A>;
impl<'a, REG, const O: u8> PVD_LOCK_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PVD interrupt disconnected from TIM15/16/17 Break input"]
#[inline(always)]
pub fn disconnected(self) -> &'a mut crate::W<REG> {
self.variant(PVD_LOCK_A::Disconnected)
}
#[doc = "PVD interrupt connected to TIM15/16/17 Break input"]
#[inline(always)]
pub fn connected(self) -> &'a mut crate::W<REG> {
self.variant(PVD_LOCK_A::Connected)
}
}
impl R {
#[doc = "Bit 2 - PVD lock enable bit"]
#[inline(always)]
pub fn pvd_lock(&self) -> PVD_LOCK_R {
PVD_LOCK_R::new(((self.bits >> 2) & 1) != 0)
}
}
impl W {
#[doc = "Bit 2 - PVD lock enable bit"]
#[inline(always)]
#[must_use]
pub fn pvd_lock(&mut self) -> PVD_LOCK_W<CFGR2_SPEC, 2> {
PVD_LOCK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR2_SPEC;
impl crate::RegisterSpec for CFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr2::R`](R) reader structure"]
impl crate::Readable for CFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr2::W`](W) writer structure"]
impl crate::Writable for CFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR2 to value 0"]
impl crate::Resettable for CFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[derive(Default)]
pub struct Response {
headers: Vec<String>,
content: Vec<String>,
status_code: u16,
}
impl Response {
pub fn new() -> Response {
Default::default()
}
pub fn add_header(&mut self, header: &str) {
self.headers.push(header.to_string())
}
pub fn add_content(&mut self, content: &str) {
self.content.push(content.to_string())
}
pub fn set_status_code(&mut self, status_code: u16) {
if self.status_code == 0 {
let (status_code, status_text) = status_code_text(status_code);
self.headers.push(format!("Status: {} {}", status_code, status_text));
self.status_code = status_code;
} else {
eprintln!("Status code has already been set to {}! Ignoring.", self.status_code);
}
}
pub fn send(self) {
for h in self.headers {
println!("{}", h);
}
println!("");
for c in self.content {
println!("{}", c);
}
}
}
fn status_code_text(status_code: u16) -> (u16, &'static str) {
let mut return_status_code = status_code;
let status_text = match status_code {
200 => "OK",
201 => "Created",
202 => "Accepted",
204 => "No Content",
304 => "Not Modified",
400 => "Bad Request",
401 => "Unauthorized",
403 => "Forbidden",
404 => "Not Found",
405 => "Method Not Allowed",
501 => "Not Implemented",
503 => "Service Unavailable",
_ => {return_status_code = 500; "Internal Server Error"},
};
(return_status_code, status_text)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn valid_status_code() {
assert_eq!((200, "OK"), status_code_text(200));
}
#[test]
fn invalid_status_code() {
assert_eq!((500, "Internal Server Error"), status_code_text(1000));
}
}
|
//! Serialization and de-serialization for TAC code.
use std::{fmt::Display, writeln};
use ty::FuncTy;
use util::ListFormatter;
use crate::*;
pub trait FormatContext<C> {
fn fmt_ctx(&self, f: &mut std::fmt::Formatter<'_>, ctx: C) -> std::fmt::Result;
}
impl<T, C> FormatContext<C> for &T
where
T: FormatContext<C>,
{
fn fmt_ctx(&self, f: &mut std::fmt::Formatter<'_>, ctx: C) -> std::fmt::Result {
(*self).fmt_ctx(f, ctx)
}
}
impl Display for Ty {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Ty::Unit => {
write!(f, "()")
}
Ty::Func(func) => func.fmt(f),
Ty::Ptr(tgt) => {
write!(f, "{}*", tgt)
}
Ty::Numeric(ty) => ty.fmt(f),
}
}
}
impl Display for FuncTy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "(fn (")?;
for (idx, param) in self.params.iter().enumerate() {
if idx != 0 {
write!(f, " ")?;
}
param.fmt(f)?;
}
write!(f, ") {})", &self.return_type)
}
}
impl Display for NumericTy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.kind {
TyKind::Bool => {
write!(f, "b")
}
TyKind::Int => {
write!(f, "i")
}
}?;
write!(f, "{}", self.size)
}
}
impl Display for BinaryOp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
BinaryOp::Add => "add",
BinaryOp::Sub => "sub",
BinaryOp::Mul => "mul",
BinaryOp::Div => "div",
BinaryOp::Lt => "lt",
BinaryOp::Gt => "gt",
BinaryOp::Le => "le",
BinaryOp::Ge => "ge",
BinaryOp::Eq => "eq",
BinaryOp::Ne => "ne",
};
write!(f, "{}", s)
}
}
struct VarId(u32);
impl Display for VarId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.0 != u32::max_value() {
write!(f, "%{}", self.0)
} else {
write!(f, "_")
}
}
}
impl Display for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Value::Dest(i) => {
write!(f, "{}", i)
}
Value::Imm(imm) => {
write!(f, "{}", imm)
}
}
}
}
impl FormatContext<VarId> for Tac {
fn fmt_ctx(&self, f: &mut std::fmt::Formatter<'_>, ctx: VarId) -> std::fmt::Result {
write!(f, "({} {} ", ctx.0, self.inst.ty)?;
match &self.inst.kind {
InstKind::Binary(i) => {
write!(f, "{} {} {}", i.op, i.lhs, i.rhs)?;
}
InstKind::FunctionCall(call) => {
write!(f, "call {} (", &call.name)?;
for (idx, param) in call.params.iter().enumerate() {
if idx != 0 {
write!(f, " ")?;
}
param.fmt(f)?;
}
write!(f, ")")?;
}
InstKind::Assign(i) => {
i.fmt(f)?;
}
InstKind::Param(id) => {
write!(f, "param {}", id)?;
}
InstKind::Phi(phi) => {
write!(f, "phi ")?;
let mut first = true;
for (&bb, &val) in phi {
if !first {
write!(f, " ")?;
} else {
first = false;
}
write!(f, "({} {})", val, bb)?;
}
}
}
write!(f, ")")?;
Ok(())
}
}
impl Display for Branch {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "(")?;
match self {
Branch::Return(v) => {
write!(f, "return")?;
if let Some(val) = v {
write!(f, " {}", val)?;
}
}
Branch::Jump(target) => {
write!(f, "br {}", target)?;
}
Branch::CondJump {
cond,
if_true,
if_false,
} => {
write!(
f,
"brif {} bb{} bb{}",
cond,
if_true.slot(),
if_false.slot()
)?;
}
Branch::Unreachable => {
write!(f, "unreachable")?;
}
}
write!(f, ")")?;
Ok(())
}
}
impl std::fmt::Display for TacFunc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let ty = self.ty.as_func().unwrap();
let param_fmt = ListFormatter::new(ty.params.iter());
write!(f, "(fn {} ({}) {}", &self.name, param_fmt, &ty.return_type)?;
for (k, v) in self.bb_iter() {
writeln!(f)?;
write!(f, "\t(bb{} (", k.slot())?;
if let Some(x) = v.head {
let mut cur_idx = x;
loop {
let i = self.instructions_arena.get(cur_idx).unwrap();
let cur_id = cur_idx.slot();
writeln!(f)?;
write!(f, "\t\t")?;
i.fmt_ctx(f, VarId(cur_id))?;
match i.next {
Some(x) => cur_idx = x,
None => {
break;
}
}
}
}
writeln!(f, ")")?;
write!(f, "\t\t")?;
v.branch.fmt(f)?;
write!(f, ")")?;
}
writeln!(f, ")")?;
Ok(())
}
}
|
use uuid::Uuid;
use std::process::Command;
/**
* Get printers on windows using wmic
*/
pub fn get_printers() -> Vec<super::Printer> {
let out = Command::new("wmic")
.arg("printer")
.arg("get")
.arg("DriverName, Name")
.output()
.unwrap();
if out.status.success() {
unsafe {
let out_str = String::from_utf8_unchecked(out.stdout);
let mut lines: Vec<&str> = out_str.split_inclusive("\n").collect();
lines.remove(0);
let mut printers: Vec<super::Printer> = Vec::with_capacity(lines.len());
for line in lines {
let printer_data: Vec<&str> = line.split_ascii_whitespace().collect();
printers.push(super::Printer {
id: Uuid::new_v5(&Uuid::NAMESPACE_DNS, printer_data[0].as_bytes()).to_string(),
name: String::from(printer_data[1]),
system_name: String::from(printer_data[0]),
});
}
return printers;
}
}
return Vec::with_capacity(0);
}
/**
* Print on windows using lpr
*/
pub fn print(printer_system_name: &String, file_path: &std::path::PathBuf) -> bool {
let process = Command::new("lpr")
.arg("-P")
.arg(printer_system_name)
.arg(file_path)
.output()
.unwrap();
return process.status.success();
} |
// TODO: Remove these
/*#![allow(dead_code)]
extern crate mr;
extern crate rand;
use rand::Rng;
use std::sync;
use std::thread;
struct MyInputter;
mod bin {
extern crate byteorder;
use self::byteorder::{BigEndian, ByteOrder};
pub fn write_u32(buf: &mut [u8], n: u32) { <BigEndian as ByteOrder>::write_u32(buf, n) }
pub fn read_u32(buf: &[u8]) -> u32 { <BigEndian as ByteOrder>::read_u32(buf) }
}
impl mr::Inputter for MyInputter {
fn input(&self, emit: &mr::KeyValueEmitter, map_tasks: i32, map_task_index: i32) {
let seed = (map_task_index + 1) as u32;
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed(
[seed, seed << 5, seed << 10, seed << 20]);
println!("Thread {}: input map_task_index={}/{}",
thread::current().name().unwrap(),
map_task_index, map_tasks);
let mut key_data = [0u8; 4];
for _ in 0..10 {
{
let mut keyw = key_data.as_mut_slice();
bin::write_u32(&mut keyw, rng.next_u32());
}
// For now, key and value are the same.
emit(key_data.as_slice(), key_data.as_slice());
}
}
}
struct MyMapper;
impl mr::Mapper for MyMapper {
fn map(&self, _emit: &mr::KeyValueEmitter, key: &[u8], _value: &[u8]) {
let key_u32 = bin::read_u32(key);
println!("Thread {}: got key {}", thread::current().name().unwrap(), key_u32);
}
}
struct MyReducer;
impl mr::Reducer for MyReducer {
fn reduce(&self, _emit: &mr::ValueEmitter, _key: &[u8], _value: &Iterator<Item=&[u8]>) {
// TODO
}
}
fn main() {
mr::local_threaded::run(sync::Arc::new(mr::MapReduceOptions{
inputter: Box::new(MyInputter),
mapper: Box::new(MyMapper),
reducer: Box::new(MyReducer),
num_workers: 4,
num_map_shards: 10,
num_reduce_shards: 12,
}));
}
*/
|
/*
* Copyright Stalwart Labs Ltd. See the COPYING
* file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
pub mod get;
pub mod helpers;
pub mod set;
use std::fmt::Display;
use crate::core::changes::ChangesObject;
use crate::core::set::date_not_set;
use crate::core::set::string_not_set;
use crate::core::Object;
use crate::Get;
use crate::Set;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VacationResponse<State = Get> {
#[serde(skip)]
_create_id: Option<usize>,
#[serde(skip)]
_state: std::marker::PhantomData<State>,
#[serde(rename = "id")]
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(rename = "isEnabled")]
#[serde(skip_serializing_if = "Option::is_none")]
is_enabled: Option<bool>,
#[serde(rename = "fromDate")]
#[serde(skip_serializing_if = "date_not_set")]
from_date: Option<DateTime<Utc>>,
#[serde(rename = "toDate")]
#[serde(skip_serializing_if = "date_not_set")]
to_date: Option<DateTime<Utc>>,
#[serde(rename = "subject")]
#[serde(skip_serializing_if = "string_not_set")]
subject: Option<String>,
#[serde(rename = "textBody")]
#[serde(skip_serializing_if = "string_not_set")]
text_body: Option<String>,
#[serde(rename = "htmlBody")]
#[serde(skip_serializing_if = "string_not_set")]
html_body: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, Copy)]
pub enum Property {
#[serde(rename = "id")]
Id,
#[serde(rename = "isEnabled")]
IsEnabled,
#[serde(rename = "fromDate")]
FromDate,
#[serde(rename = "toDate")]
ToDate,
#[serde(rename = "subject")]
Subject,
#[serde(rename = "textBody")]
TextBody,
#[serde(rename = "htmlBody")]
HtmlBody,
}
impl Display for Property {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Property::Id => write!(f, "id"),
Property::IsEnabled => write!(f, "isEnabled"),
Property::FromDate => write!(f, "fromDate"),
Property::ToDate => write!(f, "toDate"),
Property::Subject => write!(f, "subject"),
Property::TextBody => write!(f, "textBody"),
Property::HtmlBody => write!(f, "htmlBody"),
}
}
}
impl Object for VacationResponse<Set> {
type Property = Property;
fn requires_account_id() -> bool {
true
}
}
impl Object for VacationResponse<Get> {
type Property = Property;
fn requires_account_id() -> bool {
true
}
}
impl ChangesObject for VacationResponse<Set> {
type ChangesResponse = ();
}
impl ChangesObject for VacationResponse<Get> {
type ChangesResponse = ();
}
|
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os::raw::c_void;
use base::{CFOptionFlags, CFIndex, CFAllocatorRef, Boolean, CFTypeID, CFTypeRef, SInt32, CFRange};
use data::CFDataRef;
use array::CFArrayRef;
use dictionary::CFDictionaryRef;
use string::{CFStringRef, CFStringEncoding};
use error::CFErrorRef;
#[repr(C)]
pub struct __CFURL(c_void);
pub type CFURLRef = *const __CFURL;
pub type CFURLBookmarkCreationOptions = CFOptionFlags;
pub type CFURLBookmarkResolutionOptions = CFOptionFlags;
pub type CFURLBookmarkFileCreationOptions = CFOptionFlags;
pub type CFURLPathStyle = CFIndex;
/* typedef CF_ENUM(CFIndex, CFURLPathStyle) */
pub const kCFURLPOSIXPathStyle: CFURLPathStyle = 0;
pub const kCFURLHFSPathStyle: CFURLPathStyle = 1;
pub const kCFURLWindowsPathStyle: CFURLPathStyle = 2;
/* Bookmark Data Creation Options */
pub static kCFURLBookmarkCreationMinimalBookmarkMask: CFURLBookmarkCreationOptions =
(1u32 << 9) as usize;
pub static kCFURLBookmarkCreationSuitableForBookmarkFile: CFURLBookmarkCreationOptions =
(1u32 << 10) as usize;
#[cfg(target_os="macos")]
pub static kCFURLBookmarkCreationWithSecurityScope: CFURLBookmarkCreationOptions =
(1u32 << 11) as usize;
#[cfg(target_os="macos")]
pub static kCFURLBookmarkCreationSecurityScopeAllowOnlyReadAccess: CFURLBookmarkCreationOptions =
(1u32 << 12) as usize;
pub static kCFURLBookmarkCreationWithoutImplicitSecurityScope: CFURLBookmarkCreationOptions =
(1u32 << 29) as usize;
pub static kCFURLBookmarkCreationPreferFileIDResolutionMask: CFURLBookmarkCreationOptions =
(1u32 << 8) as usize; // deprecated
/* The types of components in a URL. */
pub type CFURLComponentType = CFIndex;
pub const kCFURLComponentScheme: CFIndex = 1;
pub const kCFURLComponentNetLocation: CFIndex = 2;
pub const kCFURLComponentPath: CFIndex = 3;
pub const kCFURLComponentResourceSpecifier: CFIndex = 4;
pub const kCFURLComponentUser: CFIndex = 5;
pub const kCFURLComponentPassword: CFIndex = 6;
pub const kCFURLComponentUserInfo: CFIndex = 7;
pub const kCFURLComponentHost: CFIndex = 8;
pub const kCFURLComponentPort: CFIndex = 9;
pub const kCFURLComponentParameterString: CFIndex= 10;
pub const kCFURLComponentQuery: CFIndex = 11;
pub const kCFURLComponentFragment: CFIndex = 12;
/* Bookmark Data Resolution Options */
pub const kCFURLBookmarkResolutionWithoutUIMask: CFURLBookmarkResolutionOptions = ( 1u32 << 8 ) as usize;
pub const kCFURLBookmarkResolutionWithoutMountingMask: CFURLBookmarkResolutionOptions = ( 1u32 << 9 ) as usize;
#[cfg(target_os="macos")]
pub const kCFURLBookmarkResolutionWithSecurityScope: CFURLBookmarkResolutionOptions = ( 1u32 << 10 ) as usize;
//pub const kCFURLBookmarkResolutionWithoutImplicitStartAccessing: CFURLBookmarkResolutionOptions = ( 1u32 << 15 ) as usize; // macos(11.2)+
pub const kCFBookmarkResolutionWithoutUIMask: CFURLBookmarkResolutionOptions = ( 1u32 << 8 ) as usize;
pub const kCFBookmarkResolutionWithoutMountingMask: CFURLBookmarkResolutionOptions = ( 1u32 << 9 ) as usize;
extern {
/*
* CFURL.h
*/
/* Common File System Resource Keys */
pub static kCFURLNameKey: CFStringRef;
pub static kCFURLLocalizedNameKey: CFStringRef;
pub static kCFURLIsRegularFileKey: CFStringRef;
pub static kCFURLIsDirectoryKey: CFStringRef;
pub static kCFURLIsSymbolicLinkKey: CFStringRef;
pub static kCFURLIsVolumeKey: CFStringRef;
pub static kCFURLIsPackageKey: CFStringRef;
pub static kCFURLIsApplicationKey: CFStringRef;
// pub static kCFURLApplicationIsScriptableKey: CFStringRef; //macos(10.11)+
pub static kCFURLIsSystemImmutableKey: CFStringRef;
pub static kCFURLIsUserImmutableKey: CFStringRef;
pub static kCFURLIsHiddenKey: CFStringRef;
pub static kCFURLHasHiddenExtensionKey: CFStringRef;
pub static kCFURLCreationDateKey: CFStringRef;
pub static kCFURLContentAccessDateKey: CFStringRef;
pub static kCFURLContentModificationDateKey: CFStringRef;
pub static kCFURLAttributeModificationDateKey: CFStringRef;
// pub static kCFURLFileIdentifierKey: CFStringRef; //macos(13.3)+
// pub static kCFURLFileContentIdentifierKey: CFStringRef; //macos(11.0)+
// pub static kCFURLMayShareFileContentKey: CFStringRef; //macos(11.0)+
// pub static kCFURLMayHaveExtendedAttributesKey: CFStringRef; //macos(11.0)+
// pub static kCFURLIsPurgeableKey: CFStringRef; //macos(11.0)+
// pub static kCFURLIsSparseKey: CFStringRef; //macos(11.0)+
pub static kCFURLLinkCountKey: CFStringRef;
pub static kCFURLParentDirectoryURLKey: CFStringRef;
pub static kCFURLVolumeURLKey: CFStringRef;
pub static kCFURLTypeIdentifierKey: CFStringRef; //deprecated
pub static kCFURLLocalizedTypeDescriptionKey: CFStringRef;
pub static kCFURLLabelNumberKey: CFStringRef;
pub static kCFURLLabelColorKey: CFStringRef; //deprecated
pub static kCFURLLocalizedLabelKey: CFStringRef;
pub static kCFURLEffectiveIconKey: CFStringRef; //deprecated
pub static kCFURLCustomIconKey: CFStringRef; //deprecated
pub static kCFURLFileResourceIdentifierKey: CFStringRef;
pub static kCFURLVolumeIdentifierKey: CFStringRef;
pub static kCFURLPreferredIOBlockSizeKey: CFStringRef;
pub static kCFURLIsReadableKey: CFStringRef;
pub static kCFURLIsWritableKey: CFStringRef;
pub static kCFURLIsExecutableKey: CFStringRef;
pub static kCFURLFileSecurityKey: CFStringRef;
#[cfg(feature="mac_os_10_8_features")]
#[cfg_attr(feature = "mac_os_10_7_support", linkage = "extern_weak")]
pub static kCFURLIsExcludedFromBackupKey: CFStringRef;
// pub static kCFURLTagNamesKey: CFStringRef; //macos(10.9)+
#[cfg(feature="mac_os_10_8_features")]
#[cfg_attr(feature = "mac_os_10_7_support", linkage = "extern_weak")]
pub static kCFURLPathKey: CFStringRef; // macos(10.8)+
pub static kCFURLCanonicalPathKey: CFStringRef; // macos(10.12)+
pub static kCFURLIsMountTriggerKey: CFStringRef;
// pub static kCFURLGenerationIdentifierKey: CFStringRef; // macos(10.10)+
// pub static kCFURLDocumentIdentifierKey: CFStringRef; // macos(10.10)+
// pub static kCFURLAddedToDirectoryDateKey: CFStringRef; // macos(10.10)+
// pub static kCFURLQuarantinePropertiesKey: CFStringRef; // macos(10.10)+
pub static kCFURLFileResourceTypeKey: CFStringRef;
/* File Resource Types. The file system object type values returned for the kCFURLFileResourceTypeKey */
pub static kCFURLFileResourceTypeNamedPipe: CFStringRef;
pub static kCFURLFileResourceTypeCharacterSpecial: CFStringRef;
pub static kCFURLFileResourceTypeDirectory: CFStringRef;
pub static kCFURLFileResourceTypeBlockSpecial: CFStringRef;
pub static kCFURLFileResourceTypeRegular: CFStringRef;
pub static kCFURLFileResourceTypeSymbolicLink: CFStringRef;
pub static kCFURLFileResourceTypeSocket: CFStringRef;
pub static kCFURLFileResourceTypeUnknown: CFStringRef;
/* File Property Keys */
pub static kCFURLFileSizeKey: CFStringRef;
pub static kCFURLFileAllocatedSizeKey: CFStringRef;
pub static kCFURLTotalFileSizeKey: CFStringRef;
pub static kCFURLTotalFileAllocatedSizeKey: CFStringRef;
pub static kCFURLIsAliasFileKey: CFStringRef;
// pub static kCFURLFileProtectionKey: CFStringRef; // ios(9.0)+
/* The protection level values returned for the kCFURLFileProtectionKey */
// pub static kCFURLFileProtectionNone: CFStringRef; // ios(9.0)+
// pub static kCFURLFileProtectionComplete: CFStringRef; // ios(9.0)+
// pub static kCFURLFileProtectionCompleteUnlessOpen: CFStringRef; // ios(9.0)+
// pub static kCFURLFileProtectionCompleteUntilFirstUserAuthentication: CFStringRef; // ios(9.0)+
/* Volume Property Keys */
pub static kCFURLVolumeLocalizedFormatDescriptionKey: CFStringRef;
pub static kCFURLVolumeTotalCapacityKey: CFStringRef;
pub static kCFURLVolumeAvailableCapacityKey: CFStringRef;
//pub static kCFURLVolumeAvailableCapacityForImportantUsageKey: CFStringRef; //macos(10.13)+
//pub static kCFURLVolumeAvailableCapacityForOpportunisticUsageKey: CFStringRef; //macos(10.13)+
pub static kCFURLVolumeResourceCountKey: CFStringRef;
pub static kCFURLVolumeSupportsPersistentIDsKey: CFStringRef;
pub static kCFURLVolumeSupportsSymbolicLinksKey: CFStringRef;
pub static kCFURLVolumeSupportsHardLinksKey: CFStringRef;
pub static kCFURLVolumeSupportsJournalingKey: CFStringRef;
pub static kCFURLVolumeIsJournalingKey: CFStringRef;
pub static kCFURLVolumeSupportsSparseFilesKey: CFStringRef;
pub static kCFURLVolumeSupportsZeroRunsKey: CFStringRef;
pub static kCFURLVolumeSupportsCaseSensitiveNamesKey: CFStringRef;
pub static kCFURLVolumeSupportsCasePreservedNamesKey: CFStringRef;
pub static kCFURLVolumeSupportsRootDirectoryDatesKey: CFStringRef;
pub static kCFURLVolumeSupportsVolumeSizesKey: CFStringRef;
pub static kCFURLVolumeSupportsRenamingKey: CFStringRef;
pub static kCFURLVolumeSupportsAdvisoryFileLockingKey: CFStringRef;
pub static kCFURLVolumeSupportsExtendedSecurityKey: CFStringRef;
pub static kCFURLVolumeIsBrowsableKey: CFStringRef;
pub static kCFURLVolumeMaximumFileSizeKey: CFStringRef;
pub static kCFURLVolumeIsEjectableKey: CFStringRef;
pub static kCFURLVolumeIsRemovableKey: CFStringRef;
pub static kCFURLVolumeIsInternalKey: CFStringRef;
pub static kCFURLVolumeIsAutomountedKey: CFStringRef;
pub static kCFURLVolumeIsLocalKey: CFStringRef;
pub static kCFURLVolumeIsReadOnlyKey: CFStringRef;
pub static kCFURLVolumeCreationDateKey: CFStringRef;
pub static kCFURLVolumeURLForRemountingKey: CFStringRef;
pub static kCFURLVolumeUUIDStringKey: CFStringRef;
pub static kCFURLVolumeNameKey: CFStringRef;
pub static kCFURLVolumeLocalizedNameKey: CFStringRef;
// pub static kCFURLVolumeIsEncryptedKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeIsRootFileSystemKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeSupportsCompressionKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeSupportsFileCloningKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeSupportsSwapRenamingKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeSupportsExclusiveRenamingKey: CFStringRef; //macos(10.12)+
// pub static kCFURLVolumeSupportsImmutableFilesKey: CFStringRef; //macos(10.13)+
// pub static kCFURLVolumeSupportsAccessPermissionsKey: CFStringRef; //macos(10.13)+
// pub static kCFURLVolumeSupportsFileProtectionKey: CFStringRef; //macos(11.0)+
// pub static kCFURLVolumeTypeNameKey: CFStringRef; //macos(13.3)+
// pub static kCFURLVolumeSubtypeKey: CFStringRef; //macos(13.3)+
// pub static kCFURLVolumeMountFromLocationKey: CFStringRef; //macos(13.3)+
/* iCloud Constants */
pub static kCFURLIsUbiquitousItemKey: CFStringRef;
pub static kCFURLUbiquitousItemHasUnresolvedConflictsKey: CFStringRef;
pub static kCFURLUbiquitousItemIsDownloadedKey: CFStringRef; // deprecated
pub static kCFURLUbiquitousItemIsDownloadingKey: CFStringRef;
pub static kCFURLUbiquitousItemIsUploadedKey: CFStringRef;
pub static kCFURLUbiquitousItemIsUploadingKey: CFStringRef;
pub static kCFURLUbiquitousItemPercentDownloadedKey: CFStringRef; // deprecated
pub static kCFURLUbiquitousItemPercentUploadedKey: CFStringRef; // deprecated
// pub static kCFURLUbiquitousItemDownloadingStatusKey: CFStringRef; // macos(10.9)+
// pub static kCFURLUbiquitousItemDownloadingErrorKey: CFStringRef; // macos(10.9)+
// pub static kCFURLUbiquitousItemUploadingErrorKey: CFStringRef; // macos(10.9)+
// pub static kCFURLUbiquitousItemIsExcludedFromSyncKey: CFStringRef; // macos(11.3)+
/* The values returned for kCFURLUbiquitousItemDownloadingStatusKey */
// pub static kCFURLUbiquitousItemDownloadingStatusNotDownloaded: CFStringRef; // macos(10.9)+
// pub static kCFURLUbiquitousItemDownloadingStatusDownloaded: CFStringRef; // macos(10.9)+
// pub static kCFURLUbiquitousItemDownloadingStatusCurrent: CFStringRef; // macos(10.9)+
/* CFError userInfo Dictionary Keys */
pub static kCFURLKeysOfUnsetValuesKey: CFStringRef;
/* Creating a CFURL */
pub fn CFURLCopyAbsoluteURL(anURL: CFURLRef) -> CFURLRef;
pub fn CFURLCreateAbsoluteURLWithBytes(allocator: CFAllocatorRef, relativeURLBytes: *const u8, length: CFIndex, encoding: CFStringEncoding, baseURL: CFURLRef, useCompatibilityMode: Boolean) -> CFURLRef;
pub fn CFURLCreateByResolvingBookmarkData(allocator: CFAllocatorRef, bookmark: CFDataRef, options: CFURLBookmarkResolutionOptions, relativeToURL: CFURLRef, resourcePropertiesToInclude: CFArrayRef, isStale: *mut Boolean, error: *mut CFErrorRef) -> CFURLRef;
pub fn CFURLCreateCopyAppendingPathComponent(allocator: CFAllocatorRef, url: CFURLRef, pathComponent: CFStringRef, isDirectory: Boolean) -> CFURLRef;
pub fn CFURLCreateCopyAppendingPathExtension(allocator: CFAllocatorRef, url: CFURLRef, extension: CFStringRef) -> CFURLRef;
pub fn CFURLCreateCopyDeletingLastPathComponent(allocator: CFAllocatorRef, url: CFURLRef) -> CFURLRef;
pub fn CFURLCreateCopyDeletingPathExtension(allocator: CFAllocatorRef, url: CFURLRef) -> CFURLRef;
pub fn CFURLCreateFilePathURL(allocator: CFAllocatorRef, url: CFURLRef, error: *mut CFErrorRef) -> CFURLRef;
pub fn CFURLCreateFileReferenceURL(allocator: CFAllocatorRef, url: CFURLRef, error: *mut CFErrorRef) -> CFURLRef;
pub fn CFURLCreateFromFileSystemRepresentation(allocator: CFAllocatorRef, buffer: *const u8, bufLen: CFIndex, isDirectory: Boolean) -> CFURLRef;
pub fn CFURLCreateFromFileSystemRepresentationRelativeToBase(allocator: CFAllocatorRef, buffer: *const u8, bufLen: CFIndex, isDirectory: Boolean, baseURL: CFURLRef) -> CFURLRef;
//pub fn CFURLCreateFromFSRef(allocator: CFAllocatorRef, fsRef: *const FSRef) -> CFURLRef
pub fn CFURLCreateWithBytes(allocator: CFAllocatorRef, URLBytes: *const u8, length: CFIndex, encoding: CFStringEncoding, baseURL: CFURLRef) -> CFURLRef;
pub fn CFURLCreateWithFileSystemPath(allocator: CFAllocatorRef, filePath: CFStringRef, pathStyle: CFURLPathStyle, isDirectory: Boolean) -> CFURLRef;
pub fn CFURLCreateWithFileSystemPathRelativeToBase(allocator: CFAllocatorRef, filePath: CFStringRef, pathStyle: CFURLPathStyle, isDirectory: Boolean, baseURL: CFURLRef) -> CFURLRef;
pub fn CFURLCreateWithString(allocator: CFAllocatorRef, URLString: CFStringRef, baseURL: CFURLRef) -> CFURLRef;
/* Accessing the Parts of a URL */
pub fn CFURLCanBeDecomposed(anURL: CFURLRef) -> Boolean;
pub fn CFURLCopyFileSystemPath(anURL: CFURLRef, pathStyle: CFURLPathStyle) -> CFStringRef;
pub fn CFURLCopyFragment(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
pub fn CFURLCopyHostName(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyLastPathComponent(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyNetLocation(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyParameterString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef; // deprecated
pub fn CFURLCopyPassword(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyPath(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyPathExtension(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyQueryString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
pub fn CFURLCopyResourceSpecifier(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyScheme(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLCopyStrictPath(anURL: CFURLRef, isAbsolute: *mut Boolean) -> CFStringRef;
pub fn CFURLCopyUserName(anURL: CFURLRef) -> CFStringRef;
pub fn CFURLGetPortNumber(anURL: CFURLRef) -> SInt32;
pub fn CFURLHasDirectoryPath(anURL: CFURLRef) -> Boolean;
/* Converting URLs to Other Representations */
pub fn CFURLCreateData(allocator: CFAllocatorRef, url: CFURLRef, encoding: CFStringEncoding, escapeWhitespace: Boolean) -> CFDataRef;
pub fn CFURLCreateStringByAddingPercentEscapes(allocator: CFAllocatorRef, originalString: CFStringRef, charactersToLeaveUnescaped: CFStringRef, legalURLCharactersToBeEscaped: CFStringRef, encoding: CFStringEncoding) -> CFStringRef; // API_DEPRECATED("Use [NSString stringByAddingPercentEncodingWithAllowedCharacters:] instead, which always uses the recommended UTF-8 encoding, and which encodes for a specific URL component or subcomponent (since each URL component or subcomponent has different rules for what characters are valid).", macos(10.0,10.11), ios(2.0,9.0), watchos(2.0,2.0), tvos(9.0,9.0));
pub fn CFURLCreateStringByReplacingPercentEscapes(allocator: CFAllocatorRef, originalString: CFStringRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
pub fn CFURLCreateStringByReplacingPercentEscapesUsingEncoding(allocator: CFAllocatorRef, origString: CFStringRef, charsToLeaveEscaped: CFStringRef, encoding: CFStringEncoding) -> CFStringRef; // deprecated
pub fn CFURLGetFileSystemRepresentation(anURL: CFURLRef, resolveAgainstBase: Boolean, buffer: *mut u8, maxBufLen: CFIndex) -> Boolean;
//pub fn CFURLIsFileReferenceURL(url: CFURLRef) -> Boolean; // macos(10.9)+
//pub fn CFURLGetFSRef(url: CFURLRef, fsRef: *mut FSRef) -> Boolean;
pub fn CFURLGetString(anURL: CFURLRef) -> CFStringRef;
/* Getting URL Properties */
pub fn CFURLGetBaseURL(anURL: CFURLRef) -> CFURLRef;
pub fn CFURLGetBytes(anURL: CFURLRef, buffer: *mut u8, bufferLength: CFIndex) -> CFIndex;
pub fn CFURLGetByteRangeForComponent(url: CFURLRef, component:CFURLComponentType, rangeIncludingSeparators: *mut CFRange) -> CFRange;
pub fn CFURLGetTypeID() -> CFTypeID;
pub fn CFURLResourceIsReachable(url: CFURLRef, error: *mut CFErrorRef) -> Boolean;
/* Getting and Setting File System Resource Properties */
pub fn CFURLClearResourcePropertyCache(url: CFURLRef);
pub fn CFURLClearResourcePropertyCacheForKey(url: CFURLRef, key: CFStringRef);
pub fn CFURLCopyResourcePropertiesForKeys(url: CFURLRef, keys: CFArrayRef, error: *mut CFErrorRef) -> CFDictionaryRef;
//pub fn CFURLCopyResourcePropertyForKey(url: CFURLRef, key: CFStringRef, propertyValueTypeRefPtr: *mut c_void, error: *mut CFErrorRef) -> Boolean
pub fn CFURLCreateResourcePropertiesForKeysFromBookmarkData(allocator: CFAllocatorRef, resourcePropertiesToReturn: CFArrayRef, bookmark: CFDataRef) -> CFDictionaryRef;
pub fn CFURLCreateResourcePropertyForKeyFromBookmarkData(allocator: CFAllocatorRef, resourcePropertyKey: CFStringRef, bookmark: CFDataRef) -> CFTypeRef;
pub fn CFURLSetResourcePropertiesForKeys(url: CFURLRef, keyedPropertyValues: CFDictionaryRef, error: *mut CFErrorRef) -> Boolean;
pub fn CFURLSetResourcePropertyForKey(url: CFURLRef, key: CFStringRef, value: CFTypeRef, error: *mut CFErrorRef) -> Boolean;
pub fn CFURLSetTemporaryResourcePropertyForKey(url: CFURLRef, key: CFStringRef, propertyValue: CFTypeRef);
/* Working with Bookmark Data */
pub fn CFURLCreateBookmarkData(allocator: CFAllocatorRef, url: CFURLRef, options: CFURLBookmarkCreationOptions, resourcePropertiesToInclude: CFArrayRef, relativeToURL: CFURLRef, error: *mut CFErrorRef) -> CFDataRef;
#[cfg(target_os="macos")]
pub fn CFURLCreateBookmarkDataFromAliasRecord(allocator: CFAllocatorRef, aliasRecordDataRef: CFDataRef) -> CFDataRef; // deprecated
pub fn CFURLCreateBookmarkDataFromFile(allocator: CFAllocatorRef, fileURL: CFURLRef, errorRef: *mut CFErrorRef) -> CFDataRef;
pub fn CFURLWriteBookmarkDataToFile(bookmarkRef: CFDataRef, fileURL: CFURLRef, options: CFURLBookmarkFileCreationOptions, errorRef: *mut CFErrorRef) -> Boolean;
pub fn CFURLStartAccessingSecurityScopedResource(url: CFURLRef) -> Boolean;
pub fn CFURLStopAccessingSecurityScopedResource(url: CFURLRef);
}
#[test]
#[cfg(feature="mac_os_10_8_features")]
fn can_see_excluded_from_backup_key() {
let _ = unsafe { kCFURLIsExcludedFromBackupKey };
}
|
use chrono::{DateTime, Utc};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Register {
pub id: Option<i32>,
pub rsoc_id: String,
#[serde(rename = "includeTime")]
#[serde(with = "mysql_date_format")]
pub include_time: DateTime<Utc>,
#[serde(rename = "rsocDate")]
#[serde(with = "date_without_time_format")]
pub rsoc_date: DateTime<Utc>,
pub org: String,
pub org_act: String,
#[serde(with = "string_split_in_vector")] pub url: Vec<String>,
pub domain: String,
#[serde(with = "string_split_in_vector")] pub ip: Vec<String>,
pub country: String,
pub proof: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DetailInfo {
#[serde(rename = "updateTime")]
#[serde(with = "mysql_date_format")]
pub update_time: DateTime<Utc>,
pub source: String,
pub register: Option<Vec<Register>>,
}
impl DetailInfo {
pub fn get_ip_addresses(&self) -> Vec<String> {
let mut result: Vec<String> = Vec::new();
if let Some(ref regs) = self.register {
for reg in regs.iter() {
result.extend_from_slice(®.ip);
}
}
result
}
pub fn get_urls(&self) -> Vec<String> {
let mut result: Vec<String> = Vec::new();
if let Some(ref regs) = self.register {
for reg in regs.iter() {
result.extend_from_slice(®.url);
}
}
result
}
}
mod mysql_date_format {
use chrono::{DateTime, TimeZone, Utc};
use serde::{self, Deserialize, Deserializer, Serializer};
const FORMAT: &'static str = "%Y-%m-%d %H:%M:%S";
// The signature of a serialize_with function must follow the pattern:
//
// fn serialize<S>(&T, S) -> Result<S::Ok, S::Error> where S: Serializer
//
// although it may also be generic over the input types T.
pub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", date.format(FORMAT));
serializer.serialize_str(&s)
}
// The signature of a deserialize_with function must follow the pattern:
//
// fn deserialize<D>(D) -> Result<T, D::Error> where D: Deserializer
//
// although it may also be generic over the output types T.
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Utc.datetime_from_str(&s, FORMAT)
.map_err(serde::de::Error::custom)
}
}
mod date_without_time_format {
use chrono::{DateTime, TimeZone, Utc};
use serde::{self, Deserialize, Deserializer, Serializer};
const FORMAT: &'static str = "%Y-%m-%d";
// The signature of a serialize_with function must follow the pattern:
//
// fn serialize<S>(&T, S) -> Result<S::Ok, S::Error> where S: Serializer
//
// although it may also be generic over the input types T.
pub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", date.format(FORMAT));
serializer.serialize_str(&s)
}
// The signature of a deserialize_with function must follow the pattern:
//
// fn deserialize<D>(D) -> Result<T, D::Error> where D: Deserializer
//
// although it may also be generic over the output types T.
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.is_empty() {
return Ok(Utc.ymd(1, 1, 1).and_hms(0, 0, 0));
}
Utc.datetime_from_str(&s, FORMAT)
.map_err(serde::de::Error::custom)
}
}
mod string_split_in_vector {
use serde::{self, Deserialize, Deserializer, Serializer};
// The signature of a serialize_with function must follow the pattern:
//
// fn serialize<S>(&T, S) -> Result<S::Ok, S::Error> where S: Serializer
//
// although it may also be generic over the input types T.
pub fn serialize<S>(vector: &Vec<String>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = vector
.iter()
.map(|n| ",".to_string() + n)
.fold(String::new(), |mut a, el| {
a.push_str(&el);
a
});
s.remove(0);
serializer.serialize_str(&s)
}
// The signature of a deserialize_with function must follow the pattern:
//
// fn deserialize<D>(D) -> Result<T, D::Error> where D: Deserializer
//
// although it may also be generic over the output types T.
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let mut result: Vec<String> = Vec::new();
let v: Vec<&str> = s.split(',').collect();
for el in v.iter() {
result.push(el.to_string());
}
Ok(result)
}
}
|
#[doc = "Register `DMACCARXBR` reader"]
pub type R = crate::R<DMACCARXBR_SPEC>;
#[doc = "Field `CURRBUFAPTR` reader - Application Receive Buffer Address Pointer The DMA updates this pointer during Rx operation. This pointer is cleared on reset."]
pub type CURRBUFAPTR_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - Application Receive Buffer Address Pointer The DMA updates this pointer during Rx operation. This pointer is cleared on reset."]
#[inline(always)]
pub fn currbufaptr(&self) -> CURRBUFAPTR_R {
CURRBUFAPTR_R::new(self.bits)
}
}
#[doc = "Channel current application receive buffer register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dmaccarxbr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DMACCARXBR_SPEC;
impl crate::RegisterSpec for DMACCARXBR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dmaccarxbr::R`](R) reader structure"]
impl crate::Readable for DMACCARXBR_SPEC {}
#[doc = "`reset()` method sets DMACCARXBR to value 0"]
impl crate::Resettable for DMACCARXBR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
fn padovan_recur() -> impl std::iter::Iterator<Item = usize> {
let mut p = vec![1, 1, 1];
let mut n = 0;
std::iter::from_fn(move || {
let pn = if n < 3 { p[n] } else { p[0] + p[1] };
p[0] = p[1];
p[1] = p[2];
p[2] = pn;
n += 1;
Some(pn)
})
}
fn padovan_floor() -> impl std::iter::Iterator<Item = usize> {
const P: f64 = 1.324717957244746025960908854;
const S: f64 = 1.0453567932525329623;
(0..).map(|x| (P.powf((x - 1) as f64) / S + 0.5).floor() as usize)
}
fn padovan_lsystem() -> impl std::iter::Iterator<Item = String> {
let mut str = String::from("A");
std::iter::from_fn(move || {
let result = str.clone();
let mut next = String::new();
for ch in str.chars() {
match ch {
'A' => next.push('B'),
'B' => next.push('C'),
_ => next.push_str("AB"),
}
}
str = next;
Some(result)
})
}
fn main() {
println!("First 20 terms of the Padovan sequence:");
for p in padovan_recur().take(20) {
print!("{} ", p);
}
println!();
println!(
"\nRecurrence and floor functions agree for first 64 terms? {}",
padovan_recur().take(64).eq(padovan_floor().take(64))
);
println!("\nFirst 10 strings produced from the L-system:");
for p in padovan_lsystem().take(10) {
print!("{} ", p);
}
println!();
println!(
"\nLength of first 32 strings produced from the L-system = Padovan sequence? {}",
padovan_lsystem()
.map(|x| x.len())
.take(32)
.eq(padovan_recur().take(32))
);
} |
use std::{
convert::TryFrom,
path::{Path, PathBuf},
};
use sha2::digest::Digest;
use sha2::Sha256;
use types::{package_key::TryFromError, repo::RepoUrl, DependencyKey, PackageKey};
pub(crate) trait PathExt {
fn join_sha256(&self, bytes: &[u8]) -> PathBuf;
}
impl PathExt for Path {
fn join_sha256(&self, bytes: &[u8]) -> PathBuf {
let mut sha = Sha256::new();
sha.update(bytes);
let hash_id = format!("{:x}", sha.finalize());
let part1 = &hash_id[0..2];
let part2 = &hash_id[2..4];
let part3 = &hash_id[4..];
self.join(part1).join(part2).join(part3)
}
}
pub(crate) trait DependencyKeyExt {
fn into_package_key(self, repo_url: &RepoUrl) -> Result<PackageKey, TryFromError>;
fn to_package_key(&self, repo_url: &RepoUrl) -> Result<PackageKey, TryFromError>;
}
impl DependencyKeyExt for DependencyKey {
fn into_package_key(self, repo_url: &RepoUrl) -> Result<PackageKey, TryFromError> {
match self {
DependencyKey::Remote(url) => PackageKey::try_from(url),
DependencyKey::Local(id) => Ok(PackageKey::new_unchecked(repo_url.clone(), id, None)),
}
}
fn to_package_key(&self, repo_url: &RepoUrl) -> Result<PackageKey, TryFromError> {
match self {
DependencyKey::Remote(url) => PackageKey::try_from(url),
DependencyKey::Local(id) => Ok(PackageKey::new_unchecked(
repo_url.clone(),
id.to_string(),
None,
)),
}
}
}
|
extern crate hyper;
extern crate rustc_serialize;
mod api;
// not to be included in library
// for testing purposes and iterating on API only
fn main() {
let hyper = hyper::client::Client::new();
// Build a client given your current credentials.
let client = api::client::Client {
endpoint: "https://tartan.plaid.com",
client_id: "yourclientid",
secret: "yourclientsecret",
hyper: &hyper
};
// Authenticate the user for Plaid Connect.
// `response` will be `Authenticated(..)` if successful, which includes a `User`
let response = client.request(api::product::Connect,
api::client::Payload::Authenticate(client,
"Chase".to_string(),
"username".to_string(),
"password".to_string(),
None,
None));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.