text
stringlengths 8
4.13M
|
|---|
use crate::*;
/// Sprites are conceptually both a reference to an image, and the sub region of the image
/// which represents the logical sprite.
pub trait CommonSprite<R: CommonRenderer>: Sized + Clone {
/// Construct a new sprite from an image. The image contents are copied to a texture
/// in RGBA8 format. The entire image will be used
fn new_from_image(
renderer: &R,
img: &Image,
init_args: &SpriteInitArgs,
) -> Result<Self, R::Error>;
/// Build a sprite that shares the same underlying texture but represents a different portion
/// of the texture.
///
/// # Arguments
///
/// * **source_rect** - The portion of the texture that the new sprite will render, relative to
/// the current sprite's bounds. The bounds of the output sprite will be
/// the intersection of the sprite's rect and the source_rect, so the dimensions
/// of the output sprite may not match the `source_rect` dimensions.
///
/// # Example
///
/// ```no_run
/// # use riddle::{common::Color, image::*, platform::*, renderer::*, math::*, *};
/// # fn main() -> Result<(), RiddleError> {
/// # let rdl = RiddleLib::new()?; let window = WindowBuilder::new().build(rdl.context())?;
/// let renderer = Renderer::new_from_window(&window)?;
///
/// // Load an image and create a sprite from it
/// let img = Image::new(100, 100);
/// let sprite = Sprite::new_from_image(&renderer, &img, &SpriteInitArgs::new())?;
///
/// // Take a portion of the sprite as a new sprite.
/// let subsprite = sprite.subsprite(&Rect::new(vec2(75.0, 75.0), vec2(50.0, 50.0)));
///
/// // The subsprite dimensions will be the size of the intersection between the
/// // source sprite and the new bounds.
/// assert_eq!(vec2(25.0, 25.0), subsprite.dimensions());
/// # Ok(()) }
/// ```
fn subsprite(&self, source_rect: &Rect<f32>) -> Self;
/// Get the dimensions of the sprite
///
/// # Example
///
/// ```no_run
/// # use riddle::{common::Color, image::*, platform::*, renderer::*, math::*, *};
/// # fn main() -> Result<(), RiddleError> {
/// # let rdl = RiddleLib::new()?; let window = WindowBuilder::new().build(rdl.context())?;
/// let renderer = Renderer::new_from_window(&window)?;
///
/// // Load an image and create a sprite from it
/// let img = Image::new(100, 100);
/// let sprite = Sprite::new_from_image(&renderer, &img, &SpriteInitArgs::new())?;
///
/// // The sprite dimensions will be the same of the source image
/// assert_eq!(vec2(100.0, 100.0), sprite.dimensions());
/// # Ok(()) }
/// ```
fn dimensions(&self) -> Vector2<f32>;
/// Render multiple sub regions of the sprite at once.
///
/// The regions are defined by pairs of the region of the sprite to draw in texels, and where
/// to position the region relative to the [`SpriteRenderArgs::location`].
///
/// The pivot and rotation are relative to the location arg. A change in rotation will
/// transform all rendered regions as one, not individually.
fn render_regions<Ctx: RenderContext<R> + ?Sized>(
&self,
render_ctx: &mut Ctx,
args: &SpriteRenderArgs,
parts: &[(Rect<f32>, Vector2<f32>)],
) -> Result<(), R::Error>;
/// Render the entire sprite.
fn render<Ctx: RenderContext<R> + ?Sized>(
&self,
render_ctx: &mut Ctx,
args: &SpriteRenderArgs,
) -> Result<(), R::Error> {
self.render_regions(
render_ctx,
args,
&[(
Rect::new([0.0, 0.0], self.dimensions().into()),
Vector2::new(0.0, 0.0),
)],
)
}
/// Utility function to simply render the sprite at a given location
///
/// See [`SpriteRenderArgs`] for how to render the sprite with more control.
fn render_at<Ctx: RenderContext<R> + ?Sized>(
&self,
render_ctx: &mut Ctx,
location: Vector2<f32>,
) -> Result<(), R::Error> {
self.render(
render_ctx,
&SpriteRenderArgs {
location,
..Default::default()
},
)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FilterMode {
Nearest,
Linear,
}
impl Default for FilterMode {
fn default() -> Self {
FilterMode::Nearest
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct SpriteInitArgs {
pub mag_filter: FilterMode,
pub min_filter: FilterMode,
}
impl SpriteInitArgs {
/// Create a new default init args.
pub fn new() -> Self {
Self {
mag_filter: Default::default(),
min_filter: Default::default(),
}
}
/// Specify the min and mag filters used when rendering the sprite
pub fn with_filter_modes(mut self, mag_filter: FilterMode, min_filter: FilterMode) -> Self {
self.mag_filter = mag_filter;
self.min_filter = min_filter;
self
}
}
#[derive(Clone, Debug)]
pub struct SpriteRenderArgs {
pub location: Vector2<f32>,
pub pivot: Vector2<f32>,
pub scale: Vector2<f32>,
pub angle: f32,
pub diffuse_color: Color<f32>,
}
impl SpriteRenderArgs {
/// New render args, with defaults, at the specified location
pub fn new<T: Into<Vector2<f32>>>(location: T) -> Self {
let mut args = Self::default();
args.at(location);
args
}
/// Set the location of the sprite, specifying where the pivot should
/// be placed.
#[inline]
pub fn at<T: Into<Vector2<f32>>>(&mut self, location: T) -> &mut Self {
self.location = location.into();
self
}
/// Set the pivot of the sprite, relative to the top left of the sprite
#[inline]
pub fn with_pivot<T: Into<Vector2<f32>>>(&mut self, pivot: T) -> &mut Self {
self.pivot = pivot.into();
self
}
/// Set the scale at which the sprite will be rendered
pub fn with_scale<T: Into<Vector2<f32>>>(&mut self, scale: T) -> &mut Self {
self.scale = scale.into();
self
}
/// Set the angle at which the sprite will be rendered, in radians.
pub fn with_angle(&mut self, angle: f32) -> &mut Self {
self.angle = angle;
self
}
/// Set the diffuse color of the sprite, which will be multiplied by the sprite
/// colors.
pub fn with_color(&mut self, color: Color<f32>) -> &mut Self {
self.diffuse_color = color;
self
}
}
impl Default for SpriteRenderArgs {
fn default() -> Self {
SpriteRenderArgs {
location: [0.0, 0.0].into(),
pivot: [0.0, 0.0].into(),
angle: 0.0,
scale: [1.0, 1.0].into(),
diffuse_color: Color::WHITE,
}
}
}
|
use std;
use bincode;
use mio_more;
#[derive(Debug)]
pub enum Error {
Io(std::io::Error),
Bincode(bincode::Error),
Timer(mio_more::timer::TimerError),
InvalidId,
}
impl From<std::io::Error> for Error {
fn from(other: std::io::Error) -> Self {
Error::Io(other)
}
}
impl From<bincode::Error> for Error {
fn from(other: bincode::Error) -> Self {
Error::Bincode(other)
}
}
impl From<mio_more::timer::TimerError> for Error {
fn from(other: mio_more::timer::TimerError) -> Self {
Error::Timer(other)
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// FormulaAndFunctionEventsDataSource : Data source for event platform-based queries.
/// Data source for event platform-based queries.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FormulaAndFunctionEventsDataSource {
#[serde(rename = "logs")]
LOGS,
#[serde(rename = "spans")]
SPANS,
#[serde(rename = "network")]
NETWORK,
#[serde(rename = "rum")]
RUM,
#[serde(rename = "security_signals")]
SECURITY_SIGNALS,
#[serde(rename = "profiles")]
PROFILES,
}
impl ToString for FormulaAndFunctionEventsDataSource {
fn to_string(&self) -> String {
match self {
Self::LOGS => String::from("logs"),
Self::SPANS => String::from("spans"),
Self::NETWORK => String::from("network"),
Self::RUM => String::from("rum"),
Self::SECURITY_SIGNALS => String::from("security_signals"),
Self::PROFILES => String::from("profiles"),
}
}
}
|
use boxfnonce::BoxFnOnce;
use entity_template::EntityTemplate;
use std::any::Any;
use std::collections::HashMap;
use worker::schema::{Command, CommandRequestInterface, CommandResponseInterface, Component,
GeneratedSchema};
use worker::{CommandStatus, ComponentId, Connection, EntityId, RequestId};
use world::World;
type Callback<S, T> = BoxFnOnce<'static, (*mut World<S>, T, CommandStatus, String)>;
type CommandHandler<S> = Box<FnMut(&mut World<S>, &mut Connection, RequestId, EntityId, Box<Any>)>;
pub struct Commands<S: GeneratedSchema> {
entity_command_handlers: HashMap<(ComponentId, u32), CommandHandler<S>>,
entity_command_callbacks: HashMap<RequestId, Callback<S, (EntityId, Option<Box<Any>>)>>,
create_entity_callbacks: HashMap<RequestId, Callback<S, EntityId>>,
delete_entity_callbacks: HashMap<RequestId, Callback<S, EntityId>>,
}
impl<S: 'static + GeneratedSchema> Commands<S> {
pub fn new() -> Commands<S> {
Commands {
entity_command_handlers: HashMap::new(),
entity_command_callbacks: HashMap::new(),
create_entity_callbacks: HashMap::new(),
delete_entity_callbacks: HashMap::new(),
}
}
pub fn register_handler<C: 'static + Command<S>, H: 'static>(&mut self, handler: H)
where
H: Fn(&mut World<S>, EntityId, &C::Request) -> C::Response,
{
let id = (C::Component::component_id(), C::command_index());
if self.entity_command_handlers.contains_key(&id) {
panic!("Command handler for component {} and command with index {} has already been registered.", id.0, id.1);
}
self.entity_command_handlers.insert(
id,
Box::new(move |world, connection, request_id, entity_id, request| {
let request = request.downcast_ref::<C::Request>().unwrap();
let response = handler(world, entity_id, request);
let reponse = response.serialise_response();
connection.send_command_response(request_id, C::Component::component_id(), reponse);
}),
);
}
pub fn on_command_request(
&mut self,
world: &mut World<S>,
connection: &mut Connection,
request_id: RequestId,
entity_id: EntityId,
component_id: ComponentId,
command_id: u32,
request: Box<Any>,
) {
if let Some(handler) = self.entity_command_handlers
.get_mut(&(component_id, command_id))
{
handler(world, connection, request_id, entity_id, request);
}
}
pub fn send_command<C: 'static + Command<S>, A: 'static, F: 'static>(
&mut self,
connection: &mut Connection,
entity_id: EntityId,
request: C::Request,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId, &C::Response),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
let request_ptr = request.serialise_request();
let request_id = connection.send_command_request(
entity_id,
C::Component::component_id(),
request_ptr,
C::command_index(),
None,
);
Commands::<S>::register_callback(
&mut self.entity_command_callbacks,
request_id,
|world, (entity_id, response)| {
let response = response
.as_ref()
.unwrap()
.downcast_ref::<C::Response>()
.unwrap();
success(world, entity_id, response);
},
failure,
)
}
pub fn on_command_response(
&mut self,
world: &mut World<S>,
request_id: RequestId,
entity_id: EntityId,
response: Option<Box<Any>>,
success_code: CommandStatus,
message: &str,
) {
if let Some(callback) = self.entity_command_callbacks.remove(&request_id) {
callback.call(
world,
(entity_id, response),
success_code,
message.to_string(),
);
}
}
pub fn create_entity<A: 'static, F: 'static>(
&mut self,
connection: &mut Connection,
mut entity_template: EntityTemplate,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
let (entity_acl_id, entity_acl_data) =
S::serialise_entity_acl(entity_template.read_access, entity_template.write_access);
entity_template.data.insert(entity_acl_id, entity_acl_data);
let request_id = connection.send_create_entity_request(
entity_template.data,
entity_template.entity_id,
None,
);
Commands::<S>::register_callback(
&mut self.create_entity_callbacks,
request_id,
|world, entity_id| {
success(world, entity_id);
},
failure,
)
}
pub fn on_create_entity_response(
&mut self,
world: &mut World<S>,
request_id: RequestId,
entity_id: EntityId,
success_code: CommandStatus,
message: &str,
) {
for callback in self.create_entity_callbacks.remove(&request_id) {
callback.call(world, entity_id, success_code, message.to_string());
}
}
pub fn delete_entity<A: 'static, F: 'static>(
&mut self,
connection: &mut Connection,
entity_id: EntityId,
success: A,
failure: F,
) where
A: FnOnce(&mut World<S>, EntityId),
F: FnOnce(&mut World<S>, CommandStatus, String),
{
let request_id = connection.send_delete_entity_request(entity_id, None);
Commands::<S>::register_callback(
&mut self.delete_entity_callbacks,
request_id,
|world, entity_id| {
success(world, entity_id);
},
failure,
)
}
pub fn on_delete_entity_response(
&mut self,
world: &mut World<S>,
request_id: RequestId,
entity_id: EntityId,
success_code: CommandStatus,
message: &str,
) {
for callback in self.delete_entity_callbacks.remove(&request_id) {
callback.call(world, entity_id, success_code, message.to_string());
}
}
fn register_callback<T: 'static, A, F>(
callbacks: &mut HashMap<RequestId, Callback<S, T>>,
request_id: RequestId,
success: A,
failure: F,
) where
A: 'static + FnOnce(&mut World<S>, T),
F: 'static + FnOnce(&mut World<S>, CommandStatus, String),
{
callbacks.insert(
request_id,
BoxFnOnce::from(move |world_ptr: *mut World<S>, object, status, message| {
let world = unsafe { &mut (*world_ptr) };
if status == CommandStatus::Success {
success(world, object);
} else {
failure(world, status, message);
}
}),
);
}
}
|
#![allow(clippy::identity_op)]
use crate::encoder::JpegColorType;
/// Conversion from RGB to YCbCr
#[inline]
pub fn rgb_to_ycbcr(r: u8, g: u8, b: u8) -> (u8, u8, u8) {
// To avoid floating point math this scales everything by 2^16 which gives
// a precision of approx 4 digits.
//
// Non scaled conversion:
// Y = 0.29900 * R + 0.58700 * G + 0.11400 * B
// Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + 128
// Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + 128
let r = r as i32;
let g = g as i32;
let b = b as i32;
let y = 19595 * r + 38470 * g + 7471 * b;
let cb = -11059 * r - 21709 * g + 32768 * b + (128 << 16);
let cr = 32768 * r - 27439 * g - 5329 * b + (128 << 16);
let y = (y + (1 << 15)) >> 16;
let cb = (cb + (1 << 15)) >> 16;
let cr = (cr + (1 << 15)) >> 16;
(y as u8, cb as u8, cr as u8)
}
/// Conversion from CMYK to YCCK (YCbCrK)
#[inline]
pub fn cmyk_to_ycck(c: u8, m: u8, y: u8, k: u8) -> (u8, u8, u8, u8) {
let (y, cb, cr) = rgb_to_ycbcr(c, m, y);
(y, cb, cr, 255 - k)
}
/// # Buffer used as input value for image encoding
///
/// Image encoding with [Encoder::encode_image](crate::Encoder::encode_image) needs an ImageBuffer
/// as input for the image data. For convenience the [Encoder::encode](crate::Encoder::encode)
/// function contains implementaions for common byte based pixel formats.
/// Users that needs other pixel formats or don't have the data available as byte slices
/// can create their own buffer implementations.
///
/// ## Example: ImageBuffer implementation for RgbImage from the `image` crate
/// ```no_run
/// use image::RgbImage;
/// use jpeg_encoder::{ImageBuffer, JpegColorType, rgb_to_ycbcr};
///
/// pub struct RgbImageBuffer {
/// image: RgbImage,
/// }
///
/// impl ImageBuffer for RgbImageBuffer {
/// fn get_jpeg_color_type(&self) -> JpegColorType {
/// // Rgb images are encoded as YCbCr in JFIF files
/// JpegColorType::Ycbcr
/// }
///
/// fn width(&self) -> u16 {
/// self.image.width() as u16
/// }
///
/// fn height(&self) -> u16 {
/// self.image.height() as u16
/// }
///
/// fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]){
/// let pixel = self.image.get_pixel(x as u32 ,y as u32);
///
/// let (y,cb,cr) = rgb_to_ycbcr(pixel[0], pixel[1], pixel[2]);
///
/// // For YCbCr the 4th buffer is not used
/// buffers[0].push(y);
/// buffers[1].push(cb);
/// buffers[2].push(cr);
/// }
/// }
///
/// ```
pub trait ImageBuffer {
/// The color type used in the image encoding
fn get_jpeg_color_type(&self) -> JpegColorType;
/// Width of the image
fn width(&self) -> u16;
/// Height of the image
fn height(&self) -> u16;
/// Add color values for the position to color component buffers
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]);
}
pub(crate) struct GrayImage<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for GrayImage<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Luma
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = usize::from(y) * usize::from(self.1) + usize::from(x);
buffers[0].push(self.0[offset + 0]);
}
}
macro_rules! ycbcr_image {
($name:ident, $num_colors:expr, $o1:expr, $o2:expr, $o3:expr) => {
pub(crate) struct $name<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for $name<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Ycbcr
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
#[inline(always)]
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = (usize::from(y) * usize::from(self.1) + usize::from(x)) * $num_colors;
let (y, cb, cr) = rgb_to_ycbcr(self.0[offset + $o1], self.0[offset + $o2], self.0[offset + $o3]);
buffers[0].push(y);
buffers[1].push(cb);
buffers[2].push(cr);
}
}
}
}
ycbcr_image!(RgbImage, 3, 0, 1, 2);
ycbcr_image!(RgbaImage, 4, 0, 1, 2);
ycbcr_image!(BgrImage, 3, 2, 1, 0);
ycbcr_image!(BgraImage, 4, 2, 1, 0);
pub(crate) struct YCbCrImage<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for YCbCrImage<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Ycbcr
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = (usize::from(y) * usize::from(self.1) + usize::from(x)) * 3;
buffers[0].push(self.0[offset + 0]);
buffers[1].push(self.0[offset + 1]);
buffers[2].push(self.0[offset + 2]);
}
}
pub(crate) struct CmykImage<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for CmykImage<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Cmyk
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = (usize::from(y) * usize::from(self.1) + usize::from(x)) * 4;
buffers[0].push(255 - self.0[offset + 0]);
buffers[1].push(255 - self.0[offset + 1]);
buffers[2].push(255 - self.0[offset + 2]);
buffers[3].push(255 - self.0[offset + 3]);
}
}
pub(crate) struct CmykAsYcckImage<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for CmykAsYcckImage<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Ycck
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = (usize::from(y) * usize::from(self.1) + usize::from(x)) * 4;
let (y, cb, cr, k) = cmyk_to_ycck(
self.0[offset + 0],
self.0[offset + 1],
self.0[offset + 2],
self.0[offset + 3]);
buffers[0].push(y);
buffers[1].push(cb);
buffers[2].push(cr);
buffers[3].push(k);
}
}
pub(crate) struct YcckImage<'a>(pub &'a [u8], pub u16, pub u16);
impl<'a> ImageBuffer for YcckImage<'a> {
fn get_jpeg_color_type(&self) -> JpegColorType {
JpegColorType::Ycck
}
fn width(&self) -> u16 {
self.1
}
fn height(&self) -> u16 {
self.2
}
fn fill_buffers(&self, x: u16, y: u16, buffers: &mut [Vec<u8>; 4]) {
let offset = (usize::from(y) * usize::from(self.1) + usize::from(x)) * 4;
buffers[0].push(self.0[offset + 0]);
buffers[1].push(self.0[offset + 1]);
buffers[2].push(self.0[offset + 2]);
buffers[3].push(self.0[offset + 3]);
}
}
#[cfg(test)]
mod tests {
use crate::rgb_to_ycbcr;
fn assert_rgb_to_ycbcr(rgb: [u8; 3], ycbcr: [u8; 3]) {
let (y, cb, cr) = rgb_to_ycbcr(rgb[0], rgb[1], rgb[2]);
assert_eq!([y, cb, cr], ycbcr);
}
#[test]
fn test_rgb_to_ycbcr() {
// Values taken from libjpeg for a common image
assert_rgb_to_ycbcr([59, 109, 6], [82, 85, 111]);
assert_rgb_to_ycbcr([29, 60, 11], [45, 109, 116]);
assert_rgb_to_ycbcr([57, 114, 26], [87, 94, 107]);
assert_rgb_to_ycbcr([30, 60, 6], [45, 106, 117]);
assert_rgb_to_ycbcr([41, 75, 11], [58, 102, 116]);
assert_rgb_to_ycbcr([145, 184, 108], [164, 97, 115]);
assert_rgb_to_ycbcr([33, 85, 7], [61, 98, 108]);
assert_rgb_to_ycbcr([61, 90, 40], [76, 108, 118]);
assert_rgb_to_ycbcr([75, 127, 45], [102, 96, 109]);
assert_rgb_to_ycbcr([30, 56, 14], [43, 111, 118]);
assert_rgb_to_ycbcr([106, 142, 81], [124, 104, 115]);
assert_rgb_to_ycbcr([35, 59, 11], [46, 108, 120]);
assert_rgb_to_ycbcr([170, 203, 123], [184, 94, 118]);
assert_rgb_to_ycbcr([45, 87, 16], [66, 100, 113]);
assert_rgb_to_ycbcr([59, 109, 21], [84, 92, 110]);
assert_rgb_to_ycbcr([100, 167, 36], [132, 74, 105]);
assert_rgb_to_ycbcr([17, 53, 5], [37, 110, 114]);
assert_rgb_to_ycbcr([226, 244, 220], [236, 119, 121]);
assert_rgb_to_ycbcr([192, 214, 120], [197, 85, 125]);
assert_rgb_to_ycbcr([63, 107, 22], [84, 93, 113]);
assert_rgb_to_ycbcr([44, 78, 19], [61, 104, 116]);
assert_rgb_to_ycbcr([72, 106, 54], [90, 108, 115]);
assert_rgb_to_ycbcr([99, 123, 73], [110, 107, 120]);
assert_rgb_to_ycbcr([188, 216, 148], [200, 99, 120]);
assert_rgb_to_ycbcr([19, 46, 7], [33, 113, 118]);
assert_rgb_to_ycbcr([56, 95, 40], [77, 107, 113]);
assert_rgb_to_ycbcr([81, 120, 56], [101, 103, 114]);
assert_rgb_to_ycbcr([9, 30, 0], [20, 117, 120]);
assert_rgb_to_ycbcr([90, 118, 46], [101, 97, 120]);
assert_rgb_to_ycbcr([24, 52, 0], [38, 107, 118]);
assert_rgb_to_ycbcr([32, 69, 9], [51, 104, 114]);
assert_rgb_to_ycbcr([74, 134, 33], [105, 88, 106]);
assert_rgb_to_ycbcr([37, 74, 7], [55, 101, 115]);
assert_rgb_to_ycbcr([69, 119, 31], [94, 92, 110]);
assert_rgb_to_ycbcr([63, 112, 21], [87, 91, 111]);
assert_rgb_to_ycbcr([90, 148, 17], [116, 72, 110]);
assert_rgb_to_ycbcr([50, 97, 30], [75, 102, 110]);
assert_rgb_to_ycbcr([99, 129, 72], [114, 105, 118]);
assert_rgb_to_ycbcr([161, 196, 57], [170, 64, 122]);
assert_rgb_to_ycbcr([10, 26, 1], [18, 118, 122]);
assert_rgb_to_ycbcr([87, 128, 68], [109, 105, 112]);
assert_rgb_to_ycbcr([111, 155, 73], [132, 94, 113]);
assert_rgb_to_ycbcr([33, 75, 11], [55, 103, 112]);
assert_rgb_to_ycbcr([70, 122, 51], [98, 101, 108]);
assert_rgb_to_ycbcr([22, 74, 3], [50, 101, 108]);
assert_rgb_to_ycbcr([88, 142, 45], [115, 89, 109]);
assert_rgb_to_ycbcr([66, 107, 40], [87, 101, 113]);
assert_rgb_to_ycbcr([18, 45, 0], [32, 110, 118]);
assert_rgb_to_ycbcr([163, 186, 88], [168, 83, 124]);
assert_rgb_to_ycbcr([47, 104, 4], [76, 88, 108]);
assert_rgb_to_ycbcr([147, 211, 114], [181, 90, 104]);
assert_rgb_to_ycbcr([42, 77, 18], [60, 104, 115]);
assert_rgb_to_ycbcr([37, 72, 6], [54, 101, 116]);
assert_rgb_to_ycbcr([84, 140, 55], [114, 95, 107]);
assert_rgb_to_ycbcr([46, 98, 25], [74, 100, 108]);
assert_rgb_to_ycbcr([48, 97, 20], [74, 98, 110]);
assert_rgb_to_ycbcr([189, 224, 156], [206, 100, 116]);
assert_rgb_to_ycbcr([36, 83, 0], [59, 94, 111]);
assert_rgb_to_ycbcr([159, 186, 114], [170, 97, 120]);
assert_rgb_to_ycbcr([75, 118, 46], [97, 99, 112]);
assert_rgb_to_ycbcr([193, 233, 158], [212, 97, 114]);
assert_rgb_to_ycbcr([76, 116, 48], [96, 101, 114]);
assert_rgb_to_ycbcr([108, 157, 79], [133, 97, 110]);
assert_rgb_to_ycbcr([180, 208, 155], [194, 106, 118]);
assert_rgb_to_ycbcr([74, 126, 53], [102, 100, 108]);
assert_rgb_to_ycbcr([72, 123, 46], [99, 98, 109]);
assert_rgb_to_ycbcr([71, 123, 34], [97, 92, 109]);
assert_rgb_to_ycbcr([130, 184, 72], [155, 81, 110]);
assert_rgb_to_ycbcr([30, 61, 17], [47, 111, 116]);
assert_rgb_to_ycbcr([27, 71, 0], [50, 100, 112]);
assert_rgb_to_ycbcr([45, 73, 24], [59, 108, 118]);
assert_rgb_to_ycbcr([139, 175, 93], [155, 93, 117]);
assert_rgb_to_ycbcr([11, 38, 0], [26, 114, 118]);
assert_rgb_to_ycbcr([34, 87, 15], [63, 101, 107]);
assert_rgb_to_ycbcr([43, 76, 35], [61, 113, 115]);
assert_rgb_to_ycbcr([18, 35, 7], [27, 117, 122]);
assert_rgb_to_ycbcr([69, 97, 48], [83, 108, 118]);
assert_rgb_to_ycbcr([139, 176, 50], [151, 71, 120]);
assert_rgb_to_ycbcr([21, 51, 7], [37, 111, 117]);
assert_rgb_to_ycbcr([209, 249, 189], [230, 105, 113]);
assert_rgb_to_ycbcr([32, 66, 14], [50, 108, 115]);
assert_rgb_to_ycbcr([100, 143, 67], [121, 97, 113]);
assert_rgb_to_ycbcr([40, 96, 14], [70, 96, 107]);
assert_rgb_to_ycbcr([88, 130, 64], [110, 102, 112]);
assert_rgb_to_ycbcr([52, 112, 14], [83, 89, 106]);
assert_rgb_to_ycbcr([49, 72, 25], [60, 108, 120]);
assert_rgb_to_ycbcr([144, 193, 75], [165, 77, 113]);
assert_rgb_to_ycbcr([49, 94, 1], [70, 89, 113]);
}
}
|
use crate::types::buffer::ebo::EBO;
use crate::types::buffer::vao::VAO;
use crate::types::buffer::vbo::VBO;
use crate::types::data::data_layout::DataLayout;
use gl::types::*;
pub type VAOBuilder<'a> = VertexArrayObjectBuilder<'a>;
pub struct VertexArrayObjectBuilder<'a> {
pub vbo: VBO,
pub vbo_data: &'a [f32],
pub vbo_draw_type: GLenum,
pub data_layout: DataLayout,
pub ebo: Option<EBO>,
pub ebo_data: Option<&'a [u32]>,
pub ebo_draw_type: Option<GLenum>,
}
impl<'a> VAOBuilder<'a> {
pub fn from_vbo(
vbo: VBO,
vbo_data: &'a [f32],
vbo_draw_type: GLenum,
data_layout: DataLayout,
) -> Self {
VAOBuilder {
vbo,
vbo_data,
vbo_draw_type,
data_layout,
ebo: None,
ebo_data: None,
ebo_draw_type: None,
}
}
pub fn add_ebo(mut self, ebo: EBO, ebo_data: &'a [u32], ebo_draw_type: GLenum) -> Self {
self.ebo = Some(ebo);
self.ebo_data = Some(ebo_data);
self.ebo_draw_type = Some(ebo_draw_type);
self
}
pub fn compile(self) -> (VAO, VBO, Option<EBO>) {
let vao = VAO::default();
vao.bind();
self.vbo.bind();
self.vbo.buffer_data(self.vbo_data, self.vbo_draw_type);
if self.ebo.is_some() {
let ebo = self.ebo.as_ref().unwrap();
ebo.bind();
ebo.buffer_data(self.ebo_data.unwrap(), self.ebo_draw_type.unwrap());
}
self.data_layout.vertex_attrib_pointer();
vao.unbind();
self.vbo.unbind();
if self.ebo.is_some() {
self.ebo.as_ref().unwrap().unbind();
}
(vao, self.vbo, self.ebo)
}
}
|
use std::fs;
fn solve(input: &Vec<&str>, right: usize, down: usize) -> usize {
let mut count: usize = 0;
let mut x: usize = 0;
for a in (0..input.len()).step_by(down) {
if input[a].chars().nth(x).unwrap() == '#' {
count += 1;
}
for _ in 0..right {
if x == input[a].len()-1 {x = 0;}
else {x += 1;}
}
}
count
}
fn main() {
let inputfile = fs::read_to_string("input.txt").
expect("File could not be read.");
let input: Vec<&str> = inputfile.lines().collect();
let solution1 = solve(&input, 3, 1);
println!("Part 1: {}", solution1);
let solution2 =
solve(&input, 1, 1) *
solve(&input, 3, 1) *
solve(&input, 5, 1) *
solve(&input, 7, 1) *
solve(&input, 1, 2);
println!("Part 2: {}", solution2);
}
|
//!
//! Rust Firebird Client
//!
//! Transaction struct tests
//!
mk_tests_default! {
use crate::{FbError, Connection, Transaction};
use rsfbclient_core::FirebirdClient;
macro_rules! recreate_tbl_fmtstring{
() => {"recreate table {} ( id INT NOT NULL PRIMARY KEY, description VARCHAR(20) );"};
}
macro_rules! drop_tbl_fmtstring{
() => {"drop table {};"};
}
macro_rules! insert_stmt_fmtstring{
() => {"insert into {} (id, description) values (543210, 'testing');"};
}
fn setup<C: FirebirdClient>( conn: &mut Connection<C>, table_name: &str ) -> Result<(), FbError>{
let mut setup_transaction = Transaction::new(conn)?;
setup_transaction.execute_immediate( format!(recreate_tbl_fmtstring!(), table_name).as_str() )?;
setup_transaction.commit()
}
fn teardown<C: FirebirdClient>( conn: Connection<C>, table_name: &str ) -> Result<(), FbError> {
let mut conn = conn;
let mut setup_transaction = Transaction::new(&mut conn)?;
setup_transaction.execute_immediate( format!(drop_tbl_fmtstring!(), table_name ).as_str() )?;
setup_transaction.commit()?;
conn.close()
}
#[test]
fn recreate_insert_drop_with_commit() -> Result<(), FbError> {
const TABLE_NAME: &str = "RSFBCLIENT_TEST_TRANS0";
let mut conn = cbuilder().connect()?;
setup(&mut conn, TABLE_NAME)?;
let mut transaction = Transaction::new(&mut conn)?;
let _insert_result = transaction.execute_immediate( format!(insert_stmt_fmtstring!(), TABLE_NAME).as_str() );
let commit_result = transaction.commit();
teardown(conn, TABLE_NAME)?;
commit_result
}
#[test]
fn recreate_insert_drop_with_commit_retaining() -> Result<(), FbError> {
const TABLE_NAME: &str = "RSFBCLIENT_TEST_TRANS1";
let mut conn = cbuilder().connect()?;
setup(&mut conn, TABLE_NAME)?;
let mut transaction = Transaction::new(&mut conn)?;
let _insert_result = transaction.execute_immediate( format!(insert_stmt_fmtstring!(), TABLE_NAME).as_str() );
let commit_result = transaction.commit_retaining();
drop(transaction);
teardown(conn, TABLE_NAME)?;
commit_result
}
#[test]
fn recreate_insert_drop_with_rollback() -> Result<(), FbError> {
const TABLE_NAME: &str = "RSFBCLIENT_TEST_TRANS2";
let mut conn = cbuilder().connect()?;
setup(&mut conn, TABLE_NAME)?;
let mut transaction = Transaction::new(&mut conn)?;
let _insert_result = transaction.execute_immediate( format!(insert_stmt_fmtstring!(), TABLE_NAME).as_str() );
let rollback_result = transaction.rollback();
teardown(conn, TABLE_NAME)?;
rollback_result
}
#[test]
fn recreate_insert_drop_with_rollback_retaining() -> Result<(), FbError> {
const TABLE_NAME: &str = "RSFBCLIENT_TEST_TRANS3";
let mut conn = cbuilder().connect()?;
setup(&mut conn, TABLE_NAME)?;
let mut transaction = Transaction::new(&mut conn)?;
let _insert_result = transaction.execute_immediate( format!(insert_stmt_fmtstring!(), TABLE_NAME).as_str() );
let rollback_result = transaction.rollback_retaining();
drop(transaction);
teardown(conn, TABLE_NAME)?;
rollback_result
}
}
|
pub struct Player {}
impl Player {
pub fn new() -> Player {
Player {}
}
pub fn version(&self) -> String {
String::from("0.1.0")
}
}
|
use error::Err;
/// Exponential Moving Average
/// Formula :
pub fn ema(data: &[f64], period: usize) -> Result<Vec<f64>, Err> {
if period > data.len() {
return Err(Err::NotEnoughtData);
}
let mut ema = Vec::new();
let mut j = 1;
// get period sma first and calculate the next period period ema
let sma = (data[0..period]).iter().sum::<f64>() / period as f64;
let multiplier: f64 = 2.0 / (1.0 + period as f64);
ema.push(sma);
// EMA(current) = ( (Price(current) - EMA(prev) ) x Multiplier) + EMA(prev)
ema.push(((data[period] - sma) * multiplier) + sma);
// now calculate the rest of the values
for i in &data[period + 1..data.len()] {
let tmp = ((*i - ema[j]) * multiplier) + ema[j];
j = j + 1;
ema.push(tmp);
}
Ok(ema)
}
|
use crate::{
document::Document,
schema::{self, Collection, CollectionName, InvalidNameError, Name, Schematic, View},
Error,
};
/// A database stored in `BonsaiDb`.
#[derive(Debug)]
pub struct Database;
impl Collection for Database {
fn collection_name() -> Result<CollectionName, InvalidNameError> {
CollectionName::new("bonsaidb", "databases")
}
fn define_views(schema: &mut Schematic) -> Result<(), Error> {
schema.define_view(ByName)
}
}
#[derive(Debug)]
pub struct ByName;
impl View for ByName {
type Collection = Database;
type Key = String;
type Value = schema::SchemaName;
fn unique(&self) -> bool {
true
}
fn version(&self) -> u64 {
1
}
fn name(&self) -> Result<Name, InvalidNameError> {
Name::new("by-name")
}
fn map(&self, document: &Document<'_>) -> schema::MapResult<Self::Key, Self::Value> {
let database = document.contents::<crate::connection::Database>()?;
Ok(Some(document.emit_key_and_value(
database.name.to_ascii_lowercase(),
database.schema,
)))
}
}
|
#[cfg(target_arch = "wasm32")]
use wasm_bindgen::prelude::*;
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen(start)]
pub fn wasm_main() {
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
main();
}
use instant;
#[cfg(target_arch = "android")]
use ndk_glue;
use std::iter;
#[repr(C)]
#[derive(Debug, Clone, Copy)]
struct PushConstants {
color: [f32; 4],
pos: [f32; 2],
scale: [f32; 2],
}
#[cfg_attr(target_os = "android", ndk_glue::main(backtrace = "on"))]
pub fn main() {
#[cfg(target_arch = "wasm32")]
console_log::init_with_level(log::Level::Debug).unwrap();
#[cfg(not(target_arch = "wasm32"))]
env_logger::init();
#[allow(unused_imports)]
use gfx_hal::{
adapter::{Adapter, MemoryType},
buffer, command,
format::{self as f, AsFormat},
image as i, memory as m, pass, pool,
prelude::*,
pso,
queue::QueueGroup,
window, Backend,
};
use std::mem::ManuallyDrop;
const APP_NAME: &'static str = "Part 1: Drawing a triangle";
const WINDOW_SIZE: [u32; 2] = [512, 512];
let event_loop = winit::event_loop::EventLoop::new();
let (logical_window_size, physical_window_size) = {
use winit::dpi::{LogicalSize, PhysicalSize};
let dpi = event_loop
.primary_monitor()
.expect("No primary monitor")
.scale_factor();
let logical: LogicalSize<u32> = WINDOW_SIZE.into();
let physical: PhysicalSize<u32> = logical.to_physical(dpi);
(logical, physical)
};
let mut surface_extend = window::Extent2D {
width: physical_window_size.width,
height: physical_window_size.height,
};
let window = winit::window::WindowBuilder::new()
.with_title(APP_NAME)
.with_inner_size(logical_window_size)
.build(&event_loop)
.expect("Failed to create window");
#[cfg(target_arch = "wasm32")]
{
#[allow(unused_imports)]
use web_sys::{WebGlProgram, WebGlRenderingContext, WebGlShader, Window};
web_sys::window()
.unwrap()
.document()
.unwrap()
.body()
.unwrap()
.append_child(&winit::platform::web::WindowExtWebSys::canvas(&window))
.unwrap();
web_sys::console::log_1(&"Hello using web-sys".into());
}
let (instance, surface, adapter) = {
let instance = backend::Instance::create(APP_NAME, 1).expect("Backend not supported");
let surface = unsafe {
instance
.create_surface(&window)
.expect("Failed to create surface for window")
};
let adapter = instance.enumerate_adapters().remove(0);
for adapter in &instance.enumerate_adapters() {
println!("{:?}", adapter.info);
}
(instance, surface, adapter)
};
let (device, mut queue_group) = {
let queue_family = adapter
.queue_families
.iter()
.find(|family| {
surface.supports_queue_family(family) && family.queue_type().supports_graphics()
})
.expect("No compatible queue family found");
let mut gpu = unsafe {
adapter
.physical_device
.open(&[(queue_family, &[1.0])], gfx_hal::Features::empty())
.expect("Failed to open device")
};
(gpu.device, gpu.queue_groups.pop().unwrap())
};
let (command_pool, mut command_buffer) = unsafe {
use gfx_hal::command::Level;
use gfx_hal::pool::CommandPoolCreateFlags;
let mut command_pool = device
.create_command_pool(queue_group.family, CommandPoolCreateFlags::empty())
.expect("Out of memory");
let command_buffer = command_pool.allocate_one(Level::Primary);
(command_pool, command_buffer)
};
// Render passes
let surface_color_format = {
use gfx_hal::format::{ChannelType, Format};
let supported_formats = surface
.supported_formats(&adapter.physical_device)
.unwrap_or(vec![]);
let default_format = *supported_formats.get(0).unwrap_or(&Format::Rgba8Srgb);
supported_formats
.into_iter()
.find(|format| format.base_format().1 == ChannelType::Srgb)
.unwrap_or(default_format)
};
let render_pass = {
use gfx_hal::image::Layout;
use gfx_hal::pass::{
Attachment, AttachmentLoadOp, AttachmentOps, AttachmentStoreOp, SubpassDesc,
};
let color_attachment = Attachment {
format: Some(surface_color_format),
samples: 1,
ops: AttachmentOps::new(AttachmentLoadOp::Clear, AttachmentStoreOp::Store),
stencil_ops: AttachmentOps::DONT_CARE,
layouts: Layout::Undefined..Layout::Present,
};
let subpass = SubpassDesc {
colors: &[(0, Layout::ColorAttachmentOptimal)],
depth_stencil: None,
inputs: &[],
resolves: &[],
preserves: &[],
};
unsafe {
device
.create_render_pass(
iter::once(color_attachment),
iter::once(subpass),
iter::empty(),
)
.expect("Out of memory")
}
};
let pipeline_layout = unsafe {
use gfx_hal::pso::ShaderStageFlags;
let push_constant_bytes = std::mem::size_of::<PushConstants>() as u32;
device
.create_pipeline_layout(
iter::empty(),
iter::once((ShaderStageFlags::VERTEX, 0..push_constant_bytes)),
)
.expect("Out of memory")
};
let vertex_shader = include_bytes!("shaders/part-2.vert.spv");
let fragment_shader = include_bytes!("shaders/part-2.frag.spv");
//Create a pipeline with the given layout and shadaers
unsafe fn make_pipeline<B: gfx_hal::Backend>(
device: &B::Device,
render_pass: &B::RenderPass,
pipeline_layout: &B::PipelineLayout,
vertex_shader: &[u8],
fragment_shader: &[u8],
) -> B::GraphicsPipeline {
use gfx_hal::pass::Subpass;
use gfx_hal::pso::{
BlendState, ColorBlendDesc, ColorMask, EntryPoint, Face, GraphicsPipelineDesc,
InputAssemblerDesc, Primitive, PrimitiveAssemblerDesc, Rasterizer, Specialization,
};
let spirv: Vec<u32> = auxil::read_spirv(std::io::Cursor::new(vertex_shader)).unwrap();
let vertex_shader_module = device
.create_shader_module(&spirv)
.expect("Failed to create vertex shader module");
let spirv: Vec<u32> = auxil::read_spirv(std::io::Cursor::new(fragment_shader)).unwrap();
let fragment_shader_module = device
.create_shader_module(&spirv)
.expect("Failed to create fragment shader module");
let (vs_entry, fs_entry) = (
EntryPoint::<B> {
entry: "main",
module: &vertex_shader_module,
specialization: Specialization::default(),
},
EntryPoint::<B> {
entry: "main",
module: &fragment_shader_module,
specialization: Specialization::default(),
},
);
let primitive_assembler = PrimitiveAssemblerDesc::Vertex {
buffers: &[],
attributes: &[],
input_assembler: InputAssemblerDesc::new(Primitive::TriangleList),
vertex: vs_entry,
tessellation: None,
geometry: None,
};
let mut pipeline_desc = GraphicsPipelineDesc::new(
primitive_assembler,
Rasterizer {
cull_face: Face::BACK,
..Rasterizer::FILL
},
Some(fs_entry),
pipeline_layout,
Subpass {
index: 0,
main_pass: render_pass,
},
);
pipeline_desc.blender.targets.push(ColorBlendDesc {
mask: ColorMask::ALL,
blend: Some(BlendState::ALPHA),
});
let pipeline = device
.create_graphics_pipeline(&pipeline_desc, None)
.expect("Failed to create graphics pipeline");
device.destroy_shader_module(vertex_shader_module);
device.destroy_shader_module(fragment_shader_module);
pipeline
}
let pipeline = unsafe {
make_pipeline::<backend::Backend>(
&device,
&render_pass,
&pipeline_layout,
vertex_shader,
fragment_shader,
)
};
struct Resources<B: gfx_hal::Backend> {
instance: B::Instance,
surface: B::Surface,
device: B::Device,
render_passes: Vec<B::RenderPass>,
pipeline_layouts: Vec<B::PipelineLayout>,
pipelines: Vec<B::GraphicsPipeline>,
command_pool: B::CommandPool,
submission_complete_fence: B::Fence,
rendering_complete_semaphore: B::Semaphore,
}
struct ResourceHolder<B: gfx_hal::Backend>(ManuallyDrop<Resources<B>>);
impl<B: gfx_hal::Backend> Drop for ResourceHolder<B> {
fn drop(&mut self) {
unsafe {
let Resources {
instance,
mut surface,
device,
command_pool,
render_passes,
pipeline_layouts,
pipelines,
submission_complete_fence,
rendering_complete_semaphore,
} = ManuallyDrop::take(&mut self.0);
device.destroy_semaphore(rendering_complete_semaphore);
device.destroy_fence(submission_complete_fence);
for pipeline in pipelines {
device.destroy_graphics_pipeline(pipeline);
}
for pipeline_layout in pipeline_layouts {
device.destroy_pipeline_layout(pipeline_layout);
}
for render_pass in render_passes {
device.destroy_render_pass(render_pass);
}
device.destroy_command_pool(command_pool);
surface.unconfigure_swapchain(&device);
instance.destroy_surface(surface);
}
}
}
let submission_complete_fence = device.create_fence(true).expect("Out of memory");
let rendering_complete_semaphore = device.create_semaphore().expect("Out of memory");
let mut _should_configure_swapchain = true;
let mut resource_holder: ResourceHolder<backend::Backend> =
ResourceHolder(ManuallyDrop::new(Resources {
instance,
surface,
device,
command_pool,
render_passes: vec![render_pass],
pipeline_layouts: vec![pipeline_layout],
pipelines: vec![pipeline],
submission_complete_fence,
rendering_complete_semaphore,
}));
let start_time = instant::Instant::now();
event_loop.run(move |event, _, control_flow| {
use winit::event::{Event, WindowEvent};
use winit::event_loop::ControlFlow;
match event {
Event::WindowEvent { event, .. } => match event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(dims) => {
surface_extend = window::Extent2D {
width: dims.width,
height: dims.height,
};
_should_configure_swapchain = true;
}
WindowEvent::ScaleFactorChanged { new_inner_size, .. } => {
surface_extend = window::Extent2D {
height: new_inner_size.height,
width: new_inner_size.width,
};
_should_configure_swapchain = true;
}
_ => (),
},
Event::MainEventsCleared => window.request_redraw(),
winit::event::Event::RedrawEventsCleared => {
let res: &mut Resources<_> = &mut resource_holder.0;
let render_pass = &res.render_passes[0];
let pipeline_layout = &res.pipeline_layouts[0];
let pipeline = &res.pipelines[0];
let anim = start_time.elapsed().as_secs_f32().sin();
let small = [0.33, 0.33];
let triangles = &[
// red triangles
PushConstants {
color: [1.0, 0.0, 0.0, 1.0],
pos: [-0.5, -0.5],
scale: small,
},
];
unsafe {
#[allow(unused_variables)]
let render_timeout_ns = 1_000_000_000;
#[cfg(not(target_arch = "wasm32"))]
{
res.device
.wait_for_fence(&res.submission_complete_fence, render_timeout_ns)
.expect("Out of memory or device lost");
}
res.device
.reset_fence(&mut res.submission_complete_fence)
.expect("Out of memory");
res.command_pool.reset(false);
}
if _should_configure_swapchain {
use gfx_hal::window::SwapchainConfig;
let caps = res.surface.capabilities(&adapter.physical_device);
let mut swapchain_config =
SwapchainConfig::from_caps(&caps, surface_color_format, surface_extend);
if caps.image_count.contains(&3) {
swapchain_config.image_count = 3;
}
surface_extend = swapchain_config.extent;
unsafe {
res.surface
.configure_swapchain(&res.device, swapchain_config)
.expect("Failed re configure swapchain");
}
_should_configure_swapchain = false;
}
let surface_image = unsafe {
let acquire_timeout_ns = 1_000_000_000;
match res.surface.acquire_image(acquire_timeout_ns) {
Ok((image, _)) => image,
Err(_) => {
_should_configure_swapchain = true;
return;
}
}
};
let framebuffer = unsafe {
use gfx_hal::image::Extent;
use gfx_hal::window::SwapchainConfig;
let caps = res.surface.capabilities(&adapter.physical_device);
let swapchain_config =
SwapchainConfig::from_caps(&caps, surface_color_format, surface_extend);
res.device
.create_framebuffer(
render_pass,
iter::once(swapchain_config.framebuffer_attachment()),
Extent {
width: surface_extend.width,
height: surface_extend.height,
depth: 1,
},
)
.unwrap()
};
let viewport = {
use gfx_hal::pso::{Rect, Viewport};
Viewport {
rect: Rect {
x: 0,
y: 0,
w: surface_extend.width as i16,
h: surface_extend.height as i16,
},
depth: 0.0..1.0,
}
};
unsafe {
use gfx_hal::command::{CommandBufferFlags, SubpassContents};
command_buffer.begin_primary(CommandBufferFlags::ONE_TIME_SUBMIT);
command_buffer.set_viewports(0, iter::once(viewport.clone()));
command_buffer.set_scissors(0, iter::once(viewport.rect));
command_buffer.begin_render_pass(
render_pass,
&framebuffer,
viewport.rect,
iter::once(command::RenderAttachmentInfo {
image_view: std::borrow::Borrow::borrow(&surface_image),
clear_value: command::ClearValue {
color: command::ClearColor {
float32: [0.0, 0.0, 0.0, 1.0],
},
},
}),
SubpassContents::Inline,
);
command_buffer.bind_graphics_pipeline(pipeline);
command_buffer.draw(0..4, 0..1);
command_buffer.end_render_pass();
command_buffer.finish();
queue_group.queues[0].submit(
iter::once(&command_buffer),
iter::empty(),
iter::once(&res.rendering_complete_semaphore),
Some(&mut res.submission_complete_fence),
);
// present frame
if let Err(_) = queue_group.queues[0].present(
&mut res.surface,
surface_image,
Some(&mut res.rendering_complete_semaphore),
) {
_should_configure_swapchain = true;
res.device.destroy_framebuffer(framebuffer);
}
}
}
_ => (),
}
})
}
|
use crate::r#type::Type;
#[derive(Debug, Clone)]
pub struct Bound {
pub name: String,
pub bound: Vec<Type>,
}
|
use std::thread;
use std::net::{TcpListener, TcpStream};
use std::io::{Read, Write};
use std::env;
use std::str;
const BUFFERBYTES: usize = 20;
fn client_handshake(mut stream: TcpStream) {
let mut data = [0 as u8; BUFFERBYTES]; // using 20 byte buffer
while match stream.read(&mut data) {
Ok(size) => {
// echo what's received
if size > 0 {
let val = str::from_utf8(&data[0..size]).unwrap();
println!("{}", val);
let resp = cycle_client_response(val);
stream.write(resp.as_bytes()).unwrap();
}
true
},
Err(_) => {
eprintln!("An error occurred while connecting to socket");
false
}
} {}
}
fn cycle_client_response(value: &str) -> String {
let mut splits = value.split(' ');
splits.next();
// get number sent by client
let mut num = splits.next().unwrap()
.parse::<i32>().unwrap();
num += 1;
let msg = format!("Hello {}", num);
msg
}
fn main() {
let mut addr = String::from("127.0.0.1:");
// read user input which should contain port
let args: Vec<String> = env::args().collect();
addr.push_str(&args[1]);
let listener = TcpListener::bind(addr).unwrap();
// accept connections and process them,
// spawning a new thread for each connection
for stream in listener.incoming() {
match stream {
Ok(stream) => {
thread::spawn(move || {
// handle successful connection
client_handshake(stream);
});
}
Err(e) => eprintln!("Error: {}", e),
}
}
// close the socket server
drop(listener);
}
|
use std::error::Error;
use futures::{Async, Poll};
use futures::future::{Future, IntoFuture, Shared, SharedError, SharedItem};
use rayon::ThreadPool;
use specs::{Component, DenseVecStorage};
use {BoxedErr, SharedAssetError, StoreId};
/// One of the three core traits of this crate.
///
/// You want to implement this for every type of asset like
///
/// * `Mesh`
/// * `Texture`
/// * `Terrain`
///
/// and so on. Now, an asset may be available in different formats.
/// That's why we have the `Data` associated type here. You can specify
/// an intermediate format here, like the vertex data for a mesh or the samples
/// for audio data.
///
/// This data is then generated by the `Format` trait.
pub trait Asset: Sized {
/// The `Context` type that can produce this asset
type Context: Context<Asset = Self>;
}
/// A future for an asset
pub struct AssetFuture<A>(pub Shared<Box<Future<Item = A, Error = BoxedErr>>>);
impl<A> AssetFuture<A> {
/// Wrap another future into `AssetFuture`
pub fn from_future<F>(f: F) -> Self
where
F: IntoFuture<Item = A, Error = BoxedErr> + 'static,
{
let f: Box<Future<Item = A, Error = BoxedErr>> = Box::new(f.into_future());
AssetFuture(f.shared())
}
}
impl<A> Component for AssetFuture<A>
where
A: Component,
Self: 'static,
{
type Storage = DenseVecStorage<Self>;
}
impl<A> AssetFuture<A> {
/// If any clone of this future has completed execution, returns its result immediately
/// without blocking.
/// Otherwise, returns None without triggering the work represented by this future.
pub fn peek(&self) -> Option<Result<SharedItem<A>, SharedError<BoxedErr>>> {
self.0.peek()
}
}
impl<A> Clone for AssetFuture<A> {
fn clone(&self) -> Self {
AssetFuture(self.0.clone())
}
}
impl<A> Future for AssetFuture<A>
where
A: Clone,
{
type Item = A;
type Error = BoxedErr;
fn poll(&mut self) -> Poll<A, BoxedErr> {
match self.0.poll() {
Ok(Async::NotReady) => Ok(Async::NotReady),
Ok(Async::Ready(asset)) => Ok(Async::Ready((*asset).clone())),
Err(err) => Err(BoxedErr(Box::new(SharedAssetError::from(err)))),
}
}
}
impl<A> From<Shared<Box<Future<Item = A, Error = BoxedErr>>>> for AssetFuture<A> {
fn from(inner: Shared<Box<Future<Item = A, Error = BoxedErr>>>) -> Self {
AssetFuture(inner)
}
}
/// A specifier for an asset, uniquely identifying it by
///
/// * the extension (the format it was provided in)
/// * its name
/// * the storage it was loaded from
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct AssetSpec {
/// The possible extensions of this asset
pub exts: &'static [&'static str],
/// The name of this asset.
pub name: String,
/// Unique identifier indicating the Storage from which the asset was loaded.
pub store: StoreId,
}
impl AssetSpec {
/// Creates a new asset specifier from the given parameters.
pub fn new(name: String, exts: &'static [&'static str], store: StoreId) -> Self {
AssetSpec { exts, name, store }
}
}
/// The context type which manages assets of one type.
/// It is responsible for caching
pub trait Context: Send + Sync + 'static {
/// The asset type this context can produce.
type Asset: Asset;
/// The `Data` type the asset can be created from.
type Data;
/// The error that may be returned from `create_asset`.
type Error: Error + Send + Sync;
/// The result type for loading an asset. This can also be a future
/// (or anything that implements `IntoFuture`).
type Result: IntoFuture<Item = Self::Asset, Error = Self::Error>;
/// A small keyword for which category these assets belongs to.
///
/// ## Examples
///
/// * `"mesh"` for `Mesh`
/// * `"data"` for `Level`
///
/// The storage may use this information, to e.g. search the identically-named
/// subfolder.
fn category(&self) -> &str;
/// Provides the conversion from the data format to the actual asset.
fn create_asset(&self, data: Self::Data, pool: &ThreadPool) -> Self::Result;
/// Notifies about an asset load. This is can be used to cache the asset.
/// To return a cached asset, see the `retrieve` function.
fn cache(&self, _spec: AssetSpec, _asset: AssetFuture<Self::Asset>) {}
/// Returns `Some` cached value if possible, otherwise `None`.
///
/// For a basic implementation of a cache, please take a look at the `Cache` type.
fn retrieve(&self, _spec: &AssetSpec) -> Option<AssetFuture<Self::Asset>> {
None
}
/// Updates an asset after it's been reloaded.
///
/// This usually just puts the new asset into a queue;
/// the actual update happens by calling `update` on the
/// asset.
fn update(&self, spec: &AssetSpec, asset: AssetFuture<Self::Asset>);
/// Gives a hint that several assets may have been released recently.
///
/// This is useful if your assets are reference counted, because you are
/// now able to remove unique assets from the cache, leaving the shared
/// ones there.
fn clear(&self) {}
/// Request for clearing the whole cache.
fn clear_all(&self) {}
}
/// A format, providing a conversion from bytes to asset data, which is then
/// in turn accepted by `Asset::from_data`. Examples for formats are
/// `Png`, `Obj` and `Wave`.
pub trait Format {
/// A list of the extensions (without `.`).
///
/// ## Examples
///
/// * `"png"`
/// * `"obj"`
/// * `"wav"`
const EXTENSIONS: &'static [&'static str];
/// The data type this format is able to load.
type Data;
/// The error that may be returned from `Format::parse`.
type Error: Error + Send + Sync;
/// The result of the `parse` method. Can be anything that implements
/// `IntoFuture`.
type Result: IntoFuture<Item = Self::Data, Error = Self::Error>;
/// Reads the given bytes and produces asset data.
fn parse(&self, bytes: Vec<u8>, pool: &ThreadPool) -> Self::Result;
}
|
pub const PIXELS_PER_TIME: f64 = 100.0;
pub const PIXELS_PER_SEMITONE: f64 = 8.0;
use misc::Vector;
use data::{State, DragType};
use edited_note::NoteDrawingInfo;
impl State {
pub fn client_to_time (&self, client: f64)->f64 {
client / PIXELS_PER_TIME
}
pub fn client_to_pitch (&self, client: f64)->f64 {
(client / -PIXELS_PER_SEMITONE) + 101.5
}
pub fn time_to_client (&self, time: f64)->f64 {
time * PIXELS_PER_TIME
}
pub fn pitch_to_client (&self, pitch: f64)->f64 {
(pitch - 101.5) * -PIXELS_PER_SEMITONE
}
pub fn music_to_client (&self, music: Vector)->Vector {
Vector::new (self.time_to_client (music [0]), self.pitch_to_client (music [1]))
}
pub fn client_to_music (&self, client: Vector)->Vector {
Vector::new (self.client_to_time (client[0]), self.client_to_pitch (client[1]))
}
pub fn update_elements (&self) {
js!{ $("#notes").height (@{PIXELS_PER_SEMITONE*80.0 }); }
let info = NoteDrawingInfo {
drag_type: self.drag_type(),
state: & self,
};
js!{ $(".drag_select").remove() ;}
if let Some(DragType::DragSelect {minima, maxima, ..}) = info.drag_type {
let minima = self.music_to_client (minima);
let maxima = self.music_to_client (maxima);
let size = maxima - minima;
js!{ $("<div>", {class: "drag_select"}).appendTo ($("#notes")).css ({
left:@{minima [0]},
top:@{maxima [1]},
width:@{size[0]},
height:@{-size[1]},
});}
}
for note in &self.notes {note.update_element(& info)}
}
pub fn init_elements (&self) {
for octave in 0..10 {
for (index, black) in vec![false, true, false, false, true, false, true, false, false, true, false, true].into_iter().enumerate() {
let pitch = (octave*12 + index + 21) as f64;
if black {
js!{
$("#notes").append ($("<div>", {class: "key"}).css({top: @{self.pitch_to_client (pitch+0.5)}, height:@{PIXELS_PER_SEMITONE}, "background-color": "#ddd"}));
}
}
}
}
}
}
|
extern crate bitmap_io;
use bitmap_io::*;
use std::fs::File;
#[allow(unused_must_use)]
fn main() {
let mut bmp_file = File::open("test_24-uncompressed.bmp").unwrap();
let test_24_uncompressed = Bitmap::from_file(&mut bmp_file).unwrap();
let mut bmp_file = File::open("test_32-uncompressed.bmp").unwrap();
let test_32_uncompressed = Bitmap::from_file(&mut bmp_file).unwrap();
let mut bmp_file = File::open("test_32-bitfield.bmp").unwrap();
let test_32_bitfield = Bitmap::from_file(&mut bmp_file).unwrap();
let mut bmp_file = File::open("test_16-bitfield.bmp").unwrap();
let test_16_bitfield = Bitmap::from_file(&mut bmp_file).unwrap();
let mut bmp_file = File::open("8bpp.bmp").unwrap();
let test_8_uncompressed = Bitmap::from_file(&mut bmp_file).unwrap();
// let mut bmp_file = File::open("test.bmp").unwrap();
// let bitmap = Bitmap::from_file(&mut bmp_file).unwrap();
// println!("{}", bitmap.file_header);
// println!("{}", bitmap.info_header);
if let Ok(mut out_file) = File::create("test_24-uncompressed-result.bmp") {
test_24_uncompressed.into_file(&mut out_file);
}
if let Ok(mut out_file) = File::create("test_32-uncompressed-result.bmp") {
test_32_uncompressed.into_file(&mut out_file);
}
if let Ok(mut out_file) = File::create("test_32-bitfield-result.bmp") {
test_32_bitfield.into_file(&mut out_file);
}
if let Ok(mut out_file) = File::create("test_16-bitfield-result.bmp") {
test_16_bitfield.into_file(&mut out_file);
}
if let Ok(mut out_file) = File::create("test_8-uncompressed-result.bmp") {
test_8_uncompressed.into_file(&mut out_file);
}
println!("Hello world");
}
|
use exonum::crypto::{PublicKey, Hash};
encoding_struct! {
#[derive(Eq, PartialOrd, Ord)]
struct Offer {
wallet: &PublicKey,
amount: u64,
tx_hash: &Hash,
}
}
impl Offer {
pub fn remove_amount(&mut self, amount: u64) {
*self = Offer::new(self.wallet(), self.amount() - amount, &self.tx_hash());
}
pub fn add_amount(&mut self, amount: u64) {
*self = Offer::new(self.wallet(), self.amount() + amount, &self.tx_hash());
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActionGroupResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ActionGroup>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActionGroupList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ActionGroupResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActionGroup {
#[serde(rename = "groupShortName")]
pub group_short_name: String,
pub enabled: bool,
#[serde(rename = "emailReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub email_receivers: Vec<EmailReceiver>,
#[serde(rename = "smsReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub sms_receivers: Vec<SmsReceiver>,
#[serde(rename = "webhookReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub webhook_receivers: Vec<WebhookReceiver>,
#[serde(rename = "itsmReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub itsm_receivers: Vec<ItsmReceiver>,
#[serde(rename = "azureAppPushReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub azure_app_push_receivers: Vec<AzureAppPushReceiver>,
#[serde(rename = "automationRunbookReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub automation_runbook_receivers: Vec<AutomationRunbookReceiver>,
#[serde(rename = "voiceReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub voice_receivers: Vec<VoiceReceiver>,
#[serde(rename = "logicAppReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub logic_app_receivers: Vec<LogicAppReceiver>,
#[serde(rename = "azureFunctionReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub azure_function_receivers: Vec<AzureFunctionReceiver>,
#[serde(rename = "armRoleReceivers", default, skip_serializing_if = "Vec::is_empty")]
pub arm_role_receivers: Vec<ArmRoleReceiver>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EmailReceiver {
pub name: String,
#[serde(rename = "emailAddress")]
pub email_address: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<ReceiverStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SmsReceiver {
pub name: String,
#[serde(rename = "countryCode")]
pub country_code: String,
#[serde(rename = "phoneNumber")]
pub phone_number: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<ReceiverStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebhookReceiver {
pub name: String,
#[serde(rename = "serviceUri")]
pub service_uri: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ItsmReceiver {
pub name: String,
#[serde(rename = "workspaceId")]
pub workspace_id: String,
#[serde(rename = "connectionId")]
pub connection_id: String,
#[serde(rename = "ticketConfiguration")]
pub ticket_configuration: String,
pub region: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzureAppPushReceiver {
pub name: String,
#[serde(rename = "emailAddress")]
pub email_address: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AutomationRunbookReceiver {
#[serde(rename = "automationAccountId")]
pub automation_account_id: String,
#[serde(rename = "runbookName")]
pub runbook_name: String,
#[serde(rename = "webhookResourceId")]
pub webhook_resource_id: String,
#[serde(rename = "isGlobalRunbook")]
pub is_global_runbook: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "serviceUri", default, skip_serializing_if = "Option::is_none")]
pub service_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VoiceReceiver {
pub name: String,
#[serde(rename = "countryCode")]
pub country_code: String,
#[serde(rename = "phoneNumber")]
pub phone_number: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogicAppReceiver {
pub name: String,
#[serde(rename = "resourceId")]
pub resource_id: String,
#[serde(rename = "callbackUrl")]
pub callback_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzureFunctionReceiver {
pub name: String,
#[serde(rename = "functionAppResourceId")]
pub function_app_resource_id: String,
#[serde(rename = "functionName")]
pub function_name: String,
#[serde(rename = "httpTriggerUrl")]
pub http_trigger_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ArmRoleReceiver {
pub name: String,
#[serde(rename = "roleId")]
pub role_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReceiverStatus {
NotSpecified,
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnableRequest {
#[serde(rename = "receiverName")]
pub receiver_name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActionGroupPatchBody {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ActionGroupPatch>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActionGroupPatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LocalizableString {
pub value: String,
#[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")]
pub localized_value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BaselineMetadataValue {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<LocalizableString>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BaselineResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<LocalizableString>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub timestamps: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub baseline: Vec<Baseline>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub metdata: Vec<BaselineMetadataValue>,
#[serde(rename = "predictionResultType", default, skip_serializing_if = "Option::is_none")]
pub prediction_result_type: Option<baseline_response::PredictionResultType>,
#[serde(rename = "errorType", default, skip_serializing_if = "Option::is_none")]
pub error_type: Option<baseline_response::ErrorType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BaselineProperties>,
}
pub mod baseline_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PredictionResultType {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ErrorType {}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BaselineProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timespan: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub interval: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub aggregation: Option<String>,
#[serde(rename = "internalOperationId", default, skip_serializing_if = "Option::is_none")]
pub internal_operation_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Baseline {
pub sensitivity: baseline::Sensitivity,
#[serde(rename = "lowThresholds")]
pub low_thresholds: Vec<f64>,
#[serde(rename = "highThresholds")]
pub high_thresholds: Vec<f64>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub timestamps: Vec<String>,
#[serde(rename = "PredictionResultType", default, skip_serializing_if = "Option::is_none")]
pub prediction_result_type: Option<baseline::PredictionResultType>,
#[serde(rename = "ErrorType", default, skip_serializing_if = "Option::is_none")]
pub error_type: Option<baseline::ErrorType>,
}
pub mod baseline {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Sensitivity {
Low,
Medium,
High,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PredictionResultType {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ErrorType {}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TimeSeriesInformation {
pub sensitivities: Vec<String>,
pub values: Vec<f64>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub timestamps: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CalculateBaselineResponse {
#[serde(rename = "type")]
pub type_: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub timestamps: Vec<String>,
pub baseline: Vec<Baseline>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub statistics: Option<calculate_baseline_response::Statistics>,
#[serde(rename = "internalOperationId", default, skip_serializing_if = "Option::is_none")]
pub internal_operation_id: Option<String>,
#[serde(rename = "errorType", default, skip_serializing_if = "Option::is_none")]
pub error_type: Option<calculate_baseline_response::ErrorType>,
}
pub mod calculate_baseline_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Statistics {
#[serde(rename = "isEligible", default, skip_serializing_if = "Option::is_none")]
pub is_eligible: Option<bool>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub status: Vec<String>,
#[serde(rename = "seasonalityPeriod", default, skip_serializing_if = "Option::is_none")]
pub seasonality_period: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ErrorType {}
}
|
#[doc = "Register `ETH_MACVR` reader"]
pub type R = crate::R<ETH_MACVR_SPEC>;
#[doc = "Field `SNPSVER` reader - SNPSVER"]
pub type SNPSVER_R = crate::FieldReader;
#[doc = "Field `USERVER` reader - USERVER"]
pub type USERVER_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - SNPSVER"]
#[inline(always)]
pub fn snpsver(&self) -> SNPSVER_R {
SNPSVER_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - USERVER"]
#[inline(always)]
pub fn userver(&self) -> USERVER_R {
USERVER_R::new(((self.bits >> 8) & 0xff) as u8)
}
}
#[doc = "The version register identifies the version of the Ethernet peripheral.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_macvr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETH_MACVR_SPEC;
impl crate::RegisterSpec for ETH_MACVR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eth_macvr::R`](R) reader structure"]
impl crate::Readable for ETH_MACVR_SPEC {}
#[doc = "`reset()` method sets ETH_MACVR to value 0x4042"]
impl crate::Resettable for ETH_MACVR_SPEC {
const RESET_VALUE: Self::Ux = 0x4042;
}
|
use std::io;
use std::net::SocketAddr;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
use bytes::Bytes;
use failure::{format_err, Error};
use futures::prelude::*;
use futures::ready;
use log::{debug, info, warn};
use tokio::time::{delay_until, interval, Delay, Interval};
use crate::packet::Packet;
use crate::protocol::handshake::Handshake;
use crate::protocol::sender;
use crate::protocol::sender::{SenderAlgorithmAction, SenderMetrics};
use crate::{CongestCtrl, ConnectionSettings, SrtCongestCtrl};
pub struct SenderSink<T, CC> {
sock: T,
sender: sender::Sender,
/// The congestion control
_congest_ctrl: CC,
/// The send timer
snd_wait: Option<Delay>,
/// The interval to report stats with
stats_interval: Interval,
}
impl<T, CC> SenderSink<T, CC>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
CC: CongestCtrl + Unpin,
{
pub fn new(
sock: T,
congest_ctrl: CC,
settings: ConnectionSettings,
handshake: Handshake,
) -> SenderSink<T, CC> {
info!(
"Sending started to {:?}, with latency={:?}",
settings.remote, settings.tsbpd_latency
);
SenderSink {
sock,
sender: sender::Sender::new(settings, handshake, SrtCongestCtrl),
_congest_ctrl: congest_ctrl,
snd_wait: None,
stats_interval: interval(Duration::from_secs(1)),
}
}
/// Set the interval to get statistics on
/// Defaults to one second
pub fn set_stats_interval(&mut self, ivl: Duration) {
self.stats_interval = interval(ivl);
}
pub fn settings(&self) -> &ConnectionSettings {
&self.sender.settings()
}
pub fn remote(&self) -> SocketAddr {
self.sender.settings().remote
}
pub fn metrics(&self) -> SenderMetrics {
self.sender.metrics()
}
fn sock(&mut self) -> Pin<&mut T> {
Pin::new(&mut self.sock)
}
fn send_packets(&mut self, cx: &mut Context) -> Result<(), Error> {
while let Poll::Ready(()) = self.sock().as_mut().poll_ready(cx)? {
match self.sender.pop_output() {
Some(packet) => self.sock().start_send(packet)?,
None => break,
}
}
let _ = self.sock().poll_flush(cx)?;
Ok(())
}
fn check_sender_flushed(&mut self, cx: &mut Context) -> Result<bool, Error> {
if let Poll::Ready(_) = self.sock().poll_flush(cx)? {
// if everything is flushed, return Ok
if self.sender.is_flushed() {
return Ok(true);
}
}
Ok(false)
}
fn receive_packets(&mut self, cx: &mut Context) -> Result<(), Error> {
// do we have any packets to handle?
while let Poll::Ready(a) = self.sock().poll_next(cx) {
match a {
Some(Ok(packet)) => {
debug!("Got packet: {:?}", packet);
self.sender.handle_packet(packet, Instant::now()).unwrap();
}
Some(Err(e)) => warn!("Failed to decode packet: {:?}", e),
// stream has ended, means shutdown
None => {
return Err(format_err!("Unexpected EOF of underlying stream"));
}
}
}
Ok(())
}
fn check_snd_timer(&mut self, cx: &mut Context) -> bool {
if let Some(timer) = &mut self.snd_wait {
match Pin::new(timer).poll(cx) {
Poll::Pending => return false,
Poll::Ready(_) => {
self.snd_wait = None;
self.sender.handle_snd_timer(Instant::now());
}
}
}
true
}
fn process_next_action(&mut self, cx: &mut Context) -> Poll<Result<(), Error>> {
use SenderAlgorithmAction::*;
match self.sender.next_action() {
WaitForData | WaitUntilAck => {
cx.waker().wake_by_ref();
Poll::Pending
}
WaitUntil(t) => {
self.snd_wait = Some(delay_until(t.into()));
cx.waker().wake_by_ref();
Poll::Pending
}
Close => Poll::Ready(Err(io::Error::new(
io::ErrorKind::ConnectionAborted,
"Connection received shutdown",
)
.into())),
}
}
fn poll_sink_flushed(&mut self, cx: &mut Context) -> Poll<Result<(), Error>> {
if self.check_sender_flushed(cx)? {
// TODO: this is wrong for KeepAlive
debug!("Returning ready");
return Poll::Ready(Ok(()));
}
self.receive_packets(cx)?;
self.send_packets(cx)?;
if !self.check_snd_timer(cx) {
return Poll::Pending;
}
self.process_next_action(cx)
}
fn poll_sink_closed(&mut self, cx: &mut Context) -> Poll<Result<(), Error>> {
self.sender.handle_close(Instant::now());
self.send_packets(cx)?;
self.sock().poll_close(cx)
}
}
impl<T, CC> Sink<(Instant, Bytes)> for SenderSink<T, CC>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
CC: CongestCtrl + Unpin,
{
type Error = Error;
fn start_send(mut self: Pin<&mut Self>, item: (Instant, Bytes)) -> Result<(), Error> {
self.sender.handle_data(item);
Ok(())
}
fn poll_ready(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Result<(), Error>> {
Poll::Ready(Ok(()))
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Error>> {
self.get_mut().poll_sink_flushed(cx)
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Error>> {
ready!(self.as_mut().poll_sink_flushed(cx))?;
self.as_mut().poll_sink_closed(cx)
}
}
// Stats streaming
impl<T, CC> Stream for SenderSink<T, CC>
where
T: Stream<Item = Result<(Packet, SocketAddr), Error>>
+ Sink<(Packet, SocketAddr), Error = Error>
+ Unpin,
CC: CongestCtrl + Unpin,
{
type Item = Result<SenderMetrics, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
ready!(Pin::new(&mut self.stats_interval).poll_next(cx));
Poll::Ready(Some(Ok(self.metrics())))
}
}
|
use esolangs::brainfuck::{brainfuck, brainfuck_include};
#[test]
fn mandelbrot() {
brainfuck_include!("tests/bf/mandelbrot.b");
}
#[test]
fn numwarp() {
brainfuck_include!("tests/bf/numwarp.b");
}
#[test]
fn dfbi() {
brainfuck_include!("tests/bf/dbfi.b");
}
// http://brainfuck.org/tests.b
#[test]
fn test_1() {
brainfuck! {
>,>+++++++++,>+++++++++++[<++++++<++++++<+>>>-]<<.>.< < -.>.>.<<.
}
}
#[test]
fn test_2() {
brainfuck! {
++++[>++++++<-]>[>+++++>+++++++<<-]>>++++<[[>[[>>+<<-]<]>>>-]>-[>+>+<<-]>]
+++++[>+++++++<<++>-]>.<<.
}
}
#[test]
fn test_3() {
brainfuck! {
[]++++++++++[>>+>+>++++++[<<+<+++>>>-]<<<<-]
"A*$";?@![#>>+<<]>[>>]<<<<[>++<[-]]>.>.
}
}
|
use super::bit;
use super::timer::{Timers};
use super::cartridge::{Cartridge};
pub struct Bus {
ram1: [u8; 4 * 1024],
ram2: [u8; 4 * 1024],
highRam: [u8; 127],
cart: Option<Cartridge>,
pub interruptEnableRegister: u8,
pub interruptRequestRegister: u8,
pub timerRegisters: Timers
}
pub enum IntrFlags {
VBlank = 0,
LCD = 1,
Timer = 2,
Serial = 3,
Joypad = 4
}
impl Bus {
pub fn new() -> Self {
Self {
ram1: [0; 4 * 1024],
ram2: [0; 4 * 1024],
highRam: [0; 127],
cart: None,
interruptEnableRegister: 0,
interruptRequestRegister: 0,
timerRegisters: Timers::new(),
}
}
pub fn insertCartridge(&mut self, c: Cartridge) {
self.cart = Some(c);
}
pub fn cpuRead(&self, addr: u16) -> u8 {
match addr {
0x0000..= 0x3FFF => {
match &self.cart {
Some(x) => x.readRom(addr),
None => panic!("Cartridge not inserted"),
}
},
0x4000..= 0x7FFF => {
match &self.cart {
Some(x) => x.readRom(addr),
None => panic!("Cartridge not inserted"),
}
},
0x8000..= 0x9FFF => {
todo!("Vram not implemented")
},
0xA000..= 0xBFFF => {
match &self.cart {
Some(x) => x.readRam(addr),
None => panic!("Cartridge not inserted"),
}
},
0xC000..= 0xCFFF => {self.ram1[(addr & 0x0fff) as usize]},
0xD000..= 0xDFFF => {self.ram2[(addr & 0x0fff) as usize]},
0xE000..= 0xFDFF => {
if addr <= 0xEFFF {
self.ram1[(addr & 0x0fff) as usize]
} else {
self.ram2[(addr & 0x0fff) as usize]
}
},
0xFE00..= 0xFE9F => {
todo!("Sprite table not implemented");
},
0xFEA0..= 0xFEFF => {
panic!("Unusable memory")
},
0xFF00..= 0xFF7F => {
match addr & 0x00FF {
0x00 => {todo!("Controller not implemented")},
0x01..= 0x02 => {todo!("Communication not implemented")},
0x04..= 0x07 => {
match addr & 0x000F {
0x4 => {((self.timerRegisters.divRegister & 0xFF00) >> 8) as u8},
0x5 => {self.timerRegisters.timaRegister},
0x6 => {self.timerRegisters.tmaRegister},
0x7 => {self.timerRegisters.tacRegister},
_ => {0}
}},
0x0F => {self.interruptRequestRegister},
0x10..= 0x26 => {/* Sound, not implementing*/0},
0x30..= 0x3F => {/* Waveform RAM, not implementing*/0},
0x40..= 0x4B => {todo!("LCD register not implemented")},
0x4F => {/* GBC VRAM Bank Select */0},
0x50 => {/* Set to disable boot ROM ??*/0},
0x51..= 0x55 => {/* GBC HDMA */0},
0x68..= 0x69 => {/* GBC BCP/OCP */0},
0x70 => {/* GBC WRAM Bank Select */0}
_ => {panic!("Unknown write to {}", addr)}
}
},
0xFF80..= 0xFFFE => {
self.highRam[((addr & 0x00ff) - 0x0080) as usize]
},
0xFFFF => {
self.interruptEnableRegister
}
}
}
pub fn cpuWrite(&mut self, addr: u16, data: u8) {
match addr {
0x0000..= 0x3FFF => {panic!("Tried to write to ROM")},
0x4000..= 0x7FFF => {panic!("Tried to write to ROM")},
0x8000..= 0x9FFF => {
todo!("Vram not implemented")
},
0xA000..= 0xBFFF => {
match &mut self.cart {
Some(x) => x.writeRam(addr, data),
None => panic!("Cartridge not inserted"),
}
},
0xC000..= 0xCFFF => {
self.ram1[(addr & 0x0fff) as usize] = data;
},
0xD000..= 0xDFFF => {
self.ram2[(addr & 0x0fff) as usize] = data;
},
0xE000..= 0xFDFF => {
if addr <= 0xEFFF {
self.ram1[(addr & 0x0fff) as usize] = data;
} else {
self.ram2[(addr & 0x0fff) as usize] = data;
}
},
0xFE00..= 0xFE9F => {
todo!("Sprite table not implemented");
},
0xFEA0..= 0xFEFF => {
panic!("Unusable memory");
},
0xFF00..= 0xFF7F => {
match addr & 0x00FF {
0x00 => {todo!("Controller not implemented")},
0x01..= 0x02 => {todo!("Communication not implemented")},
0x04..= 0x07 => {
match addr & 0x000F {
0x4 => {self.timerRegisters.divRegister = 0},
0x5 => {self.timerRegisters.timaWrite(data);},
0x6 => {self.timerRegisters.tmaWrite(data)},
0x7 => {self.timerRegisters.tacRegister = data},
_ => {}
}},
0x0F => {self.interruptRequestRegister = data},
0x10..= 0x26 => {/* Sound, not implementing*/},
0x30..= 0x3F => {/* Waveform RAM, not implementing*/},
0x40..= 0x4B => {todo!("LCD register not implemented")},
0x4F => {/* GBC VRAM Bank Select */},
0x50 => {/* Set to disable boot ROM ??*/},
0x51..= 0x55 => {/* GBC HDMA */},
0x68..= 0x69 => {/* GBC BCP/OCP */},
0x70 => {/* GBC WRAM Bank Select */}
_ => {panic!("Unknown write to {}", addr)}
}
},
0xFF80..= 0xFFFE => {
self.highRam[((addr & 0x00ff) - 0x0080) as usize] = data;
},
0xFFFF => {
self.interruptEnableRegister = data;
}
}
}
pub fn requestInterrupt(&mut self, i: IntrFlags) {
self.interruptRequestRegister = bit::set(self.interruptRequestRegister, i as usize);
}
pub fn getInterruptRequest(&self, i: IntrFlags) -> bool {
bit::get(self.interruptRequestRegister, i as usize)
}
pub fn resetInterruptRequest(&mut self, i: IntrFlags) {
self.interruptRequestRegister = bit::clr(self.interruptRequestRegister, i as usize);
}
pub fn getInterruptEnable(&self, i: IntrFlags) -> bool{
bit::get(self.interruptEnableRegister, i as usize)
}
}
/*
0000 3FFF 16 KiB ROM bank 00 From cartridge, usually a fixed bank
4000 7FFF 16 KiB ROM Bank 01~NN From cartridge, switchable bank via mapper (if any)
8000 9FFF 8 KiB Video RAM (VRAM) In CGB mode, switchable bank 0/1
A000 BFFF 8 KiB External RAM From cartridge, switchable bank if any
C000 CFFF 4 KiB Work RAM (WRAM)
D000 DFFF 4 KiB Work RAM (WRAM) In CGB mode, switchable bank 1~7
E000 FDFF Mirror of C000~DDFF (ECHO RAM) Nintendo says use of this area is prohibited.
FE00 FE9F Sprite attribute table (OAM)
FEA0 FEFF Not Usable Nintendo says use of this area is prohibited
FF00 FF7F I/O Registers
FF80 FFFE High RAM (HRAM)
FFFF FFFF Interrupts Enable Register (IE)
*/
|
/*
Copyright 2019-2023 Didier Plaindoux
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std::marker::PhantomData;
use crate::parser::parser::Combine;
use crate::parser::parser::Parse;
use crate::parser::response::Response::Reject;
use crate::parser::response::Response::Success;
use crate::stream::position::Position;
use crate::stream::stream::Stream;
pub struct ParserStream<'a, P, A, S, L>(&'a P, S, PhantomData<A>, PhantomData<L>)
where
P: Combine<A> + Parse<A, S>,
S: Stream<Pos = L>,
L: Position;
impl<'a, P, A, S, L> ParserStream<'a, P, A, S, L>
where
P: Combine<A> + Parse<A, S>,
S: Stream<Pos = L>,
L: Position,
{
#[inline]
pub fn new(p: &'a P, s: S) -> Self {
ParserStream(p, s, PhantomData, PhantomData)
}
}
impl<'a, P, A, S, L> Clone for ParserStream<'a, P, A, S, L>
where
P: Combine<A> + Parse<A, S>,
S: Stream<Pos = L>,
L: Position,
{
fn clone(&self) -> Self {
ParserStream(self.0, self.1.clone(), PhantomData, PhantomData)
}
}
impl<'a, P, A, S, L> Stream for ParserStream<'a, P, A, S, L>
where
P: Combine<A> + Parse<A, S>,
S: Stream<Pos = L>,
L: Position,
{
type Item = A;
type Pos = L;
fn position(&self) -> Self::Pos {
self.1.position()
}
fn next(&self) -> (Option<Self::Item>, Self) {
match self.0.parse(self.1.clone()) {
Success(a, s, _) => (Some(a), ParserStream::new(self.0, s)),
Reject(_, _) => (None, ParserStream::new(self.0, self.1.clone())),
}
}
}
|
#[derive(Debug, Copy, Clone)]
pub struct Squircle {
pub p0: kurbo::Point,
pub p1: kurbo::Point,
pub p2: kurbo::Point,
pub radius: f64,
pub smoothness: f64,
}
impl Squircle {
pub fn to_curve(&self) -> Vec<kurbo::Point> {
let Squircle {
p0,
p1,
p2,
radius,
smoothness: xi,
} = *self;
if radius <= 0.0 {
return vec![p1];
}
let v0 = p0 - p1;
let v1 = p2 - p1;
let length0 = v0.hypot();
let length1 = v1.hypot();
let n0 = v0 / length0;
let n1 = v1 / length1;
let alpha = n0.dot(n1).acos();
let t = 1.0 / (alpha / 2.0).tan();
// clamp radius and amount of smoothing
let smoothing_length = (t + xi) * radius;
let smoothing = smoothing_length.min(length0).min(length1);
let (radius, xi) = if smoothing < t * radius {
(smoothing / t, 0.0)
} else {
(radius, (smoothing / radius - t).max(0.0))
};
let pc = p1 + n0 * radius * t;
let ccw = v0.cross(v1) > 0.0;
let n = if ccw {
kurbo::Vec2 { x: -n0.y, y: n0.x }
} else {
kurbo::Vec2 { x: n0.y, y: -n0.x }
};
let center = pc + n * radius;
let phi = std::f64::consts::PI - alpha;
let phi0 = 0.5 * phi * xi;
let phi1 = phi - 2.0 * phi0;
let phi0 = if ccw { -phi0 } else { phi0 };
let phi1 = if ccw { -phi1 } else { phi1 };
let (s0, c0) = phi0.sin_cos();
let (s1, c1) = (phi0 + phi1).sin_cos();
let vc = (0.5 * phi0.abs()).tan();
let ab = (vc + xi) * radius;
let sp0 = kurbo::Vec2 {
x: (c0 * -n.x + s0 * n.y),
y: (s0 * -n.x - c0 * n.y),
};
let sp1 = kurbo::Vec2 {
x: (c1 * -n.x + s1 * n.y),
y: (s1 * -n.x - c1 * n.y),
};
let c00 = p1 + n0 * smoothing;
let c01 = p1 + n0 * (smoothing - 2.0 * ab / 3.0);
let c02 = p1 + n0 * ((t - vc) * radius);
let c03 = center + radius * sp0;
let c10 = p1 + n1 * smoothing;
let c11 = p1 + n1 * (smoothing - 2.0 * ab / 3.0);
let c12 = p1 + n1 * ((t - vc) * radius);
let c13 = center + radius * sp1;
let phi2 = (phi1) / 2.0;
let (s2, c2) = (phi0 + phi2).sin_cos();
let sp2 = kurbo::Vec2 {
x: (c2 * -n.x + s2 * n.y),
y: (s2 * -n.x - c2 * n.y),
};
let alp = 4.0 / 3.0 * (phi2 / 4.0).tan();
let x02 = center + radius * sp2;
let x00 = c03
+ radius
* kurbo::Vec2 {
x: -alp * sp0.y,
y: alp * sp0.x,
};
let x01 = x02
+ radius
* kurbo::Vec2 {
x: alp * sp2.y,
y: -alp * sp2.x,
};
let x03 = x02
+ radius
* kurbo::Vec2 {
x: -alp * sp2.y,
y: alp * sp2.x,
};
let x04 = c13
+ radius
* kurbo::Vec2 {
x: alp * sp1.y,
y: -alp * sp1.x,
};
vec![
c00, c01, c02, c03, x00, x01, x02, x03, x04, c13, c12, c11, c10,
]
}
}
|
use crate::Number;
use super::Term;
///A term which divides one stored term by another
pub struct FractionTerm<T: Number>
{
numerator: Box<dyn Term<T> + Send + Sync>,
denominator: Box<dyn Term<T> + Send + Sync>
}
impl<T: Number> FractionTerm<T>
{
///Creates a fraction term from the given numerator and denominator terms
///
/// # Examples
///
/// ```
/// use crate::parametrizer::term::fractionterm::FractionTerm;
/// use crate::parametrizer::term::variableterm::VariableTerm;
/// use crate::parametrizer::term::constantterm::ConstantTerm;
/// use crate::parametrizer::term::Term;
///
/// let const1 = ConstantTerm::new(6);
/// let const2 = ConstantTerm::new(2);
/// let const3 = ConstantTerm::new(10);
/// let variable = VariableTerm::new();
///
/// let frac1 = FractionTerm::new(Box::new(const1), Box::new(const2));
/// let frac2 = FractionTerm::new(Box::new(const3), Box::new(variable));
///
/// assert_eq!(3, frac1.evaluate(1));
/// assert_eq!(2, frac2.evaluate(5));
/// ```
///
/// ```should_panic
/// use crate::parametrizer::term::fractionterm::FractionTerm;
/// use crate::parametrizer::term::variableterm::VariableTerm;
/// use crate::parametrizer::term::constantterm::ConstantTerm;
/// use crate::parametrizer::term::Term;
///
/// let const1 = ConstantTerm::new(6);
/// let variable = VariableTerm::new();
///
/// let frac1 = FractionTerm::new(Box::new(const1), Box::new(variable));
/// frac1.evaluate(0);
/// ```
pub fn new(numerator: Box<dyn Term<T> + Send + Sync>, denominator: Box<dyn Term<T> + Send + Sync>) -> FractionTerm<T>
{
return FractionTerm::<T> { numerator, denominator };
}
}
impl<T: Number> Term<T> for FractionTerm<T>
{
///Divides the numerator by the denominator.
///
/// # Panics
/// Panics if the denominator evaluates to 0
fn evaluate(&self, t: T) -> T
{
let d = self.denominator.evaluate(t);
if d == T::zero() //If the denominator is 0, panic
{
panic!("Cannot divide by 0 in parametrized InverseTerm. Make sure the function you set as your denominator is never zero on your inputs.");
}
else
{
return self.numerator.evaluate(t) / d;
}
}
}
|
#[doc = "Register `HASH_CSR19` reader"]
pub type R = crate::R<HASH_CSR19_SPEC>;
#[doc = "Register `HASH_CSR19` writer"]
pub type W = crate::W<HASH_CSR19_SPEC>;
#[doc = "Field `CS19` reader - CS19"]
pub type CS19_R = crate::FieldReader<u32>;
#[doc = "Field `CS19` writer - CS19"]
pub type CS19_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - CS19"]
#[inline(always)]
pub fn cs19(&self) -> CS19_R {
CS19_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - CS19"]
#[inline(always)]
#[must_use]
pub fn cs19(&mut self) -> CS19_W<HASH_CSR19_SPEC, 0> {
CS19_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "HASH context swap registers\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hash_csr19::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hash_csr19::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HASH_CSR19_SPEC;
impl crate::RegisterSpec for HASH_CSR19_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hash_csr19::R`](R) reader structure"]
impl crate::Readable for HASH_CSR19_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hash_csr19::W`](W) writer structure"]
impl crate::Writable for HASH_CSR19_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HASH_CSR19 to value 0"]
impl crate::Resettable for HASH_CSR19_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::{traits::*, transaction::Transaction};
use crate::{Geometry, TableFlags, WriteMap, RO, RW};
use anyhow::Context;
use std::{
collections::BTreeMap,
fs::DirBuilder,
ops::Deref,
path::{Path, PathBuf},
};
use tempfile::tempdir;
#[derive(Debug)]
enum DbFolder {
Persisted(std::path::PathBuf),
Temporary(tempfile::TempDir),
}
impl DbFolder {
fn path(&self) -> &Path {
match self {
Self::Persisted(p) => p.as_path(),
Self::Temporary(temp_dir) => temp_dir.path(),
}
}
}
#[derive(Debug)]
pub struct Database {
inner: crate::Database<WriteMap>,
folder: DbFolder,
}
impl Database {
pub fn path(&self) -> &Path {
self.folder.path()
}
fn open_db(
mut builder: crate::DatabaseBuilder<WriteMap>,
folder: DbFolder,
chart: &DatabaseChart,
read_only: bool,
) -> anyhow::Result<Self> {
builder.set_max_tables(std::cmp::max(chart.len(), 1));
builder.set_flags(crate::DatabaseFlags {
mode: if read_only {
crate::Mode::ReadOnly
} else {
crate::Mode::ReadWrite {
sync_mode: crate::SyncMode::Durable,
}
},
no_rdahead: true,
coalesce: true,
..Default::default()
});
Ok(Self {
inner: builder.open(folder.path()).with_context(|| {
format!("failed to open database at {}", folder.path().display())
})?,
folder,
})
}
fn new(chart: &DatabaseChart, folder: DbFolder, read_only: bool) -> anyhow::Result<Self> {
let mut builder = crate::Database::<WriteMap>::new();
builder.set_max_tables(chart.len());
builder.set_geometry(Geometry {
size: Some(..isize::MAX as usize),
growth_step: None,
shrink_threshold: None,
page_size: None,
});
builder.set_rp_augment_limit(16 * 256 * 1024);
if read_only {
Self::open_db(builder, folder, chart, true)
} else {
let _ = DirBuilder::new().recursive(true).create(folder.path());
let this = Self::open_db(builder, folder, chart, false)?;
let tx = this.inner.begin_rw_txn()?;
for (table, settings) in chart {
tx.create_table(
Some(table),
if settings.dup_sort {
TableFlags::DUP_SORT
} else {
TableFlags::default()
},
)?;
}
tx.commit()?;
Ok(this)
}
}
pub fn create(chart: &DatabaseChart, path: Option<PathBuf>) -> anyhow::Result<Database> {
let folder = if let Some(path) = path {
DbFolder::Persisted(path)
} else {
let path = tempdir()?;
DbFolder::Temporary(path)
};
Self::new(chart, folder, false)
}
pub fn open(chart: &DatabaseChart, path: &Path) -> anyhow::Result<Database> {
Self::new(chart, DbFolder::Persisted(path.to_path_buf()), true)
}
}
impl Deref for Database {
type Target = crate::Database<WriteMap>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl Database {
pub fn begin_read(&self) -> anyhow::Result<Transaction<'_, RO>> {
Ok(Transaction {
inner: self.inner.begin_ro_txn()?,
})
}
pub fn begin_readwrite(&self) -> anyhow::Result<Transaction<'_, RW>> {
Ok(Transaction {
inner: self.inner.begin_rw_txn()?,
})
}
}
#[derive(Debug)]
pub struct UntypedTable<T>(pub T)
where
T: Table;
impl<T> Table for UntypedTable<T>
where
T: Table,
{
const NAME: &'static str = T::NAME;
type Key = Vec<u8>;
type Value = Vec<u8>;
type SeekKey = Vec<u8>;
}
impl<T> UntypedTable<T>
where
T: Table,
{
pub fn encode_key(key: T::Key) -> <<T as Table>::Key as Encodable>::Encoded {
key.encode()
}
pub fn decode_key(encoded: &[u8]) -> anyhow::Result<T::Key>
where
<T as Table>::Key: Decodable,
{
<T::Key as Decodable>::decode(encoded)
}
pub fn encode_value(value: T::Value) -> <<T as Table>::Value as Encodable>::Encoded {
value.encode()
}
pub fn decode_value(encoded: &[u8]) -> anyhow::Result<T::Value> {
<T::Value as Decodable>::decode(encoded)
}
pub fn encode_seek_key(value: T::SeekKey) -> <<T as Table>::SeekKey as Encodable>::Encoded {
value.encode()
}
}
#[macro_export]
macro_rules! table {
($(#[$docs:meta])+ ( $name:ident ) $key:ty [ $seek_key:ty ] => $value:ty) => {
$(#[$docs])+
///
#[doc = concat!("Takes [`", stringify!($key), "`] as a key and returns [`", stringify!($value), "`]")]
#[derive(Clone, Copy, Debug, Default)]
pub struct $name;
impl $crate::orm::Table for $name {
const NAME: &'static str = stringify!($name);
type Key = $key;
type SeekKey = $seek_key;
type Value = $value;
}
impl $name {
pub const fn untyped(self) -> $crate::orm::UntypedTable<Self> {
$crate::orm::UntypedTable(self)
}
}
impl std::fmt::Display for $name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", <Self as $crate::orm::Table>::NAME)
}
}
};
($(#[$docs:meta])+ ( $name:ident ) $key:ty => $value:ty) => {
table!(
$(#[$docs])+
( $name ) $key [ $key ] => $value
);
};
}
#[macro_export]
macro_rules! dupsort {
($(#[$docs:meta])+ ( $table_name:ident ) $key:ty [$seek_key:ty] => $value:ty [$seek_value:ty] ) => {
table!(
$(#[$docs])+
///
#[doc = concat!("`DUPSORT` table with seek value type being: [`", stringify!($seek_value), "`].")]
( $table_name ) $key [$seek_key] => $value
);
impl $crate::orm::DupSort for $table_name {
type SeekValue = $seek_value;
}
};
($(#[$docs:meta])+ ( $table_name:ident ) $key:ty [$seek_key:ty] => $value:ty ) => {
dupsort!(
$(#[$docs])+
( $table_name ) $key [$seek_key] => $value [$value]
);
};
($(#[$docs:meta])+ ( $table_name:ident ) $key:ty => $value:ty [$seek_value:ty] ) => {
dupsort!(
$(#[$docs])+
( $table_name ) $key [$key] => $value [$seek_value]
);
};
($(#[$docs:meta])+ ( $table_name:ident ) $key:ty => $value:ty ) => {
dupsort!(
$(#[$docs])+
( $table_name ) $key [$key] => $value [$value]
);
};
}
#[derive(Clone, Debug, Default)]
pub struct TableSettings {
pub dup_sort: bool,
}
/// Contains settings for each table in the database to be created or opened.
pub type DatabaseChart = BTreeMap<&'static str, TableSettings>;
#[macro_export]
macro_rules! table_info {
($t:ty) => {
(
<$t as $crate::orm::Table>::NAME,
$crate::orm::TableSettings {
dup_sort: $crate::impls::impls!($t: $crate::orm::DupSort),
},
)
};
}
|
#[doc = "Register `GINTSTS` reader"]
pub type R = crate::R<GINTSTS_SPEC>;
#[doc = "Register `GINTSTS` writer"]
pub type W = crate::W<GINTSTS_SPEC>;
#[doc = "Field `CMOD` reader - Current mode of operation"]
pub type CMOD_R = crate::BitReader;
#[doc = "Field `MMIS` reader - Mode mismatch interrupt"]
pub type MMIS_R = crate::BitReader;
#[doc = "Field `MMIS` writer - Mode mismatch interrupt"]
pub type MMIS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OTGINT` reader - OTG interrupt"]
pub type OTGINT_R = crate::BitReader;
#[doc = "Field `SOF` reader - Start of frame"]
pub type SOF_R = crate::BitReader;
#[doc = "Field `SOF` writer - Start of frame"]
pub type SOF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RXFLVL` reader - RxFIFO non-empty"]
pub type RXFLVL_R = crate::BitReader;
#[doc = "Field `NPTXFE` reader - Non-periodic TxFIFO empty"]
pub type NPTXFE_R = crate::BitReader;
#[doc = "Field `GINAKEFF` reader - Global IN non-periodic NAK effective"]
pub type GINAKEFF_R = crate::BitReader;
#[doc = "Field `GOUTNAKEFF` reader - Global OUT NAK effective"]
pub type GOUTNAKEFF_R = crate::BitReader;
#[doc = "Field `ESUSP` reader - Early suspend"]
pub type ESUSP_R = crate::BitReader;
#[doc = "Field `ESUSP` writer - Early suspend"]
pub type ESUSP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USBSUSP` reader - USB suspend"]
pub type USBSUSP_R = crate::BitReader;
#[doc = "Field `USBSUSP` writer - USB suspend"]
pub type USBSUSP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USBRST` reader - USB reset"]
pub type USBRST_R = crate::BitReader;
#[doc = "Field `USBRST` writer - USB reset"]
pub type USBRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ENUMDNE` reader - Enumeration done"]
pub type ENUMDNE_R = crate::BitReader;
#[doc = "Field `ENUMDNE` writer - Enumeration done"]
pub type ENUMDNE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ISOODRP` reader - Isochronous OUT packet dropped interrupt"]
pub type ISOODRP_R = crate::BitReader;
#[doc = "Field `ISOODRP` writer - Isochronous OUT packet dropped interrupt"]
pub type ISOODRP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EOPF` reader - End of periodic frame interrupt"]
pub type EOPF_R = crate::BitReader;
#[doc = "Field `EOPF` writer - End of periodic frame interrupt"]
pub type EOPF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IEPINT` reader - IN endpoint interrupt"]
pub type IEPINT_R = crate::BitReader;
#[doc = "Field `OEPINT` reader - OUT endpoint interrupt"]
pub type OEPINT_R = crate::BitReader;
#[doc = "Field `IISOIXFR` reader - Incomplete isochronous IN transfer"]
pub type IISOIXFR_R = crate::BitReader;
#[doc = "Field `IISOIXFR` writer - Incomplete isochronous IN transfer"]
pub type IISOIXFR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IPXFR_INCOMPISOOUT` reader - Incomplete periodic transfer(Host mode)/Incomplete isochronous OUT transfer(Device mode)"]
pub type IPXFR_INCOMPISOOUT_R = crate::BitReader;
#[doc = "Field `IPXFR_INCOMPISOOUT` writer - Incomplete periodic transfer(Host mode)/Incomplete isochronous OUT transfer(Device mode)"]
pub type IPXFR_INCOMPISOOUT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RSTDET` reader - Reset detected interrupt"]
pub type RSTDET_R = crate::BitReader;
#[doc = "Field `RSTDET` writer - Reset detected interrupt"]
pub type RSTDET_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HPRTINT` reader - Host port interrupt"]
pub type HPRTINT_R = crate::BitReader;
#[doc = "Field `HCINT` reader - Host channels interrupt"]
pub type HCINT_R = crate::BitReader;
#[doc = "Field `PTXFE` reader - Periodic TxFIFO empty"]
pub type PTXFE_R = crate::BitReader;
#[doc = "Field `CIDSCHG` reader - Connector ID status change"]
pub type CIDSCHG_R = crate::BitReader;
#[doc = "Field `CIDSCHG` writer - Connector ID status change"]
pub type CIDSCHG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DISCINT` reader - Disconnect detected interrupt"]
pub type DISCINT_R = crate::BitReader;
#[doc = "Field `DISCINT` writer - Disconnect detected interrupt"]
pub type DISCINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SRQINT` reader - Session request/new session detected interrupt"]
pub type SRQINT_R = crate::BitReader;
#[doc = "Field `SRQINT` writer - Session request/new session detected interrupt"]
pub type SRQINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WKUPINT` reader - Resume/remote wakeup detected interrupt"]
pub type WKUPINT_R = crate::BitReader;
#[doc = "Field `WKUPINT` writer - Resume/remote wakeup detected interrupt"]
pub type WKUPINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Current mode of operation"]
#[inline(always)]
pub fn cmod(&self) -> CMOD_R {
CMOD_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Mode mismatch interrupt"]
#[inline(always)]
pub fn mmis(&self) -> MMIS_R {
MMIS_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - OTG interrupt"]
#[inline(always)]
pub fn otgint(&self) -> OTGINT_R {
OTGINT_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Start of frame"]
#[inline(always)]
pub fn sof(&self) -> SOF_R {
SOF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - RxFIFO non-empty"]
#[inline(always)]
pub fn rxflvl(&self) -> RXFLVL_R {
RXFLVL_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Non-periodic TxFIFO empty"]
#[inline(always)]
pub fn nptxfe(&self) -> NPTXFE_R {
NPTXFE_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Global IN non-periodic NAK effective"]
#[inline(always)]
pub fn ginakeff(&self) -> GINAKEFF_R {
GINAKEFF_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Global OUT NAK effective"]
#[inline(always)]
pub fn goutnakeff(&self) -> GOUTNAKEFF_R {
GOUTNAKEFF_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 10 - Early suspend"]
#[inline(always)]
pub fn esusp(&self) -> ESUSP_R {
ESUSP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - USB suspend"]
#[inline(always)]
pub fn usbsusp(&self) -> USBSUSP_R {
USBSUSP_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - USB reset"]
#[inline(always)]
pub fn usbrst(&self) -> USBRST_R {
USBRST_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Enumeration done"]
#[inline(always)]
pub fn enumdne(&self) -> ENUMDNE_R {
ENUMDNE_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Isochronous OUT packet dropped interrupt"]
#[inline(always)]
pub fn isoodrp(&self) -> ISOODRP_R {
ISOODRP_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - End of periodic frame interrupt"]
#[inline(always)]
pub fn eopf(&self) -> EOPF_R {
EOPF_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 18 - IN endpoint interrupt"]
#[inline(always)]
pub fn iepint(&self) -> IEPINT_R {
IEPINT_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - OUT endpoint interrupt"]
#[inline(always)]
pub fn oepint(&self) -> OEPINT_R {
OEPINT_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Incomplete isochronous IN transfer"]
#[inline(always)]
pub fn iisoixfr(&self) -> IISOIXFR_R {
IISOIXFR_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - Incomplete periodic transfer(Host mode)/Incomplete isochronous OUT transfer(Device mode)"]
#[inline(always)]
pub fn ipxfr_incompisoout(&self) -> IPXFR_INCOMPISOOUT_R {
IPXFR_INCOMPISOOUT_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 23 - Reset detected interrupt"]
#[inline(always)]
pub fn rstdet(&self) -> RSTDET_R {
RSTDET_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - Host port interrupt"]
#[inline(always)]
pub fn hprtint(&self) -> HPRTINT_R {
HPRTINT_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - Host channels interrupt"]
#[inline(always)]
pub fn hcint(&self) -> HCINT_R {
HCINT_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - Periodic TxFIFO empty"]
#[inline(always)]
pub fn ptxfe(&self) -> PTXFE_R {
PTXFE_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 28 - Connector ID status change"]
#[inline(always)]
pub fn cidschg(&self) -> CIDSCHG_R {
CIDSCHG_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - Disconnect detected interrupt"]
#[inline(always)]
pub fn discint(&self) -> DISCINT_R {
DISCINT_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Session request/new session detected interrupt"]
#[inline(always)]
pub fn srqint(&self) -> SRQINT_R {
SRQINT_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Resume/remote wakeup detected interrupt"]
#[inline(always)]
pub fn wkupint(&self) -> WKUPINT_R {
WKUPINT_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 1 - Mode mismatch interrupt"]
#[inline(always)]
#[must_use]
pub fn mmis(&mut self) -> MMIS_W<GINTSTS_SPEC, 1> {
MMIS_W::new(self)
}
#[doc = "Bit 3 - Start of frame"]
#[inline(always)]
#[must_use]
pub fn sof(&mut self) -> SOF_W<GINTSTS_SPEC, 3> {
SOF_W::new(self)
}
#[doc = "Bit 10 - Early suspend"]
#[inline(always)]
#[must_use]
pub fn esusp(&mut self) -> ESUSP_W<GINTSTS_SPEC, 10> {
ESUSP_W::new(self)
}
#[doc = "Bit 11 - USB suspend"]
#[inline(always)]
#[must_use]
pub fn usbsusp(&mut self) -> USBSUSP_W<GINTSTS_SPEC, 11> {
USBSUSP_W::new(self)
}
#[doc = "Bit 12 - USB reset"]
#[inline(always)]
#[must_use]
pub fn usbrst(&mut self) -> USBRST_W<GINTSTS_SPEC, 12> {
USBRST_W::new(self)
}
#[doc = "Bit 13 - Enumeration done"]
#[inline(always)]
#[must_use]
pub fn enumdne(&mut self) -> ENUMDNE_W<GINTSTS_SPEC, 13> {
ENUMDNE_W::new(self)
}
#[doc = "Bit 14 - Isochronous OUT packet dropped interrupt"]
#[inline(always)]
#[must_use]
pub fn isoodrp(&mut self) -> ISOODRP_W<GINTSTS_SPEC, 14> {
ISOODRP_W::new(self)
}
#[doc = "Bit 15 - End of periodic frame interrupt"]
#[inline(always)]
#[must_use]
pub fn eopf(&mut self) -> EOPF_W<GINTSTS_SPEC, 15> {
EOPF_W::new(self)
}
#[doc = "Bit 20 - Incomplete isochronous IN transfer"]
#[inline(always)]
#[must_use]
pub fn iisoixfr(&mut self) -> IISOIXFR_W<GINTSTS_SPEC, 20> {
IISOIXFR_W::new(self)
}
#[doc = "Bit 21 - Incomplete periodic transfer(Host mode)/Incomplete isochronous OUT transfer(Device mode)"]
#[inline(always)]
#[must_use]
pub fn ipxfr_incompisoout(&mut self) -> IPXFR_INCOMPISOOUT_W<GINTSTS_SPEC, 21> {
IPXFR_INCOMPISOOUT_W::new(self)
}
#[doc = "Bit 23 - Reset detected interrupt"]
#[inline(always)]
#[must_use]
pub fn rstdet(&mut self) -> RSTDET_W<GINTSTS_SPEC, 23> {
RSTDET_W::new(self)
}
#[doc = "Bit 28 - Connector ID status change"]
#[inline(always)]
#[must_use]
pub fn cidschg(&mut self) -> CIDSCHG_W<GINTSTS_SPEC, 28> {
CIDSCHG_W::new(self)
}
#[doc = "Bit 29 - Disconnect detected interrupt"]
#[inline(always)]
#[must_use]
pub fn discint(&mut self) -> DISCINT_W<GINTSTS_SPEC, 29> {
DISCINT_W::new(self)
}
#[doc = "Bit 30 - Session request/new session detected interrupt"]
#[inline(always)]
#[must_use]
pub fn srqint(&mut self) -> SRQINT_W<GINTSTS_SPEC, 30> {
SRQINT_W::new(self)
}
#[doc = "Bit 31 - Resume/remote wakeup detected interrupt"]
#[inline(always)]
#[must_use]
pub fn wkupint(&mut self) -> WKUPINT_W<GINTSTS_SPEC, 31> {
WKUPINT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "OTG_FS core interrupt register (OTG_FS_GINTSTS)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gintsts::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gintsts::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GINTSTS_SPEC;
impl crate::RegisterSpec for GINTSTS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gintsts::R`](R) reader structure"]
impl crate::Readable for GINTSTS_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gintsts::W`](W) writer structure"]
impl crate::Writable for GINTSTS_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GINTSTS to value 0x0400_0020"]
impl crate::Resettable for GINTSTS_SPEC {
const RESET_VALUE: Self::Ux = 0x0400_0020;
}
|
//! Small program that convert uint to hex (and vice versa)
#![warn(
missing_docs,
absolute_paths_not_starting_with_crate,
anonymous_parameters,
box_pointers,
clashing_extern_declarations,
deprecated_in_future,
elided_lifetimes_in_paths,
explicit_outlives_requirements,
indirect_structural_match,
keyword_idents,
macro_use_extern_crate,
meta_variable_misuse,
missing_copy_implementations,
missing_crate_level_docs,
missing_debug_implementations,
missing_doc_code_examples,
non_ascii_idents,
private_doc_tests,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unaligned_references,
unreachable_pub,
unsafe_code,
unstable_features,
unused_crate_dependencies,
unused_extern_crates,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_results,
variant_size_differences
)] // unsafe_op_in_unsafe_fn is unstable
#![warn(clippy::all)]
/// Convert a String from UInt to hex format OR from Hex to Int format
///
/// Example:
/// ```
/// let res = hex_uint_converter::convert_uint_or_hex("1").unwrap();
/// ```
pub fn convert_uint_or_hex(str_to_parse: &str) -> Result<String, String> {
if str_to_parse.contains("0x") {
// Try to parse Hex to Int
let formated_str = str_to_parse.trim_start_matches("0x");
let number = match u128::from_str_radix(&formated_str, 16) {
Ok(n) => n,
Err(_) => {
return Err(format!(
"Error, the value {:?} is not an hexadecimal value!",
str_to_parse
))
}
};
Ok(number.to_string())
} else {
// Try to parse Int to Hex
let number: u128 = match str_to_parse.parse() {
Ok(n) => n,
Err(_) => {
return Err(format!(
"Error, the value {:?} is not an unsigned integer!",
str_to_parse
))
}
};
Ok(format!("0x{:x}", number))
}
}
#[test]
fn basic_tests() {
assert_eq!(convert_uint_or_hex("1").unwrap(), "0x1");
assert_eq!(convert_uint_or_hex("0x1").unwrap(), "1");
assert_eq!(convert_uint_or_hex("0xF").unwrap(), "15");
assert_eq!(convert_uint_or_hex("15").unwrap(), "0xf");
assert_eq!(convert_uint_or_hex("bad_value").is_err(), true);
assert_eq!(convert_uint_or_hex("0xbad_value").is_err(), true);
}
|
#![cfg_attr(test, feature(test))]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use bytes::Bytes;
mod byte_buffer;
mod compression_type;
mod encoding_type;
mod error;
mod node;
mod node_types;
mod options;
mod printer;
mod reader;
mod sixbit;
mod text_reader;
mod to_text_xml;
mod types;
mod value;
mod writer;
use crate::error::Result;
use crate::text_reader::TextXmlReader;
use crate::to_text_xml::TextXmlWriter;
// Public exports
pub use crate::compression_type::CompressionType;
pub use crate::encoding_type::EncodingType;
pub use crate::error::KbinError;
pub use crate::node::{Node, NodeCollection};
pub use crate::node_types::StandardType;
pub use crate::options::{Options, OptionsBuilder};
pub use crate::printer::Printer;
pub use crate::reader::Reader;
pub use crate::to_text_xml::ToTextXml;
pub use crate::value::{Value, ValueArray};
pub use crate::writer::{Writeable, Writer};
const SIGNATURE: u8 = 0xA0;
const SIG_COMPRESSED: u8 = 0x42;
const SIG_UNCOMPRESSED: u8 = 0x45;
const ARRAY_MASK: u8 = 1 << 6; // 1 << 6 = 64
pub fn is_binary_xml(input: &[u8]) -> bool {
input.len() > 2 &&
input[0] == SIGNATURE &&
(input[1] == SIG_COMPRESSED || input[1] == SIG_UNCOMPRESSED)
}
pub fn from_binary(input: Bytes) -> Result<(NodeCollection, EncodingType)> {
let reader = Reader::new(input)?;
let encoding = reader.encoding();
let collection = reader
.collect::<Option<_>>()
.ok_or(KbinError::NoNodeCollection)?;
Ok((collection, encoding))
}
pub fn from_text_xml(input: &[u8]) -> Result<(NodeCollection, EncodingType)> {
let mut reader = TextXmlReader::new(input);
let collection = reader
.as_node_collection()?
.ok_or(KbinError::NoNodeCollection)?;
let encoding = reader.encoding();
Ok((collection, encoding))
}
pub fn from_bytes(input: Bytes) -> Result<(NodeCollection, EncodingType)> {
if is_binary_xml(&input) {
from_binary(input)
} else {
from_text_xml(&input)
}
}
#[inline]
pub fn from_slice(input: &[u8]) -> Result<(NodeCollection, EncodingType)> {
from_binary(Bytes::from(input.to_vec()))
}
pub fn to_binary<T>(input: &T) -> Result<Vec<u8>>
where
T: Writeable,
{
let mut writer = Writer::new();
writer.to_binary(input).map_err(Into::into)
}
pub fn to_binary_with_options<T>(options: Options, input: &T) -> Result<Vec<u8>>
where
T: Writeable,
{
let mut writer = Writer::with_options(options);
writer.to_binary(input).map_err(Into::into)
}
pub fn to_text_xml<T>(input: &T) -> Result<Vec<u8>>
where
T: ToTextXml,
{
let writer = TextXmlWriter::new();
writer.into_text_xml(input)
}
|
use std::{self, env, fs};
mod parking_lot;
enum Mode {
File,
Interactive,
}
fn main() {
let args: Vec<String> = env::args().collect();
let mode = if args.len() == 1 {
Mode::Interactive
} else {
Mode::File
};
let mut parking = parking_lot::ParkingLot::new();
match mode {
Mode::Interactive => loop {
let mut command = String::new();
std::io::stdin().read_line(&mut command).unwrap();
println!("{}", parking_lot::stringify(parking.repl(&command)));
},
Mode::File => {
let contents =
fs::read_to_string(&args[1]).expect("Something went wrong reading the file");
for command in contents.lines() {
println!("\ncommand: '{}'", command);
println!("{}", parking_lot::stringify(parking.repl(&command)));
}
}
}
}
|
struct Solution;
impl Solution {
pub fn valid_mountain_array(a: Vec<i32>) -> bool {
let n = a.len();
if n < 3 {
return false;
}
let mut idx = 0;
// 递增
while idx + 1 < n && a[idx] < a[idx + 1] {
idx += 1;
}
// 最高点不能是数组的第一个位置或最后一个位置
if idx == 0 || idx == n - 1 {
return false;
}
// 递减
while idx + 1 < n && a[idx] > a[idx + 1] {
idx += 1;
}
idx + 1 == n
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_mountain_array() {
assert_eq!(Solution::valid_mountain_array(vec![2, 1]), false);
assert_eq!(Solution::valid_mountain_array(vec![3, 5, 5]), false);
assert_eq!(Solution::valid_mountain_array(vec![0, 3, 2, 1]), true);
}
}
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Numeric traits for generic mathematics
use std::ops::{Add, Sub, Mul, Div, Rem, Neg};
use std::ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr};
use std::{usize, u8, u16, u32, u64};
use std::{isize, i8, i16, i32, i64};
use std::{f32, f64};
use std::mem::{self, size_of};
use std::num::FpCategory;
/// The base trait for numeric types
pub trait Num: PartialEq + Zero + One
+ Add<Output = Self> + Sub<Output = Self>
+ Mul<Output = Self> + Div<Output = Self> + Rem<Output = Self>
{
/// Parse error for `from_str_radix`
type FromStrRadixErr;
/// Convert from a string and radix <= 36.
fn from_str_radix(str: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr>;
}
macro_rules! int_trait_impl {
($name:ident for $($t:ty)*) => ($(
impl $name for $t {
type FromStrRadixErr = ::std::num::ParseIntError;
fn from_str_radix(s: &str, radix: u32)
-> Result<Self, ::std::num::ParseIntError>
{
<$t>::from_str_radix(s, radix)
}
}
)*)
}
macro_rules! float_trait_impl {
($name:ident for $($t:ty)*) => ($(
impl $name for $t {
type FromStrRadixErr = ::std::num::ParseFloatError;
fn from_str_radix(s: &str, radix: u32)
-> Result<Self, ::std::num::ParseFloatError>
{
<$t>::from_str_radix(s, radix)
}
}
)*)
}
int_trait_impl!(Num for usize u8 u16 u32 u64 isize i8 i16 i32 i64);
float_trait_impl!(Num for f32 f64);
/// Defines an additive identity element for `Self`.
///
/// # Deriving
///
/// This trait can be automatically be derived using `#[deriving(Zero)]`
/// attribute. If you choose to use this, make sure that the laws outlined in
/// the documentation for `Zero::zero` still hold.
pub trait Zero: Add<Self, Output = Self> {
/// Returns the additive identity element of `Self`, `0`.
///
/// # Laws
///
/// ```{.text}
/// a + 0 = a ∀ a ∈ Self
/// 0 + a = a ∀ a ∈ Self
/// ```
///
/// # Purity
///
/// This function should return the same result at all times regardless of
/// external mutable state, for example values stored in TLS or in
/// `static mut`s.
// FIXME (#5527): This should be an associated constant
fn zero() -> Self;
/// Returns `true` if `self` is equal to the additive identity.
#[inline]
fn is_zero(&self) -> bool;
}
macro_rules! zero_impl {
($t:ty, $v:expr) => {
impl Zero for $t {
#[inline]
fn zero() -> $t { $v }
#[inline]
fn is_zero(&self) -> bool { *self == $v }
}
}
}
zero_impl!(usize, 0usize);
zero_impl!(u8, 0u8);
zero_impl!(u16, 0u16);
zero_impl!(u32, 0u32);
zero_impl!(u64, 0u64);
zero_impl!(isize, 0isize);
zero_impl!(i8, 0i8);
zero_impl!(i16, 0i16);
zero_impl!(i32, 0i32);
zero_impl!(i64, 0i64);
zero_impl!(f32, 0.0f32);
zero_impl!(f64, 0.0f64);
/// Defines a multiplicative identity element for `Self`.
pub trait One: Mul<Self, Output = Self> {
/// Returns the multiplicative identity element of `Self`, `1`.
///
/// # Laws
///
/// ```{.text}
/// a * 1 = a ∀ a ∈ Self
/// 1 * a = a ∀ a ∈ Self
/// ```
///
/// # Purity
///
/// This function should return the same result at all times regardless of
/// external mutable state, for example values stored in TLS or in
/// `static mut`s.
// FIXME (#5527): This should be an associated constant
fn one() -> Self;
}
macro_rules! one_impl {
($t:ty, $v:expr) => {
impl One for $t {
#[inline]
fn one() -> $t { $v }
}
}
}
one_impl!(usize, 1usize);
one_impl!(u8, 1u8);
one_impl!(u16, 1u16);
one_impl!(u32, 1u32);
one_impl!(u64, 1u64);
one_impl!(isize, 1isize);
one_impl!(i8, 1i8);
one_impl!(i16, 1i16);
one_impl!(i32, 1i32);
one_impl!(i64, 1i64);
one_impl!(f32, 1.0f32);
one_impl!(f64, 1.0f64);
/// Useful functions for signed numbers (i.e. numbers that can be negative).
pub trait Signed: Num + Neg<Output = Self> {
/// Computes the absolute value.
///
/// For `f32` and `f64`, `NaN` will be returned if the number is `NaN`.
///
/// For signed integers, `::MIN` will be returned if the number is `::MIN`.
fn abs(&self) -> Self;
/// The positive difference of two numbers.
///
/// Returns `zero` if the number is less than or equal to `other`, otherwise the difference
/// between `self` and `other` is returned.
fn abs_sub(&self, other: &Self) -> Self;
/// Returns the sign of the number.
///
/// For `f32` and `f64`:
///
/// * `1.0` if the number is positive, `+0.0` or `INFINITY`
/// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// * `NaN` if the number is `NaN`
///
/// For signed integers:
///
/// * `0` if the number is zero
/// * `1` if the number is positive
/// * `-1` if the number is negative
fn signum(&self) -> Self;
/// Returns true if the number is positive and false if the number is zero or negative.
fn is_positive(&self) -> bool;
/// Returns true if the number is negative and false if the number is zero or positive.
fn is_negative(&self) -> bool;
}
macro_rules! signed_impl {
($($t:ty)*) => ($(
impl Signed for $t {
#[inline]
fn abs(&self) -> $t {
if self.is_negative() { -*self } else { *self }
}
#[inline]
fn abs_sub(&self, other: &$t) -> $t {
if *self <= *other { 0 } else { *self - *other }
}
#[inline]
fn signum(&self) -> $t {
match *self {
n if n > 0 => 1,
0 => 0,
_ => -1,
}
}
#[inline]
fn is_positive(&self) -> bool { *self > 0 }
#[inline]
fn is_negative(&self) -> bool { *self < 0 }
}
)*)
}
signed_impl!(isize i8 i16 i32 i64);
macro_rules! signed_float_impl {
($t:ty, $nan:expr, $inf:expr, $neg_inf:expr) => {
impl Signed for $t {
/// Computes the absolute value. Returns `NAN` if the number is `NAN`.
#[inline]
fn abs(&self) -> $t {
<$t>::abs(*self)
}
/// The positive difference of two numbers. Returns `0.0` if the number is
/// less than or equal to `other`, otherwise the difference between`self`
/// and `other` is returned.
#[inline]
fn abs_sub(&self, other: &$t) -> $t {
<$t>::abs_sub(*self, *other)
}
/// # Returns
///
/// - `1.0` if the number is positive, `+0.0` or `INFINITY`
/// - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// - `NAN` if the number is NaN
#[inline]
fn signum(&self) -> $t {
<$t>::signum(*self)
}
/// Returns `true` if the number is positive, including `+0.0` and `INFINITY`
#[inline]
fn is_positive(&self) -> bool { *self > 0.0 || (1.0 / *self) == $inf }
/// Returns `true` if the number is negative, including `-0.0` and `NEG_INFINITY`
#[inline]
fn is_negative(&self) -> bool { *self < 0.0 || (1.0 / *self) == $neg_inf }
}
}
}
signed_float_impl!(f32, f32::NAN, f32::INFINITY, f32::NEG_INFINITY);
signed_float_impl!(f64, f64::NAN, f64::INFINITY, f64::NEG_INFINITY);
/// A trait for values which cannot be negative
pub trait Unsigned: Num {}
macro_rules! empty_trait_impl {
($name:ident for $($t:ty)*) => ($(
impl $name for $t {}
)*)
}
empty_trait_impl!(Unsigned for usize u8 u16 u32 u64);
/// Numbers which have upper and lower bounds
pub trait Bounded {
// FIXME (#5527): These should be associated constants
/// returns the smallest finite number this type can represent
fn min_value() -> Self;
/// returns the largest finite number this type can represent
fn max_value() -> Self;
}
macro_rules! bounded_impl {
($t:ty, $min:expr, $max:expr) => {
impl Bounded for $t {
#[inline]
fn min_value() -> $t { $min }
#[inline]
fn max_value() -> $t { $max }
}
}
}
bounded_impl!(usize, usize::MIN, usize::MAX);
bounded_impl!(u8, u8::MIN, u8::MAX);
bounded_impl!(u16, u16::MIN, u16::MAX);
bounded_impl!(u32, u32::MIN, u32::MAX);
bounded_impl!(u64, u64::MIN, u64::MAX);
bounded_impl!(isize, isize::MIN, isize::MAX);
bounded_impl!(i8, i8::MIN, i8::MAX);
bounded_impl!(i16, i16::MIN, i16::MAX);
bounded_impl!(i32, i32::MIN, i32::MAX);
bounded_impl!(i64, i64::MIN, i64::MAX);
bounded_impl!(f32, f32::MIN, f32::MAX);
bounded_impl!(f64, f64::MIN, f64::MAX);
/// Saturating math operations
pub trait Saturating {
/// Saturating addition operator.
/// Returns a+b, saturating at the numeric bounds instead of overflowing.
fn saturating_add(self, v: Self) -> Self;
/// Saturating subtraction operator.
/// Returns a-b, saturating at the numeric bounds instead of overflowing.
fn saturating_sub(self, v: Self) -> Self;
}
impl<T: CheckedAdd + CheckedSub + Zero + PartialOrd + Bounded> Saturating for T {
#[inline]
fn saturating_add(self, v: T) -> T {
match self.checked_add(&v) {
Some(x) => x,
None => if v >= Zero::zero() {
Bounded::max_value()
} else {
Bounded::min_value()
}
}
}
#[inline]
fn saturating_sub(self, v: T) -> T {
match self.checked_sub(&v) {
Some(x) => x,
None => if v >= Zero::zero() {
Bounded::min_value()
} else {
Bounded::max_value()
}
}
}
}
/// Performs addition that returns `None` instead of wrapping around on
/// overflow.
pub trait CheckedAdd: Add<Self, Output = Self> {
/// Adds two numbers, checking for overflow. If overflow happens, `None` is
/// returned.
fn checked_add(&self, v: &Self) -> Option<Self>;
}
macro_rules! checked_impl {
($trait_name:ident, $method:ident, $t:ty) => {
impl $trait_name for $t {
#[inline]
fn $method(&self, v: &$t) -> Option<$t> {
<$t>::$method(*self, *v)
}
}
}
}
checked_impl!(CheckedAdd, checked_add, u8);
checked_impl!(CheckedAdd, checked_add, u16);
checked_impl!(CheckedAdd, checked_add, u32);
checked_impl!(CheckedAdd, checked_add, u64);
checked_impl!(CheckedAdd, checked_add, usize);
checked_impl!(CheckedAdd, checked_add, i8);
checked_impl!(CheckedAdd, checked_add, i16);
checked_impl!(CheckedAdd, checked_add, i32);
checked_impl!(CheckedAdd, checked_add, i64);
checked_impl!(CheckedAdd, checked_add, isize);
/// Performs subtraction that returns `None` instead of wrapping around on underflow.
pub trait CheckedSub: Sub<Self, Output = Self> {
/// Subtracts two numbers, checking for underflow. If underflow happens,
/// `None` is returned.
fn checked_sub(&self, v: &Self) -> Option<Self>;
}
checked_impl!(CheckedSub, checked_sub, u8);
checked_impl!(CheckedSub, checked_sub, u16);
checked_impl!(CheckedSub, checked_sub, u32);
checked_impl!(CheckedSub, checked_sub, u64);
checked_impl!(CheckedSub, checked_sub, usize);
checked_impl!(CheckedSub, checked_sub, i8);
checked_impl!(CheckedSub, checked_sub, i16);
checked_impl!(CheckedSub, checked_sub, i32);
checked_impl!(CheckedSub, checked_sub, i64);
checked_impl!(CheckedSub, checked_sub, isize);
/// Performs multiplication that returns `None` instead of wrapping around on underflow or
/// overflow.
pub trait CheckedMul: Mul<Self, Output = Self> {
/// Multiplies two numbers, checking for underflow or overflow. If underflow
/// or overflow happens, `None` is returned.
fn checked_mul(&self, v: &Self) -> Option<Self>;
}
checked_impl!(CheckedMul, checked_mul, u8);
checked_impl!(CheckedMul, checked_mul, u16);
checked_impl!(CheckedMul, checked_mul, u32);
checked_impl!(CheckedMul, checked_mul, u64);
checked_impl!(CheckedMul, checked_mul, usize);
checked_impl!(CheckedMul, checked_mul, i8);
checked_impl!(CheckedMul, checked_mul, i16);
checked_impl!(CheckedMul, checked_mul, i32);
checked_impl!(CheckedMul, checked_mul, i64);
checked_impl!(CheckedMul, checked_mul, isize);
/// Performs division that returns `None` instead of panicking on division by zero and instead of
/// wrapping around on underflow and overflow.
pub trait CheckedDiv: Div<Self, Output = Self> {
/// Divides two numbers, checking for underflow, overflow and division by
/// zero. If any of that happens, `None` is returned.
fn checked_div(&self, v: &Self) -> Option<Self>;
}
macro_rules! checkeddiv_int_impl {
($t:ty, $min:expr) => {
impl CheckedDiv for $t {
#[inline]
fn checked_div(&self, v: &$t) -> Option<$t> {
if *v == 0 || (*self == $min && *v == -1) {
None
} else {
Some(*self / *v)
}
}
}
}
}
checkeddiv_int_impl!(isize, isize::MIN);
checkeddiv_int_impl!(i8, i8::MIN);
checkeddiv_int_impl!(i16, i16::MIN);
checkeddiv_int_impl!(i32, i32::MIN);
checkeddiv_int_impl!(i64, i64::MIN);
macro_rules! checkeddiv_uint_impl {
($($t:ty)*) => ($(
impl CheckedDiv for $t {
#[inline]
fn checked_div(&self, v: &$t) -> Option<$t> {
if *v == 0 {
None
} else {
Some(*self / *v)
}
}
}
)*)
}
checkeddiv_uint_impl!(usize u8 u16 u32 u64);
pub trait PrimInt
: Sized
+ Copy
+ Num + NumCast
+ Bounded
+ PartialOrd + Ord + Eq
+ Not<Output=Self>
+ BitAnd<Output=Self>
+ BitOr<Output=Self>
+ BitXor<Output=Self>
+ Shl<usize, Output=Self>
+ Shr<usize, Output=Self>
+ CheckedAdd<Output=Self>
+ CheckedSub<Output=Self>
+ CheckedMul<Output=Self>
+ CheckedDiv<Output=Self>
+ Saturating
{
/// Returns the number of ones in the binary representation of `self`.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0b01001100u8;
///
/// assert_eq!(n.count_ones(), 3);
/// ```
fn count_ones(self) -> u32;
/// Returns the number of zeros in the binary representation of `self`.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0b01001100u8;
///
/// assert_eq!(n.count_zeros(), 5);
/// ```
fn count_zeros(self) -> u32;
/// Returns the number of leading zeros in the binary representation
/// of `self`.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0b0101000u16;
///
/// assert_eq!(n.leading_zeros(), 10);
/// ```
fn leading_zeros(self) -> u32;
/// Returns the number of trailing zeros in the binary representation
/// of `self`.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0b0101000u16;
///
/// assert_eq!(n.trailing_zeros(), 3);
/// ```
fn trailing_zeros(self) -> u32;
/// Shifts the bits to the left by a specified amount amount, `n`, wrapping
/// the truncated bits to the end of the resulting integer.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
/// let m = 0x3456789ABCDEF012u64;
///
/// assert_eq!(n.rotate_left(12), m);
/// ```
fn rotate_left(self, n: u32) -> Self;
/// Shifts the bits to the right by a specified amount amount, `n`, wrapping
/// the truncated bits to the beginning of the resulting integer.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
/// let m = 0xDEF0123456789ABCu64;
///
/// assert_eq!(n.rotate_right(12), m);
/// ```
fn rotate_right(self, n: u32) -> Self;
/// Reverses the byte order of the integer.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
/// let m = 0xEFCDAB8967452301u64;
///
/// assert_eq!(n.swap_bytes(), m);
/// ```
fn swap_bytes(self) -> Self;
/// Convert an integer from big endian to the target's endianness.
///
/// On big endian this is a no-op. On little endian the bytes are swapped.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
///
/// if cfg!(target_endian = "big") {
/// assert_eq!(u64::from_be(n), n)
/// } else {
/// assert_eq!(u64::from_be(n), n.swap_bytes())
/// }
/// ```
fn from_be(x: Self) -> Self;
/// Convert an integer from little endian to the target's endianness.
///
/// On little endian this is a no-op. On big endian the bytes are swapped.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
///
/// if cfg!(target_endian = "little") {
/// assert_eq!(u64::from_le(n), n)
/// } else {
/// assert_eq!(u64::from_le(n), n.swap_bytes())
/// }
/// ```
fn from_le(x: Self) -> Self;
/// Convert `self` to big endian from the target's endianness.
///
/// On big endian this is a no-op. On little endian the bytes are swapped.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
///
/// if cfg!(target_endian = "big") {
/// assert_eq!(n.to_be(), n)
/// } else {
/// assert_eq!(n.to_be(), n.swap_bytes())
/// }
/// ```
fn to_be(self) -> Self;
/// Convert `self` to little endian from the target's endianness.
///
/// On little endian this is a no-op. On big endian the bytes are swapped.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// let n = 0x0123456789ABCDEFu64;
///
/// if cfg!(target_endian = "little") {
/// assert_eq!(n.to_le(), n)
/// } else {
/// assert_eq!(n.to_le(), n.swap_bytes())
/// }
/// ```
fn to_le(self) -> Self;
/// Raises self to the power of `exp`, using exponentiation by squaring.
///
/// # Examples
///
/// ```
/// use num::traits::PrimInt;
///
/// assert_eq!(2i32.pow(4), 16);
/// ```
fn pow(self, mut exp: u32) -> Self;
}
macro_rules! prim_int_impl {
($($T:ty)*) => ($(
impl PrimInt for $T {
fn count_ones(self) -> u32 {
<$T>::count_ones(self)
}
fn count_zeros(self) -> u32 {
<$T>::count_zeros(self)
}
fn leading_zeros(self) -> u32 {
<$T>::leading_zeros(self)
}
fn trailing_zeros(self) -> u32 {
<$T>::trailing_zeros(self)
}
fn rotate_left(self, n: u32) -> Self {
<$T>::rotate_left(self, n)
}
fn rotate_right(self, n: u32) -> Self {
<$T>::rotate_right(self, n)
}
fn swap_bytes(self) -> Self {
<$T>::swap_bytes(self)
}
fn from_be(x: Self) -> Self {
<$T>::from_be(x)
}
fn from_le(x: Self) -> Self {
<$T>::from_le(x)
}
fn to_be(self) -> Self {
<$T>::to_be(self)
}
fn to_le(self) -> Self {
<$T>::to_le(self)
}
fn pow(self, exp: u32) -> Self {
<$T>::pow(self, exp)
}
}
)*)
}
prim_int_impl!(u8 u16 u32 u64 usize i8 i16 i32 i64 isize);
/// A generic trait for converting a value to a number.
pub trait ToPrimitive {
/// Converts the value of `self` to an `isize`.
#[inline]
fn to_isize(&self) -> Option<isize> {
self.to_i64().and_then(|x| x.to_isize())
}
/// Converts the value of `self` to an `i8`.
#[inline]
fn to_i8(&self) -> Option<i8> {
self.to_i64().and_then(|x| x.to_i8())
}
/// Converts the value of `self` to an `i16`.
#[inline]
fn to_i16(&self) -> Option<i16> {
self.to_i64().and_then(|x| x.to_i16())
}
/// Converts the value of `self` to an `i32`.
#[inline]
fn to_i32(&self) -> Option<i32> {
self.to_i64().and_then(|x| x.to_i32())
}
/// Converts the value of `self` to an `i64`.
fn to_i64(&self) -> Option<i64>;
/// Converts the value of `self` to a `usize`.
#[inline]
fn to_usize(&self) -> Option<usize> {
self.to_u64().and_then(|x| x.to_usize())
}
/// Converts the value of `self` to an `u8`.
#[inline]
fn to_u8(&self) -> Option<u8> {
self.to_u64().and_then(|x| x.to_u8())
}
/// Converts the value of `self` to an `u16`.
#[inline]
fn to_u16(&self) -> Option<u16> {
self.to_u64().and_then(|x| x.to_u16())
}
/// Converts the value of `self` to an `u32`.
#[inline]
fn to_u32(&self) -> Option<u32> {
self.to_u64().and_then(|x| x.to_u32())
}
/// Converts the value of `self` to an `u64`.
#[inline]
fn to_u64(&self) -> Option<u64>;
/// Converts the value of `self` to an `f32`.
#[inline]
fn to_f32(&self) -> Option<f32> {
self.to_f64().and_then(|x| x.to_f32())
}
/// Converts the value of `self` to an `f64`.
#[inline]
fn to_f64(&self) -> Option<f64> {
self.to_i64().and_then(|x| x.to_f64())
}
}
macro_rules! impl_to_primitive_int_to_int {
($SrcT:ty, $DstT:ty, $slf:expr) => (
{
if size_of::<$SrcT>() <= size_of::<$DstT>() {
Some($slf as $DstT)
} else {
let n = $slf as i64;
let min_value: $DstT = Bounded::min_value();
let max_value: $DstT = Bounded::max_value();
if min_value as i64 <= n && n <= max_value as i64 {
Some($slf as $DstT)
} else {
None
}
}
}
)
}
macro_rules! impl_to_primitive_int_to_uint {
($SrcT:ty, $DstT:ty, $slf:expr) => (
{
let zero: $SrcT = Zero::zero();
let max_value: $DstT = Bounded::max_value();
if zero <= $slf && $slf as u64 <= max_value as u64 {
Some($slf as $DstT)
} else {
None
}
}
)
}
macro_rules! impl_to_primitive_int {
($T:ty) => (
impl ToPrimitive for $T {
#[inline]
fn to_isize(&self) -> Option<isize> { impl_to_primitive_int_to_int!($T, isize, *self) }
#[inline]
fn to_i8(&self) -> Option<i8> { impl_to_primitive_int_to_int!($T, i8, *self) }
#[inline]
fn to_i16(&self) -> Option<i16> { impl_to_primitive_int_to_int!($T, i16, *self) }
#[inline]
fn to_i32(&self) -> Option<i32> { impl_to_primitive_int_to_int!($T, i32, *self) }
#[inline]
fn to_i64(&self) -> Option<i64> { impl_to_primitive_int_to_int!($T, i64, *self) }
#[inline]
fn to_usize(&self) -> Option<usize> { impl_to_primitive_int_to_uint!($T, usize, *self) }
#[inline]
fn to_u8(&self) -> Option<u8> { impl_to_primitive_int_to_uint!($T, u8, *self) }
#[inline]
fn to_u16(&self) -> Option<u16> { impl_to_primitive_int_to_uint!($T, u16, *self) }
#[inline]
fn to_u32(&self) -> Option<u32> { impl_to_primitive_int_to_uint!($T, u32, *self) }
#[inline]
fn to_u64(&self) -> Option<u64> { impl_to_primitive_int_to_uint!($T, u64, *self) }
#[inline]
fn to_f32(&self) -> Option<f32> { Some(*self as f32) }
#[inline]
fn to_f64(&self) -> Option<f64> { Some(*self as f64) }
}
)
}
impl_to_primitive_int! { isize }
impl_to_primitive_int! { i8 }
impl_to_primitive_int! { i16 }
impl_to_primitive_int! { i32 }
impl_to_primitive_int! { i64 }
macro_rules! impl_to_primitive_uint_to_int {
($DstT:ty, $slf:expr) => (
{
let max_value: $DstT = Bounded::max_value();
if $slf as u64 <= max_value as u64 {
Some($slf as $DstT)
} else {
None
}
}
)
}
macro_rules! impl_to_primitive_uint_to_uint {
($SrcT:ty, $DstT:ty, $slf:expr) => (
{
if size_of::<$SrcT>() <= size_of::<$DstT>() {
Some($slf as $DstT)
} else {
let zero: $SrcT = Zero::zero();
let max_value: $DstT = Bounded::max_value();
if zero <= $slf && $slf as u64 <= max_value as u64 {
Some($slf as $DstT)
} else {
None
}
}
}
)
}
macro_rules! impl_to_primitive_uint {
($T:ty) => (
impl ToPrimitive for $T {
#[inline]
fn to_isize(&self) -> Option<isize> { impl_to_primitive_uint_to_int!(isize, *self) }
#[inline]
fn to_i8(&self) -> Option<i8> { impl_to_primitive_uint_to_int!(i8, *self) }
#[inline]
fn to_i16(&self) -> Option<i16> { impl_to_primitive_uint_to_int!(i16, *self) }
#[inline]
fn to_i32(&self) -> Option<i32> { impl_to_primitive_uint_to_int!(i32, *self) }
#[inline]
fn to_i64(&self) -> Option<i64> { impl_to_primitive_uint_to_int!(i64, *self) }
#[inline]
fn to_usize(&self) -> Option<usize> {
impl_to_primitive_uint_to_uint!($T, usize, *self)
}
#[inline]
fn to_u8(&self) -> Option<u8> { impl_to_primitive_uint_to_uint!($T, u8, *self) }
#[inline]
fn to_u16(&self) -> Option<u16> { impl_to_primitive_uint_to_uint!($T, u16, *self) }
#[inline]
fn to_u32(&self) -> Option<u32> { impl_to_primitive_uint_to_uint!($T, u32, *self) }
#[inline]
fn to_u64(&self) -> Option<u64> { impl_to_primitive_uint_to_uint!($T, u64, *self) }
#[inline]
fn to_f32(&self) -> Option<f32> { Some(*self as f32) }
#[inline]
fn to_f64(&self) -> Option<f64> { Some(*self as f64) }
}
)
}
impl_to_primitive_uint! { usize }
impl_to_primitive_uint! { u8 }
impl_to_primitive_uint! { u16 }
impl_to_primitive_uint! { u32 }
impl_to_primitive_uint! { u64 }
macro_rules! impl_to_primitive_float_to_float {
($SrcT:ident, $DstT:ident, $slf:expr) => (
if size_of::<$SrcT>() <= size_of::<$DstT>() {
Some($slf as $DstT)
} else {
let n = $slf as f64;
let max_value: $SrcT = ::std::$SrcT::MAX;
if -max_value as f64 <= n && n <= max_value as f64 {
Some($slf as $DstT)
} else {
None
}
}
)
}
macro_rules! impl_to_primitive_float {
($T:ident) => (
impl ToPrimitive for $T {
#[inline]
fn to_isize(&self) -> Option<isize> { Some(*self as isize) }
#[inline]
fn to_i8(&self) -> Option<i8> { Some(*self as i8) }
#[inline]
fn to_i16(&self) -> Option<i16> { Some(*self as i16) }
#[inline]
fn to_i32(&self) -> Option<i32> { Some(*self as i32) }
#[inline]
fn to_i64(&self) -> Option<i64> { Some(*self as i64) }
#[inline]
fn to_usize(&self) -> Option<usize> { Some(*self as usize) }
#[inline]
fn to_u8(&self) -> Option<u8> { Some(*self as u8) }
#[inline]
fn to_u16(&self) -> Option<u16> { Some(*self as u16) }
#[inline]
fn to_u32(&self) -> Option<u32> { Some(*self as u32) }
#[inline]
fn to_u64(&self) -> Option<u64> { Some(*self as u64) }
#[inline]
fn to_f32(&self) -> Option<f32> { impl_to_primitive_float_to_float!($T, f32, *self) }
#[inline]
fn to_f64(&self) -> Option<f64> { impl_to_primitive_float_to_float!($T, f64, *self) }
}
)
}
impl_to_primitive_float! { f32 }
impl_to_primitive_float! { f64 }
/// A generic trait for converting a number to a value.
pub trait FromPrimitive: Sized {
/// Convert an `isize` to return an optional value of this type. If the
/// value cannot be represented by this value, the `None` is returned.
#[inline]
fn from_isize(n: isize) -> Option<Self> {
FromPrimitive::from_i64(n as i64)
}
/// Convert an `i8` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_i8(n: i8) -> Option<Self> {
FromPrimitive::from_i64(n as i64)
}
/// Convert an `i16` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_i16(n: i16) -> Option<Self> {
FromPrimitive::from_i64(n as i64)
}
/// Convert an `i32` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_i32(n: i32) -> Option<Self> {
FromPrimitive::from_i64(n as i64)
}
/// Convert an `i64` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
fn from_i64(n: i64) -> Option<Self>;
/// Convert a `usize` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_usize(n: usize) -> Option<Self> {
FromPrimitive::from_u64(n as u64)
}
/// Convert an `u8` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_u8(n: u8) -> Option<Self> {
FromPrimitive::from_u64(n as u64)
}
/// Convert an `u16` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_u16(n: u16) -> Option<Self> {
FromPrimitive::from_u64(n as u64)
}
/// Convert an `u32` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_u32(n: u32) -> Option<Self> {
FromPrimitive::from_u64(n as u64)
}
/// Convert an `u64` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
fn from_u64(n: u64) -> Option<Self>;
/// Convert a `f32` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_f32(n: f32) -> Option<Self> {
FromPrimitive::from_f64(n as f64)
}
/// Convert a `f64` to return an optional value of this type. If the
/// type cannot be represented by this value, the `None` is returned.
#[inline]
fn from_f64(n: f64) -> Option<Self> {
FromPrimitive::from_i64(n as i64)
}
}
macro_rules! impl_from_primitive {
($T:ty, $to_ty:ident) => (
#[allow(deprecated)]
impl FromPrimitive for $T {
#[inline] fn from_i8(n: i8) -> Option<$T> { n.$to_ty() }
#[inline] fn from_i16(n: i16) -> Option<$T> { n.$to_ty() }
#[inline] fn from_i32(n: i32) -> Option<$T> { n.$to_ty() }
#[inline] fn from_i64(n: i64) -> Option<$T> { n.$to_ty() }
#[inline] fn from_u8(n: u8) -> Option<$T> { n.$to_ty() }
#[inline] fn from_u16(n: u16) -> Option<$T> { n.$to_ty() }
#[inline] fn from_u32(n: u32) -> Option<$T> { n.$to_ty() }
#[inline] fn from_u64(n: u64) -> Option<$T> { n.$to_ty() }
#[inline] fn from_f32(n: f32) -> Option<$T> { n.$to_ty() }
#[inline] fn from_f64(n: f64) -> Option<$T> { n.$to_ty() }
}
)
}
impl_from_primitive! { isize, to_isize }
impl_from_primitive! { i8, to_i8 }
impl_from_primitive! { i16, to_i16 }
impl_from_primitive! { i32, to_i32 }
impl_from_primitive! { i64, to_i64 }
impl_from_primitive! { usize, to_usize }
impl_from_primitive! { u8, to_u8 }
impl_from_primitive! { u16, to_u16 }
impl_from_primitive! { u32, to_u32 }
impl_from_primitive! { u64, to_u64 }
impl_from_primitive! { f32, to_f32 }
impl_from_primitive! { f64, to_f64 }
/// Cast from one machine scalar to another.
///
/// # Examples
///
/// ```
/// use num;
///
/// let twenty: f32 = num::traits::cast(0x14).unwrap();
/// assert_eq!(twenty, 20f32);
/// ```
///
#[inline]
pub fn cast<T: NumCast,U: NumCast>(n: T) -> Option<U> {
NumCast::from(n)
}
/// An interface for casting between machine scalars.
pub trait NumCast: ToPrimitive {
/// Creates a number from another value that can be converted into
/// a primitive via the `ToPrimitive` trait.
fn from<T: ToPrimitive>(n: T) -> Option<Self>;
}
macro_rules! impl_num_cast {
($T:ty, $conv:ident) => (
impl NumCast for $T {
#[inline]
#[allow(deprecated)]
fn from<N: ToPrimitive>(n: N) -> Option<$T> {
// `$conv` could be generated using `concat_idents!`, but that
// macro seems to be broken at the moment
n.$conv()
}
}
)
}
impl_num_cast! { u8, to_u8 }
impl_num_cast! { u16, to_u16 }
impl_num_cast! { u32, to_u32 }
impl_num_cast! { u64, to_u64 }
impl_num_cast! { usize, to_usize }
impl_num_cast! { i8, to_i8 }
impl_num_cast! { i16, to_i16 }
impl_num_cast! { i32, to_i32 }
impl_num_cast! { i64, to_i64 }
impl_num_cast! { isize, to_isize }
impl_num_cast! { f32, to_f32 }
impl_num_cast! { f64, to_f64 }
pub trait Float
: Num
+ Copy
+ NumCast
+ PartialOrd
+ Neg<Output = Self>
{
/// Returns the `NaN` value.
///
/// ```
/// use num::traits::Float;
///
/// let nan: f32 = Float::nan();
///
/// assert!(nan.is_nan());
/// ```
fn nan() -> Self;
/// Returns the infinite value.
///
/// ```
/// use num::traits::Float;
/// use std::f32;
///
/// let infinity: f32 = Float::infinity();
///
/// assert!(infinity.is_infinite());
/// assert!(!infinity.is_finite());
/// assert!(infinity > f32::MAX);
/// ```
fn infinity() -> Self;
/// Returns the negative infinite value.
///
/// ```
/// use num::traits::Float;
/// use std::f32;
///
/// let neg_infinity: f32 = Float::neg_infinity();
///
/// assert!(neg_infinity.is_infinite());
/// assert!(!neg_infinity.is_finite());
/// assert!(neg_infinity < f32::MIN);
/// ```
fn neg_infinity() -> Self;
/// Returns `-0.0`.
///
/// ```
/// use num::traits::{Zero, Float};
///
/// let inf: f32 = Float::infinity();
/// let zero: f32 = Zero::zero();
/// let neg_zero: f32 = Float::neg_zero();
///
/// assert_eq!(zero, neg_zero);
/// assert_eq!(7.0f32/inf, zero);
/// assert_eq!(zero * 10.0, zero);
/// ```
fn neg_zero() -> Self;
/// Returns the smallest finite value that this type can represent.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x: f64 = Float::min_value();
///
/// assert_eq!(x, f64::MIN);
/// ```
fn min_value() -> Self;
/// Returns the largest finite value that this type can represent.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x: f64 = Float::max_value();
/// assert_eq!(x, f64::MAX);
/// ```
fn max_value() -> Self;
/// Returns `true` if this value is `NaN` and false otherwise.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let nan = f64::NAN;
/// let f = 7.0;
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
fn is_nan(self) -> bool;
/// Returns `true` if this value is positive infinity or negative infinity and
/// false otherwise.
///
/// ```
/// use num::traits::Float;
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf: f32 = Float::infinity();
/// let neg_inf: f32 = Float::neg_infinity();
/// let nan: f32 = f32::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
fn is_infinite(self) -> bool;
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// ```
/// use num::traits::Float;
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf: f32 = Float::infinity();
/// let neg_inf: f32 = Float::neg_infinity();
/// let nan: f32 = f32::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
fn is_finite(self) -> bool;
/// Returns `true` if the number is neither zero, infinite,
/// [subnormal][subnormal], or `NaN`.
///
/// ```
/// use num::traits::Float;
/// use std::f32;
///
/// let min = f32::MIN_POSITIVE; // 1.17549435e-38f32
/// let max = f32::MAX;
/// let lower_than_min = 1.0e-40_f32;
/// let zero = 0.0f32;
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!f32::NAN.is_normal());
/// assert!(!f32::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
/// [subnormal]: http://en.wikipedia.org/wiki/Denormal_number
fn is_normal(self) -> bool;
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// ```
/// use num::traits::Float;
/// use std::num::FpCategory;
/// use std::f32;
///
/// let num = 12.4f32;
/// let inf = f32::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
fn classify(self) -> FpCategory;
/// Returns the largest integer less than or equal to a number.
///
/// ```
/// use num::traits::Float;
///
/// let f = 3.99;
/// let g = 3.0;
///
/// assert_eq!(f.floor(), 3.0);
/// assert_eq!(g.floor(), 3.0);
/// ```
fn floor(self) -> Self;
/// Returns the smallest integer greater than or equal to a number.
///
/// ```
/// use num::traits::Float;
///
/// let f = 3.01;
/// let g = 4.0;
///
/// assert_eq!(f.ceil(), 4.0);
/// assert_eq!(g.ceil(), 4.0);
/// ```
fn ceil(self) -> Self;
/// Returns the nearest integer to a number. Round half-way cases away from
/// `0.0`.
///
/// ```
/// use num::traits::Float;
///
/// let f = 3.3;
/// let g = -3.3;
///
/// assert_eq!(f.round(), 3.0);
/// assert_eq!(g.round(), -3.0);
/// ```
fn round(self) -> Self;
/// Return the integer part of a number.
///
/// ```
/// use num::traits::Float;
///
/// let f = 3.3;
/// let g = -3.7;
///
/// assert_eq!(f.trunc(), 3.0);
/// assert_eq!(g.trunc(), -3.0);
/// ```
fn trunc(self) -> Self;
/// Returns the fractional part of a number.
///
/// ```
/// use num::traits::Float;
///
/// let x = 3.5;
/// let y = -3.5;
/// let abs_difference_x = (x.fract() - 0.5).abs();
/// let abs_difference_y = (y.fract() - (-0.5)).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
/// ```
fn fract(self) -> Self;
/// Computes the absolute value of `self`. Returns `Float::nan()` if the
/// number is `Float::nan()`.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = 3.5;
/// let y = -3.5;
///
/// let abs_difference_x = (x.abs() - x).abs();
/// let abs_difference_y = (y.abs() - (-y)).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
///
/// assert!(f64::NAN.abs().is_nan());
/// ```
fn abs(self) -> Self;
/// Returns a number that represents the sign of `self`.
///
/// - `1.0` if the number is positive, `+0.0` or `Float::infinity()`
/// - `-1.0` if the number is negative, `-0.0` or `Float::neg_infinity()`
/// - `Float::nan()` if the number is `Float::nan()`
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let f = 3.5;
///
/// assert_eq!(f.signum(), 1.0);
/// assert_eq!(f64::NEG_INFINITY.signum(), -1.0);
///
/// assert!(f64::NAN.signum().is_nan());
/// ```
fn signum(self) -> Self;
/// Returns `true` if `self` is positive, including `+0.0` and
/// `Float::infinity()`.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let nan: f64 = f64::NAN;
///
/// let f = 7.0;
/// let g = -7.0;
///
/// assert!(f.is_sign_positive());
/// assert!(!g.is_sign_positive());
/// // Requires both tests to determine if is `NaN`
/// assert!(!nan.is_sign_positive() && !nan.is_sign_negative());
/// ```
fn is_sign_positive(self) -> bool;
/// Returns `true` if `self` is negative, including `-0.0` and
/// `Float::neg_infinity()`.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let nan = f64::NAN;
///
/// let f = 7.0;
/// let g = -7.0;
///
/// assert!(!f.is_sign_negative());
/// assert!(g.is_sign_negative());
/// // Requires both tests to determine if is `NaN`.
/// assert!(!nan.is_sign_positive() && !nan.is_sign_negative());
/// ```
fn is_sign_negative(self) -> bool;
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding
/// error. This produces a more accurate result with better performance than
/// a separate multiplication operation followed by an add.
///
/// ```
/// use num::traits::Float;
///
/// let m = 10.0;
/// let x = 4.0;
/// let b = 60.0;
///
/// // 100.0
/// let abs_difference = (m.mul_add(x, b) - (m*x + b)).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn mul_add(self, a: Self, b: Self) -> Self;
/// Take the reciprocal (inverse) of a number, `1/x`.
///
/// ```
/// use num::traits::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.recip() - (1.0/x)).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn recip(self) -> Self;
/// Raise a number to an integer power.
///
/// Using this function is generally faster than using `powf`
///
/// ```
/// use num::traits::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.powi(2) - x*x).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn powi(self, n: i32) -> Self;
/// Raise a number to a floating point power.
///
/// ```
/// use num::traits::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.powf(2.0) - x*x).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn powf(self, n: Self) -> Self;
/// Take the square root of a number.
///
/// Returns NaN if `self` is a negative number.
///
/// ```
/// use num::traits::Float;
///
/// let positive = 4.0;
/// let negative = -4.0;
///
/// let abs_difference = (positive.sqrt() - 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// assert!(negative.sqrt().is_nan());
/// ```
fn sqrt(self) -> Self;
/// Returns `e^(self)`, (the exponential function).
///
/// ```
/// use num::traits::Float;
///
/// let one = 1.0;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn exp(self) -> Self;
/// Returns `2^(self)`.
///
/// ```
/// use num::traits::Float;
///
/// let f = 2.0;
///
/// // 2^2 - 4 == 0
/// let abs_difference = (f.exp2() - 4.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn exp2(self) -> Self;
/// Returns the natural logarithm of the number.
///
/// ```
/// use num::traits::Float;
///
/// let one = 1.0;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn ln(self) -> Self;
/// Returns the logarithm of the number with respect to an arbitrary base.
///
/// ```
/// use num::traits::Float;
///
/// let ten = 10.0;
/// let two = 2.0;
///
/// // log10(10) - 1 == 0
/// let abs_difference_10 = (ten.log(10.0) - 1.0).abs();
///
/// // log2(2) - 1 == 0
/// let abs_difference_2 = (two.log(2.0) - 1.0).abs();
///
/// assert!(abs_difference_10 < 1e-10);
/// assert!(abs_difference_2 < 1e-10);
/// ```
fn log(self, base: Self) -> Self;
/// Returns the base 2 logarithm of the number.
///
/// ```
/// use num::traits::Float;
///
/// let two = 2.0;
///
/// // log2(2) - 1 == 0
/// let abs_difference = (two.log2() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn log2(self) -> Self;
/// Returns the base 10 logarithm of the number.
///
/// ```
/// use num::traits::Float;
///
/// let ten = 10.0;
///
/// // log10(10) - 1 == 0
/// let abs_difference = (ten.log10() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn log10(self) -> Self;
/// Returns the maximum of the two numbers.
///
/// ```
/// use num::traits::Float;
///
/// let x = 1.0;
/// let y = 2.0;
///
/// assert_eq!(x.max(y), y);
/// ```
fn max(self, other: Self) -> Self;
/// Returns the minimum of the two numbers.
///
/// ```
/// use num::traits::Float;
///
/// let x = 1.0;
/// let y = 2.0;
///
/// assert_eq!(x.min(y), x);
/// ```
fn min(self, other: Self) -> Self;
/// The positive difference of two numbers.
///
/// * If `self <= other`: `0:0`
/// * Else: `self - other`
///
/// ```
/// use num::traits::Float;
///
/// let x = 3.0;
/// let y = -3.0;
///
/// let abs_difference_x = (x.abs_sub(1.0) - 2.0).abs();
/// let abs_difference_y = (y.abs_sub(1.0) - 0.0).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
/// ```
fn abs_sub(self, other: Self) -> Self;
/// Take the cubic root of a number.
///
/// ```
/// use num::traits::Float;
///
/// let x = 8.0;
///
/// // x^(1/3) - 2 == 0
/// let abs_difference = (x.cbrt() - 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn cbrt(self) -> Self;
/// Calculate the length of the hypotenuse of a right-angle triangle given
/// legs of length `x` and `y`.
///
/// ```
/// use num::traits::Float;
///
/// let x = 2.0;
/// let y = 3.0;
///
/// // sqrt(x^2 + y^2)
/// let abs_difference = (x.hypot(y) - (x.powi(2) + y.powi(2)).sqrt()).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn hypot(self, other: Self) -> Self;
/// Computes the sine of a number (in radians).
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/2.0;
///
/// let abs_difference = (x.sin() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn sin(self) -> Self;
/// Computes the cosine of a number (in radians).
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = 2.0*f64::consts::PI;
///
/// let abs_difference = (x.cos() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn cos(self) -> Self;
/// Computes the tangent of a number (in radians).
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/4.0;
/// let abs_difference = (x.tan() - 1.0).abs();
///
/// assert!(abs_difference < 1e-14);
/// ```
fn tan(self) -> Self;
/// Computes the arcsine of a number. Return value is in radians in
/// the range [-pi/2, pi/2] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let f = f64::consts::PI / 2.0;
///
/// // asin(sin(pi/2))
/// let abs_difference = (f.sin().asin() - f64::consts::PI / 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn asin(self) -> Self;
/// Computes the arccosine of a number. Return value is in radians in
/// the range [0, pi] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let f = f64::consts::PI / 4.0;
///
/// // acos(cos(pi/4))
/// let abs_difference = (f.cos().acos() - f64::consts::PI / 4.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn acos(self) -> Self;
/// Computes the arctangent of a number. Return value is in radians in the
/// range [-pi/2, pi/2];
///
/// ```
/// use num::traits::Float;
///
/// let f = 1.0;
///
/// // atan(tan(1))
/// let abs_difference = (f.tan().atan() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn atan(self) -> Self;
/// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`).
///
/// * `x = 0`, `y = 0`: `0`
/// * `x >= 0`: `arctan(y/x)` -> `[-pi/2, pi/2]`
/// * `y >= 0`: `arctan(y/x) + pi` -> `(pi/2, pi]`
/// * `y < 0`: `arctan(y/x) - pi` -> `(-pi, -pi/2)`
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let pi = f64::consts::PI;
/// // All angles from horizontal right (+x)
/// // 45 deg counter-clockwise
/// let x1 = 3.0;
/// let y1 = -3.0;
///
/// // 135 deg clockwise
/// let x2 = -3.0;
/// let y2 = 3.0;
///
/// let abs_difference_1 = (y1.atan2(x1) - (-pi/4.0)).abs();
/// let abs_difference_2 = (y2.atan2(x2) - 3.0*pi/4.0).abs();
///
/// assert!(abs_difference_1 < 1e-10);
/// assert!(abs_difference_2 < 1e-10);
/// ```
fn atan2(self, other: Self) -> Self;
/// Simultaneously computes the sine and cosine of the number, `x`. Returns
/// `(sin(x), cos(x))`.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/4.0;
/// let f = x.sin_cos();
///
/// let abs_difference_0 = (f.0 - x.sin()).abs();
/// let abs_difference_1 = (f.1 - x.cos()).abs();
///
/// assert!(abs_difference_0 < 1e-10);
/// assert!(abs_difference_0 < 1e-10);
/// ```
fn sin_cos(self) -> (Self, Self);
/// Returns `e^(self) - 1` in a way that is accurate even if the
/// number is close to zero.
///
/// ```
/// use num::traits::Float;
///
/// let x = 7.0;
///
/// // e^(ln(7)) - 1
/// let abs_difference = (x.ln().exp_m1() - 6.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn exp_m1(self) -> Self;
/// Returns `ln(1+n)` (natural logarithm) more accurately than if
/// the operations were performed separately.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let x = f64::consts::E - 1.0;
///
/// // ln(1 + (e - 1)) == ln(e) == 1
/// let abs_difference = (x.ln_1p() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn ln_1p(self) -> Self;
/// Hyperbolic sine function.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
///
/// let f = x.sinh();
/// // Solving sinh() at 1 gives `(e^2-1)/(2e)`
/// let g = (e*e - 1.0)/(2.0*e);
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
fn sinh(self) -> Self;
/// Hyperbolic cosine function.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
/// let f = x.cosh();
/// // Solving cosh() at 1 gives this result
/// let g = (e*e + 1.0)/(2.0*e);
/// let abs_difference = (f - g).abs();
///
/// // Same result
/// assert!(abs_difference < 1.0e-10);
/// ```
fn cosh(self) -> Self;
/// Hyperbolic tangent function.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
///
/// let f = x.tanh();
/// // Solving tanh() at 1 gives `(1 - e^(-2))/(1 + e^(-2))`
/// let g = (1.0 - e.powi(-2))/(1.0 + e.powi(-2));
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
fn tanh(self) -> Self;
/// Inverse hyperbolic sine function.
///
/// ```
/// use num::traits::Float;
///
/// let x = 1.0;
/// let f = x.sinh().asinh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
fn asinh(self) -> Self;
/// Inverse hyperbolic cosine function.
///
/// ```
/// use num::traits::Float;
///
/// let x = 1.0;
/// let f = x.cosh().acosh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
fn acosh(self) -> Self;
/// Inverse hyperbolic tangent function.
///
/// ```
/// use num::traits::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let f = e.tanh().atanh();
///
/// let abs_difference = (f - e).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
fn atanh(self) -> Self;
/// Returns the mantissa, base 2 exponent, and sign as integers, respectively.
/// The original number can be recovered by `sign * mantissa * 2 ^ exponent`.
/// The floating point encoding is documented in the [Reference][floating-point].
///
/// ```
/// use num::traits::Float;
///
/// let num = 2.0f32;
///
/// // (8388608, -22, 1)
/// let (mantissa, exponent, sign) = Float::integer_decode(num);
/// let sign_f = sign as f32;
/// let mantissa_f = mantissa as f32;
/// let exponent_f = num.powf(exponent as f32);
///
/// // 1 * 8388608 * 2^(-22) == 2
/// let abs_difference = (sign_f * mantissa_f * exponent_f - num).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
/// [floating-point]: ../../../../../reference.html#machine-types
fn integer_decode(self) -> (u64, i16, i8);
}
macro_rules! float_impl {
($T:ident $decode:ident) => (
impl Float for $T {
fn nan() -> Self {
::std::$T::NAN
}
fn infinity() -> Self {
::std::$T::INFINITY
}
fn neg_infinity() -> Self {
::std::$T::NEG_INFINITY
}
fn neg_zero() -> Self {
-0.0
}
fn min_value() -> Self {
::std::$T::MIN
}
fn max_value() -> Self {
::std::$T::MAX
}
fn is_nan(self) -> bool {
<$T>::is_nan(self)
}
fn is_infinite(self) -> bool {
<$T>::is_infinite(self)
}
fn is_finite(self) -> bool {
<$T>::is_finite(self)
}
fn is_normal(self) -> bool {
<$T>::is_normal(self)
}
fn classify(self) -> FpCategory {
<$T>::classify(self)
}
fn floor(self) -> Self {
<$T>::floor(self)
}
fn ceil(self) -> Self {
<$T>::ceil(self)
}
fn round(self) -> Self {
<$T>::round(self)
}
fn trunc(self) -> Self {
<$T>::trunc(self)
}
fn fract(self) -> Self {
<$T>::fract(self)
}
fn abs(self) -> Self {
<$T>::abs(self)
}
fn signum(self) -> Self {
<$T>::signum(self)
}
fn is_sign_positive(self) -> bool {
<$T>::is_sign_positive(self)
}
fn is_sign_negative(self) -> bool {
<$T>::is_sign_negative(self)
}
fn mul_add(self, a: Self, b: Self) -> Self {
<$T>::mul_add(self, a, b)
}
fn recip(self) -> Self {
<$T>::recip(self)
}
fn powi(self, n: i32) -> Self {
<$T>::powi(self, n)
}
fn powf(self, n: Self) -> Self {
<$T>::powf(self, n)
}
fn sqrt(self) -> Self {
<$T>::sqrt(self)
}
fn exp(self) -> Self {
<$T>::exp(self)
}
fn exp2(self) -> Self {
<$T>::exp2(self)
}
fn ln(self) -> Self {
<$T>::ln(self)
}
fn log(self, base: Self) -> Self {
<$T>::log(self, base)
}
fn log2(self) -> Self {
<$T>::log2(self)
}
fn log10(self) -> Self {
<$T>::log10(self)
}
fn max(self, other: Self) -> Self {
<$T>::max(self, other)
}
fn min(self, other: Self) -> Self {
<$T>::min(self, other)
}
fn abs_sub(self, other: Self) -> Self {
<$T>::abs_sub(self, other)
}
fn cbrt(self) -> Self {
<$T>::cbrt(self)
}
fn hypot(self, other: Self) -> Self {
<$T>::hypot(self, other)
}
fn sin(self) -> Self {
<$T>::sin(self)
}
fn cos(self) -> Self {
<$T>::cos(self)
}
fn tan(self) -> Self {
<$T>::tan(self)
}
fn asin(self) -> Self {
<$T>::asin(self)
}
fn acos(self) -> Self {
<$T>::acos(self)
}
fn atan(self) -> Self {
<$T>::atan(self)
}
fn atan2(self, other: Self) -> Self {
<$T>::atan2(self, other)
}
fn sin_cos(self) -> (Self, Self) {
<$T>::sin_cos(self)
}
fn exp_m1(self) -> Self {
<$T>::exp_m1(self)
}
fn ln_1p(self) -> Self {
<$T>::ln_1p(self)
}
fn sinh(self) -> Self {
<$T>::sinh(self)
}
fn cosh(self) -> Self {
<$T>::cosh(self)
}
fn tanh(self) -> Self {
<$T>::tanh(self)
}
fn asinh(self) -> Self {
<$T>::asinh(self)
}
fn acosh(self) -> Self {
<$T>::acosh(self)
}
fn atanh(self) -> Self {
<$T>::atanh(self)
}
fn integer_decode(self) -> (u64, i16, i8) {
$decode(self)
}
}
)
}
fn integer_decode_f32(f: f32) -> (u64, i16, i8) {
let bits: u32 = unsafe { mem::transmute(f) };
let sign: i8 = if bits >> 31 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 23) & 0xff) as i16;
let mantissa = if exponent == 0 {
(bits & 0x7fffff) << 1
} else {
(bits & 0x7fffff) | 0x800000
};
// Exponent bias + mantissa shift
exponent -= 127 + 23;
(mantissa as u64, exponent, sign)
}
fn integer_decode_f64(f: f64) -> (u64, i16, i8) {
let bits: u64 = unsafe { mem::transmute(f) };
let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };
let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;
let mantissa = if exponent == 0 {
(bits & 0xfffffffffffff) << 1
} else {
(bits & 0xfffffffffffff) | 0x10000000000000
};
// Exponent bias + mantissa shift
exponent -= 1023 + 52;
(mantissa, exponent, sign)
}
float_impl!(f32 integer_decode_f32);
float_impl!(f64 integer_decode_f64);
|
#[doc = "Reader of register DDRCTRL_STAT"]
pub type R = crate::R<u32, super::DDRCTRL_STAT>;
#[doc = "Reader of field `OPERATING_MODE`"]
pub type OPERATING_MODE_R = crate::R<u8, u8>;
#[doc = "Reader of field `SELFREF_TYPE`"]
pub type SELFREF_TYPE_R = crate::R<u8, u8>;
#[doc = "Reader of field `SELFREF_CAM_NOT_EMPTY`"]
pub type SELFREF_CAM_NOT_EMPTY_R = crate::R<bool, bool>;
impl R {
#[doc = "Bits 0:2 - OPERATING_MODE"]
#[inline(always)]
pub fn operating_mode(&self) -> OPERATING_MODE_R {
OPERATING_MODE_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bits 4:5 - SELFREF_TYPE"]
#[inline(always)]
pub fn selfref_type(&self) -> SELFREF_TYPE_R {
SELFREF_TYPE_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bit 12 - SELFREF_CAM_NOT_EMPTY"]
#[inline(always)]
pub fn selfref_cam_not_empty(&self) -> SELFREF_CAM_NOT_EMPTY_R {
SELFREF_CAM_NOT_EMPTY_R::new(((self.bits >> 12) & 0x01) != 0)
}
}
|
// 17.7 Baby names
// Uses the UnionSet data structure, which has ammortized O(1)
// insertion and retrieval. O(n) time, O(n) extra space.
use std::collections::HashMap;
trait UnionSet<Id> {
fn join(&mut self, a: Id, b: Id);
fn get_union(&mut self, id: Id) -> Id;
}
impl UnionSet<usize> for HashMap<usize, usize> {
fn join(&mut self, a: usize, b: usize) {
let a_parent = *self.entry(a).or_insert(a);
let b_parent = *self.entry(b).or_insert(a);
self.insert(b_parent, a_parent);
}
fn get_union(&mut self, id: usize) -> usize {
let mut id = id;
loop {
let parent = *self.get(&id).unwrap();
if id == parent {
break;
}
id = parent;
}
id
}
}
struct BabyNames {
name_ids: HashMap<&'static str, usize>,
union_set: HashMap<usize, usize>,
counts: HashMap<usize, usize>,
}
impl BabyNames {
fn from(frequencies: &[(&str, usize)], equivalencies: &'static [(&str, &str)]) -> Self {
let mut index = 0;
let mut name_ids = HashMap::new();
let mut counts = HashMap::new();
let mut union_set = HashMap::new();
let mut get_new_index = || {
let x = index;
index += 1;
x
};
for &(a, b) in equivalencies.iter() {
let a_index = *name_ids.entry(a).or_insert_with(&mut get_new_index);
let b_index = *name_ids.entry(b).or_insert_with(&mut get_new_index);
union_set.join(a_index, b_index);
}
for (name, count) in frequencies {
let id = *name_ids.get(name).unwrap();
let id = union_set.get_union(id);
let val = counts.entry(id).or_insert(0usize);
*val += *count;
}
BabyNames {
name_ids,
union_set,
counts,
}
}
fn get_count(&mut self, name: &str) -> usize {
let id = self.name_ids.get(name).unwrap();
let parent_id = self.union_set.get_union(*id);
*self.counts.get(&parent_id).unwrap()
}
}
#[test]
fn test() {
const TESTS: &[(&[(&str, usize)], &[(&str, &str)], &[(&str, usize)])] = &[(
&[
("John", 15),
("Jon", 12),
("Chris", 13),
("Kris", 4),
("Christopher", 19),
],
&[
("Jon", "John"),
("John", "Johnny"),
("Chris", "Kris"),
("Chris", "Christopher"),
],
&[("John", 27), ("Kris", 36)],
)];
for test in TESTS {
let mut babynames = BabyNames::from(test.0, test.1);
for (name, count) in test.2 {
assert!(babynames.get_count(name) == *count);
}
}
}
|
#![feature(test)]
// https://github.com/bastion-rs/numanji/issues/1
use std::fmt::{Debug, Error, Formatter};
use std::sync::Arc;
enum ArcList<T> {
Cons(T, Arc<Self>),
Nil,
}
impl<T> ArcList<T> {
fn is_nil(&self) -> bool {
match self {
Self::Nil => true,
_ => false,
}
}
}
impl<T: Debug> Debug for ArcList<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
match self {
Self::Nil => Ok(()),
Self::Cons(e, t) if t.is_nil() => write!(f, "{:?}", e),
Self::Cons(e, t) => write!(f, "{:?}, {:?}", e, *(*t)),
}
}
}
type Ptr<T> = Arc<ArcList<T>>;
fn cons<T>(t: T, list: Ptr<T>) -> Ptr<T> {
Arc::new(ArcList::Cons(t, list.clone()))
}
fn count_inner<T>(acc: usize, list: Ptr<T>) -> usize {
match &*list {
ArcList::Nil => acc,
ArcList::Cons(_, t) => count_inner(acc + 1, t.clone()),
}
}
fn count<T>(list: Ptr<T>) -> usize {
count_inner(0, list)
}
fn map<T, U>(f: fn(&T) -> U, list: Ptr<T>) -> Ptr<U> {
match &*list {
ArcList::Nil => Arc::new(ArcList::Nil),
ArcList::Cons(x, t) => Arc::new(ArcList::Cons(f(x), map(f, t.clone()))),
}
}
#[cfg(test)]
mod list_bench {
extern crate test;
use super::*;
use rand::Rng;
use std::sync::Arc;
use test::Bencher;
const SCALE: usize = 10000;
#[bench]
fn long_cons_then_count(bencher: &mut Bencher) {
bencher.iter(|| {
let mut rng = rand::thread_rng();
let mut a = Arc::new(ArcList::Nil);
for _ in 0..SCALE {
a = cons(rng.gen::<usize>(), a);
}
assert_eq!(count(a), SCALE)
});
}
#[bench]
fn long_cons_then_map(bencher: &mut Bencher) {
bencher.iter(|| {
let mut rng = rand::thread_rng();
let mut a = Arc::new(ArcList::Nil);
for _ in 0..SCALE {
a = cons(rng.gen::<usize>(), a);
}
map(|x| x + 1, a);
});
}
#[bench]
fn long_cons_then_count_in_multi_threads(bencher: &mut Bencher) {
bencher.iter(|| {
let mut handles = Vec::new();
for _ in 0..6 {
handles.push(
std::thread::Builder::new()
.stack_size(512 * 1024 * 1024)
.spawn(|| {
let mut rng = rand::thread_rng();
let mut a = Arc::new(ArcList::Nil);
for _ in 0..SCALE {
a = cons(rng.gen::<usize>(), a);
}
assert_eq!(count(a), SCALE)
})
.unwrap(),
);
}
for i in handles {
i.join().unwrap();
}
});
}
#[bench]
fn long_cons_then_map_across_multi_threads(bencher: &mut Bencher) {
bencher.iter(|| {
let mut rng = rand::thread_rng();
let mut handles = Vec::new();
let mut a = Arc::new(ArcList::Nil);
for _ in 0..SCALE {
a = cons(rng.gen::<usize>(), a);
}
for _ in 0..6 {
let a = a.clone();
handles.push(
std::thread::Builder::new()
.stack_size(512 * 1024 * 1024)
.spawn(move || {
map(|x| x + 1, a);
})
.unwrap(),
);
}
for i in handles {
i.join().unwrap();
}
});
}
}
|
use super::{into_std_seek_pos, DynamicAssetImpl, DynamicDataRecorderImpl};
use rustzx_core::{
error::IoError,
host::{DataRecorder, LoadableAsset, SeekFrom, SeekableAsset},
};
use std::{
fs::File,
io::{Read, Seek, Write},
};
pub struct FileAsset {
file: File,
}
impl From<File> for FileAsset {
fn from(file: File) -> Self {
Self { file }
}
}
impl SeekableAsset for FileAsset {
fn seek(&mut self, pos: SeekFrom) -> Result<usize, IoError> {
self.file
.seek(into_std_seek_pos(pos))
.map_err(|e| {
log::error!("Failed to seeek asset: {}", e);
IoError::HostAssetImplFailed
})
.map(|count| count as usize)
}
}
impl LoadableAsset for FileAsset {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, IoError> {
self.file.read(buf).map_err(|e| {
log::error!("Failed to read asset: {}", e);
IoError::HostAssetImplFailed
})
}
}
impl DataRecorder for FileAsset {
fn write(&mut self, buf: &[u8]) -> Result<usize, IoError> {
self.file.write(buf).map_err(|e| {
log::error!("Failed to write data to file: {}", e);
IoError::HostAssetImplFailed
})
}
}
impl DynamicAssetImpl for FileAsset {}
impl DynamicDataRecorderImpl for FileAsset {}
|
use plotly::common::{DashType, Line, Mode, Title};
use plotly::layout::Axis;
use plotly::{Layout, Plot, Scatter};
pub fn scatter_plot(x: Vec<f32>, y: Vec<f32>, c: &str, title: &str) {
let trace1 = Scatter::new(x, y).name(c).mode(Mode::Markers);
let layout = Layout::new()
.title(Title::new(title))
.xaxis(Axis::new().title(Title::new("X")).show_grid(true).zero_line(true))
.yaxis(Axis::new().title(Title::new("Y")).show_line(false));
let mut plot = Plot::new();
plot.add_trace(trace1);
plot.set_layout(layout);
plot.show();
}
#[allow(dead_code)]
pub fn line_dash(x: Vec<f32>, y: Vec<f32>, c: &str, title: &str) {
let trace1 = Scatter::new(x, y)
.name(c)
.mode(Mode::LinesMarkers)
.line(Line::new().dash(DashType::DashDot));
let layout = Layout::new()
.title(Title::new(title))
.xaxis(Axis::new().title(Title::new("X")).show_grid(true).zero_line(true))
.yaxis(Axis::new().title(Title::new("Y")).show_line(false));
let mut plot = Plot::new();
plot.add_trace(trace1);
plot.set_layout(layout);
plot.show();
}
|
#![allow(non_snake_case)]
use std::collections::HashMap;
use std::path::Path;
use image;
use image::GenericImage;
use cgmath::{vec2, vec3, Rad, InnerSpace};
use super::mesh::{Mesh, Vertex, Texture};
use crate::utils::common::*;
use crate::entity::Entity;
use crate::types::*;
const SCALE: f32 = 40.0;
const MAX_PIXEL_COLOR: f32 = 128 as f32;
pub const SIZE: f32 = 800.0;
pub const DEADZONE: f32 = 5.0;
pub const BOUND_MAX: f32 = SIZE - DEADZONE;
type Heights = HashMap<(u32, u32), f32>;
pub struct Terrain {
pub entity: Entity,
heights: Heights
}
impl Terrain {
pub fn new(worldPos: Point3, orientation: cgmath::Vector3<Rad<f32>>, scale: f32) -> Terrain {
let (mesh, heightArr) = genTerrain("resources/textures/heightmap.png");
let e = Entity::new(vec![mesh], Point3{ x: worldPos.x, y: worldPos.y, z: worldPos.z }, orientation, scale, 0.0);
Terrain { entity: e, heights: heightArr }
}
pub fn getHeight(&self, worldX: f32, worldZ: f32) -> f32 {
let terrainX = worldX - self.entity.worldPos.x;
let terrainZ = worldZ - self.entity.worldPos.z;
let squareSize = SIZE / ((self.heights.len() as f64).sqrt() - 1.0) as f32;
let x = (terrainX / squareSize).floor() as u32;
let z = (terrainZ / squareSize).floor() as u32;
let xCoordInSquare = (terrainX % squareSize)/squareSize;
let zCoordInSquare = (terrainZ % squareSize)/squareSize;
if xCoordInSquare <= (1.0 - zCoordInSquare) {
barryCentric(
vec3(0.0, *self.heights.get(&(x, z)).unwrap_or(&0.0), 0.0),
vec3(1.0, *self.heights.get(&(x+1, z)).unwrap_or(&0.0), 0.0),
vec3(0.0, *self.heights.get(&(x, z+1)).unwrap_or(&0.0), 1.0), vec2(xCoordInSquare, zCoordInSquare)
)
} else {
barryCentric(
vec3(1.0, *self.heights.get(&(x+1, z)).unwrap_or(&0.0), 0.0),
vec3(1.0, *self.heights.get(&(x+1,z+1)).unwrap_or(&0.0), 1.0),
vec3(0.0, *self.heights.get(&(x,z+1)).unwrap_or(&0.0), 1.0), vec2(xCoordInSquare, zCoordInSquare)
)
}
}
}
fn genTerrain(heightMap: &str) -> (Mesh, Heights) {
let img = image::open(&Path::new(&heightMap)).expect("Heightmap failed to load");
let VERTEX_COUNT = img.height();
let (vertices, heights) = genVertices(img, VERTEX_COUNT);
let indices = genIndices(VERTEX_COUNT);
let (grass, rock) = ("grass.png", "rock.jpg");
let dir = "resources/textures";
let grassTexture = Texture {
id: unsafe { textureFromFile(grass, dir) },
type_: "textureSampler".into(),
path: grass.into()
};
let rockTexture = Texture {
id: unsafe{ textureFromFile(rock, dir) },
type_: "textureSampler".into(),
path: rock.into()
};
let mesh = Mesh::new(vertices, indices, vec![grassTexture, rockTexture]);
(mesh, heights)
}
fn getHeightFromImage(x: u32, z: u32, img: &image::DynamicImage) -> f32 {
if x >= img.height() || z >= img.height() {
0.0
} else {
let p = img.get_pixel(x, z).data;
let mut height = p[0] as f32;
height += MAX_PIXEL_COLOR/2.0;
height /= MAX_PIXEL_COLOR/2.0;
height *= SCALE;
height
}
}
fn calcNormal(x: u32, z: u32, gridSize: u32, heights: &mut Heights, img: &image::DynamicImage) -> Vector3 {
let hLx = if x == 0 { x } else { x-1 };
let hRx = if x == gridSize - 1 { x } else { x+1 };
let hDz = if z == 0 { z } else { z-1 };
let hUz = if z == gridSize - 1 { z } else { z+1 };
let hL = *heights.entry((hLx, z)).or_insert(getHeightFromImage(hLx, z, &img));
let hR = *heights.entry((hRx, z)).or_insert(getHeightFromImage(hRx, z, &img));
let hD = *heights.entry((x, hDz)).or_insert(getHeightFromImage(x, hDz, &img));
let hU = *heights.entry((x, hUz)).or_insert(getHeightFromImage(x, hUz, &img));
vec3(hL-hR, 2.0, hD-hU).normalize()
}
fn genVertices(img: image::DynamicImage, VERTEX_COUNT: u32) -> (Vec<Vertex>, Heights) {
let mut vertexVec: Vec<Vertex> = Vec::with_capacity((VERTEX_COUNT * VERTEX_COUNT) as usize);
let mut heights: Heights = HashMap::default();
for gz in 0..VERTEX_COUNT {
for gx in 0..VERTEX_COUNT {
let height = *heights.entry((gx, gz)).or_insert(getHeightFromImage(gx, gz, &img));
let x = (gx as f32)/((VERTEX_COUNT - 1) as f32) * SIZE;
let y = height;
let z = (gz as f32)/((VERTEX_COUNT - 1) as f32) * SIZE;
let n = calcNormal(gx, gz, VERTEX_COUNT, &mut heights, &img);
let tX = (gx as f32)/((VERTEX_COUNT - 1) as f32);
let tZ = (gz as f32)/((VERTEX_COUNT - 1) as f32);
vertexVec.push(Vertex { Position: vec3(x, y, z), Normal: n, TexCoords: vec2(tX, tZ), ..Vertex::default() });
}
}
(vertexVec, heights)
}
fn genIndices(VERTEX_COUNT: u32) -> Vec<u32> {
let mut indices = Vec::with_capacity((6 * (VERTEX_COUNT-1)*(VERTEX_COUNT-1)) as usize);
for gz in 0..(VERTEX_COUNT-1) {
for gx in 0..(VERTEX_COUNT-1) {
let topLeft = (gz*VERTEX_COUNT)+gx;
let topRight = topLeft + 1;
let bottomLeft = ((gz+1)*VERTEX_COUNT)+gx;
let bottomRight = bottomLeft + 1;
indices.push(topLeft);
indices.push(bottomLeft);
indices.push(topRight);
indices.push(topRight);
indices.push(bottomLeft);
indices.push(bottomRight);
}
}
indices
}
fn barryCentric(p1: Vector3, p2: Vector3, p3: Vector3, pos: Vector2) -> f32 {
let det = (p2.z - p3.z) * (p1.x - p3.x) + (p3.x - p2.x) * (p1.z - p3.z);
let l1 = ((p2.z - p3.z) * (pos.x - p3.x) + (p3.x - p2.x) * (pos.y - p3.z)) / det;
let l2 = ((p3.z - p1.z) * (pos.x - p3.x) + (p1.x - p3.x) * (pos.y - p3.z)) / det;
let l3 = 1.0 - l1 - l2;
l1 * p1.y + l2 * p2.y + l3 * p3.y
}
|
use std::io::{self, Read};
use std::string::FromUtf8Error;
fn main() {
let iter = EntryIterator::from_stream(io::stdin());
for (i, entry) in iter.enumerate() {
let header = entry.header;
if i == 0 {
if header.has_magic() {
println!("This looks like a valid tar file.");
} else {
println!("Bad magic in the first header.");
break;
}
}
if header.is_null() {
println!("Entry {} is null", i);
break;
}
println!("Entry {}", i);
match header.name() {
Ok(n) => println!("Name {}", n),
Err(e) => println!("Invalid name: {}", e),
};
println!("Size {} bytes", header.size());
println!("");
}
}
struct Entry {
header: Header,
}
struct Header {
block: [u8; 512],
}
const NAME_LEN: usize = 100;
const MODE_LEN: usize = 8;
const UID_LEN: usize = 8;
const GID_LEN: usize = 8;
const SIZE_LEN: usize = 12;
const MTIME_LEN: usize = 12;
const CHECKSUM_LEN: usize = 8;
const TYPEFLAG_LEN: usize = 1;
const LINKNAME_LEN: usize = 100;
const MAGIC_LEN: usize = 6;
impl Header {
fn from_block(block: [u8; 512]) -> Header {
Header { block }
}
fn has_magic(&self) -> bool {
let maybe_magic = String::from_utf8(self.magic_field().to_vec());
match maybe_magic {
Ok(actual) => actual == "ustar\0",
_ => false,
}
}
fn is_null(&self) -> bool {
let offset = NAME_LEN + MODE_LEN + UID_LEN + GID_LEN + SIZE_LEN +
MTIME_LEN + CHECKSUM_LEN;
self.block[offset] == 0
}
fn name(&self) -> Result<String, FromUtf8Error> {
let len = find_zero(&self.block, NAME_LEN).unwrap_or(NAME_LEN);
let bytes = self.block[0..len].to_vec();
String::from_utf8(bytes)
}
fn size(&self) -> usize {
let bytes = self.size_field();
parse_octal(&bytes[0..SIZE_LEN - 1]) // Ignore the terminating space.
}
fn magic_field(&self) -> &[u8] {
let offset = NAME_LEN + MODE_LEN + UID_LEN + GID_LEN + SIZE_LEN +
MTIME_LEN + CHECKSUM_LEN + TYPEFLAG_LEN + LINKNAME_LEN;
&self.block[offset..(offset + MAGIC_LEN)]
}
fn size_field(&self) -> &[u8] {
let offset = NAME_LEN + MODE_LEN + UID_LEN + GID_LEN;
&self.block[offset..(offset + SIZE_LEN)]
}
}
// TODO: don't panic if this fails,
// or at least panic somewhere else.
fn parse_octal(bytes: &[u8]) -> usize {
bytes
.iter()
.fold(0, |acc, b| {
let n = *b as usize;
if n < 48 || n > 55 {
panic!("Not an octal digit: {}", b);
}
acc * 8 + n - 48
})
}
fn find_zero(buf: &[u8; 512], maxlen: usize) -> Option<usize> {
for i in 0..maxlen {
if buf[i] == 0 {
return Some(i);
}
}
None
}
#[test]
fn test_has_magic() {
let good = block_from_visual("somefile^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000644 ^@000765 ^@000024 ^@00000000000 13124523641 013414^@ 0^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@ustar");
let bad = block_from_visual("somefile^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000644 ^@000765 ^@000024 ^@00000000000 13124523641 013414^@ 0^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@nope!");
assert_eq!(true, Header::from_block(good).has_magic());
assert_eq!(false, Header::from_block(bad).has_magic());
}
#[test]
fn test_is_null() {
let null_block = [0; 512];
let non_null_block = block_from_visual("somefile^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000644 ^@000765 ^@000024 ^@00000000000 13124523641 013414^@ 0^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@ustar");
assert_eq!(true, Header::from_block(null_block).is_null());
assert_eq!(false, Header::from_block(non_null_block).is_null());
}
#[test]
fn test_name_short() {
let block = block_from_visual("somefile^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@x");
assert_eq!("somefile", Header::from_block(block).name().unwrap());
}
#[test]
fn test_name_exactly_100() {
let block = block_from_visual("long________________________________________________________________________________________________x");
assert_eq!("long________________________________________________________________________________________________",
Header::from_block(block).name().unwrap());
}
#[test]
fn test_size_small() {
let block = block_from_visual("11bytes^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000644 ^@000765 ^@000024 ^@00000000013 ");
assert_eq!(11, Header::from_block(block).size());
}
#[cfg(test)]
fn block_from_visual(visual: &str) -> [u8; 512] {
let mut block = [0; 512];
let chars: Vec<char> = visual.chars().collect();
let mut i = 0;
let mut j = 0;
while i < chars.len() {
if chars[i] == '^' && chars[i + 1] == '@' {
i += 2;
} else {
let mut buf: [u8; 1] = [0; 1];
chars[i].encode_utf8(&mut buf);
block[j] = buf[0];
i += 1;
}
j += 1;
}
block
}
#[test]
fn test_block_from_visual() {
let block = block_from_visual("somefile^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000644 ^@000765 ^@000024 ^@00000000000 13124523641 013414^@ 0^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@ustar^@00pivotal^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@staff^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@000000 ^@000000 ^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@^@X");
assert_eq!(115, block[0]); // s ascii
assert_eq!(101, block[7]); // e ascii
assert_eq!(0, block[8]); // ^@ becomes 0
assert_eq!(48, block[101]); // 0 ascii
assert_eq!(88, block[511]);
}
fn num_data_blocks(entry_size: usize) -> usize {
f32::ceil(entry_size as f32 / 512.0) as usize
}
#[test]
fn test_num_data_blocks() {
assert_eq!(0, num_data_blocks(0));
assert_eq!(1, num_data_blocks(1));
assert_eq!(1, num_data_blocks(512));
assert_eq!(2, num_data_blocks(513));
}
struct BlockIterator<T: Read> {
stream: T,
}
impl<T: Read> BlockIterator<T> {
fn from_stream(stream: T) -> BlockIterator<T> {
BlockIterator { stream }
}
}
impl<T: Read> Iterator for BlockIterator<T> {
type Item = [u8; 512];
fn next(&mut self) -> Option<[u8; 512]> {
let mut block: [u8; 512] = [0; 512];
match self.stream.read(&mut block) {
Ok(512) => Some(block),
Ok(0) => None,
Ok(n) => panic!("Expected to read 512 bytes but got {}", n),
Err(e) => panic!("Read error: {}", e),
}
}
}
#[test]
fn test_block_iterator() {
let file = include_bytes!("../fixtures/simple.tar");
let subject = BlockIterator::from_stream(file.as_ref());
let blocks: Vec<[u8; 512]> = subject.collect();
assert_eq!(7, blocks.len());
}
struct EntryIterator<T: Read> {
iter: BlockIterator<T>,
done: bool,
}
impl<T: Read> EntryIterator<T> {
fn from_stream(stream: T) -> EntryIterator<T> {
EntryIterator {
iter: BlockIterator::from_stream(stream),
done: false,
}
}
fn _make_entry(&mut self, block: [u8; 512]) -> Entry {
let header = Header::from_block(block);
if header.is_null() {
self.done = true;
} else {
for _ in 0..num_data_blocks(header.size()) {
self.iter.next();
}
}
Entry { header }
}
}
impl<T: Read> Iterator for EntryIterator<T> {
type Item = Entry;
fn next(&mut self) -> Option<Entry> {
if self.done {
return None;
}
self.iter.next().map(|b| self._make_entry(b))
}
}
#[test]
fn test_entry_iterator() {
let file = include_bytes!("../fixtures/simple.tar");
let subject = EntryIterator::from_stream(file.as_ref());
let entries: Vec<Entry> = subject.collect();
assert_eq!(3, entries.len());
assert_eq!("1", entries[0].header.name().unwrap());
assert_eq!("513", entries[1].header.name().unwrap());
assert_eq!(true, entries[2].header.is_null());
}
|
use std::collections::HashMap;
// just under a minute to brute force pt 2 with the same solution...
const TURNS: usize = 30000000;
pub fn solve(input: &str) -> usize {
let initial_numbers = process(input);
let mut number_timestamps = HashMap::new();
let mut previous_number: Option<usize> = None;
for t in 0..initial_numbers.len() {
match previous_number {
None => (),
Some(n) => { number_timestamps.insert(n, t); },
}
previous_number = Some(initial_numbers[t]);
}
let mut previous_number: usize = previous_number.unwrap();
for t in initial_numbers.len()..TURNS {
let number = match number_timestamps.contains_key(&previous_number) {
true => t - number_timestamps.get(&previous_number).unwrap(),
false => 0,
};
number_timestamps.insert(previous_number, t);
previous_number = number;
}
previous_number
}
fn process(input: &str) -> Vec<usize> {
let lines: Vec<&str> = input.lines().collect();
lines[0].split(",").map(|x| x.parse().unwrap()).collect()
}
|
use std::thread;
use std::time::Duration;
use actix::*;
#[derive(Debug)]
struct c_struct {
left: Vec<String>,
right: Vec<String>
}
impl c_struct {
fn new() -> Self {
Self{
left:vec![],
right : vec![]
}
}
fn make_a_b(&mut self){
while !self.left.is_empty() && !self.right.is_empty() {
println!("完成 c {:?},{:?} ",self.left.pop(),self.right.pop());
}
}
}
impl Actor for c_struct {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context){
println!("开始...")
}
}
struct CMesage {
content: String
}
impl Message for CMesage {
type Result = String;
}
impl Handler<CMesage> for c_struct{
type Result = String;
fn handle(&mut self, msg: CMesage, ctx: &mut Context<Self>) -> Self::Result{
if msg.content.starts_with("a--") {
self.left.push(msg.content);
}else if msg.content.starts_with("b--"){
self.right.push(msg.content);
}
self.make_a_b();
String::from("已经ok")
}
}
fn main(){
let system = actix::System::new("舞台");
let c_st = c_struct::new().start();
thread::spawn(move||{
loop{
c_st.do_send(CMesage{
content:String::from("a--")
});
c_st.do_send(CMesage{
content:String::from("b--")
});
thread::sleep(Duration::from_secs(2));
}
});
system.run();
}
|
struct Solution();
// use std::collections::HashMap;
impl Solution {
// //
pub fn single_number(nums: Vec<i32>) -> i32 {
// let length=nums.len();
// let mut num_count=HashMap::new();
// for i in 0..length{
// match num_count.remove(&nums[i]){
// Some(_value)=>{},
// None=>{num_count.insert(nums[i],1);},
// }
// }
// if let Some(&value)=num_count.keys().next(){
// return value;
// }
// nums[0]
// }
//异或可以直接判断
let length=nums.len();
let mut rslt=0;//0^任何数=任何数
//同样的数取异或=0
for i in 0..length{
rslt^=nums[i];
}
rslt
}
}
fn main(){
println!("{}",Solution::single_number(vec![2,2,1]));
println!("{}",Solution::single_number(vec![4,1,2,1,2]));
}
|
use std::cell::RefCell;
#[derive(Debug, Clone)]
struct UnionFind<T> {
cell: RefCell<Vec<T>>,
}
macro_rules! impl_UnionFind {
($( $ty:ty ),*) => ($(
impl UnionFind<$ty> {
fn new(size: usize) -> Self {
UnionFind {
cell: RefCell::new(vec![0; size]),
}
}
fn root(&self, i: $ty) -> $ty {
assert_ne!(i, 0);
let mut data = self.cell.borrow_mut();
let mut root = i;
while data[root as usize] != 0 {
root = data[root as usize];
};
if i != root {
data[i as usize] = root;
}
root
}
fn same(&self, i: $ty, j: $ty) -> bool {
self.root(i) == self.root(j)
}
fn join(&mut self, i: $ty, j: $ty) -> bool {
let i = self.root(i);
let j = self.root(j);
if i != j {
self.cell.borrow_mut()[j as usize] = i;
}
i != j
}
}
)*)
}
impl_UnionFind!(u8, u16, u32, u64);
#[test]
fn union_find() {
macro_rules! check_UnionFind {
($( $ty:ty ),*) => ($(
let mut uf = UnionFind::<$ty>::new(100);
for i in 1..99 {
assert!(uf.join(i + 1, 1));
}
for i in 1..99 {
assert!(uf.same(1, i + 1));
}
)*)
}
check_UnionFind!(u8, u16, u32, u64);
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Accounts_CheckNameAvailability(#[from] accounts::check_name_availability::Error),
#[error(transparent)]
Accounts_ListByResourceGroup(#[from] accounts::list_by_resource_group::Error),
#[error(transparent)]
Extensions_ListByAccount(#[from] extensions::list_by_account::Error),
#[error(transparent)]
Extensions_Get(#[from] extensions::get::Error),
#[error(transparent)]
Extensions_Create(#[from] extensions::create::Error),
#[error(transparent)]
Extensions_Update(#[from] extensions::update::Error),
#[error(transparent)]
Extensions_Delete(#[from] extensions::delete::Error),
#[error(transparent)]
Accounts_Get(#[from] accounts::get::Error),
#[error(transparent)]
Accounts_CreateOrUpdate(#[from] accounts::create_or_update::Error),
#[error(transparent)]
Accounts_Update(#[from] accounts::update::Error),
#[error(transparent)]
Accounts_Delete(#[from] accounts::delete::Error),
#[error(transparent)]
Projects_ListByResourceGroup(#[from] projects::list_by_resource_group::Error),
#[error(transparent)]
Projects_Get(#[from] projects::get::Error),
#[error(transparent)]
Projects_Create(#[from] projects::create::Error),
#[error(transparent)]
Projects_Update(#[from] projects::update::Error),
#[error(transparent)]
Projects_GetJobStatus(#[from] projects::get_job_status::Error),
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/microsoft.visualstudio/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod accounts {
use super::{models, API_VERSION};
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
subscription_id: &str,
body: &models::CheckNameAvailabilityParameter,
) -> std::result::Result<models::CheckNameAvailabilityResult, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/microsoft.visualstudio/checkNameAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_name_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_name_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(check_name_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_name_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_name_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(rsp_body)
.map_err(|source| check_name_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(check_name_availability::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::AccountResourceListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccountResourceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
resource_name: &str,
) -> std::result::Result<models::AccountResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccountResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(get::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
body: &models::AccountResourceRequest,
resource_name: &str,
) -> std::result::Result<models::AccountResource, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccountResource = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(create_or_update::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
body: &models::AccountTagRequest,
resource_name: &str,
) -> std::result::Result<models::AccountResource, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AccountResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(update::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
resource_name: &str,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod extensions {
use super::{models, API_VERSION};
pub async fn list_by_account(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
account_resource_name: &str,
) -> std::result::Result<models::ExtensionResourceListResult, list_by_account::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}/extension",
operation_config.base_path(),
subscription_id,
resource_group_name,
account_resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_account::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_account::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_account::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_account::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ExtensionResourceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_account::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_account::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
account_resource_name: &str,
extension_resource_name: &str,
) -> std::result::Result<models::ExtensionResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}/extension/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
account_resource_name,
extension_resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ExtensionResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(get::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
body: &models::ExtensionResourceRequest,
account_resource_name: &str,
extension_resource_name: &str,
) -> std::result::Result<models::ExtensionResource, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}/extension/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
account_resource_name,
extension_resource_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ExtensionResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
body: &models::ExtensionResourceRequest,
account_resource_name: &str,
extension_resource_name: &str,
) -> std::result::Result<models::ExtensionResource, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}/extension/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
account_resource_name,
extension_resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ExtensionResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
account_resource_name: &str,
extension_resource_name: &str,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/microsoft.visualstudio/account/{}/extension/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
account_resource_name,
extension_resource_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod projects {
use super::{models, API_VERSION};
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
root_resource_name: &str,
) -> std::result::Result<models::ProjectResourceListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.visualstudio/account/{}/project",
operation_config.base_path(),
subscription_id,
resource_group_name,
root_resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProjectResourceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
root_resource_name: &str,
resource_name: &str,
) -> std::result::Result<models::ProjectResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.visualstudio/account/{}/project/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
root_resource_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProjectResource =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(get::Error::NotFound404 {}),
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
body: &models::ProjectResource,
resource_group_name: &str,
subscription_id: &str,
root_resource_name: &str,
resource_name: &str,
validating: Option<&str>,
) -> std::result::Result<create::Response, create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.visualstudio/account/{}/project/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
root_resource_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create::Error::SerializeError)?;
if let Some(validating) = validating {
url.query_pairs_mut().append_pair("validating", validating);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProjectResource =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(create::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ProjectResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
body: &models::ProjectResource,
root_resource_name: &str,
resource_name: &str,
) -> std::result::Result<models::ProjectResource, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.visualstudio/account/{}/project/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
root_resource_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProjectResource =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_job_status(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
root_resource_name: &str,
resource_name: &str,
sub_container_name: &str,
operation: &str,
job_id: Option<&str>,
) -> std::result::Result<get_job_status::Response, get_job_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/microsoft.visualstudio/account/{}/project/{}/subContainers/{}/status",
operation_config.base_path(),
subscription_id,
resource_group_name,
root_resource_name,
resource_name,
sub_container_name
);
let mut url = url::Url::parse(url_str).map_err(get_job_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_job_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("operation", operation);
if let Some(job_id) = job_id {
url.query_pairs_mut().append_pair("jobId", job_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_job_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_job_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProjectResource =
serde_json::from_slice(rsp_body).map_err(|source| get_job_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(get_job_status::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(get_job_status::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(get_job_status::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_job_status {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ProjectResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
mod generic;
mod point;
mod rect;
mod size;
pub use {
generic::{GeomUnit, NonNegativeFloat, NonNegativeGeomUnit},
point::{FloatPoint, IntPoint, Point},
rect::{FloatRect, IntRect, Rect},
size::{FloatSize, IntSize, Size},
};
|
use middle::middle::*;
use std::fmt;
pub struct Mangled<T>(pub T);
pub trait Mangle {
fn mangle(&self) -> Mangled<&Self> {
Mangled(self)
}
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error>;
}
impl<'a, T: Mangle> fmt::Display for Mangled<&'a T> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.mangle_into(f)
}
}
impl<'a, 'ast> Mangle for TypeDefinition<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{}", self.fq_name)
}
}
impl<'a, 'ast> Mangle for SimpleType<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
use middle::middle::SimpleType::*;
match *self {
Boolean => write!(f, "boolean"),
Int => write!(f, "int"),
Short => write!(f, "short"),
Char => write!(f, "char"),
Byte => write!(f, "byte"),
Other(tydef) => tydef.mangle_into(f),
}
}
}
impl<'a, 'ast> Mangle for Type<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
use middle::middle::Type::*;
match *self {
SimpleType(ref ty) => ty.mangle_into(f),
ArrayType(ref ty) => write!(f, "a@{}", ty.mangle()),
_ => panic!("tried to mangle a bad type {}", *self),
}
}
}
impl<'a, 'ast> Mangle for Field<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "FIELD{}", self.fq_name)
}
}
impl<'a, 'ast> Mangle for MethodImpl<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
if self.is_native {
write!(f, "NATIVE{}", self.fq_name)
} else {
try!(write!(f, "METHOD"));
try!(write!(f, "{}", self.fq_name));
for ty in self.arg_types.iter() {
try!(write!(f, "#{}", ty.mangle()));
}
Ok(())
}
}
}
impl<'a, 'ast> Mangle for Constructor<'a, 'ast> {
fn mangle_into(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
try!(write!(f, "NEW"));
try!(write!(f, "{}", self.origin.fq_name));
for ty in self.arg_types.iter() {
try!(write!(f, "#{}", ty.mangle()));
}
Ok(())
}
}
|
use std::{future::Future, marker::PhantomData, rc::Rc};
use super::engine::Engine;
use crate::rvals::{FromSteelVal, IntoSteelVal, Result, SteelVal};
use crate::stop;
use crate::{
rerrs::{ErrorKind, SteelErr},
rvals::FutureResult,
};
use futures::FutureExt;
/// Trait for allowing any function that satisfies the `Fn` trait to be embedded in the engine
/// This allows for clean embedding of function pointers as well as closures that capture immutable environments
pub trait RegisterFn<FN, ARGS, RET> {
fn register_fn(&mut self, name: &'static str, func: FN) -> &mut Self;
}
pub trait RegisterAsyncFn<FN, ARGS, RET> {
fn register_async_fn(&mut self, name: &'static str, func: FN) -> &mut Self;
}
// Exists only to provides some bounds for the impl_register_fn implementation
// Without this, upstream crates could provides alternative implementations for (_,_), (_,_,_), etc.
// This allows us to get away with some funny business in the arguments
pub struct Wrapper<ARGS>(PhantomData<ARGS>);
impl<
FUT: Future<Output = RET> + 'static,
RET: IntoSteelVal + 'static,
FN: Fn() -> FUT + 'static,
> RegisterAsyncFn<FN, Wrapper<()>, RET> for Engine
{
fn register_async_fn(&mut self, name: &'static str, func: FN) -> &mut Self {
let f = move |args: &[SteelVal]| -> Result<FutureResult> {
if !args.is_empty() {
stop!(ArityMismatch => format!("{} expected 0 arguments, got {}", name, args.len()));
}
let res = func();
Ok(FutureResult::new(Box::pin(res.map(|x| x.into_steelval()))))
};
self.register_value(name, SteelVal::FutureFunc(Rc::new(f)))
}
}
impl<RET: IntoSteelVal, FN: Fn() -> RET + 'static> RegisterFn<FN, Wrapper<()>, RET> for Engine {
fn register_fn(&mut self, name: &'static str, func: FN) -> &mut Self {
let f = move |args: &[SteelVal]| -> Result<SteelVal> {
if !args.is_empty() {
stop!(ArityMismatch => format!("{} expected 0 arguments, got {}", name, args.len()));
}
let res = func();
res.into_steelval()
};
self.register_value(name, SteelVal::BoxedFunction(Rc::new(f)))
}
}
macro_rules! impl_register_fn {
($arg_count:expr => $($param:ident: $idx:expr),*) => {
impl<
$($param: FromSteelVal,)*
FN: Fn($($param),*) -> RET + 'static,
RET: IntoSteelVal
> RegisterFn<FN, Wrapper<($($param,)*)>, RET> for Engine {
fn register_fn(&mut self, name: &'static str, func: FN) -> &mut Self {
let f = move |args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != $arg_count {
stop!(ArityMismatch => format!("{} expected {} argument, got {}", name, $arg_count, args.len()));
}
let res = func($(<$param>::from_steelval(args[$idx].clone())?,)*);
res.into_steelval()
};
self.register_value(name, SteelVal::BoxedFunction(Rc::new(f)))
}
}
};
}
macro_rules! impl_register_async_fn {
($arg_count:expr => $($param:ident: $idx:expr),*) => {
impl<
FUT: Future<Output = RET> + 'static,
$($param: FromSteelVal,)*
FN: Fn($($param),*) -> FUT + 'static,
RET: IntoSteelVal
> RegisterAsyncFn<FN, Wrapper<($($param,)*)>, RET> for Engine {
fn register_async_fn(&mut self, name: &'static str, func: FN) -> &mut Self {
let f = move |args: &[SteelVal]| -> Result<FutureResult> {
if args.len() != $arg_count {
stop!(ArityMismatch => format!("{} expected {} argument, got {}", name, $arg_count, args.len()));
}
let res = func($(<$param>::from_steelval(args[$idx].clone())?,)*);
Ok(FutureResult::new(Box::pin(res.map(|x| x.into_steelval()))))
};
self.register_value(name, SteelVal::FutureFunc(Rc::new(f)))
}
}
};
}
impl_register_fn!(1 => A:0);
impl_register_fn!(2 => A:0, B:1);
impl_register_fn!(3 => A:0, B:1, C:2);
impl_register_fn!(4 => A:0, B:1, C:2, D:3);
impl_register_fn!(5 => A:0, B:1, C:2, D:3, E:4);
impl_register_fn!(6 => A:0, B:1, C:2, D:3, E:4, F:5);
impl_register_fn!(7 => A:0, B:1, C:2, D:3, E:4, F:5, G:6);
impl_register_fn!(8 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7);
impl_register_fn!(9 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8);
impl_register_fn!(10 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9);
impl_register_fn!(11 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10);
impl_register_fn!(12 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11);
impl_register_fn!(13 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12);
impl_register_fn!(14 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 13);
impl_register_fn!(15 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 13, O: 14);
impl_register_fn!(16 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 14, O: 14, P: 15);
impl_register_async_fn!(1 => A:0);
impl_register_async_fn!(2 => A:0, B:1);
impl_register_async_fn!(3 => A:0, B:1, C:2);
impl_register_async_fn!(4 => A:0, B:1, C:2, D:3);
impl_register_async_fn!(5 => A:0, B:1, C:2, D:3, E:4);
impl_register_async_fn!(6 => A:0, B:1, C:2, D:3, E:4, F:5);
impl_register_async_fn!(7 => A:0, B:1, C:2, D:3, E:4, F:5, G:6);
impl_register_async_fn!(8 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7);
impl_register_async_fn!(9 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8);
impl_register_async_fn!(10 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9);
impl_register_async_fn!(11 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10);
impl_register_async_fn!(12 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11);
impl_register_async_fn!(13 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12);
impl_register_async_fn!(14 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 13);
impl_register_async_fn!(15 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 13, O: 14);
impl_register_async_fn!(16 => A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M: 12, N: 14, O: 14, P: 15);
|
mod bitnot;
mod bitand;
mod bitor;
mod bitxor;
pub mod prelude {
pub use super::{
BitNot,
BitAnd,
BitOr,
BitXor
};
}
pub use self::bitnot::prelude::*;
pub use self::bitand::prelude::*;
pub use self::bitor::prelude::*;
pub use self::bitxor::prelude::*;
|
use crate::{
ParseAssetError, ParseNameError,
ParseSymbolError, ReadError, WriteError
};
use keys::error as KeyError;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Clone, Debug)]
pub enum Error {
BytesReadError(ReadError),
BytesWriteError(WriteError),
FromHexError(hex::FromHexError),
Keys(KeyError::Error),
ParseAssetErr(ParseAssetError),
ParseNameErr(ParseNameError),
ParseSymbolError(ParseSymbolError),
FromTrxKindsError,
IncreMerkleError,
InvalidLength,
NoNewProducersList,
VerificationError(KeyError::Error),
}
|
use crate::byte::Byte;
use crate::word::Word;
use crate::cpu::Cpu;
use crate::byte_le;
use crate::program::Program;
use crate::memory::MemorySpace;
use gdnative::prelude::*;
#[derive(NativeClass)]
#[inherit(Node)]
pub struct Artefact {
cpu: Cpu,
program: Option<Program>,
}
type GodotTrits = Int32Array;
type GodotBytes = Int32Array;
fn word_to_godot_trits(value: Word) -> GodotTrits {
let mut result = GodotTrits::new();
for i in 0..Word::WIDTH {
result.push(value.bytes[i/Byte::WIDTH].trits[i%Byte::WIDTH].val.into());
}
result
}
#[methods]
impl Artefact {
fn new(owner: &Node) -> Self {
let mut result = Artefact{cpu: Cpu::new(), program: None};
result.reset(owner);
result
}
#[export]
fn _ready(&self, _owner: &Node) {
godot_print!("Artefact Initialized");
}
#[export]
fn load_program_from_json(&mut self, _owner: &Node, json: String) {
let program_result = Program::load_from_json(json);
match program_result {
Err(err) => {
godot_print!("Program load failed {:?}", err);
self.program = None;
},
Ok(program) => {
godot_print!("Program loaded successfully");
self.program = Some(program);
self.reset(_owner);
}
}
}
#[export]
fn reset(&mut self, _owner: &Node) {
if let Some(ref program) = self.program {
self.cpu.mem = [None, None, None, None, None, None, None, None, None];
self.cpu.regs = program.regs;
self.cpu.load_data_chunks(&program.data_chunks);
} else {
self.cpu.init_default();
let (pc_space, pc_offset) = self.cpu.get_mut_space_and_offset(self.cpu.regs.pc).unwrap();
let shellcode = [
byte_le!(T,T,T,1,0,0,0,0,0), // add b, 1
byte_le!(T,T,1,1,0,0,0,T,T), // sub pc, 1
];
for (i, b) in shellcode.iter().enumerate() {
pc_space.set_byte(pc_offset+(i as isize), *b).unwrap();
}
}
}
#[export]
fn run(&mut self, _owner: &Node, i: usize) -> isize {
if let Err(interrupt) = self.cpu.run(i) {
return interrupt as isize;
}
return 0;
}
#[export]
fn add_breakpoint(&mut self, _owner: &Node, addr_value: i64) {
self.cpu.breakpoints.insert(Word::from(addr_value));
}
#[export]
fn del_breakpoint(&mut self, _owner: &Node, addr_value: i64) {
self.cpu.breakpoints.remove(&Word::from(addr_value));
}
#[export]
fn get_breakpoints(&mut self, _owner: &Node) -> Int32Array {
let mut result = Int32Array::new();
for a in &self.cpu.breakpoints {
result.push(i64::from(*a) as i32);
}
result
}
#[export]
fn get_mem_perm(&self, _owner: &Node, addr_value: i64, size: i64) -> Int32Array {
let mut result = Int32Array::new();
if let Some(ref program) = self.program {
for _ in 0..size {
result.push(0b000000000);
}
let memspace: usize = ((addr_value as isize + MemorySpace::MAX_ADDR) / MemorySpace::SIZE as isize + 4) as usize;
let addr: isize = addr_value as isize - ((memspace-4) * MemorySpace::SIZE) as isize;
for pc in &program.perm_chunks {
if memspace != pc.memspace {
continue;
}
if addr + size as isize <= pc.addr
|| pc.addr + pc.permissions.len() as isize <= addr {
continue;
}
let (mut i, mut pc_i) = if pc.addr < addr {
(0, addr - pc.addr)
} else {
(pc.addr - addr, 0)
};
while i < size as isize && pc_i < pc.permissions.len() as isize {
result.set(i as i32, pc.permissions.get(pc_i as i32));
i += 1;
pc_i += 1;
}
}
} else {
for _ in 0..size {
result.push(0b111111111);
}
};
result
}
#[export]
fn get_reg_perm(&self, _owner: &Node) -> Int32Array {
let mut result = Int32Array::new();
result.push(0b111111111111111111); // PC
result.push(0b111111111111111111); // SP
result.push(0b111111111111111111); // FLAGS
result.push(0b111111111111111111); // A
result.push(0b111111111111111111); // B
result.push(0b111111111111111111); // C
result.push(0b111111111111111111); // D
result.push(0b111111111111111111); // E
result.push(0b111111111111111111); // F
result
}
#[export]
fn mem_read(&self, _owner: &Node, addr_value: i64, size: i64) -> GodotBytes {
let mut result = GodotBytes::new();
let addr: Word = Word::from(addr_value);
if let Ok((space, offset)) = self.cpu.get_space_and_offset(addr) {
for i in 0..size {
if let Ok(byte) = space.get_byte(offset + i as isize) {
result.push(i64::from(byte) as i32);
} else {
break;
}
}
}
result
}
#[export]
fn mem_write(&mut self, _owner: &Node, addr_value: i64, data: GodotBytes) {
let addr: Word = Word::from(addr_value);
if let Ok((space, offset)) = self.cpu.get_mut_space_and_offset(addr) {
for i in 0..data.len() {
if space.set_byte(offset + i as isize, Byte::from(data.get(i) as i64)).is_err() {
break;
}
}
}
}
#[export]
fn get_reg_trits(&self, _owner: &Node, index: i64) -> GodotTrits {
let value: Word = match index {
0 => { self.cpu.regs.pc },
1 => { self.cpu.regs.sp },
2 => { self.cpu.regs.flags },
3 => { self.cpu.regs.a },
4 => { self.cpu.regs.b },
5 => { self.cpu.regs.c },
6 => { self.cpu.regs.d },
7 => { self.cpu.regs.e },
8 => { self.cpu.regs.f },
_ => {
godot_error!("Bad register index in get_reg_trits");
Word::ZERO
},
};
word_to_godot_trits(value)
}
#[export]
fn get_reg_value(&self, _owner: &Node, index: i64) -> i64 {
let value: Word = match index {
0 => { self.cpu.regs.pc },
1 => { self.cpu.regs.sp },
2 => { self.cpu.regs.flags },
3 => { self.cpu.regs.a },
4 => { self.cpu.regs.b },
5 => { self.cpu.regs.c },
6 => { self.cpu.regs.d },
7 => { self.cpu.regs.e },
8 => { self.cpu.regs.f },
_ => {
godot_error!("Bad register index in get_reg_value");
Word::ZERO
},
};
i64::from(value)
}
#[export]
fn set_reg_trits(&mut self, _owner: &Node, index: i64, trits: GodotTrits) {
let value: &mut Word = match index {
0 => { &mut self.cpu.regs.pc },
1 => { &mut self.cpu.regs.sp },
2 => { &mut self.cpu.regs.flags },
3 => { &mut self.cpu.regs.a },
4 => { &mut self.cpu.regs.b },
5 => { &mut self.cpu.regs.c },
6 => { &mut self.cpu.regs.d },
7 => { &mut self.cpu.regs.e },
8 => { &mut self.cpu.regs.f },
_ => {
godot_error!("Bad register index in set_reg_trits");
return ;
},
};
for i in 0..(trits.len() as usize) {
(*value).bytes[i/Byte::WIDTH].trits[i%Byte::WIDTH].val = trits.get(i as i32) as i8;
}
}
}
|
//! Write a function that returns the largest element in a list.
//! Basic implementation
fn main() {
let vec = vec![2,6,5,0,10,23,12];
println!("The largest number is {} ", find_largest_num(vec));
}
fn find_largest_num(list: Vec<u32>) -> u32 {
let mut largest: u32 = 0;
for x in &list {
if x > &largest {
largest = *x;
}
}
largest
}
|
#[doc = "Register `PWR_CR1` reader"]
pub type R = crate::R<PWR_CR1_SPEC>;
#[doc = "Register `PWR_CR1` writer"]
pub type W = crate::W<PWR_CR1_SPEC>;
#[doc = "Field `LPDS` reader - LPDS"]
pub type LPDS_R = crate::BitReader;
#[doc = "Field `LPDS` writer - LPDS"]
pub type LPDS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPCFG` reader - LPCFG"]
pub type LPCFG_R = crate::BitReader;
#[doc = "Field `LPCFG` writer - LPCFG"]
pub type LPCFG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LVDS` reader - LVDS"]
pub type LVDS_R = crate::BitReader;
#[doc = "Field `LVDS` writer - LVDS"]
pub type LVDS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PVDEN` reader - PVDEN"]
pub type PVDEN_R = crate::BitReader;
#[doc = "Field `PVDEN` writer - PVDEN"]
pub type PVDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PLS` reader - PLS"]
pub type PLS_R = crate::FieldReader;
#[doc = "Field `PLS` writer - PLS"]
pub type PLS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `DBP` reader - DBP"]
pub type DBP_R = crate::BitReader;
#[doc = "Field `DBP` writer - DBP"]
pub type DBP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AVDEN` reader - AVDEN"]
pub type AVDEN_R = crate::BitReader;
#[doc = "Field `AVDEN` writer - AVDEN"]
pub type AVDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ALS` reader - ALS"]
pub type ALS_R = crate::FieldReader;
#[doc = "Field `ALS` writer - ALS"]
pub type ALS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
impl R {
#[doc = "Bit 0 - LPDS"]
#[inline(always)]
pub fn lpds(&self) -> LPDS_R {
LPDS_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - LPCFG"]
#[inline(always)]
pub fn lpcfg(&self) -> LPCFG_R {
LPCFG_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - LVDS"]
#[inline(always)]
pub fn lvds(&self) -> LVDS_R {
LVDS_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 4 - PVDEN"]
#[inline(always)]
pub fn pvden(&self) -> PVDEN_R {
PVDEN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:7 - PLS"]
#[inline(always)]
pub fn pls(&self) -> PLS_R {
PLS_R::new(((self.bits >> 5) & 7) as u8)
}
#[doc = "Bit 8 - DBP"]
#[inline(always)]
pub fn dbp(&self) -> DBP_R {
DBP_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 16 - AVDEN"]
#[inline(always)]
pub fn avden(&self) -> AVDEN_R {
AVDEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bits 17:18 - ALS"]
#[inline(always)]
pub fn als(&self) -> ALS_R {
ALS_R::new(((self.bits >> 17) & 3) as u8)
}
}
impl W {
#[doc = "Bit 0 - LPDS"]
#[inline(always)]
#[must_use]
pub fn lpds(&mut self) -> LPDS_W<PWR_CR1_SPEC, 0> {
LPDS_W::new(self)
}
#[doc = "Bit 1 - LPCFG"]
#[inline(always)]
#[must_use]
pub fn lpcfg(&mut self) -> LPCFG_W<PWR_CR1_SPEC, 1> {
LPCFG_W::new(self)
}
#[doc = "Bit 2 - LVDS"]
#[inline(always)]
#[must_use]
pub fn lvds(&mut self) -> LVDS_W<PWR_CR1_SPEC, 2> {
LVDS_W::new(self)
}
#[doc = "Bit 4 - PVDEN"]
#[inline(always)]
#[must_use]
pub fn pvden(&mut self) -> PVDEN_W<PWR_CR1_SPEC, 4> {
PVDEN_W::new(self)
}
#[doc = "Bits 5:7 - PLS"]
#[inline(always)]
#[must_use]
pub fn pls(&mut self) -> PLS_W<PWR_CR1_SPEC, 5> {
PLS_W::new(self)
}
#[doc = "Bit 8 - DBP"]
#[inline(always)]
#[must_use]
pub fn dbp(&mut self) -> DBP_W<PWR_CR1_SPEC, 8> {
DBP_W::new(self)
}
#[doc = "Bit 16 - AVDEN"]
#[inline(always)]
#[must_use]
pub fn avden(&mut self) -> AVDEN_W<PWR_CR1_SPEC, 16> {
AVDEN_W::new(self)
}
#[doc = "Bits 17:18 - ALS"]
#[inline(always)]
#[must_use]
pub fn als(&mut self) -> ALS_W<PWR_CR1_SPEC, 17> {
ALS_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Reset on any system reset. This register provides write access security when enabled by TZEN register bit in Section10: Reset and clock control (RCC). When security is enabled a non-secure write access generates a bus error. Secure and non-secure read accesses are granted and return the register value.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pwr_cr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pwr_cr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PWR_CR1_SPEC;
impl crate::RegisterSpec for PWR_CR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pwr_cr1::R`](R) reader structure"]
impl crate::Readable for PWR_CR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pwr_cr1::W`](W) writer structure"]
impl crate::Writable for PWR_CR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PWR_CR1 to value 0"]
impl crate::Resettable for PWR_CR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use serde::ser::{Serialize, Serializer, SerializeStruct};
use std::vec::Vec;
use std::fmt;
#[derive(Clone)]
pub enum Button {
PAYLOAD(String,String),
URL(String,String),
QUICKPAYLOAD(String,String),
}
impl fmt::Display for Button {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(button, button)")
}
}
impl Serialize for Button {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Button::PAYLOAD(name,payload) => {
let mut state = serializer.serialize_struct("ButtonPayload", 3)?;
state.serialize_field("type", "postback")?;
state.serialize_field("title", name)?;
state.serialize_field("payload", payload)?;
state.end()
}
Button::URL(name,url) => {
let mut state = serializer.serialize_struct("ButtonUrl", 5)?;
state.serialize_field("type", "web_url")?;
state.serialize_field("title", name)?;
state.serialize_field("url", url)?;
state.serialize_field("webview_height_ratio", "compact")?;
state.serialize_field("messenger_extensions", &false)?;
state.end()
},
Button::QUICKPAYLOAD(name,payload) => {
let mut state = serializer.serialize_struct("ButtonQuickPayload", 3)?;
state.serialize_field("content_type", "text")?;
state.serialize_field("title", name)?;
state.serialize_field("payload", payload)?;
state.end()
}
}
}
}
impl Button {
pub fn new_button_pb(name: &str, postback: &str) -> Button {
Button::PAYLOAD(String::from(name),String::from(postback))
}
pub fn new_button_quick_pb(name: &str, postback: &str) -> Button {
Button::QUICKPAYLOAD(String::from(name),String::from(postback))
}
pub fn new_button_url(name: &str, url: &str) -> Button {
Button::PAYLOAD(String::from(name),String::from(url))
}
pub fn to_json_str(&self) -> String {
match self {
Button::PAYLOAD(name,payload) => format!("{{\"content_type\":\"text\",\"title\":\"{}\",\"payload\":\"{}\"}}",name,payload),
Button::URL(name,url) => {
format!("
{{
\"type\":\"web_url\",
\"url\":\"{}\",
\"title\":\"{}\",
\"webview_height_ratio\": \"compact\",
\"messenger_extensions\": \"false\",
}}",url,name)
},
Button::QUICKPAYLOAD(name,payload) => format!("{{\"content_type\":\"text\",\"title\":\"{}\",\"payload\":\"{}\"}}",name,payload),
}
}
}
|
extern crate adventofcode;
use adventofcode::d10::{Map, Point};
use std::io;
use std::io::BufRead;
fn best_point(map: &Map) -> (Point, usize) {
map.visible_from()
.iter()
.fold((Point(0, 0), 0), |(p, pcount), (&q, &qcount)| {
if qcount > pcount {
(q, qcount)
} else {
(p, pcount)
}
})
}
fn main() -> io::Result<()> {
let b = io::BufReader::new(io::stdin());
let map = Map::new(b.lines().map(|r| r.unwrap()));
println!("{:?}", best_point(&map));
Ok(())
}
|
// LNP/BP Core Library implementing LNPBP specifications & standards
// Written in 2020 by
// Dr. Maxim Orlovsky <orlovsky@pandoracore.com>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
#![recursion_limit = "256"]
// Coding conventions
#![deny(
non_upper_case_globals,
non_camel_case_types,
non_snake_case,
unused_mut,
unused_imports,
dead_code,
//missing_docs
)]
#[macro_use]
extern crate amplify;
#[macro_use]
extern crate amplify_derive;
#[macro_use]
extern crate strict_encoding;
#[macro_use]
extern crate strict_encoding_derive;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate num_derive;
#[macro_use]
extern crate bitcoin_hashes;
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde_with;
#[cfg(feature = "serde")]
extern crate serde_crate as serde;
pub use secp256k1zkp;
pub mod bech32;
pub mod contract;
pub mod schema;
pub mod stash;
pub mod validation;
pub mod vm;
#[macro_use]
mod macros;
pub mod prelude {
use super::*;
pub use super::{bech32, schema, vm};
pub use super::bech32::{Bech32, FromBech32, ToBech32};
pub use contract::{
data, reveal, seal, value, AllocatedValue, Allocation, AllocationMap,
AllocationValueMap, AllocationValueVec, Assignment, AssignmentVec,
AtomicValue, ConcealSeals, ConcealState, ConfidentialDataError,
ConfidentialState, ContractId, DeclarativeStrategy, EndpointValueMap,
Extension, Genesis, HashStrategy, IntoSealValueMap, Metadata,
NoDataError, Node, NodeId, NodeOutput, OutpointValue, OutpointValueMap,
OutpointValueVec, OwnedRights, ParentOwnedRights, ParentPublicRights,
PedersenStrategy, PublicRights, RevealedByMerge, RevealedState, Seal,
SealDefinition, SealEndpoint, SealPoint, SealValueMap, State,
StateRetrievalError, StateType, ToSealDefinition, Transition,
UtxobValue,
};
pub use schema::{
script, AssignmentAbi, AssignmentAction, ExecutableCode, ExtensionAbi,
ExtensionAction, ExtensionSchema, ExtensionType, GenesisAbi,
GenesisAction, NodeSubtype, NodeType, PublicRightType,
PublicRightsStructure, Schema, SchemaId, TransitionAbi,
TransitionAction, VmType,
};
pub use stash::{
Anchor, AnchorId, ConcealAnchors, Consignment, ConsignmentEndpoints,
ConsistencyError, Disclosure, ExtensionData, GraphApi, Stash,
TransitionData, PSBT_OUT_PUBKEY, PSBT_OUT_TWEAK, PSBT_PREFIX,
};
pub use validation::{Validator, Validity};
pub use vm::VmApi;
}
pub use prelude::*;
|
use async_trait::async_trait;
use s3::creds::Credentials;
use s3::Bucket;
use crate::config::ServiceConfig;
use crate::file::{FileError, FileProvider, FileResult};
#[derive(Clone)]
pub struct S3 {
config: ServiceConfig,
bucket: Bucket,
}
impl S3 {
pub fn new(config: ServiceConfig) -> Self {
Self {
bucket: Bucket::new(
config.s3_bucket.as_deref().unwrap(),
config.s3_region.as_deref().unwrap().parse().unwrap(),
Credentials::new(
config.s3_access_key.as_deref(),
config.s3_secret_key.as_deref(),
None,
None,
None,
)
.unwrap(),
)
.unwrap(),
config,
}
}
}
#[async_trait]
impl FileProvider for S3 {
async fn load(&self, file_path: &str) -> FileResult<Vec<u8>> {
match self.bucket.get_object(file_path).await {
Ok((data, _code)) => Ok(data),
Err(e) => Err(FileError::S3Error(e)),
}
}
async fn save(&self, file_path: &str, data: Vec<u8>) -> FileResult<()> {
match self.bucket.put_object(file_path, &data).await {
Ok((_data, _code)) => Ok(()),
Err(e) => Err(FileError::S3Error(e)),
}
}
fn base_path(&self) -> &str {
self.config.path.as_str()
}
}
|
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use crate::domain::ui_interaction::UiInteraction;
use crate::domain::ui_layout::UiLayout;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UiLibrary {
pub name: String,
pub presets: Vec<UiLibraryPreset>
}
impl Default for UiLibrary {
fn default() -> Self {
UiLibrary {
name: "".to_string(),
presets: vec![]
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UiLibraryPreset {
pub key: String,
pub value: String,
pub preset_calls: Vec<PresetCall>,
pub sub_properties: Vec<UiProperty>
}
impl Default for UiLibraryPreset {
fn default() -> Self {
UiLibraryPreset {
key: "".to_string(),
value: "".to_string(),
preset_calls: vec![],
sub_properties: vec![]
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PresetCall {
pub name: String,
pub preset: String
}
impl Default for PresetCall {
fn default() -> Self {
PresetCall {
name: "".to_string(),
preset: "".to_string()
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UiProperty {
pub key: String,
pub value: String
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct UiFlow {
pub name: String,
pub interactions: Vec<UiInteraction>,
pub layout: Vec<UiLayout>
}
impl UiFlow {
pub fn new(name: String) -> Self {
UiFlow {
name,
interactions: vec![],
layout: vec![]
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Component {
pub name: String,
pub child_components: Vec<String>,
pub configs: HashMap<String, String>,
}
impl Default for Component {
fn default() -> Self {
Component {
name: "".to_string(),
child_components: vec![],
configs: Default::default(),
}
}
}
|
use serde_json::{Value};
use bytes::Bytes;
use warp::{Filter};
#[tokio::main]
async fn main() {
// POST /v1/receive/:log_name
let log_receiver = warp::post()
.and(warp::path("v1"))
.and(warp::path("receive"))
.and(warp::path::param())
.and(warp::body::bytes())
.map(|log_name: String, body: Bytes| {
let body_msg = std::str::from_utf8(&body).unwrap();
let json_test : Value = serde_json::from_str(&body_msg).unwrap();
if json_test.is_object() {
println!("{} {} {}", log_name, body_msg, json_test);
} else {
println!("{} {} Oops! json body is invalid! : {}", log_name, body_msg, json_test);
}
warp::reply::with_status("", warp::http::StatusCode::OK)
});
warp::serve(log_receiver).run(([0,0,0,0], 8080)).await
}
|
use variant_count::VariantCount;
use crate::commands::Command;
use crate::commands::ledger::LedgerCommand;
use crate::commands::anoncreds::AnoncredsCommand;
use crate::commands::anoncreds::issuer::IssuerCommand;
use crate::commands::anoncreds::prover::ProverCommand;
use crate::commands::anoncreds::verifier::VerifierCommand;
use crate::commands::blob_storage::BlobStorageCommand;
use crate::commands::crypto::CryptoCommand;
use crate::commands::pool::PoolCommand;
use crate::commands::did::DidCommand;
use crate::commands::wallet::WalletCommand;
use crate::commands::pairwise::PairwiseCommand;
use crate::commands::non_secrets::NonSecretsCommand;
use crate::commands::payments::PaymentsCommand;
use crate::commands::cache::CacheCommand;
use std::fmt;
use crate::commands::metrics::MetricsCommand;
impl fmt::Display for CommandMetric {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl From<usize> for CommandMetric {
fn from(i: usize) -> Self {
let conversion = num_traits::FromPrimitive::from_usize(i);
if conversion.is_some() {
conversion.unwrap()
} else {
panic!("Unable to convert from {}, unknown error code", i)
}
}
}
impl From<&IssuerCommand> for CommandMetric {
fn from(cmd: &IssuerCommand) -> Self {
match cmd {
IssuerCommand::CreateSchema(_, _, _, _, _) => {
CommandMetric::IssuerCommandCreateSchema
}
IssuerCommand::CreateAndStoreCredentialDefinition(_, _, _, _, _, _, _) => {
CommandMetric::IssuerCommandCreateAndStoreCredentialDefinition
}
IssuerCommand::CreateAndStoreCredentialDefinitionContinue(_, _, _, _, _, _, _, _) => {
CommandMetric::IssuerCommandCreateAndStoreCredentialDefinitionContinue
}
IssuerCommand::RotateCredentialDefinitionStart(_, _, _, _) => {
CommandMetric::IssuerCommandRotateCredentialDefinitionStart
}
IssuerCommand::RotateCredentialDefinitionStartComplete(_, _, _, _, _, _, _) => {
CommandMetric::IssuerCommandRotateCredentialDefinitionStartComplete
}
IssuerCommand::RotateCredentialDefinitionApply(_, _, _) => {
CommandMetric::IssuerCommandRotateCredentialDefinitionApply
}
IssuerCommand::CreateAndStoreRevocationRegistry(_, _, _, _, _, _, _, _) => {
CommandMetric::IssuerCommandCreateAndStoreRevocationRegistry
}
IssuerCommand::CreateCredentialOffer(_, _, _) => {
CommandMetric::IssuerCommandCreateCredentialOffer
}
IssuerCommand::CreateCredential(_, _, _, _, _, _, _) => {
CommandMetric::IssuerCommandCreateCredential
}
IssuerCommand::RevokeCredential(_, _, _, _, _) => {
CommandMetric::IssuerCommandRevokeCredential
}
IssuerCommand::MergeRevocationRegistryDeltas(_, _, _) => {
CommandMetric::IssuerCommandMergeRevocationRegistryDeltas
}
}
}
}
impl From<&ProverCommand> for CommandMetric {
fn from(cmd: &ProverCommand) -> Self {
match cmd {
ProverCommand::CreateMasterSecret(_, _, _) => { CommandMetric::ProverCommandCreateMasterSecret }
ProverCommand::CreateCredentialRequest(_, _, _, _, _, _) => { CommandMetric::ProverCommandCreateCredentialRequest }
ProverCommand::SetCredentialAttrTagPolicy(_, _, _, _, _) => { CommandMetric::ProverCommandSetCredentialAttrTagPolicy }
ProverCommand::GetCredentialAttrTagPolicy(_, _, _) => { CommandMetric::ProverCommandGetCredentialAttrTagPolicy }
ProverCommand::StoreCredential(_, _, _, _, _, _, _) => { CommandMetric::ProverCommandStoreCredential }
ProverCommand::GetCredentials(_, _, _) => { CommandMetric::ProverCommandGetCredentials }
ProverCommand::GetCredential(_, _, _) => { CommandMetric::ProverCommandGetCredential }
ProverCommand::DeleteCredential(_, _, _) => { CommandMetric::ProverCommandDeleteCredential }
ProverCommand::SearchCredentials(_, _, _) => { CommandMetric::ProverCommandSearchCredentials }
ProverCommand::FetchCredentials(_, _, _) => { CommandMetric::ProverCommandFetchCredentials }
ProverCommand::CloseCredentialsSearch(_, _) => { CommandMetric::ProverCommandCloseCredentialsSearch }
ProverCommand::GetCredentialsForProofReq(_, _, _) => { CommandMetric::ProverCommandGetCredentialsForProofReq }
ProverCommand::SearchCredentialsForProofReq(_, _, _, _) => { CommandMetric::ProverCommandSearchCredentialsForProofReq }
ProverCommand::FetchCredentialForProofReq(_, _, _, _) => { CommandMetric::ProverCommandFetchCredentialForProofReq }
ProverCommand::CloseCredentialsSearchForProofReq(_, _) => { CommandMetric::ProverCommandCloseCredentialsSearchForProofReq }
ProverCommand::CreateProof(_, _, _, _, _, _, _, _) => { CommandMetric::ProverCommandCreateProof }
ProverCommand::CreateRevocationState(_, _, _, _, _, _) => { CommandMetric::ProverCommandCreateRevocationState }
ProverCommand::UpdateRevocationState(_, _, _, _, _, _, _) => { CommandMetric::ProverCommandUpdateRevocationState }
}
}
}
impl From<&VerifierCommand> for CommandMetric {
fn from(cmd: &VerifierCommand) -> Self {
match cmd {
VerifierCommand::VerifyProof(_, _, _, _, _, _, _) => { CommandMetric::VerifierCommandVerifyProof }
VerifierCommand::GenerateNonce(_) => { CommandMetric::VerifierCommandGenerateNonce }
}
}
}
impl From<&Command> for CommandMetric {
fn from(cmd: &Command) -> Self {
match cmd {
Command::Exit => { CommandMetric::Exit }
Command::Anoncreds(cmd) => {
match cmd {
AnoncredsCommand::Issuer(cmd) => { cmd.into() }
AnoncredsCommand::Prover(cmd) => { cmd.into() }
AnoncredsCommand::Verifier(cmd) => { cmd.into() }
AnoncredsCommand::ToUnqualified(_, _) => { CommandMetric::AnoncredsCommandToUnqualified }
}
}
Command::BlobStorage(cmd) => {
match cmd {
BlobStorageCommand::OpenReader(_, _, _) => { CommandMetric::BlobStorageCommandOpenReader }
BlobStorageCommand::OpenWriter(_, _, _) => { CommandMetric::BlobStorageCommandOpenWriter }
}
}
Command::Crypto(cmd) => {
match cmd {
CryptoCommand::CreateKey(_, _, _) => { CommandMetric::CryptoCommandCreateKey }
CryptoCommand::SetKeyMetadata(_, _, _, _) => { CommandMetric::CryptoCommandSetKeyMetadata }
CryptoCommand::GetKeyMetadata(_, _, _) => { CommandMetric::CryptoCommandGetKeyMetadata }
CryptoCommand::CryptoSign(_, _, _, _) => { CommandMetric::CryptoCommandCryptoSign }
CryptoCommand::CryptoVerify(_, _, _, _) => { CommandMetric::CryptoCommandCryptoVerify }
CryptoCommand::AuthenticatedEncrypt(_, _, _, _, _) => { CommandMetric::CryptoCommandAuthenticatedEncrypt }
CryptoCommand::AuthenticatedDecrypt(_, _, _, _) => { CommandMetric::CryptoCommandAuthenticatedDecrypt }
CryptoCommand::AnonymousEncrypt(_, _, _) => { CommandMetric::CryptoCommandAnonymousEncrypt }
CryptoCommand::AnonymousDecrypt(_, _, _, _) => { CommandMetric::CryptoCommandAnonymousDecrypt }
CryptoCommand::PackMessage(_, _, _, _, _) => { CommandMetric::CryptoCommandPackMessage }
CryptoCommand::UnpackMessage(_, _, _) => { CommandMetric::CryptoCommandUnpackMessage }
}
}
Command::Ledger(cmd) => {
match cmd {
LedgerCommand::SignAndSubmitRequest(_, _, _, _, _) => { CommandMetric::LedgerCommandSignAndSubmitRequest }
LedgerCommand::SubmitRequest(_, _, _) => { CommandMetric::LedgerCommandSubmitRequest }
LedgerCommand::SubmitAck(_, _) => { CommandMetric::LedgerCommandSubmitAck }
LedgerCommand::SubmitAction(_, _, _, _, _) => { CommandMetric::LedgerCommandSubmitAction }
LedgerCommand::SignRequest(_, _, _, _) => { CommandMetric::LedgerCommandSignRequest }
LedgerCommand::MultiSignRequest(_, _, _, _) => { CommandMetric::LedgerCommandMultiSignRequest }
LedgerCommand::BuildGetDdoRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetDdoRequest }
LedgerCommand::BuildNymRequest(_, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildNymRequest }
LedgerCommand::BuildAttribRequest(_, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildAttribRequest }
LedgerCommand::BuildGetAttribRequest(_, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildGetAttribRequest }
LedgerCommand::BuildGetNymRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetNymRequest }
LedgerCommand::ParseGetNymResponse(_, _) => { CommandMetric::LedgerCommandParseGetNymResponse }
LedgerCommand::BuildSchemaRequest(_, _, _) => { CommandMetric::LedgerCommandBuildSchemaRequest }
LedgerCommand::BuildGetSchemaRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetSchemaRequest }
LedgerCommand::ParseGetSchemaResponse(_, _) => { CommandMetric::LedgerCommandParseGetSchemaResponse }
LedgerCommand::BuildCredDefRequest(_, _, _) => { CommandMetric::LedgerCommandBuildCredDefRequest }
LedgerCommand::BuildGetCredDefRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetCredDefRequest }
LedgerCommand::ParseGetCredDefResponse(_, _) => { CommandMetric::LedgerCommandParseGetCredDefResponse }
LedgerCommand::BuildNodeRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildNodeRequest }
LedgerCommand::BuildGetValidatorInfoRequest(_, _) => { CommandMetric::LedgerCommandBuildGetValidatorInfoRequest }
LedgerCommand::BuildGetTxnRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildGetTxnRequest }
LedgerCommand::BuildPoolConfigRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildPoolConfigRequest }
LedgerCommand::BuildPoolRestartRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildPoolRestartRequest }
LedgerCommand::BuildPoolUpgradeRequest(_, _, _, _, _, _, _, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildPoolUpgradeRequest }
LedgerCommand::BuildRevocRegDefRequest(_, _, _) => { CommandMetric::LedgerCommandBuildRevocRegDefRequest }
LedgerCommand::BuildGetRevocRegDefRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetRevocRegDefRequest }
LedgerCommand::ParseGetRevocRegDefResponse(_, _) => { CommandMetric::LedgerCommandParseGetRevocRegDefResponse }
LedgerCommand::BuildRevocRegEntryRequest(_, _, _, _, _) => { CommandMetric::LedgerCommandBuildRevocRegEntryRequest }
LedgerCommand::BuildGetRevocRegRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildGetRevocRegRequest }
LedgerCommand::ParseGetRevocRegResponse(_, _) => { CommandMetric::LedgerCommandParseGetRevocRegResponse }
LedgerCommand::BuildGetRevocRegDeltaRequest(_, _, _, _, _) => { CommandMetric::LedgerCommandBuildGetRevocRegDeltaRequest }
LedgerCommand::ParseGetRevocRegDeltaResponse(_, _) => { CommandMetric::LedgerCommandParseGetRevocRegDeltaResponse }
LedgerCommand::RegisterSPParser(_, _, _, _) => { CommandMetric::LedgerCommandRegisterSPParser }
LedgerCommand::GetResponseMetadata(_, _) => { CommandMetric::LedgerCommandGetResponseMetadata }
LedgerCommand::BuildAuthRuleRequest(_, _, _, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildAuthRuleRequest }
LedgerCommand::BuildAuthRulesRequest(_, _, _) => { CommandMetric::LedgerCommandBuildAuthRulesRequest }
LedgerCommand::BuildGetAuthRuleRequest(_, _, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildGetAuthRuleRequest }
LedgerCommand::GetSchema(_, _, _, _) => { CommandMetric::LedgerCommandGetSchema }
LedgerCommand::GetSchemaContinue(_, _, _) => { CommandMetric::LedgerCommandGetSchemaContinue }
LedgerCommand::GetCredDef(_, _, _, _) => { CommandMetric::LedgerCommandGetCredDef }
LedgerCommand::GetCredDefContinue(_, _, _) => { CommandMetric::LedgerCommandGetCredDefContinue }
LedgerCommand::BuildTxnAuthorAgreementRequest(_, _, _, _, _, _) => { CommandMetric::LedgerCommandBuildTxnAuthorAgreementRequest }
LedgerCommand::BuildDisableAllTxnAuthorAgreementsRequest(_, _) => { CommandMetric::LedgerCommandBuildDisableAllTxnAuthorAgreementsRequest }
LedgerCommand::BuildGetTxnAuthorAgreementRequest(_, _, _) => { CommandMetric::LedgerCommandBuildGetTxnAuthorAgreementRequest }
LedgerCommand::BuildAcceptanceMechanismRequests(_, _, _, _, _) => { CommandMetric::LedgerCommandBuildAcceptanceMechanismRequests }
LedgerCommand::BuildGetAcceptanceMechanismsRequest(_, _, _, _) => { CommandMetric::LedgerCommandBuildGetAcceptanceMechanismsRequest }
LedgerCommand::AppendTxnAuthorAgreementAcceptanceToRequest(_, _, _, _, _, _, _) => { CommandMetric::LedgerCommandAppendTxnAuthorAgreementAcceptanceToRequest }
LedgerCommand::AppendRequestEndorser(_, _, _) => { CommandMetric::LedgerCommandAppendRequestEndorser }
LedgerCommand::BuildGetFrozenLedgersRequest(_,_,) => { CommandMetric::LedgerCommandBuildGetFrozenLedgersRequest }
LedgerCommand::BuildLedgersFreezeRequest(_,_,_,) => { CommandMetric::LedgerCommandBuildLedgersFreezeRequest }
}
}
Command::Pool(cmd) => {
match cmd {
PoolCommand::Create(_, _, _) => { CommandMetric::PoolCommandCreate }
PoolCommand::Delete(_, _) => { CommandMetric::PoolCommandDelete }
PoolCommand::Open(_, _, _) => { CommandMetric::PoolCommandOpen }
PoolCommand::OpenAck(_, _, _) => { CommandMetric::PoolCommandOpenAck }
PoolCommand::List(_) => { CommandMetric::PoolCommandList }
PoolCommand::Close(_, _) => { CommandMetric::PoolCommandClose }
PoolCommand::CloseAck(_, _) => { CommandMetric::PoolCommandCloseAck }
PoolCommand::Refresh(_, _) => { CommandMetric::PoolCommandRefresh }
PoolCommand::RefreshAck(_, _) => { CommandMetric::PoolCommandRefreshAck }
PoolCommand::SetProtocolVersion(_, _) => { CommandMetric::PoolCommandSetProtocolVersion }
}
}
Command::Did(cmd) => {
match cmd {
DidCommand::CreateAndStoreMyDid(_, _, _) => { CommandMetric::DidCommandCreateAndStoreMyDid }
DidCommand::ReplaceKeysStart(_, _, _, _) => { CommandMetric::DidCommandReplaceKeysStart }
DidCommand::ReplaceKeysApply(_, _, _) => { CommandMetric::DidCommandReplaceKeysApply }
DidCommand::StoreTheirDid(_, _, _) => { CommandMetric::DidCommandStoreTheirDid }
DidCommand::GetMyDidWithMeta(_, _, _) => { CommandMetric::DidCommandGetMyDidWithMeta }
DidCommand::ListMyDidsWithMeta(_, _) => { CommandMetric::DidCommandListMyDidsWithMeta }
DidCommand::KeyForDid(_, _, _, _) => { CommandMetric::DidCommandKeyForDid }
DidCommand::KeyForLocalDid(_, _, _) => { CommandMetric::DidCommandKeyForLocalDid }
DidCommand::SetEndpointForDid(_, _, _, _) => { CommandMetric::DidCommandSetEndpointForDid }
DidCommand::GetEndpointForDid(_, _, _, _) => { CommandMetric::DidCommandGetEndpointForDid }
DidCommand::SetDidMetadata(_, _, _, _) => { CommandMetric::DidCommandSetDidMetadata }
DidCommand::GetDidMetadata(_, _, _) => { CommandMetric::DidCommandGetDidMetadata }
DidCommand::AbbreviateVerkey(_, _, _) => { CommandMetric::DidCommandAbbreviateVerkey }
DidCommand::GetNymAck(_, _, _, _) => { CommandMetric::DidCommandGetNymAck }
DidCommand::GetAttribAck(_, _, _) => { CommandMetric::DidCommandGetAttribAck }
DidCommand::QualifyDid(_, _, _, _) => { CommandMetric::DidCommandQualifyDid }
}
}
Command::Wallet(cmd) => {
match cmd {
WalletCommand::RegisterWalletType(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => { CommandMetric::WalletCommandRegisterWalletType }
WalletCommand::Create(_, _, _) => { CommandMetric::WalletCommandCreate }
WalletCommand::CreateContinue(_, _, _, _, _) => { CommandMetric::WalletCommandCreateContinue }
WalletCommand::Open(_, _, _) => { CommandMetric::WalletCommandOpen }
WalletCommand::OpenContinue(_, _) => { CommandMetric::WalletCommandOpenContinue }
WalletCommand::Close(_, _) => { CommandMetric::WalletCommandClose }
WalletCommand::Delete(_, _, _) => { CommandMetric::WalletCommandDelete }
WalletCommand::DeleteContinue(_, _, _, _, _) => { CommandMetric::WalletCommandDeleteContinue }
WalletCommand::Export(_, _, _) => { CommandMetric::WalletCommandExport }
WalletCommand::ExportContinue(_, _, _, _, _) => { CommandMetric::WalletCommandExportContinue }
WalletCommand::Import(_, _, _, _) => { CommandMetric::WalletCommandImport }
WalletCommand::ImportContinue(_, _, _, _, _) => { CommandMetric::WalletCommandImportContinue }
WalletCommand::GenerateKey(_, _) => { CommandMetric::WalletCommandGenerateKey }
WalletCommand::DeriveKey(_, _) => { CommandMetric::WalletCommandDeriveKey }
}
}
Command::Pairwise(cmd) => {
match cmd {
PairwiseCommand::PairwiseExists(_, _, _) => { CommandMetric::PairwiseCommandPairwiseExists }
PairwiseCommand::CreatePairwise(_, _, _, _, _) => { CommandMetric::PairwiseCommandCreatePairwise }
PairwiseCommand::ListPairwise(_, _) => { CommandMetric::PairwiseCommandListPairwise }
PairwiseCommand::GetPairwise(_, _, _) => { CommandMetric::PairwiseCommandGetPairwise }
PairwiseCommand::SetPairwiseMetadata(_, _, _, _) => { CommandMetric::PairwiseCommandSetPairwiseMetadata }
}
}
Command::NonSecrets(cmd) => {
match cmd {
NonSecretsCommand::AddRecord(_, _, _, _, _, _) => { CommandMetric::NonSecretsCommandAddRecord }
NonSecretsCommand::UpdateRecordValue(_, _, _, _, _) => { CommandMetric::NonSecretsCommandUpdateRecordValue }
NonSecretsCommand::UpdateRecordTags(_, _, _, _, _) => { CommandMetric::NonSecretsCommandUpdateRecordTags }
NonSecretsCommand::AddRecordTags(_, _, _, _, _) => { CommandMetric::NonSecretsCommandAddRecordTags }
NonSecretsCommand::DeleteRecordTags(_, _, _, _, _) => { CommandMetric::NonSecretsCommandDeleteRecordTags }
NonSecretsCommand::DeleteRecord(_, _, _, _) => { CommandMetric::NonSecretsCommandDeleteRecord }
NonSecretsCommand::GetRecord(_, _, _, _, _) => { CommandMetric::NonSecretsCommandGetRecord }
NonSecretsCommand::OpenSearch(_, _, _, _, _) => { CommandMetric::NonSecretsCommandOpenSearch }
NonSecretsCommand::FetchSearchNextRecords(_, _, _, _) => { CommandMetric::NonSecretsCommandFetchSearchNextRecords }
NonSecretsCommand::CloseSearch(_, _) => { CommandMetric::NonSecretsCommandCloseSearch }
}
}
Command::Payments(cmd) => {
match cmd {
PaymentsCommand::RegisterMethod(_, _, _) => { CommandMetric::PaymentsCommandRegisterMethod }
PaymentsCommand::CreateAddress(_, _, _, _) => { CommandMetric::PaymentsCommandCreateAddress }
PaymentsCommand::CreateAddressAck(_, _, _) => { CommandMetric::PaymentsCommandCreateAddressAck }
PaymentsCommand::ListAddresses(_, _) => { CommandMetric::PaymentsCommandListAddresses }
PaymentsCommand::AddRequestFees(_, _, _, _, _, _, _) => { CommandMetric::PaymentsCommandAddRequestFees }
PaymentsCommand::AddRequestFeesAck(_, _) => { CommandMetric::PaymentsCommandAddRequestFeesAck }
PaymentsCommand::ParseResponseWithFees(_, _, _) => { CommandMetric::PaymentsCommandParseResponseWithFees }
PaymentsCommand::ParseResponseWithFeesAck(_, _) => { CommandMetric::PaymentsCommandParseResponseWithFeesAck }
PaymentsCommand::BuildGetPaymentSourcesRequest(_, _, _, _, _) => { CommandMetric::PaymentsCommandBuildGetPaymentSourcesRequest }
PaymentsCommand::BuildGetPaymentSourcesRequestAck(_, _) => { CommandMetric::PaymentsCommandBuildGetPaymentSourcesRequestAck }
PaymentsCommand::ParseGetPaymentSourcesResponse(_, _, _) => { CommandMetric::PaymentsCommandParseGetPaymentSourcesResponse }
PaymentsCommand::ParseGetPaymentSourcesResponseAck(_, _) => { CommandMetric::PaymentsCommandParseGetPaymentSourcesResponseAck }
PaymentsCommand::BuildPaymentReq(_, _, _, _, _, _) => { CommandMetric::PaymentsCommandBuildPaymentReq }
PaymentsCommand::BuildPaymentReqAck(_, _) => { CommandMetric::PaymentsCommandBuildPaymentReqAck }
PaymentsCommand::ParsePaymentResponse(_, _, _) => { CommandMetric::PaymentsCommandParsePaymentResponse }
PaymentsCommand::ParsePaymentResponseAck(_, _) => { CommandMetric::PaymentsCommandParsePaymentResponseAck }
PaymentsCommand::AppendTxnAuthorAgreementAcceptanceToExtra(_, _, _, _, _, _, _) => { CommandMetric::PaymentsCommandAppendTxnAuthorAgreementAcceptanceToExtra }
PaymentsCommand::BuildMintReq(_, _, _, _, _) => { CommandMetric::PaymentsCommandBuildMintReq }
PaymentsCommand::BuildMintReqAck(_, _) => { CommandMetric::PaymentsCommandBuildMintReqAck }
PaymentsCommand::BuildSetTxnFeesReq(_, _, _, _, _) => { CommandMetric::PaymentsCommandBuildSetTxnFeesReq }
PaymentsCommand::BuildSetTxnFeesReqAck(_, _) => { CommandMetric::PaymentsCommandBuildSetTxnFeesReqAck }
PaymentsCommand::BuildGetTxnFeesReq(_, _, _, _) => { CommandMetric::PaymentsCommandBuildGetTxnFeesReq }
PaymentsCommand::BuildGetTxnFeesReqAck(_, _) => { CommandMetric::PaymentsCommandBuildGetTxnFeesReqAck }
PaymentsCommand::ParseGetTxnFeesResponse(_, _, _) => { CommandMetric::PaymentsCommandParseGetTxnFeesResponse }
PaymentsCommand::ParseGetTxnFeesResponseAck(_, _) => { CommandMetric::PaymentsCommandParseGetTxnFeesResponseAck }
PaymentsCommand::BuildVerifyPaymentReq(_, _, _, _) => { CommandMetric::PaymentsCommandBuildVerifyPaymentReq }
PaymentsCommand::BuildVerifyPaymentReqAck(_, _) => { CommandMetric::PaymentsCommandBuildVerifyPaymentReqAck }
PaymentsCommand::ParseVerifyPaymentResponse(_, _, _) => { CommandMetric::PaymentsCommandParseVerifyPaymentResponse }
PaymentsCommand::ParseVerifyPaymentResponseAck(_, _) => { CommandMetric::PaymentsCommandParseVerifyPaymentResponseAck }
PaymentsCommand::GetRequestInfo(_, _, _, _) => { CommandMetric::PaymentsCommandGetRequestInfo }
PaymentsCommand::SignWithAddressReq(_, _, _, _) => { CommandMetric::PaymentsCommandSignWithAddressReq }
PaymentsCommand::SignWithAddressAck(_, _) => { CommandMetric::PaymentsCommandSignWithAddressAck }
PaymentsCommand::VerifyWithAddressReq(_, _, _, _) => { CommandMetric::PaymentsCommandVerifyWithAddressReq }
PaymentsCommand::VerifyWithAddressAck(_, _) => { CommandMetric::PaymentsCommandVerifyWithAddressAck }
}
}
Command::Cache(cmd) => {
match cmd {
CacheCommand::GetSchema(_, _, _, _, _, _) => { CommandMetric::CacheCommandGetSchema }
CacheCommand::GetSchemaContinue(_, _, _, _) => { CommandMetric::CacheCommandGetSchemaContinue }
CacheCommand::GetCredDef(_, _, _, _, _, _) => { CommandMetric::CacheCommandGetCredDef }
CacheCommand::GetCredDefContinue(_, _, _, _) => { CommandMetric::CacheCommandGetCredDefContinue }
CacheCommand::PurgeSchemaCache(_, _, _) => { CommandMetric::CacheCommandPurgeSchemaCache }
CacheCommand::PurgeCredDefCache(_, _, _) => { CommandMetric::CacheCommandPurgeCredDefCache }
}
}
Command::Metrics(cmd) => {
match cmd { MetricsCommand::CollectMetrics(_) => { CommandMetric::MetricsCommandCollectMetrics } }
}
}
}
}
#[derive(Debug, PartialEq, Copy, Clone, FromPrimitive, ToPrimitive, VariantCount)]
#[repr(usize)]
pub enum CommandMetric {
// IssuerCommand
IssuerCommandCreateSchema,
IssuerCommandCreateAndStoreCredentialDefinition,
IssuerCommandCreateAndStoreCredentialDefinitionContinue,
IssuerCommandRotateCredentialDefinitionStart,
IssuerCommandRotateCredentialDefinitionStartComplete,
IssuerCommandRotateCredentialDefinitionApply,
IssuerCommandCreateAndStoreRevocationRegistry,
IssuerCommandCreateCredentialOffer,
IssuerCommandCreateCredential,
IssuerCommandRevokeCredential,
IssuerCommandMergeRevocationRegistryDeltas,
// ProverCommand
ProverCommandCreateMasterSecret,
ProverCommandCreateCredentialRequest,
ProverCommandSetCredentialAttrTagPolicy,
ProverCommandGetCredentialAttrTagPolicy,
ProverCommandStoreCredential,
ProverCommandGetCredentials,
ProverCommandGetCredential,
ProverCommandDeleteCredential,
ProverCommandSearchCredentials,
ProverCommandFetchCredentials,
ProverCommandCloseCredentialsSearch,
ProverCommandGetCredentialsForProofReq,
ProverCommandSearchCredentialsForProofReq,
ProverCommandFetchCredentialForProofReq,
ProverCommandCloseCredentialsSearchForProofReq,
ProverCommandCreateProof,
ProverCommandCreateRevocationState,
ProverCommandUpdateRevocationState,
// VerifierCommand
VerifierCommandVerifyProof,
VerifierCommandGenerateNonce,
// AnoncredsCommand
AnoncredsCommandToUnqualified,
// BlobStorage
BlobStorageCommandOpenReader,
BlobStorageCommandOpenWriter,
// CryptoCommand
CryptoCommandCreateKey,
CryptoCommandSetKeyMetadata,
CryptoCommandGetKeyMetadata,
CryptoCommandCryptoSign,
CryptoCommandCryptoVerify,
CryptoCommandAuthenticatedEncrypt,
CryptoCommandAuthenticatedDecrypt,
CryptoCommandAnonymousEncrypt,
CryptoCommandAnonymousDecrypt,
CryptoCommandPackMessage,
CryptoCommandUnpackMessage,
LedgerCommandSignAndSubmitRequest,
// LedgerCommand
LedgerCommandSubmitRequest,
LedgerCommandSubmitAck,
LedgerCommandSubmitAction,
LedgerCommandSignRequest,
LedgerCommandMultiSignRequest,
LedgerCommandBuildGetDdoRequest,
LedgerCommandBuildNymRequest,
LedgerCommandBuildAttribRequest,
LedgerCommandBuildGetAttribRequest,
LedgerCommandBuildGetNymRequest,
LedgerCommandParseGetNymResponse,
LedgerCommandBuildSchemaRequest,
LedgerCommandBuildGetSchemaRequest,
LedgerCommandParseGetSchemaResponse,
LedgerCommandBuildCredDefRequest,
LedgerCommandBuildGetCredDefRequest,
LedgerCommandParseGetCredDefResponse,
LedgerCommandBuildNodeRequest,
LedgerCommandBuildGetValidatorInfoRequest,
LedgerCommandBuildGetTxnRequest,
LedgerCommandBuildPoolConfigRequest,
LedgerCommandBuildPoolRestartRequest,
LedgerCommandBuildPoolUpgradeRequest,
LedgerCommandBuildRevocRegDefRequest,
LedgerCommandBuildGetRevocRegDefRequest,
LedgerCommandParseGetRevocRegDefResponse,
LedgerCommandBuildRevocRegEntryRequest,
LedgerCommandBuildGetRevocRegRequest,
LedgerCommandParseGetRevocRegResponse,
LedgerCommandBuildGetRevocRegDeltaRequest,
LedgerCommandParseGetRevocRegDeltaResponse,
LedgerCommandRegisterSPParser,
LedgerCommandGetResponseMetadata,
LedgerCommandBuildAuthRuleRequest,
LedgerCommandBuildAuthRulesRequest,
LedgerCommandBuildGetAuthRuleRequest,
LedgerCommandGetSchema,
LedgerCommandGetSchemaContinue,
LedgerCommandGetCredDef,
LedgerCommandGetCredDefContinue,
LedgerCommandBuildTxnAuthorAgreementRequest,
LedgerCommandBuildDisableAllTxnAuthorAgreementsRequest,
LedgerCommandBuildGetTxnAuthorAgreementRequest,
LedgerCommandBuildAcceptanceMechanismRequests,
LedgerCommandBuildGetAcceptanceMechanismsRequest,
LedgerCommandAppendTxnAuthorAgreementAcceptanceToRequest,
LedgerCommandAppendRequestEndorser,
LedgerCommandBuildGetFrozenLedgersRequest,
LedgerCommandBuildLedgersFreezeRequest,
// PoolCommand
PoolCommandCreate,
PoolCommandDelete,
PoolCommandOpen,
PoolCommandOpenAck,
PoolCommandList,
PoolCommandClose,
PoolCommandCloseAck,
PoolCommandRefresh,
PoolCommandRefreshAck,
PoolCommandSetProtocolVersion,
// DidCommand
DidCommandCreateAndStoreMyDid,
DidCommandReplaceKeysStart,
DidCommandReplaceKeysApply,
DidCommandStoreTheirDid,
DidCommandGetMyDidWithMeta,
DidCommandListMyDidsWithMeta,
DidCommandKeyForDid,
DidCommandKeyForLocalDid,
DidCommandSetEndpointForDid,
DidCommandGetEndpointForDid,
DidCommandSetDidMetadata,
DidCommandGetDidMetadata,
DidCommandAbbreviateVerkey,
DidCommandGetNymAck,
DidCommandGetAttribAck,
DidCommandQualifyDid,
// WalletCommand
WalletCommandRegisterWalletType,
WalletCommandCreate,
WalletCommandCreateContinue,
WalletCommandOpen,
WalletCommandOpenContinue,
WalletCommandClose,
WalletCommandDelete,
WalletCommandDeleteContinue,
WalletCommandExport,
WalletCommandExportContinue,
WalletCommandImport,
WalletCommandImportContinue,
WalletCommandGenerateKey,
WalletCommandDeriveKey,
// PairwiseCommand
PairwiseCommandPairwiseExists,
PairwiseCommandCreatePairwise,
PairwiseCommandListPairwise,
PairwiseCommandGetPairwise,
PairwiseCommandSetPairwiseMetadata,
// NonSecretsCommand
NonSecretsCommandAddRecord,
NonSecretsCommandUpdateRecordValue,
NonSecretsCommandUpdateRecordTags,
NonSecretsCommandAddRecordTags,
NonSecretsCommandDeleteRecordTags,
NonSecretsCommandDeleteRecord,
NonSecretsCommandGetRecord,
NonSecretsCommandOpenSearch,
NonSecretsCommandFetchSearchNextRecords,
NonSecretsCommandCloseSearch,
// PaymentsCommand
PaymentsCommandRegisterMethod,
PaymentsCommandCreateAddress,
PaymentsCommandCreateAddressAck,
PaymentsCommandListAddresses,
PaymentsCommandAddRequestFees,
PaymentsCommandAddRequestFeesAck,
PaymentsCommandParseResponseWithFees,
PaymentsCommandParseResponseWithFeesAck,
PaymentsCommandBuildGetPaymentSourcesRequest,
PaymentsCommandBuildGetPaymentSourcesRequestAck,
PaymentsCommandParseGetPaymentSourcesResponse,
PaymentsCommandParseGetPaymentSourcesResponseAck,
PaymentsCommandBuildPaymentReq,
PaymentsCommandBuildPaymentReqAck,
PaymentsCommandParsePaymentResponse,
PaymentsCommandParsePaymentResponseAck,
PaymentsCommandAppendTxnAuthorAgreementAcceptanceToExtra,
PaymentsCommandBuildMintReq,
PaymentsCommandBuildMintReqAck,
PaymentsCommandBuildSetTxnFeesReq,
PaymentsCommandBuildSetTxnFeesReqAck,
PaymentsCommandBuildGetTxnFeesReq,
PaymentsCommandBuildGetTxnFeesReqAck,
PaymentsCommandParseGetTxnFeesResponse,
PaymentsCommandParseGetTxnFeesResponseAck,
PaymentsCommandBuildVerifyPaymentReq,
PaymentsCommandBuildVerifyPaymentReqAck,
PaymentsCommandParseVerifyPaymentResponse,
PaymentsCommandParseVerifyPaymentResponseAck,
PaymentsCommandGetRequestInfo,
PaymentsCommandSignWithAddressReq,
PaymentsCommandSignWithAddressAck,
PaymentsCommandVerifyWithAddressReq,
PaymentsCommandVerifyWithAddressAck,
// CacheCommand
CacheCommandGetSchema,
CacheCommandGetSchemaContinue,
CacheCommandGetCredDef,
CacheCommandGetCredDefContinue,
CacheCommandPurgeSchemaCache,
CacheCommandPurgeCredDefCache,
// MetricsCommand
MetricsCommandCollectMetrics,
// Exit
Exit,
}
|
fn main() {
(0..10).map(|x| x*x);
(0..10).map(|x| {
fn f(y: u32) -> u32 {
y*y
}
let z = f(x+1) * f(x+2);
z*z
});
fn f<T>(g: T, x: u32) -> u32
where T: Fn(u32) -> u32
{
g(x+1) * g(x+2)
}
f(|x|{x*x}, 2);
(0..10).map(|x| x*x)
.inspect(|x|{ println!("value {}", *x) })
.filter(|x| *x<3)
.filter_map(|x| Some(x))
.fold(0, |x, y| x+y);
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::MutexGuard;
use linked_hash_map::Entry;
use linked_hash_map::LinkedHashMap;
use super::library::Library;
use super::symbols::Symbols;
/// Maximum cache size in bytes, including all of the loaded debug info.
///
/// When the cache starts to exceed this size, the least recently used library's
/// symbols will be removed.
const DEFAULT_MAX_CACHE_SIZE: usize = 1 << 30; // 1 GiB
lazy_static::lazy_static! {
static ref CACHE: Mutex<DebugInfoCache> = Mutex::new(DebugInfoCache::new());
}
/// An LRU cache of loaded symbols. This is shared by all processes since we
/// only need to load symbols once for a particular inode. However, each process
/// will have mapped these libraries into memory differently, so that is tracked
/// on a per-process basis.
pub struct DebugInfoCache {
/// Mapping of inode -> symbols.
cache: LinkedHashMap<u64, Arc<Symbols>>,
size: usize,
max_size: usize,
}
impl DebugInfoCache {
fn new() -> Self {
Self {
cache: Default::default(),
size: 0,
max_size: DEFAULT_MAX_CACHE_SIZE,
}
}
/// Loads the symbols for the given library. Returns an error if the symbols
/// failed to load.
pub fn load(&mut self, library: &Library) -> Result<Arc<Symbols>, anyhow::Error> {
match self.cache.entry(library.inode) {
Entry::Occupied(entry) => Ok(entry.get().clone()),
Entry::Vacant(entry) => {
let symbols = Arc::new(Symbols::load(&library.path)?);
self.size += symbols.bytes_used();
let symbols = entry.insert(symbols).clone();
// Evict older entries if we've exceeded the max cache size.
// Even if we evict the entry we just inserted, that's fine
// since we still return an `Arc`.
while self.size > self.max_size {
if let Some((_k, v)) = self.cache.pop_front() {
self.size -= v.bytes_used();
}
}
Ok(symbols)
}
}
}
}
pub fn cache() -> MutexGuard<'static, DebugInfoCache> {
CACHE.lock().expect("lock poisoned")
}
|
mod components;
mod connection;
mod flags;
mod future;
mod keystate;
mod pingdata;
mod powerups;
mod units;
mod upgrades;
mod vector2;
mod connection_events;
pub mod collision;
pub mod config;
pub mod systemdata;
pub(crate) mod gamemode;
pub use self::components::*;
pub use self::config::Config;
pub use self::connection::*;
pub use self::flags::*;
pub use self::future::FutureDispatcher;
pub use self::keystate::*;
pub use self::pingdata::*;
pub use self::powerups::*;
pub use self::units::*;
pub use self::upgrades::*;
pub use self::vector2::*;
pub mod event {
pub use types::connection_events::*;
}
pub use self::gamemode::{GameMode, GameModeWriter};
|
// other library used for correctness checks
extern crate fid;
use fid::{FID, BitVector};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
fn xorshift(mut x: u64) -> u64 {
x ^= x << 13;
x ^= x >> 7;
x ^= x << 17;
x
}
const SIZE: usize = 1_000_000;
#[test]
/// Check that the hash is consistent
fn test_hash() {
let mut r1 = rsdict::RsDict::new();
let mut r2 = rsdict::RsDict::new();
let mut seed = 0xdeadbeef;
for _ in 0..SIZE {
seed = xorshift(seed);
let val = (seed & 1) == 1;
r1.push(val);
r2.push(val);
}
let mut hasher = DefaultHasher::new();
r1.hash(&mut hasher);
let h1 = hasher.finish();
let mut hasher = DefaultHasher::new();
r2.hash(&mut hasher);
let h2 = hasher.finish();
assert_eq!(h1, h2);
}
|
#![windows_subsystem = "windows"]
#[macro_use]
extern crate clap;
mod editor;
mod internal;
mod keyboard;
mod operations;
mod plugins;
mod property_creator;
mod property_editor;
mod support;
mod tools;
use drg::asset::*;
use editor::*;
use std::env;
use std::path::*;
fn main() {
let matches = clap_app!(DRGEditor =>
(version: "0.1.0")
(author: "Craig M. <craigmc08@gmail.com>")
(@arg DATA: -d --data +takes_value "Directory for config files; default: ./data")
(@arg ASSET: +takes_value "Asset to open; if not present, no asset is opened")
)
.get_matches();
let mut default_data_dir = env::current_exe().unwrap();
default_data_dir.pop();
default_data_dir.push("data");
let data_dir: &Path = matches
.value_of("DATA")
.map(|x| x.as_ref())
.unwrap_or(default_data_dir.as_ref());
let struct_pattern_file = data_dir.join("struct-patterns.json");
if let Err(err) = struct_pattern::StructPatterns::load(&struct_pattern_file) {
println!("Failed to load struct patterns: {:?}", err);
std::process::exit(-1);
}
if let Some(asset_loc) = matches.value_of("ASSET") {
start_editor_with_path(asset_loc.as_ref());
} else {
start_editor_empty();
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod operations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Search/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod admin_keys {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<AdminKeyResult, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/listAdminKeys",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: AdminKeyResult = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn regenerate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
key_kind: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<AdminKeyResult, regenerate::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/regenerateAdminKey/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, key_kind
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(regenerate::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(regenerate::BuildRequestError)?;
let rsp = client.execute(req).await.context(regenerate::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(regenerate::ResponseBytesError)?;
let rsp_value: AdminKeyResult = serde_json::from_slice(&body).context(regenerate::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(regenerate::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(regenerate::DeserializeError { body })?;
regenerate::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod regenerate {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod query_keys {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn create(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<QueryKey, create::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/createQueryKey/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(create::BuildRequestError)?;
let rsp = client.execute(req).await.context(create::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?;
let rsp_value: QueryKey = serde_json::from_slice(&body).context(create::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create::DeserializeError { body })?;
create::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_search_service(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<ListQueryKeysResult, list_by_search_service::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/listQueryKeys",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_search_service::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(list_by_search_service::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_search_service::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_search_service::ResponseBytesError)?;
let rsp_value: ListQueryKeysResult =
serde_json::from_slice(&body).context(list_by_search_service::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_search_service::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_search_service::DeserializeError { body })?;
list_by_search_service::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_search_service {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
key: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/deleteQueryKey/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, key
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
StatusCode::NOT_FOUND => delete::NotFound404 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
NotFound404 {},
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod services {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<SearchService, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: SearchService = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
service: &SearchService,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(service);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: SearchService = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: SearchService = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(SearchService),
Created201(SearchService),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
service: &SearchServiceUpdate,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<SearchService, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(service);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: SearchService = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
StatusCode::NOT_FOUND => delete::NotFound404 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
NotFound404 {},
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<SearchServiceListResult, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: SearchServiceListResult =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<SearchServiceListResult, list_by_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Search/searchServices",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(list_by_subscription::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: SearchServiceListResult =
serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
list_by_subscription::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
check_name_availability_input: &CheckNameAvailabilityInput,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<CheckNameAvailabilityOutput, check_name_availability::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Search/checkNameAvailability",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(check_name_availability::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(check_name_availability_input);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(check_name_availability::BuildRequestError)?;
let rsp = client.execute(req).await.context(check_name_availability::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(check_name_availability::ResponseBytesError)?;
let rsp_value: CheckNameAvailabilityOutput =
serde_json::from_slice(&body).context(check_name_availability::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(check_name_availability::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(check_name_availability::DeserializeError { body })?;
check_name_availability::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod check_name_availability {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod private_link_resources {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_supported(
operation_config: &crate::OperationConfig,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
resource_group_name: &str,
search_service_name: &str,
) -> std::result::Result<PrivateLinkResourcesResult, list_supported::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateLinkResources",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_supported::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(list_supported::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_supported::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_supported::ResponseBytesError)?;
let rsp_value: PrivateLinkResourcesResult =
serde_json::from_slice(&body).context(list_supported::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_supported::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_supported::DeserializeError { body })?;
list_supported::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_supported {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod private_endpoint_connections {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
private_endpoint_connection_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<PrivateEndpointConnection, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, private_endpoint_connection_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
private_endpoint_connection_name: &str,
private_endpoint_connection: &PrivateEndpointConnection,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<PrivateEndpointConnection, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, private_endpoint_connection_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(private_endpoint_connection);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
private_endpoint_connection_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<PrivateEndpointConnection, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, private_endpoint_connection_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
Ok(rsp_value)
}
StatusCode::NOT_FOUND => delete::NotFound404 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
NotFound404 {},
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_service(
operation_config: &crate::OperationConfig,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
resource_group_name: &str,
search_service_name: &str,
) -> std::result::Result<PrivateEndpointConnectionListResult, list_by_service::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_service::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(list_by_service::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_service::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_service::ResponseBytesError)?;
let rsp_value: PrivateEndpointConnectionListResult =
serde_json::from_slice(&body).context(list_by_service::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_service::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_service::DeserializeError { body })?;
list_by_service::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_service {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod shared_private_link_resources {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
shared_private_link_resource_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<SharedPrivateLinkResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/sharedPrivateLinkResources/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, shared_private_link_resource_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: SharedPrivateLinkResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
shared_private_link_resource_name: &str,
shared_private_link_resource: &SharedPrivateLinkResource,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/sharedPrivateLinkResources/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, shared_private_link_resource_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(shared_private_link_resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: SharedPrivateLinkResource =
serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(SharedPrivateLinkResource),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
search_service_name: &str,
shared_private_link_resource_name: &str,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/sharedPrivateLinkResources/{}",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name, shared_private_link_resource_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
StatusCode::NOT_FOUND => delete::NotFound404 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
NotFound404 {},
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_service(
operation_config: &crate::OperationConfig,
x_ms_client_request_id: Option<&str>,
subscription_id: &str,
resource_group_name: &str,
search_service_name: &str,
) -> std::result::Result<SharedPrivateLinkResourceListResult, list_by_service::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/sharedPrivateLinkResources",
&operation_config.base_path, subscription_id, resource_group_name, search_service_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_service::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req = req_builder.build().context(list_by_service::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_service::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_service::ResponseBytesError)?;
let rsp_value: SharedPrivateLinkResourceListResult =
serde_json::from_slice(&body).context(list_by_service::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_service::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_service::DeserializeError { body })?;
list_by_service::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_service {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
|
use hacspec_lib::prelude::*;
#[test]
fn test_unsigned_public_integers() {
unsigned_public_integer!(LargeInteger, 233);
let a = LargeInteger::from_literal(1);
let b = LargeInteger::from_literal(2);
let c = a + b;
assert_eq!(c, LargeInteger::from_literal(3));
}
#[test]
#[should_panic]
fn test_unsigned_integer() {
unsigned_integer!(LargeSecretInteger, 233);
let a = LargeSecretInteger::from_literal(1);
let b = LargeSecretInteger::from_literal(2);
let c = a + b;
// FIXME: Panics because equal is not implemented yet
assert!(c.equal(LargeSecretInteger::from_literal(3)));
}
#[test]
fn test_signed_public_integers() {
signed_public_integer!(LargeSignedInteger, 233);
let a = LargeSignedInteger::from_literal(1);
let b = LargeSignedInteger::from_literal(2);
let c = a + b;
assert_eq!(c, LargeSignedInteger::from_literal(3));
}
#[test]
#[should_panic]
fn test_signed_integer() {
signed_integer!(LargeSecretSignedInteger, 233);
let a = LargeSecretSignedInteger::from_literal(1);
let b = LargeSecretSignedInteger::from_literal(2);
let c = a + b;
// FIXME: Panics because equal is not implemented yet
assert!(c.equal(LargeSecretSignedInteger::from_literal(3)));
}
#[test]
fn test_public_nat_mod() {
public_nat_mod!(
type_name: Elem,
type_of_canvas: P256Canvas,
bit_size_of_field: 256,
modulo_value: "ffffffff00000001000000000000000000000000ffffffffffffffffffffffff"
);
let g_x = Elem::from_hex("6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296");
let g_y = Elem::from_hex("4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5");
let _g_g = g_x * g_y;
}
#[test]
fn test_secret_nat_mod() {
nat_mod!(
type_name: Elem,
type_of_canvas: P256Canvas,
bit_size_of_field: 256,
modulo_value: "ffffffff00000001000000000000000000000000ffffffffffffffffffffffff"
);
let g_x = Elem::from_hex("6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296");
let g_y = Elem::from_hex("4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5");
let _g_g = g_x * g_y;
}
|
#[derive(Clone, Deserialize)]
pub struct Target {
pub name: String,
pub shared_secret: String,
pub index_key: String,
pub encrypt_key: String,
}
#[derive(Deserialize)]
pub struct Config {
pub target: Vec<Target>,
}
|
use std::ops::Bound;
use std::sync::Arc;
use anyhow::{Context, Result};
use async_trait::async_trait;
use chrono::{DateTime, Duration, Utc};
use serde_derive::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use sqlx::{postgres::PgConnection, Acquire};
use svc_agent::{
mqtt::{
IntoPublishableMessage, OutgoingEvent, OutgoingEventProperties, ShortTermTimingProperties,
},
AgentId,
};
use tracing::{error, warn};
use uuid::Uuid;
use crate::clients::tq::{
Priority, Task as TqTask, TranscodeMinigroupToHlsStream, TranscodeMinigroupToHlsSuccess,
};
use crate::db::class::Object as Class;
use crate::db::recording::Segments;
use crate::{app::AppContext, clients::conference::ConfigSnapshot};
use crate::{
clients::event::{Event, EventData, RoomAdjustResult},
db::class::ClassType,
};
use super::{
shared_helpers, MjrDumpsUploadReadyData, MjrDumpsUploadResult, TranscodeSuccess, UploadedStream,
};
const NS_IN_MS: i64 = 1_000_000;
const PIN_EVENT_TYPE: &str = "pin";
const HOST_EVENT_TYPE: &str = "host";
pub(super) struct MinigroupPostprocessingStrategy {
ctx: Arc<dyn AppContext>,
minigroup: Class,
}
impl MinigroupPostprocessingStrategy {
pub(super) fn new(ctx: Arc<dyn AppContext>, minigroup: Class) -> Self {
Self { ctx, minigroup }
}
}
#[async_trait]
impl super::PostprocessingStrategy for MinigroupPostprocessingStrategy {
async fn handle_stream_upload(&self, stream: UploadedStream) -> Result<()> {
match stream.parsed_data {
Ok(stream_data) => {
let mut conn = self.ctx.get_conn().await?;
crate::db::recording::StreamUploadUpdateQuery::new(
self.minigroup.id(),
stream.id,
stream_data.segments,
stream_data.uri,
stream_data.started_at,
)
.execute(&mut conn)
.await?;
}
Err(err) => {
warn!(
stream_id = ?stream.id,
"Failed to transcode recording with stream_id: {}, err: {:?}", stream.id, err
);
let mut conn = self.ctx.get_conn().await?;
crate::db::recording::remove_recording(self.minigroup.id(), stream.id, &mut conn)
.await?;
}
}
let recordings = {
let mut conn = self.ctx.get_conn().await?;
crate::db::recording::RecordingListQuery::new(self.minigroup.id())
.execute(&mut conn)
.await?
};
let ready_recordings = recordings
.iter()
.filter_map(ReadyRecording::from_db_object)
.collect::<Vec<_>>();
if recordings.len() != ready_recordings.len() {
return Ok(());
}
let host_recording = {
// Find host stream id.
let host = match self.find_host(self.minigroup.event_room_id()).await? {
None => {
error!(
class_id = ?self.minigroup.id(),
event_room_id = ?self.minigroup.event_room_id(),
"No host in room",
);
return Ok(());
}
Some(agent_id) => agent_id,
};
let maybe_recording = ready_recordings
.into_iter()
.find(|recording| recording.created_by == host);
match maybe_recording {
None => {
error!(
class_id = ?self.minigroup.id(),
"No host recording in room",
);
return Ok(());
}
Some(recording) => recording,
}
};
call_adjust(
self.ctx.clone(),
self.minigroup.event_room_id(),
host_recording,
self.ctx.get_preroll_offset(self.minigroup.audience()),
)
.await?;
Ok(())
}
async fn handle_adjust(&self, room_adjust_result: RoomAdjustResult) -> Result<()> {
match room_adjust_result {
RoomAdjustResult::Success {
original_room_id,
modified_room_id,
cut_original_segments,
..
} => {
// Find host stream id.
let host = match self.find_host(modified_room_id).await? {
None => {
error!(
class_id = ?self.minigroup.id(),
"No host in room",
);
return Ok(());
}
Some(agent_id) => agent_id,
};
// Save adjust results to the DB and fetch recordings.
let recordings = {
let mut conn = self.ctx.get_conn().await?;
let mut txn = conn
.begin()
.await
.context("Failed to begin sqlx db transaction")?;
let q = crate::db::class::UpdateAdjustedRoomsQuery::new(
self.minigroup.id(),
original_room_id,
modified_room_id,
);
q.execute(&mut txn).await?;
let recordings = crate::db::recording::AdjustMinigroupUpdateQuery::new(
self.minigroup.id(),
cut_original_segments,
host.clone(),
)
.execute(&mut txn)
.await?;
txn.commit().await?;
recordings
};
send_transcoding_task(
&self.ctx,
&self.minigroup,
recordings,
modified_room_id,
Priority::Normal,
)
.await
}
RoomAdjustResult::Error { error } => {
bail!("Adjust failed, err = {:#?}", error);
}
}
}
async fn handle_transcoding_completion(
&self,
completion_result: TranscodeSuccess,
) -> Result<()> {
match completion_result {
TranscodeSuccess::TranscodeMinigroupToHls(TranscodeMinigroupToHlsSuccess {
recording_duration,
..
}) => {
let stream_duration = recording_duration.parse::<f64>()?.round() as u64;
{
let mut conn = self.ctx.get_conn().await?;
crate::db::recording::TranscodingUpdateQuery::new(self.minigroup.id())
.execute(&mut conn)
.await?;
}
let timing = ShortTermTimingProperties::new(Utc::now());
let props = OutgoingEventProperties::new("minigroup.ready", timing);
let path = format!("audiences/{}/events", self.minigroup.audience());
let payload = MinigroupReady {
id: self.minigroup.id(),
scope: self.minigroup.scope().to_owned(),
tags: self.minigroup.tags().map(ToOwned::to_owned),
status: "success".to_string(),
stream_duration,
};
let event = OutgoingEvent::broadcast(payload, props, &path);
let boxed_event = Box::new(event) as Box<dyn IntoPublishableMessage + Send>;
self.ctx
.publisher()
.publish(boxed_event)
.context("Failed to publish minigroup.ready event")
}
TranscodeSuccess::TranscodeStreamToHls(success_result) => {
bail!(
"Got transcoding success for an unexpected tq template; expected transcode-minigroup-to-hls for a minigroup, id = {}, result = {:#?}",
self.minigroup.id(),
success_result,
);
}
}
}
async fn handle_mjr_dumps_upload(&self, dumps: Vec<MjrDumpsUploadResult>) -> Result<()> {
if dumps.is_empty() {
bail!("Expected at least 1 RTC");
}
let ready_dumps = shared_helpers::extract_ready_dumps(dumps)?;
{
let mut conn = self.ctx.get_conn().await?;
insert_recordings(&mut conn, self.minigroup.id(), &ready_dumps).await?;
}
let tq_client = self.ctx.tq_client();
for dump in ready_dumps {
tq_client
.create_task(
&self.minigroup,
TqTask::ConvertMjrDumpsToStream {
mjr_dumps_uris: dump.mjr_dumps_uris,
stream_uri: dump.uri,
stream_id: dump.id,
},
Priority::Normal,
)
.await?
}
Ok(())
}
}
impl MinigroupPostprocessingStrategy {
async fn find_host(&self, event_room_id: Uuid) -> Result<Option<AgentId>> {
find_host(self.ctx.clone(), event_room_id).await
}
}
async fn find_host(ctx: Arc<dyn AppContext>, event_room_id: Uuid) -> Result<Option<AgentId>> {
let host_events = ctx
.event_client()
.list_events(event_room_id, HOST_EVENT_TYPE)
.await
.context("Failed to get host events for room")?;
match host_events.first().map(|event| event.data()) {
None => Ok(None),
Some(EventData::Host(data)) => Ok(Some(data.agent_id().to_owned())),
Some(other) => bail!("Got unexpected host event data: {:?}", other),
}
}
async fn insert_recordings(
conn: &mut PgConnection,
class_id: Uuid,
dumps: &[MjrDumpsUploadReadyData],
) -> Result<()> {
let mut txn = conn
.begin()
.await
.context("Failed to begin sqlx db transaction")?;
for dump in dumps {
let q = crate::db::recording::RecordingInsertQuery::new(
class_id,
dump.id,
dump.created_by.to_owned(),
);
q.execute(&mut txn).await?;
}
txn.commit().await?;
Ok(())
}
async fn call_adjust(
ctx: Arc<dyn AppContext>,
room_id: Uuid,
host_recording: ReadyRecording,
offset: i64,
) -> Result<()> {
ctx.event_client()
.adjust_room(
room_id,
host_recording.started_at,
host_recording.segments,
offset,
)
.await
.map_err(|err| anyhow!("Failed to adjust room, id = {}: {}", room_id, err))?;
Ok(())
}
pub async fn restart_transcoding(
ctx: Arc<dyn AppContext>,
minigroup: Class,
priority: Priority,
) -> Result<()> {
if minigroup.kind() != ClassType::Minigroup {
bail!("Invalid class type");
}
let modified_event_room_id = match minigroup.modified_event_room_id() {
Some(id) => id,
None => bail!("Not adjusted yet"),
};
let mut conn = ctx.get_conn().await?;
let recordings = crate::db::recording::RecordingListQuery::new(minigroup.id())
.execute(&mut conn)
.await?;
send_transcoding_task(
&ctx,
&minigroup,
recordings,
modified_event_room_id,
priority,
)
.await
}
async fn send_transcoding_task(
ctx: &Arc<dyn AppContext>,
minigroup: &Class,
recordings: Vec<crate::db::recording::Object>,
modified_event_room_id: Uuid,
priority: Priority,
) -> Result<()> {
// Find host stream id.
let host = match find_host(ctx.clone(), modified_event_room_id).await? {
None => {
error!(class_id = ?minigroup.id(), "No host in room");
return Ok(());
}
Some(agent_id) => agent_id,
};
let recordings = recordings
.into_iter()
.map(|recording| ReadyRecording::from_db_object(&recording))
.collect::<Option<Vec<_>>>()
.ok_or_else(|| anyhow!("Not all recordings are ready"))?;
let maybe_host_recording = recordings
.iter()
.find(|recording| recording.created_by == host);
let host_stream = match maybe_host_recording {
// Host has been set but there's no recording, skip transcoding.
None => bail!("No host stream id in room"),
Some(recording) => recording,
};
// Fetch event room opening time for events' offset calculation.
let modified_event_room = ctx
.event_client()
.read_room(modified_event_room_id)
.await
.context("Failed to read modified event room")?;
match modified_event_room.time {
(Bound::Included(_), _) => (),
_ => bail!("Wrong event room opening time"),
};
ctx.event_client()
.dump_room(modified_event_room_id)
.await
.context("Dump room event failed")?;
// Find the earliest recording.
let earliest_recording = recordings
.iter()
.min_by(|a, b| a.started_at.cmp(&b.started_at))
.ok_or_else(|| anyhow!("No recordings"))?;
// Fetch pin events for building pin segments.
let pin_events = ctx
.event_client()
.list_events(modified_event_room_id, PIN_EVENT_TYPE)
.await
.context("Failed to get pin events for room")?;
// Fetch writer config snapshots for building muted segments.
let mute_events = ctx
.conference_client()
.read_config_snapshots(minigroup.conference_room_id())
.await
.context("Failed to get writer config snapshots for room")?;
// Build streams for template bindings.
let streams = recordings
.iter()
.map(|recording| {
let event_room_offset = recording.started_at
- (host_stream.started_at
- Duration::milliseconds(ctx.get_preroll_offset(minigroup.audience())));
let recording_offset = recording.started_at - earliest_recording.started_at;
build_stream(
recording,
&pin_events,
event_room_offset,
recording_offset,
&mute_events,
)
})
.collect::<Result<Vec<_>, _>>()?;
let host_stream_id = host_stream.rtc_id;
// Create a tq task.
let task = TqTask::TranscodeMinigroupToHls {
streams,
host_stream_id,
};
ctx.tq_client()
.create_task(minigroup, task, priority)
.await
.context("TqClient create task failed")
}
fn build_stream(
recording: &ReadyRecording,
pin_events: &[Event],
event_room_offset: Duration,
recording_offset: Duration,
configs_changes: &[ConfigSnapshot],
) -> anyhow::Result<TranscodeMinigroupToHlsStream> {
let recording_end = match recording
.segments
.last()
.map(|range| range.end)
.ok_or_else(|| anyhow!("Recording segments have no end?"))?
{
Bound::Included(t) | Bound::Excluded(t) => t,
Bound::Unbounded => bail!("Unbounded recording end"),
};
let pin_segments = collect_pin_segments(
pin_events,
event_room_offset,
&recording.created_by,
recording_end,
);
// We need only changes for the recording that fall into recording span
let changes = configs_changes.iter().filter(|snapshot| {
let m = (snapshot.created_at - recording.started_at).num_milliseconds();
m > 0 && m < recording_end && snapshot.rtc_id == recording.rtc_id
});
let mut video_mute_start = None;
let mut audio_mute_start = None;
let mut video_mute_segments = vec![];
let mut audio_mute_segments = vec![];
for change in changes {
if change.send_video == Some(false) && video_mute_start.is_none() {
video_mute_start = Some(change);
}
if change.send_video == Some(true) && video_mute_start.is_some() {
let start = video_mute_start.take().unwrap();
let muted_at = (start.created_at - recording.started_at).num_milliseconds();
let unmuted_at = (change.created_at - recording.started_at).num_milliseconds();
video_mute_segments.push((Bound::Included(muted_at), Bound::Excluded(unmuted_at)));
}
if change.send_audio == Some(false) && audio_mute_start.is_none() {
audio_mute_start = Some(change);
}
if change.send_audio == Some(true) && audio_mute_start.is_some() {
let start = audio_mute_start.take().unwrap();
let muted_at = (start.created_at - recording.started_at).num_milliseconds();
let unmuted_at = (change.created_at - recording.started_at).num_milliseconds();
audio_mute_segments.push((Bound::Included(muted_at), Bound::Excluded(unmuted_at)));
}
}
// If last mute segment was left open, close it with recording end
if let Some(start) = video_mute_start {
let muted_at = (start.created_at - recording.started_at).num_milliseconds();
video_mute_segments.push((Bound::Included(muted_at), Bound::Excluded(recording_end)));
}
if let Some(start) = audio_mute_start {
let muted_at = (start.created_at - recording.started_at).num_milliseconds();
audio_mute_segments.push((Bound::Included(muted_at), Bound::Excluded(recording_end)));
}
let v = TranscodeMinigroupToHlsStream::new(recording.rtc_id, recording.stream_uri.to_owned())
.offset(recording_offset.num_milliseconds() as u64)
// We pass not modified but original segments here, this is done because:
// First of all remember that:
// - for non-hosts' streams modified segments == og segments
// - for host's stream modified segments DO differ
// All non-hosts' streams should be cutted out when there is a gap in host stream, for example:
// host's str: begin------------pause----pauseend------------end (notice pause in the middle)
// non-host's str: begin-----------------------------------------end (no pause at all)
// For a non-host's stream we must apply the pause in the host's stream
// Tq does that but it needs pauses, and these pauses are only present in og segments, not in modified segments
.modified_segments(recording.modified_segments.to_owned())
.segments(recording.segments.to_owned())
.pin_segments(pin_segments.into())
.video_mute_segments(video_mute_segments.into())
.audio_mute_segments(audio_mute_segments.into());
Ok(v)
}
fn collect_pin_segments(
pin_events: &[Event],
event_room_offset: Duration,
recording_created_by: &AgentId,
recording_end: i64,
) -> Vec<(Bound<i64>, Bound<i64>)> {
let mut pin_segments = vec![];
let mut pin_start = None;
let mut add_segment = |start, end| {
if start <= end && start >= 0 && end <= recording_end {
pin_segments.push((Bound::Included(start), Bound::Excluded(end)));
}
};
for event in pin_events {
if let EventData::Pin(data) = event.data() {
// Shift from the event room's dimension to the recording's dimension.
let occurred_at =
event.occurred_at() as i64 / NS_IN_MS - event_room_offset.num_milliseconds();
if data
.agent_id()
.map(|aid| *aid == *recording_created_by)
.unwrap_or(false)
{
// Stream was pinned.
// Its possible that teacher pins someone twice in a row
// Do nothing in that case
if pin_start.is_none() {
pin_start = Some(occurred_at);
}
} else if let Some(pinned_at) = pin_start {
// stream was unpinned
// its possible that pinned_at equals unpin's occurred_at after adjust
// we skip segments like that
if occurred_at > pinned_at {
add_segment(pinned_at, occurred_at);
}
pin_start = None;
}
}
}
// If the stream hasn't got unpinned since some moment then add a pin segment to the end
// of the recording to keep it pinned.
if let Some(start) = pin_start {
add_segment(start, recording_end);
}
pin_segments
}
#[derive(Debug)]
struct ReadyRecording {
rtc_id: Uuid,
stream_uri: String,
segments: Segments,
modified_segments: Segments,
started_at: DateTime<Utc>,
created_by: AgentId,
}
impl ReadyRecording {
fn from_db_object(recording: &crate::db::recording::Object) -> Option<Self> {
Some(Self {
rtc_id: recording.rtc_id(),
stream_uri: recording.stream_uri().cloned()?,
segments: recording.segments().cloned()?,
modified_segments: recording.modified_or_segments().cloned()?,
started_at: recording.started_at()?,
created_by: recording.created_by().clone(),
})
}
}
#[derive(Debug, PartialEq, Deserialize, Serialize)]
struct MinigroupReady {
id: Uuid,
scope: String,
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<JsonValue>,
status: String,
stream_duration: u64,
}
#[cfg(test)]
mod tests;
|
use std::net::IpAddr;
use std::str::FromStr;
use crate::error::{LookoutError, Result};
use crate::types::ASn;
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TracerouteResult {
pub header: TracerouteHeader,
pub hops: Vec<TracerouteHop>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TracerouteHeader {
pub destination: Host,
pub max_hops: usize,
pub packet_size: usize,
}
impl FromStr for TracerouteHeader {
type Err = LookoutError;
fn from_str(s: &str) -> Result<TracerouteHeader> {
let mut components = s.split(',');
let destination = components.next()
.ok_or("traceroute output: destination misssing")?
.trim_start_matches("traceroute to ")
.trim()
.parse()?;
let max_hops = components.next()
.ok_or("traceroute output: max hops misssing")?
.trim().split(' ')
.next().ok_or("traceroute output: max hops missing")?
.parse()?;
let packet_size = components.next()
.ok_or("traceroute output: packet size")?
.trim().split(' ')
.next().ok_or("traceroute output: max hops missing")?
.parse()?;
Ok(TracerouteHeader {
destination, max_hops, packet_size,
})
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TracerouteHop {
pub rtts: [Option<isize>; 3],
pub hosts: [Option<Host>; 3],
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct Host {
pub hostname: String,
pub ip: IpAddr,
pub asn: Option<ASn>,
}
impl Host {
fn new (hostname: &str, ip: IpAddr, asn: Option<ASn>) -> Host {
Host {
hostname: String::from(hostname), ip, asn: asn,
}
}
}
impl FromStr for Host {
type Err = LookoutError;
fn from_str(s: &str) -> Result<Host> {
let mut split = s.trim().split_whitespace();
let hostname: &str = split.next().ok_or("traceroute output: missing hostname")?;
let ip: IpAddr = {
let ip = split.next().ok_or("traceroute output: missing ip address")?;
if ip.len()==0 || ip.chars().next() != Some('(') || ip.chars().last() != Some(')') {
Err("traceroute output: need parentheses aroung ip address")?;
}
ip[1..ip.len()-1].parse()?
};
let asn: Option<ASn> = if let Some(asn) = split.next() {
if asn == "[*]" {
// ASN could not be resolved
None
} else if asn.len()==0 || asn.chars().next() != Some('[') || asn.chars().last() != Some(']') {
// Wrong format
Err("traceroute output: need brackets aroung asn")?;
unreachable!()
} else {
// Format is valid
Some(asn[1..asn.len()-1].parse()?)
}
} else {
None
};
if split.next().is_some() {
Err("traceroute output: host has to many fields")?;
}
Ok(Host::new(hostname, ip, asn))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_header() {
assert_eq!(
"traceroute to one.one.one.one (1.1.1.1), 30 hops max, 60 byte packets".parse::<TracerouteHeader>().unwrap(),
TracerouteHeader {
destination: Host::new("one.one.one.one", "1.1.1.1".parse().unwrap(), None),
max_hops: 30,
packet_size: 60,
}
);
}
#[test]
fn test_parse_host() {
assert_eq!(
"one.one.one.one (1.1.1.1) [AS13335]".parse::<Host>().unwrap(),
Host::new("one.one.one.one", "1.1.1.1".parse().unwrap(), Some(ASn(13335)))
);
assert_eq!(
" 100.127.1.7 (100.127.1.7) [*]\t".parse::<Host>().unwrap(),
Host::new("100.127.1.7", "100.127.1.7".parse().unwrap(), None)
);
assert_eq!(
"100.127.1.7 (100.127.1.7)".parse::<Host>().unwrap(),
Host::new("100.127.1.7", "100.127.1.7".parse().unwrap(), None)
);
assert!("100.127.1.7 100.127.1.7".parse::<Host>().is_err());
assert!("100.127.1.7 (100.127.1.7) AS1234".parse::<Host>().is_err());
assert!("100.127.1.7 (100.127.1.7) [AS1234] Blah".parse::<Host>().is_err());
}
}
|
#![warn(rust_2018_idioms)]
#![allow(dead_code)]
pub mod association;
pub mod chunk;
pub mod error;
pub mod error_cause;
pub mod packet;
pub mod param;
pub(crate) mod queue;
pub mod stream;
pub(crate) mod timer;
pub(crate) mod util;
|
#![recursion_limit = "1024"]
use crate::pages::{Login, Person, PersonsList};
use wasm_bindgen::prelude::*;
use yew::html;
use yew::prelude::*;
mod database;
mod pages;
mod person;
mod user;
enum Page {
Login,
PersonsList,
OnePerson(Option<person::Person>),
}
struct AuthApp {
page: Page,
current_user: Option<user::User>,
can_write: bool,
db_conn: database::DbConn,
link: ComponentLink<Self>,
}
enum Msg {
LoggedIn(user::User),
ChangeUser,
GoToOnePersonPage(Option<person::Person>),
GoToPersonsListPage,
}
impl Component for AuthApp {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
page: Page::Login,
current_user: None,
can_write: false,
db_conn: database::Database::new_thread_safe(),
link,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::LoggedIn(user) => {
self.page = Page::PersonsList;
self.current_user = Some(user.clone());
self.can_write = user.privileges.contains(&user::DbPrivilege::CanWrite);
},
Msg::ChangeUser => self.page = Page::Login,
Msg::GoToOnePersonPage(person) => self.page = Page::OnePerson(person),
Msg::GoToPersonsListPage => self.page = Page::PersonsList,
}
true
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
true
}
fn view(&self) -> Html {
let go_to_page = self.link.callback(|u: user::User| {
Msg::LoggedIn(u)
});
let go_to_persons_list_page = self.link.callback(|_| {
Msg::GoToPersonsListPage
});
let go_to_one_person_page = self.link.callback(|p: Option<person::Person>| {
Msg::GoToOnePersonPage(p)
});
html! {
<div>
<style>
{r#"
.current-user {
color: #0000C0;
}
"#}
</style>
<header>
<h2>{"People Management"}</h2>
<p>
{"Current User: "}
<span class="current-user">
{
if let Some(user) = &self.current_user {
user.username.as_str()
} else {
"---"
}
}
</span>
{
match self.page {
Page::Login => html! {
<div />
},
_ => html! {
<span>
{""}
<button onclick=&self.link.callback(|_| Msg::ChangeUser)>
{"Change User"}
</button>
</span>
}
}
}
</p>
<hr />
</header>
{
match &self.page {
Page::Login => html! {
<Login
user=self.current_user.clone()
on_log_in=go_to_page.clone()
db_conn=Some(self.db_conn.clone())
/>
},
Page::PersonsList => html! {
<PersonsList
can_write=self.can_write
go_to_one_person_page=go_to_one_person_page.clone()
db_conn=Some(self.db_conn.clone())
/>
},
Page::OnePerson(person) => html! {
<Person
can_write=self.can_write
id=if let Some(person) = person { Some(person.id) } else { None }
name=if let Some(person) = person { person.name.clone() } else { String::default() }
go_to_persons_list=go_to_persons_list_page.clone()
db_conn=Some(self.db_conn.clone())
/>
},
_ => html! { <div></div> }
}
}
</div>
}
}
}
#[wasm_bindgen(start)]
pub fn run_app() {
App::<AuthApp>::new().mount_to_body();
}
|
mod actions;
pub mod keyboard;
mod mode_actions;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use self::actions::{Action, Response};
use self::keyboard::{KeyStroke, Keyboard};
use self::mode_actions::ModeActions;
use crate::core::ClientToClientWriter;
use failure::Error;
use xi_rpc::Peer;
lazy_static! {
static ref PASTE_BUFFER: Arc<Mutex<Option<String>>> = Arc::new(Mutex::new(None));
}
#[derive(Debug, Default, Deserialize)]
pub struct Config {
#[serde(default)]
normal_mode: HashMap<String, String>,
#[serde(default)]
insert_mode: HashMap<String, String>,
#[serde(default)]
visual_mode: HashMap<String, String>,
#[serde(default)]
action_mode: HashMap<String, String>,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum Mode {
Normal,
Insert,
Visual,
Action,
}
impl Mode {
#[allow(dead_code)]
pub fn to_string(self) -> String {
match self {
Mode::Normal => String::from("NORMAL"),
Mode::Insert => String::from("INSERT"),
Mode::Visual => String::from("VISUAL"),
Mode::Action => String::from("ACTION"),
}
}
}
pub struct InputController {
keyboard: Box<dyn Keyboard>,
view_id: String,
normal_mode: ModeActions,
insert_mode: ModeActions,
visual_mode: ModeActions,
action_mode: ModeActions,
mode: Mode,
front_event_writer: ClientToClientWriter,
}
impl InputController {
pub fn new(
keyboard: Box<dyn Keyboard>,
client_to_client_writer: ClientToClientWriter,
config: &Config,
) -> Self {
Self {
keyboard,
view_id: String::new(),
normal_mode: ModeActions::setup(Mode::Normal, &config.normal_mode),
insert_mode: ModeActions::setup(Mode::Insert, &config.insert_mode),
visual_mode: ModeActions::setup(Mode::Visual, &config.visual_mode),
action_mode: ModeActions::setup(Mode::Action, &config.action_mode),
mode: Mode::Normal,
front_event_writer: client_to_client_writer,
}
}
pub fn open_file(&mut self, core: &dyn Peer, file_path: &str) -> Result<(), Error> {
let view_id = core
.send_rpc_request("new_view", &json!({ "file_path": file_path }))
.expect("failed to create the new view");
self.view_id = view_id.as_str().unwrap().to_string();
self.front_event_writer.send_rpc_notification(
"set_path_for_view",
&json!({
"view_id": self.view_id,
"path": file_path,
}),
);
core.send_rpc_notification("set_theme", &json!({"theme_name": "Solarized (light)" }));
self.front_event_writer.send_rpc_notification(
"add_status_item",
&json!({
"key": "change-mode",
"value": self.mode.to_string(),
"alignment": "left",
}),
);
Ok(())
}
pub fn start_keyboard_event_loop(&mut self, core: &dyn Peer) -> Result<(), Error> {
loop {
let key_res = self.keyboard.get_next_keystroke();
if let Some(key) = key_res {
let mut action = match self.mode {
Mode::Normal => self.normal_mode.get_action_from_keystroke(key),
Mode::Insert => self.insert_mode.get_action_from_keystroke(key),
Mode::Visual => self.visual_mode.get_action_from_keystroke(key),
Mode::Action => self.action_mode.get_action_from_keystroke(key),
};
if action.is_none() && self.mode == Mode::Insert {
action = Some(Action::InsertKeyStroke(key));
} else if action.is_none() {
continue;
}
let res =
action
.unwrap()
.execute(&self.view_id, core, &mut self.front_event_writer);
match res {
Response::Continue => continue,
Response::Stop => break,
Response::SwitchToInsertMode => self.mode = Mode::Insert,
Response::SwitchToNormalMode => self.mode = Mode::Normal,
Response::SwitchToVisualMode => self.mode = Mode::Visual,
Response::SwitchToActionMode => self.mode = Mode::Action,
}
core.send_rpc_notification(
"edit",
&json!({ "method": "collapse_selections", "view_id": self.view_id}),
);
self.front_event_writer.send_rpc_notification(
"update_status_item",
&json!({
"key": "change-mode",
"value": self.mode.to_string(),
}),
);
}
}
self.front_event_writer
.send_rpc_notification("command", &json!({"method": "exit"}));
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::Config;
#[test]
fn test_config_deserialization() {
let config: Config = toml::from_str(
r#"
[visual_mode]
move_down = "<key_up>"
"#,
)
.unwrap();
assert_eq!(
String::from("<key_up>"),
config.visual_mode[&String::from("move_down")]
);
}
}
|
use crate::gpio::*;
use embedded_hal::spi::{FullDuplex, Mode, Phase, Polarity};
use esp8266::{IO_MUX, SPI1};
use void::Void;
#[derive(Copy, Clone, Eq, PartialEq)]
#[repr(u8)]
pub enum SpiClock {
Spi2MHz = 40,
Spi4MHz = 20,
Spi5MHz = 16,
Spi8MHz = 10,
Spi10MHz = 8,
Spi16MHz = 5,
Spi20MHz = 4,
Spi40MHz = 2,
Spi80MHz = 1,
}
pub trait SPIExt {
fn spi(
self,
sclk: Gpio14<HSPI>,
miso: Gpio12<HSPI>,
mosi: Gpio13<HSPI>,
frequency: SpiClock,
) -> SPI1Master;
}
impl SPIExt for SPI1 {
fn spi(
self,
sclk: Gpio14<HSPI>,
miso: Gpio12<HSPI>,
mosi: Gpio13<HSPI>,
frequency: SpiClock,
) -> SPI1Master {
SPI1Master::new(self, sclk, miso, mosi, frequency)
}
}
pub struct SPI1Master {
spi: SPI1,
sclk: Gpio14<HSPI>,
miso: Gpio12<HSPI>,
mosi: Gpio13<HSPI>,
}
impl SPI1Master {
pub fn new(
spi: SPI1,
sclk: Gpio14<HSPI>,
miso: Gpio12<HSPI>,
mosi: Gpio13<HSPI>,
frequency: SpiClock,
) -> Self {
let mut spi = SPI1Master {
spi,
sclk,
miso: miso.into_push_pull_output().into_hspi(),
mosi,
};
unsafe { spi.spi.spi_ctrl.write_with_zero(|w| w) };
spi.set_frequency(frequency);
spi.spi.spi_user.write(|w| {
w.spi_usr_mosi()
.set_bit()
.spi_duplex()
.set_bit()
.spi_ck_i_edge()
.set_bit()
});
spi.set_data_bits(8);
unsafe { spi.spi.spi_ctrl1.write_with_zero(|w| w) };
spi
}
/// free up the spi device and return the pins used
pub fn decompose(self) -> nb::Result<(SPI1, Gpio14<HSPI>, Gpio12<HSPI>, Gpio13<HSPI>), Void> {
Ok((self.spi, self.sclk, self.miso, self.mosi))
}
pub fn set_data_mode(&mut self, mode: Mode) {
match mode.phase {
Phase::CaptureOnFirstTransition => self
.spi
.spi_user
.modify(|_, w| w.spi_ck_o_edge().clear_bit()),
Phase::CaptureOnSecondTransition => {
self.spi.spi_user.modify(|_, w| w.spi_ck_o_edge().set_bit())
}
}
match mode.polarity {
Polarity::IdleLow => self
.spi
.spi_pin
.modify(|_, w| w.spi_idle_edge().clear_bit()),
Polarity::IdleHigh => self.spi.spi_pin.modify(|_, w| w.spi_idle_edge().set_bit()),
}
}
pub fn set_frequency(&mut self, frequency: SpiClock) {
let iomux = unsafe { &*IO_MUX::ptr() };
if frequency == SpiClock::Spi80MHz {
iomux
.io_mux_conf
.modify(|_, w| w.spi1_clk_equ_sys_clk().set_bit());
unsafe {
self.spi
.spi_clock
.write_with_zero(|w| w.spi_clk_equ_sysclk().set_bit())
};
} else {
iomux
.io_mux_conf
.modify(|_, w| w.spi1_clk_equ_sys_clk().clear_bit());
unsafe {
self.spi.spi_clock.write_with_zero(|w| {
w.spi_clkcnt_n()
.bits(frequency as u8 - 1)
.spi_clkcnt_h()
.bits((frequency as u8) / 2 - 1)
.spi_clkcnt_l()
.bits((frequency as u8 - 1) & 0x40)
})
};
}
}
fn set_data_bits(&mut self, bits: u16) {
self.spi.spi_user1.write(|w| unsafe {
w.reg_usr_mosi_bitlen()
.bits(bits - 1)
.reg_usr_miso_bitlen()
.bits(bits - 1)
});
}
}
impl FullDuplex<u8> for SPI1Master {
type Error = Void;
fn read(&mut self) -> nb::Result<u8, Self::Error> {
if self.spi.spi_cmd.read().spi_usr().bit_is_set() {
return Err(nb::Error::WouldBlock);
}
Ok((self.spi.spi_w0.read().bits() & 0xFF) as u8)
}
fn send(&mut self, word: u8) -> nb::Result<(), Self::Error> {
if self.spi.spi_cmd.read().spi_usr().bit_is_set() {
return Err(nb::Error::WouldBlock);
}
self.set_data_bits(8);
self.spi.spi_w0.write(|w| unsafe { w.bits(word as u32) });
self.spi.spi_cmd.modify(|_, w| w.spi_usr().set_bit());
Ok(())
}
}
impl embedded_hal::blocking::spi::write::Default<u8> for SPI1Master {}
impl embedded_hal::blocking::spi::transfer::Default<u8> for SPI1Master {}
|
pub mod msg;
pub mod constants;
|
use std::fmt::Debug;
use bevy::{
ecs::{entity::Entity, world::World},
math::{Quat, Vec3},
prelude::Transform,
reflect::{TypeUuid, Uuid},
};
use serde::{Deserialize, Serialize};
pub mod app;
pub mod builtin;
pub mod command;
pub mod data;
pub mod de;
pub mod loader;
pub mod manager;
pub mod registry;
use crate::data::{BoxedPrefabData, PrefabData};
pub mod prelude {
pub use crate::app::*;
pub use crate::command::PrefabCommands;
pub use crate::data::{BoxedPrefabData, PrefabData};
pub use crate::Prefab;
}
use crate::registry::PrefabConstructFn;
///////////////////////////////////////////////////////////////////////////////
#[derive(Debug, TypeUuid)]
#[uuid = "58bc173f-8f5e-4200-88bc-9f12ae9f87cc"]
pub struct Prefab {
root_entity: Entity,
data: BoxedPrefabData,
transform: Transform,
world: World,
}
///////////////////////////////////////////////////////////////////////////////
// TODO: use `Override` trait instead
#[derive(Default, Debug, Serialize, Deserialize, Clone)]
#[serde(default)]
pub struct PrefabTransformOverride {
translation: Option<Vec3>,
rotation: Option<Quat>,
scale: Option<Vec3>,
}
///////////////////////////////////////////////////////////////////////////////
/// Tags a prefab with pending instancing
#[derive(Debug, Clone)]
pub struct PrefabNotInstantiatedTag {
// TODO: Tuple variant version doesn't work,
_marker: (),
}
#[derive(Debug, Clone, Copy)]
pub enum PrefabError {
Missing,
WrongExpectedSourcePrefab,
}
/// Tags a prefab as missing
#[derive(Debug, Clone)]
pub struct PrefabErrorTag(PrefabError);
impl PrefabErrorTag {
pub fn error(&self) -> PrefabError {
self.0
}
}
///////////////////////////////////////////////////////////////////////////////
#[derive(Clone)]
/// Overrides the prefab construct function, needed for procedural prefabs
pub struct PrefabConstruct(PrefabConstructFn);
/// Used internally to validate if the prefab match the expected type,
/// sadly this validation can't be done during deserialization
#[derive(Debug, Clone)]
struct PrefabTypeUuid(Uuid);
|
use ringbuf::Consumer;
use stainless_ffmpeg::prelude::*;
use std::collections::HashMap;
use std::ffi::{c_void, CStr, CString};
use std::io::{Cursor, Error, ErrorKind, Result};
use std::mem;
use std::ptr::null_mut;
use std::str::from_utf8_unchecked;
unsafe fn to_string(data: *const i8) -> String {
if data.is_null() {
return "".to_string();
}
from_utf8_unchecked(CStr::from_ptr(data).to_bytes()).to_string()
}
macro_rules! check_result {
($condition: expr, $block: block) => {
let errnum = $condition;
if errnum < 0 {
let mut data = [0i8; AV_ERROR_MAX_STRING_SIZE];
av_strerror(errnum, data.as_mut_ptr(), AV_ERROR_MAX_STRING_SIZE);
$block;
return Err(Error::new(
ErrorKind::InvalidInput,
to_string(data.as_ptr()),
));
}
};
($condition: expr) => {
let errnum = $condition;
if errnum < 0 {
let mut data = [0i8; AV_ERROR_MAX_STRING_SIZE];
av_strerror(errnum, data.as_mut_ptr(), AV_ERROR_MAX_STRING_SIZE);
return Err(Error::new(
ErrorKind::InvalidInput,
to_string(data.as_ptr()),
));
}
};
}
#[derive(Debug)]
pub struct MediaStream {
pub format_context: *mut AVFormatContext,
stream_info: bool,
stream_ids: Vec<u8>,
decoders: HashMap<u8, Decoder>,
graph: Option<FilterGraph>,
}
#[derive(Debug)]
pub struct Decoder {
codec: *mut AVCodec,
context: *mut AVCodecContext,
decoder: AudioDecoder,
}
unsafe extern "C" fn read_data(opaque: *mut c_void, raw_buffer: *mut u8, buf_size: i32) -> i32 {
log::trace!("Read more data: {} bytes", buf_size);
let consumer: &mut Consumer<u8> = &mut *(opaque as *mut Consumer<u8>);
if consumer.is_empty() {
log::warn!("Empty source stream");
return 0;
}
let mut vec = std::slice::from_raw_parts_mut(raw_buffer, buf_size as usize);
let size = consumer
.write_into(&mut vec, Some(buf_size as usize))
.unwrap();
size as i32
}
impl MediaStream {
pub fn new(format: &str, consumer: Consumer<u8>) -> Result<Self> {
unsafe {
av_log_set_level(AV_LOG_ERROR);
av_log_set_level(AV_LOG_QUIET);
}
let buffer_size = 2048;
let mut format_context = unsafe { avformat_alloc_context() };
unsafe {
let buffer = av_malloc(buffer_size);
let cformat = CString::new(format).unwrap();
let av_input_format = av_find_input_format(cformat.as_ptr());
log::info!("Open dynamic buffer");
let writable_buffer = 0;
let opaque = Box::new(consumer);
let avio_context = avio_alloc_context(
buffer as *mut u8,
buffer_size as i32,
writable_buffer,
Box::into_raw(opaque) as *mut c_void,
Some(read_data),
None,
None,
);
(*format_context).pb = avio_context;
log::info!("Open Input");
check_result!(avformat_open_input(
&mut format_context,
null_mut(),
av_input_format,
null_mut(),
));
}
log::info!("Created");
Ok(MediaStream {
decoders: HashMap::new(),
format_context,
stream_info: false,
stream_ids: vec![],
graph: None,
})
}
pub fn stream_info(&self) -> Result<()> {
log::info!("Find stream info");
unsafe {
check_result!(avformat_find_stream_info(self.format_context, null_mut()));
Ok(())
}
}
}
|
//! Handles the managment of an inactive page table.
use super::super::TEMPORARY_MAP_TABLE;
use super::current_page_table::CURRENT_PAGE_TABLE;
use super::frame_allocator::FRAME_ALLOCATOR;
use super::page_table::{Level4, PageTable};
use super::page_table_entry::*;
use super::page_table_manager::PageTableManager;
use super::PageFrame;
use core::ptr::Unique;
use memory::{Address, PhysicalAddress};
use sync::PreemptionState;
use x86_64::registers::control_regs::cr3;
/// The reference to the place where the level 4 table will be mapped.
const L4_TABLE: *mut PageTable<Level4> = 0xffffffffffffd000 as *mut PageTable<Level4>;
/// Represents a currently inactive page table that needs to be modified.
pub struct InactivePageTable {
/// A reference to the level 4 table.
l4_table: Unique<PageTable<Level4>>,
/// The page frame of the level 4 table.
l4_frame: PageFrame,
/// Optionally contains the preemption state of the mapped entry in the
/// current page table.
preemption_state: Option<PreemptionState>
}
impl PageTableManager for InactivePageTable {
fn get_l4(&mut self) -> &mut PageTable<Level4> {
unsafe {
self.map();
self.l4_table.as_mut()
}
}
}
impl Drop for InactivePageTable {
fn drop(&mut self) {
match self.preemption_state {
Some(_) => self.unmap(),
None => ()
}
}
}
impl InactivePageTable {
/// Creates a new inactive page table.
///
/// # Safety
/// - Should only be called during kernel setup.
pub unsafe fn new() -> InactivePageTable {
let frame = FRAME_ALLOCATOR.allocate();
let preemption_state = CURRENT_PAGE_TABLE.lock().map_inactive(&frame);
// Zero the page.
let table = &mut *L4_TABLE;
table.zero();
// Set up some invariants.
table[510]
.set_address(TEMPORARY_MAP_TABLE)
.set_flags(PRESENT | WRITABLE | NO_EXECUTE);
table[511]
.set_address(frame.get_address())
.set_flags(PRESENT | WRITABLE | NO_EXECUTE);
InactivePageTable {
l4_table: Unique::new_unchecked(L4_TABLE),
l4_frame: frame,
preemption_state: Some(preemption_state)
}
}
/// Creates a copy of the current page table kernel part as an inactive
/// page table.
pub fn copy_from_current() -> InactivePageTable {
let frame = FRAME_ALLOCATOR.allocate();
let preemption_state = unsafe { CURRENT_PAGE_TABLE.lock().map_inactive(&frame) };
let table = unsafe { &mut *L4_TABLE };
table.zero();
table[256] = CURRENT_PAGE_TABLE.lock().get_l4()[256].clone();
table[257] = CURRENT_PAGE_TABLE.lock().get_l4()[257].clone();
table[506] = CURRENT_PAGE_TABLE.lock().get_l4()[506].clone();
table[507] = CURRENT_PAGE_TABLE.lock().get_l4()[507].clone();
unsafe {
table[510]
.set_address(TEMPORARY_MAP_TABLE)
.set_flags(PRESENT | WRITABLE | NO_EXECUTE);
}
table[511]
.set_address(frame.get_address())
.set_flags(PRESENT | WRITABLE | NO_EXECUTE);
CURRENT_PAGE_TABLE.lock().unmap_inactive(&preemption_state);
InactivePageTable {
l4_table: unsafe { Unique::new_unchecked(L4_TABLE) },
l4_frame: frame,
preemption_state: None
}
}
/// Creates an inactive page table at the given address.
///
/// The old_table parameter points to a table containing the preemption
/// state.
pub fn from_frame(frame: PageFrame, old_table: &InactivePageTable) -> InactivePageTable {
InactivePageTable {
l4_table: unsafe { Unique::new_unchecked(L4_TABLE) },
l4_frame: frame,
preemption_state: Some(unsafe {
old_table
.preemption_state
.as_ref()
.expect("The old table was not mapped.")
.copy()
})
}
}
/// Creates an inactive page table that points to the current page table.
pub fn from_current_table() -> InactivePageTable {
InactivePageTable {
l4_table: unsafe { Unique::new_unchecked(L4_TABLE) },
l4_frame: PageFrame::from_address(PhysicalAddress::from_usize(cr3().0 as usize)),
preemption_state: None
}
}
/// Maps the inactive page table to the current one.
///
/// # Safety
/// - Ensure that it is properly unmapped every time it's mapped.
unsafe fn map(&mut self) {
if self.preemption_state.is_none() {
let preemption_state = CURRENT_PAGE_TABLE.lock().map_inactive(&self.l4_frame);
self.preemption_state = Some(preemption_state);
}
}
/// Returns the page frame of this page table.
pub fn get_frame(&self) -> PageFrame {
unsafe { self.l4_frame.copy() }
}
/// Unmaps the currently loaded inactive page table.
pub fn unmap(&mut self) {
// TODO: Find something better than unmapping manually after every use.
if !self.preemption_state.is_none() {
CURRENT_PAGE_TABLE
.lock()
.unmap_inactive(self.preemption_state.as_ref().unwrap());
self.preemption_state = None;
}
}
}
|
pub mod allocator;
pub mod allocators;
mod bumpallocator;
pub mod dump_linear_scan;
pub mod embedded_meta_data;
pub mod large_object_allocator;
pub mod linear_scan;
pub mod malloc_allocator;
pub use self::allocator::Allocator;
pub use self::bumpallocator::BumpAllocator;
pub use self::large_object_allocator::LargeObjectAllocator;
pub use self::malloc_allocator::MallocAllocator;
pub use crate::policy::mallocspace::metadata::is_alloced_by_malloc;
|
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use common::result::Result;
use crate::domain::interaction::{Like, Reading, Review, View};
use crate::domain::publication::PublicationId;
use crate::domain::reader::ReaderId;
#[async_trait]
pub trait InteractionRepository: Sync + Send {
async fn find_views(
&self,
reader_id: Option<&ReaderId>,
publication_id: Option<&PublicationId>,
from: Option<&DateTime<Utc>>,
to: Option<&DateTime<Utc>>,
) -> Result<Vec<View>>;
async fn find_readings(
&self,
reader_id: Option<&ReaderId>,
publication_id: Option<&PublicationId>,
from: Option<&DateTime<Utc>>,
to: Option<&DateTime<Utc>>,
) -> Result<Vec<Reading>>;
async fn find_likes(
&self,
reader_id: Option<&ReaderId>,
publication_id: Option<&PublicationId>,
from: Option<&DateTime<Utc>>,
to: Option<&DateTime<Utc>>,
) -> Result<Vec<Like>>;
async fn find_reviews(
&self,
reader_id: Option<&ReaderId>,
publication_id: Option<&PublicationId>,
from: Option<&DateTime<Utc>>,
to: Option<&DateTime<Utc>>,
) -> Result<Vec<Review>>;
async fn save_view(&self, view: &mut View) -> Result<()>;
async fn save_reading(&self, reading: &mut Reading) -> Result<()>;
async fn save_like(&self, like: &mut Like) -> Result<()>;
async fn save_review(&self, review: &mut Review) -> Result<()>;
async fn delete_like(&self, reader_id: &ReaderId, publication_id: &PublicationId)
-> Result<()>;
async fn delete_review(
&self,
reader_id: &ReaderId,
publication_id: &PublicationId,
) -> Result<()>;
}
|
use artell_domain::image::{Format, Image, ImageRepository};
use rusoto_core::{region::Region, ByteStream};
use rusoto_s3::{PutObjectRequest, S3Client, S3};
pub struct S3ImageRepository {
client: S3Client,
bucket: String,
}
impl S3ImageRepository {
pub fn new(bucket: String) -> Self {
let client = S3Client::new(Region::ApNortheast1);
S3ImageRepository { client, bucket }
}
}
#[async_trait]
impl ImageRepository for S3ImageRepository {
fn url_to(&self, image_name: &str) -> String {
format!(
"https://artell.s3-ap-northeast-1.amazonaws.com/{}",
image_name
)
}
async fn save(&self, image: Image) -> anyhow::Result<()> {
log::debug!("start putting a new object to s3...");
let content_type = match image.format() {
Format::Png => "image/png",
Format::Jpeg => "image/jpeg",
};
self.client
.put_object(PutObjectRequest {
bucket: self.bucket.clone(),
key: image.name,
// rusoto_s3 のバグで、素直にByteStream::newを呼び出した場合
// put objectリクエストが失敗する。
// https://github.com/rusoto/rusoto/issues/1752
body: Some(ByteStream::from(image.data.as_ref().to_vec())),
content_type: Some(content_type.to_string()),
// 誰でも見れるようにする。
grant_read: Some("uri=http://acs.amazonaws.com/groups/global/AllUsers".to_string()),
..Default::default()
})
.await?;
log::debug!("completed to put a new object to s3");
Ok(())
}
}
|
use rocket::http::Status;
use rocket_contrib::json::Json;
use serde_json::Value;
use crate::models::author::{Author, NewAuthor};
use crate::models::client_wrapper::DBConnection;
use crate::requests::util;
#[get("/")]
pub fn all(mut db_conn: DBConnection) -> Json<Value> {
let (status, authors) = match db_conn.client.query("select * from author", &[]) {
Ok(rows) => (Status::Ok, Some(util::rows_to_values::<Author>(rows))),
Err(_) => (Status::NotFound, None),
};
util::build_response(status, authors)
}
#[get("/<id>")]
pub fn by_id(id: i32, mut db_conn: DBConnection) -> Json<Value> {
let (status, author) = match db_conn
.client
.query_one("select * from author where id = $1", &[&id])
{
Ok(row) => (Status::Ok, Some(Author::from(row))),
Err(_) => (Status::NotFound, None),
};
util::build_response(status, author)
}
#[put("/add", format = "application/json", data = "<author>")]
pub fn add(author: Json<NewAuthor>, mut db_conn: DBConnection) -> Json<Value> {
let status = match db_conn.client.execute(
"insert into author values (default, $1, $2)",
&[&author.name, &author.country],
) {
Ok(_) => Status::Ok,
Err(_) => Status::NotModified,
};
util::build_simple_response(status)
}
#[post("/update/<id>", format = "application/json", data = "<author>")]
pub fn update(id: i32, author: Json<NewAuthor>, mut db_conn: DBConnection) -> Json<Value> {
let status = match db_conn.client.execute(
"update author set (name, country) = ($1, $2) where id = $3",
&[&author.name, &author.country, &id],
) {
Ok(_) => Status::Ok,
Err(_) => Status::NotModified,
};
util::build_simple_response(status)
}
#[delete("/delete/<id>")]
pub fn delete(id: i32, mut db_conn: DBConnection) -> Json<Value> {
let status = match db_conn
.client
.execute("delete from author where id = $1", &[&id])
{
Ok(_) => Status::Ok,
Err(_) => Status::NotModified,
};
util::build_simple_response(status)
}
|
use crate::cartridge::{BankType, BankWindow, BaseMapper, Mapper, MemAttr};
use crate::cartridge::{ChrRom, NesHeader, PrgRom};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct State {
inner: BaseMapper,
}
impl State {
pub fn new(header: &NesHeader, prg_rom: &PrgRom, chr_rom: &ChrRom) -> Self {
let mut inner = BaseMapper::new();
inner.initialize(prg_rom, chr_rom, 0, 0x2000);
inner.map_ppu_address(0x0000, BankType::CHR_MEM, 0, BankWindow::Size8k);
let last_bank = inner.bank_num(BankType::PRG_ROM, BankWindow::Size16k) - 1;
inner.map_cpu_address(0x8000, BankType::PRG_ROM, 0, BankWindow::Size16k);
inner.map_cpu_address(0xC000, BankType::PRG_ROM, last_bank as u8, BankWindow::Size16k);
match header.mirroring {
super::MirrorMode::Vertical => {
inner.initialize_and_map_nametable_vertical();
}
super::MirrorMode::Horizontal => {
inner.initialize_and_map_nametable_horizontal();
}
};
State { inner }
}
}
impl Mapper for State {
fn peek(&mut self, addr: u16) -> u8 {
self.inner.peek_cpu_memory(addr)
}
fn poke(&mut self, addr: u16, value: u8) {
match addr {
0x6000..=0x7FFF => {
self.inner.poke_cpu_memory(addr, value)
}
0x8000..=0xFFFF => {
let selector = value & 0b0000_0111;
self.inner.map_cpu_address(0x8000, BankType::PRG_ROM, selector, BankWindow::Size16k);
}
_ => unreachable!("CPU ADDRESS: 0x{:X}", addr)
}
}
fn vpeek(&mut self, addr: u16) -> u8 {
self.inner.peek_ppu_memory(addr)
}
fn vpoke(&mut self, addr: u16, value: u8) {
self.inner.poke_ppu_memory(addr, value)
}
fn load_state(&mut self, state: Vec<u8>) {
let state: Self = bincode::deserialize(&state[..]).unwrap();
*self = state;
}
fn save_state(&self) -> Vec<u8> {
bincode::serialize(&self).unwrap()
}
}
|
#[doc = "Register `DMACTxCR` reader"]
pub type R = crate::R<DMACTX_CR_SPEC>;
#[doc = "Register `DMACTxCR` writer"]
pub type W = crate::W<DMACTX_CR_SPEC>;
#[doc = "Field `ST` reader - Start or Stop Transmission Command"]
pub type ST_R = crate::BitReader;
#[doc = "Field `ST` writer - Start or Stop Transmission Command"]
pub type ST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OSF` reader - Operate on Second Packet"]
pub type OSF_R = crate::BitReader;
#[doc = "Field `OSF` writer - Operate on Second Packet"]
pub type OSF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TSE` reader - TCP Segmentation Enabled"]
pub type TSE_R = crate::BitReader;
#[doc = "Field `TSE` writer - TCP Segmentation Enabled"]
pub type TSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXPBL` reader - Transmit Programmable Burst Length"]
pub type TXPBL_R = crate::FieldReader;
#[doc = "Field `TXPBL` writer - Transmit Programmable Burst Length"]
pub type TXPBL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
impl R {
#[doc = "Bit 0 - Start or Stop Transmission Command"]
#[inline(always)]
pub fn st(&self) -> ST_R {
ST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 4 - Operate on Second Packet"]
#[inline(always)]
pub fn osf(&self) -> OSF_R {
OSF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 12 - TCP Segmentation Enabled"]
#[inline(always)]
pub fn tse(&self) -> TSE_R {
TSE_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bits 16:21 - Transmit Programmable Burst Length"]
#[inline(always)]
pub fn txpbl(&self) -> TXPBL_R {
TXPBL_R::new(((self.bits >> 16) & 0x3f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Start or Stop Transmission Command"]
#[inline(always)]
#[must_use]
pub fn st(&mut self) -> ST_W<DMACTX_CR_SPEC, 0> {
ST_W::new(self)
}
#[doc = "Bit 4 - Operate on Second Packet"]
#[inline(always)]
#[must_use]
pub fn osf(&mut self) -> OSF_W<DMACTX_CR_SPEC, 4> {
OSF_W::new(self)
}
#[doc = "Bit 12 - TCP Segmentation Enabled"]
#[inline(always)]
#[must_use]
pub fn tse(&mut self) -> TSE_W<DMACTX_CR_SPEC, 12> {
TSE_W::new(self)
}
#[doc = "Bits 16:21 - Transmit Programmable Burst Length"]
#[inline(always)]
#[must_use]
pub fn txpbl(&mut self) -> TXPBL_W<DMACTX_CR_SPEC, 16> {
TXPBL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Channel transmit control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dmactx_cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dmactx_cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DMACTX_CR_SPEC;
impl crate::RegisterSpec for DMACTX_CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dmactx_cr::R`](R) reader structure"]
impl crate::Readable for DMACTX_CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dmactx_cr::W`](W) writer structure"]
impl crate::Writable for DMACTX_CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DMACTxCR to value 0"]
impl crate::Resettable for DMACTX_CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::sync::Once;
use std::error::Error;
use std::fs;
fn read_report_csv(filename: &str) -> Result<Vec<i32>, Box<dyn Error>> {
//panic if no file found
//panic if no values read from file
unimplemented!("Function unimplemented");
}
fn main() {
println!("Hello, world!");
}
#[cfg(test)]
mod tests {
use super::*;
static INIT: Once = Once::new();
static FILENAME: &str = "test_csv.csv";
fn init_test_csv() {
INIT.call_once ( || {
fs::write(FILENAME, b"1721\n979\n366\n299\n675\n1456").unwrap();
});
}
#[test]
fn test_read_file() {
init_test_csv();
let result = read_report_csv(FILENAME).unwrap();
unimplemented!("Test Unimplemented");
}
#[test]
fn test_find_candidates() {
unimplemented!("Test Unimplemented");
}
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
//! Index configuration.
use vector::Metric;
use super::index_write_parameters::IndexWriteParameters;
/// The index configuration
#[derive(Debug, Clone)]
pub struct IndexConfiguration {
/// Index write parameter
pub index_write_parameter: IndexWriteParameters,
/// Distance metric
pub dist_metric: Metric,
/// Dimension of the raw data
pub dim: usize,
/// Aligned dimension - round up dim to the nearest multiple of 8
pub aligned_dim: usize,
/// Total number of points in given data set
pub max_points: usize,
/// Number of points which are used as initial candidates when iterating to
/// closest point(s). These are not visible externally and won't be returned
/// by search. DiskANN forces at least 1 frozen point for dynamic index.
/// The frozen points have consecutive locations.
pub num_frozen_pts: usize,
/// Calculate distance by PQ or not
pub use_pq_dist: bool,
/// Number of PQ chunks
pub num_pq_chunks: usize,
/// Use optimized product quantization
/// Currently not supported
pub use_opq: bool,
/// potential for growth. 1.2 means the index can grow by up to 20%.
pub growth_potential: f32,
// TODO: below settings are not supported in current iteration
// pub concurrent_consolidate: bool,
// pub has_built: bool,
// pub save_as_one_file: bool,
// pub dynamic_index: bool,
// pub enable_tags: bool,
// pub normalize_vecs: bool,
}
impl IndexConfiguration {
/// Create IndexConfiguration instance
#[allow(clippy::too_many_arguments)]
pub fn new(
dist_metric: Metric,
dim: usize,
aligned_dim: usize,
max_points: usize,
use_pq_dist: bool,
num_pq_chunks: usize,
use_opq: bool,
num_frozen_pts: usize,
growth_potential: f32,
index_write_parameter: IndexWriteParameters
) -> Self {
Self {
index_write_parameter,
dist_metric,
dim,
aligned_dim,
max_points,
num_frozen_pts,
use_pq_dist,
num_pq_chunks,
use_opq,
growth_potential,
}
}
/// Get the size of adjacency list that we build out.
pub fn write_range(&self) -> usize {
self.index_write_parameter.max_degree as usize
}
}
|
use apllodb_shared_components::{ApllodbResult, SchemaIndex, ShortName};
use serde::{Deserialize, Serialize};
/// Column name.
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)]
pub struct ColumnName(ShortName);
impl ColumnName {
/// Constructor.
///
/// # Failures
/// - [NameTooLong](apllodb_shared_components::SqlState::NameTooLong) when:
/// - `name` length is longer than 64 (counted as UTF-8 character).
pub fn new(name: impl ToString) -> ApllodbResult<Self> {
let sn = ShortName::new(name)?;
Ok(Self(sn))
}
/// Ref to column name
pub fn as_str(&self) -> &str {
self.0.as_str()
}
/// Whether the index hits to this column name
pub fn matches(&self, index: &SchemaIndex) -> bool {
index.attr() == self.as_str()
}
}
impl From<&ColumnName> for SchemaIndex {
fn from(cn: &ColumnName) -> Self {
SchemaIndex::from(cn.as_str())
}
}
|
//! # JsonRPC
//!
//! This module contains the the utilities needed do build the JsonRPC api, both HTTP and
//! WebSockets, which can then directly served by `warp`.
//!
//! `warp` is a HTTP framework which is built around filters, this module is used to build filters
//! that mimics the Tendermint API.
mod api;
mod utils;
mod websockets;
pub use api::Jrpc;
pub use websockets::Ws;
use crate::node;
use crate::store::Storage;
use futures::future::FutureExt;
use warp::Filter as _;
pub const WEBSOCKET_PATH: &str = "websocket";
/// Create a new gRPC server.
pub async fn serve<S: 'static + Storage + Sync + Send>(
node: node::SharedNode<S>,
addr: std::net::SocketAddr,
) -> Result<(), std::convert::Infallible> {
let jrpc_api = warp::path::end().and(Jrpc::new_mimic(node));
let ws = warp::path(WEBSOCKET_PATH).and(Ws::new_mimic());
warp::serve(jrpc_api.or(ws))
.run(addr)
.then(|()| async { Ok(()) })
.await
}
|
#[derive(Debug, Serialize)]
pub struct Book {
pub id: i32,
pub title: String,
#[serde(rename = "authorId")]
pub author_id: i32,
pub published: bool,
}
impl From<postgres::Row> for Book {
fn from(row: postgres::Row) -> Self {
Book {
id: row.get(0),
title: row.get(1),
author_id: row.get(2),
published: row.get(3),
}
}
}
#[derive(Debug, Deserialize)]
pub struct NewBook {
pub title: String,
#[serde(rename = "authorId")]
pub author_id: i32,
pub published: bool,
}
|
#![cfg(test)]
use color_eyre::eyre::{eyre, Result};
use pretty_assertions::assert_eq;
use super::*;
use crate::template::block::{Block, BlockKind, If, IfExpr, IfOp, Var, VarEnv, VarEnvSet};
use crate::template::source::Source;
use crate::template::span::ByteSpan;
#[test]
fn parse_single_text() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"Hello World this is a text block"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(
block,
Block::new(ByteSpan::new(0usize, content.len()), BlockKind::Text)
);
Ok(())
}
#[test]
fn parse_single_comment() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{!-- Hello World this is a comment block --}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(
block,
Block::new(ByteSpan::new(0usize, content.len()), BlockKind::Comment)
);
Ok(())
}
#[test]
fn parse_single_escaped() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{{ Hello World this is a comment block }}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let inner = ByteSpan::new(3usize, content.len() - 3);
assert_eq!(&content[inner], " Hello World this is a comment block ");
assert_eq!(block.kind(), &BlockKind::Escaped(inner));
Ok(())
}
#[test]
fn parse_single_var_default() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{OS}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let name = ByteSpan::new(2usize, content.len() - 2);
assert_eq!(&content[name], "OS");
let envs = VarEnvSet([Some(VarEnv::Dotfile), Some(VarEnv::Profile), None]);
assert_eq!(block.kind(), &BlockKind::Var(Var { envs, name }));
Ok(())
}
#[test]
fn parse_single_var_env() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{$ENV}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let name = ByteSpan::new(3usize, content.len() - 2);
assert_eq!(&content[name], "ENV");
let envs = VarEnvSet([Some(VarEnv::Environment), None, None]);
assert_eq!(block.kind(), &BlockKind::Var(Var { envs, name }));
Ok(())
}
#[test]
fn parse_single_var_profile() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{#PROFILE}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let name = ByteSpan::new(3usize, content.len() - 2);
assert_eq!(&content[name], "PROFILE");
let envs = VarEnvSet([Some(VarEnv::Profile), None, None]);
assert_eq!(block.kind(), &BlockKind::Var(Var { envs, name }));
Ok(())
}
#[test]
fn parse_single_var_dotfile() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{&ITEM}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let name = ByteSpan::new(3usize, content.len() - 2);
assert_eq!(&content[name], "ITEM");
let envs = VarEnvSet([Some(VarEnv::Dotfile), None, None]);
assert_eq!(block.kind(), &BlockKind::Var(Var { envs, name }));
Ok(())
}
#[test]
fn parse_single_var_mixed() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{$&#MIXED}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let name = ByteSpan::new(5usize, content.len() - 2);
assert_eq!(&content[name], "MIXED");
let envs = VarEnvSet([
Some(VarEnv::Environment),
Some(VarEnv::Dotfile),
Some(VarEnv::Profile),
]);
assert_eq!(block.kind(), &BlockKind::Var(Var { envs, name }));
Ok(())
}
#[test]
fn parse_single_vars() -> Result<()> {
crate::tests::setup_test_env();
// duplicate variable environment
let content = r#"{{##OS}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.ok_or(eyre!("No block found"))?;
assert!(block.is_err());
Ok(())
}
#[test]
fn parse_single_print() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@print FooBar}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let inner = ByteSpan::new(9usize, content.len() - 2);
assert_eq!(&content[inner], "FooBar");
assert_eq!(block.kind(), &BlockKind::Print(inner));
Ok(())
}
#[test]
fn parse_single_if_eq() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{OS}} == "windows"}}{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let if_span = ByteSpan::new(0usize, 27usize);
assert_eq!(&content[if_span], r#"{{@if {{OS}} == "windows"}}"#);
let name = ByteSpan::new(8usize, 10usize);
assert_eq!(&content[name], "OS");
let envs = VarEnvSet([Some(VarEnv::Dotfile), Some(VarEnv::Profile), None]);
let op = IfOp::Eq;
let other = ByteSpan::new(17usize, 24usize);
assert_eq!(&content[other], "windows");
let end_span = ByteSpan::new(27usize, 34usize);
assert_eq!(&content[end_span], r#"{{@fi}}"#);
assert_eq!(
block.kind(),
&BlockKind::If(If {
head: (
if_span.span(IfExpr::Compare {
var: Var { envs, name },
op,
other
}),
vec![]
),
elifs: vec![],
els: None,
end: end_span
})
);
Ok(())
}
#[test]
fn parse_single_if_neq() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{OS}} != "windows"}}{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let if_span = ByteSpan::new(0usize, 27usize);
assert_eq!(&content[if_span], r#"{{@if {{OS}} != "windows"}}"#);
let name = ByteSpan::new(8usize, 10usize);
assert_eq!(&content[name], "OS");
let envs = VarEnvSet([Some(VarEnv::Dotfile), Some(VarEnv::Profile), None]);
let op = IfOp::NotEq;
let other = ByteSpan::new(17usize, 24usize);
assert_eq!(&content[other], "windows");
let end_span = ByteSpan::new(27usize, 34usize);
assert_eq!(&content[end_span], r#"{{@fi}}"#);
assert_eq!(
block.kind(),
&BlockKind::If(If {
head: (
if_span.span(IfExpr::Compare {
var: Var { envs, name },
op,
other
}),
vec![]
),
elifs: vec![],
els: None,
end: end_span
})
);
Ok(())
}
#[test]
fn parse_single_if_exists() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{$#EXISTS}}}}{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let if_span = ByteSpan::new(0usize, 20usize);
assert_eq!(&content[if_span], r#"{{@if {{$#EXISTS}}}}"#);
let name = ByteSpan::new(10usize, 16usize);
assert_eq!(&content[name], "EXISTS");
let envs = VarEnvSet([Some(VarEnv::Environment), Some(VarEnv::Profile), None]);
let end_span = ByteSpan::new(20usize, 27usize);
assert_eq!(&content[end_span], r#"{{@fi}}"#);
assert_eq!(
block.kind(),
&BlockKind::If(If {
head: (
if_span.span(IfExpr::Exists {
var: Var { envs, name }
}),
vec![]
),
elifs: vec![],
els: None,
end: end_span
})
);
Ok(())
}
#[test]
fn parse_single_if_not_exists() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if !{{$#EXISTS}}}}{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let block = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(block.span(), &ByteSpan::new(0usize, content.len()));
let if_span = ByteSpan::new(0usize, 21usize);
assert_eq!(&content[if_span], r#"{{@if !{{$#EXISTS}}}}"#);
let name = ByteSpan::new(11usize, 17usize);
assert_eq!(&content[name], "EXISTS");
let envs = VarEnvSet([Some(VarEnv::Environment), Some(VarEnv::Profile), None]);
let end_span = ByteSpan::new(21usize, 28usize);
assert_eq!(&content[end_span], r#"{{@fi}}"#);
assert_eq!(
block.kind(),
&BlockKind::If(If {
head: (
if_span.span(IfExpr::NotExists {
var: Var { envs, name }
}),
vec![]
),
elifs: vec![],
els: None,
end: end_span
})
);
Ok(())
}
#[test]
fn find_blocks() {
crate::tests::setup_test_env();
let content = r#"{{ Hello World }} {{{ Escaped {{ }} }} }}}
{{!-- Hello World {{}} {{{ asdf }}} this is a comment --}}
{{@if {{}} }} }}
"#;
let iter = BlockIter::new(content);
// Hello World
// Text: SPACE
// Escaped
// Text: LF SPACES
// Comment
// Text: LF SPACES
// If
// Text: Closing LF SPACES
assert_eq!(iter.count(), 8);
}
#[test]
fn find_blocks_unicode() {
crate::tests::setup_test_env();
let content = "\u{1f600}{{{ \u{1f600} }}}\u{1f600}";
let iter = BlockIter::new(content);
// Text: Smiley
// Escaped
// Text: Smiley
assert_eq!(iter.count(), 3);
}
#[test]
fn parse_comment() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{!-- Hello World this {{}} is a comment {{{{{{ }}}--}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(
token,
Block::new(ByteSpan::new(0usize, content.len()), BlockKind::Comment)
);
Ok(())
}
#[test]
fn parse_escaped() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{{!-- Hello World this {{}} is a comment {{{{{{ }}--}}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(
token,
Block::new(
ByteSpan::new(0usize, content.len()),
BlockKind::Escaped(ByteSpan::new(3usize, content.len() - 3))
)
);
Ok(())
}
#[test]
fn parse_if_cmp() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{&OS}} == "windows" }}
DEMO
{{@elif {{&OS}} == "linux" }}
LINUX
{{@else}}
ASD
{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(token.span, ByteSpan::new(0usize, content.len()));
assert!(matches!(token.kind, BlockKind::If(_)));
Ok(())
}
#[test]
fn parse_if_cmp_nested() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{&OS}} == "windows" }}
{{!-- This is a nested comment --}}
{{{ Escaped {{}} }}}
{{@elif {{&OS}} == "linux" }}
{{!-- Below is a nested variable --}}
{{ OS }}
{{@else}}
ASD
{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(token.span, ByteSpan::new(0usize, content.len()));
assert!(matches!(token.kind, BlockKind::If(_)));
Ok(())
}
#[test]
fn parse_if_exists() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{&OS}} }}
DEMO
ASD
{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(token.span, ByteSpan::new(0usize, content.len()));
assert!(matches!(token.kind, BlockKind::If(_)));
Ok(())
}
#[test]
fn parse_if_mixed() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@if {{OS}}}}
print("No value for variable `OS` set")
{{@elif {{&OS}} != "windows"}}
print("OS is not windows")
{{@elif {{OS}} == "windows"}}
{{{!-- This is a nested comment. Below it is a nested variable block. --}}}
print("OS is {{OS}}")
{{@else}}
{{{!-- This is a nested comment. --}}}
print("Can never get here. {{{ {{OS}} is neither `windows` nor not `windows`. }}}")
{{@fi}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(token.span, ByteSpan::new(0usize, content.len()));
assert!(matches!(token.kind, BlockKind::If(_)));
Ok(())
}
#[test]
fn parse_print() -> Result<()> {
crate::tests::setup_test_env();
let content = r#"{{@print FooBar}}"#;
let source = Source::anonymous(content);
let mut parser = Parser::new(source);
let token = parser
.next_top_level_block()
.expect("Found no block")
.expect("Encountered a parse error");
assert_eq!(token.span, ByteSpan::new(0usize, content.len()));
assert!(matches!(token.kind, BlockKind::Print(_)));
Ok(())
}
#[test]
fn parse_variables() -> Result<()> {
crate::tests::setup_test_env();
assert_eq!(
parse_var("$#&FOO_BAR", 0)?,
Var {
envs: VarEnvSet([
Some(VarEnv::Environment),
Some(VarEnv::Profile),
Some(VarEnv::Dotfile)
]),
name: ByteSpan::new(3usize, 10usize),
}
);
assert_eq!(
parse_var("&BAZ_1", 0)?,
Var {
envs: VarEnvSet([Some(VarEnv::Dotfile), None, None]),
name: ByteSpan::new(1usize, 6usize),
}
);
assert_eq!(
parse_var("$#&FOO_BAR", 10)?,
Var {
envs: VarEnvSet([
Some(VarEnv::Environment),
Some(VarEnv::Profile),
Some(VarEnv::Dotfile)
]),
name: ByteSpan::new(13usize, 20usize),
}
);
// invalid env / var_name
assert!(parse_var("!FOO_BAR", 10).is_err());
// duplicate env
assert!(parse_var("&&FOO_BAR", 0).is_err());
Ok(())
}
#[test]
fn parse_others() -> Result<()> {
crate::tests::setup_test_env();
assert_eq!(parse_other("\"BAZ_1\"", 0)?, ByteSpan::new(1usize, 6usize));
assert_eq!(
parse_other("This is a test \"Hello World How are you today\"", 0)?,
ByteSpan::new(16usize, 45usize)
);
assert!(parse_other("This is a test \"Hello World How are you today", 0).is_err());
assert!(parse_other("This is a test", 0).is_err());
Ok(())
}
|
use std::convert::TryInto;
use std::fmt;
use std::num::TryFromIntError;
use chrono::{NaiveTime, Timelike};
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct ExtendedTime {
hour: u8,
minute: u8,
}
impl fmt::Debug for ExtendedTime {
fn fmt(&self, f: &mut fmt::Formatter) -> std::result::Result<(), fmt::Error> {
write!(f, "{}:{:02}", self.hour, self.minute)
}
}
impl ExtendedTime {
pub fn new(hour: u8, minute: u8) -> Self {
if minute >= 60 {
panic!("invalid time: minute is {}", minute)
}
Self { hour, minute }
}
pub fn hour(self) -> u8 {
self.hour
}
pub fn minute(self) -> u8 {
self.minute
}
pub fn add_minutes(self, minutes: i16) -> Result<Self, TryFromIntError> {
let as_minutes = self.mins_from_midnight() as i16 + minutes;
Self::from_mins_from_midnight(as_minutes.try_into()?)
}
pub fn add_hours(self, hours: i16) -> Result<Self, TryFromIntError> {
Ok(Self {
hour: (i16::from(self.hour) + hours).try_into()?,
minute: self.minute,
})
}
pub fn from_mins_from_midnight(minute: u16) -> Result<Self, TryFromIntError> {
let hour = (minute / 60).try_into().unwrap();
let minute = (minute % 60).try_into()?;
Ok(Self { hour, minute })
}
pub fn mins_from_midnight(self) -> u16 {
u16::from(self.minute) + 60 * u16::from(self.hour)
}
}
impl TryInto<NaiveTime> for ExtendedTime {
type Error = ();
fn try_into(self) -> Result<NaiveTime, Self::Error> {
NaiveTime::from_hms_opt(self.hour.into(), self.minute.into(), 0).ok_or(())
}
}
impl From<NaiveTime> for ExtendedTime {
fn from(time: NaiveTime) -> ExtendedTime {
Self {
hour: time.hour().try_into().expect("invalid NaiveTime"),
minute: time.minute().try_into().expect("invalid NaiveTime"),
}
}
}
|
use simple_logger::SimpleLogger;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::path::PathBuf;
use std::process::{exit, Command, Stdio};
use structopt::StructOpt;
use log::{error, info};
#[derive(StructOpt, Debug)]
#[structopt(name = "cargo-remote", bin_name = "cargo")]
enum Opts {
#[structopt(name = "remote")]
Remote {
#[structopt(short = "r", long = "remote", help = "Remote ssh build server")]
build_server: String,
#[structopt(
short = "b",
long = "build-env",
help = "Set remote environment variables. RUST_BACKTRACE, CC, LIB, etc. ",
default_value = "RUST_BACKTRACE=1"
)]
build_env: String,
#[structopt(
short = "d",
long = "rustup-default",
help = "Rustup default (stable|beta|nightly)",
default_value = "stable"
)]
rustup_default: String,
#[structopt(
short = "e",
long = "env",
help = "Environment profile. default_value = /etc/profile",
default_value = "/etc/profile"
)]
env: String,
#[structopt(
short = "c",
long = "copy-back",
help = "Transfer specific files or folders from that folder back to the local machine"
)]
copy_back: Option<Vec<PathBuf>>,
#[structopt(
long = "no-copy-lock",
help = "don't transfer the Cargo.lock file back to the local machine"
)]
no_copy_lock: bool,
#[structopt(
long = "manifest-path",
help = "Path to the manifest to execute",
default_value = "Cargo.toml",
parse(from_os_str)
)]
manifest_path: PathBuf,
#[structopt(
long = "base-path",
help = "the base dir of build path",
default_value = "~"
)]
base_path: PathBuf,
#[structopt(
long = "build-path",
help = "Use this build_path instead of generating build_path from a hash."
)]
build_path: Option<PathBuf>,
#[structopt(
long = "transfer-hidden",
help = "Transfer hidden files and directories to the build server"
)]
hidden: bool,
#[structopt(
long = "transfer-compress",
help = "Compress file data during the transfer"
)]
compress: bool,
#[structopt(help = "cargo command that will be executed remotely")]
command: String,
#[structopt(
help = "cargo options and flags that will be applied remotely",
name = "remote options"
)]
options: Vec<String>,
},
}
fn main() {
SimpleLogger::new().init().unwrap();
let Opts::Remote {
build_server,
build_env,
rustup_default,
env,
copy_back,
no_copy_lock,
manifest_path,
hidden,
build_path,
command,
options,
compress,
base_path,
} = Opts::from_args();
let project_dir = {
let mut metadata_cmd = cargo_metadata::MetadataCommand::new();
metadata_cmd.manifest_path(manifest_path).no_deps();
let project_metadata = metadata_cmd.exec().unwrap();
project_metadata.workspace_root
};
info!("Project dir: {:?}", project_dir);
let build_path = build_path.unwrap_or_else(|| {
// generate a unique build path by using the hashed project dir as folder on the remote machine
let mut hasher = DefaultHasher::new();
project_dir.hash(&mut hasher);
// format!("{}/remote-builds/{}/", base_path, hasher.finish())
let mut p = PathBuf::new();
p.push(base_path);
if p.to_string_lossy() != "~" {
assert!(p.is_absolute(), "The base path must be absolute path.");
}
p.push("remote-builds");
p.push(hasher.finish().to_string());
p
});
info!("Transferring sources to build server.");
// transfer project to build server
let mut rsync_to = Command::new("rsync");
rsync_to
.arg("-a".to_owned())
.arg("--delete")
.arg("--info=progress2")
.arg("--exclude")
.arg("target")
.arg("--exclude")
.arg("node_modules");
if compress {
rsync_to.arg("--compress");
}
if !hidden {
rsync_to.arg("--exclude").arg(".*");
}
rsync_to
.arg("--rsync-path")
.arg(format!(
"mkdir -p {} && rsync",
build_path.to_string_lossy()
))
.arg(format!("{}/", project_dir.to_string_lossy()))
.arg(format!("{}:{}", build_server, build_path.to_string_lossy()))
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.output()
.unwrap_or_else(|e| {
error!("Failed to transfer project to build server (error: {})", e);
exit(-4);
});
info!("Build ENV: {:?}", build_env);
info!("Environment profile: {:?}", env);
info!("Build path: {:?}", build_path.to_string_lossy());
let build_command = format!(
"source {}; rustup default {}; cd {}; {} cargo {} {}",
env,
rustup_default,
build_path.to_string_lossy(),
build_env,
command,
options.join(" ")
);
info!("Starting build process. \n{}", build_command);
let output = Command::new("ssh")
.arg("-t")
.arg(&build_server)
.arg(build_command)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.output()
.unwrap_or_else(|e| {
error!("Failed to run cargo command remotely (error: {})", e);
exit(-5);
});
if let Some(file_names) = copy_back {
assert!(file_names.len() > 0, "need at least a file or dir");
for file_name in file_names {
assert!(file_name.to_string_lossy().len() > 0, "file or dir that trans back cannot be empty!");
let mut dir = project_dir.clone();
dir.push(file_name.clone());
let dir = dir.parent().unwrap().as_os_str();
// ensure dirs.
Command::new("mkdir")
.arg("-p")
.arg(dir)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.output()
.unwrap_or_else(|e| {
error!(
"Failed to create target dir on local machine (error: {})",
e
);
exit(-6);
});
info!(
"Transferring {} back to client.",
file_name.to_string_lossy()
);
let mut rsync_to = Command::new("rsync");
if compress {
rsync_to.arg("--compress");
}
rsync_to
.arg("-a")
.arg("-r")
.arg("--delete")
.arg("--info=progress2")
.arg(format!(
"{}:{}/{}",
build_server,
build_path.to_string_lossy(),
file_name.to_string_lossy()
))
.arg(format!(
"{}/{}",
project_dir.to_string_lossy(),
file_name.to_string_lossy()
))
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.output()
.unwrap_or_else(|e| {
error!(
"Failed to transfer target back to local machine (error: {})",
e
);
exit(-6);
});
}
}
if !no_copy_lock {
info!("Transferring Cargo.lock file back to client.");
let mut rsync_to = Command::new("rsync");
if compress {
rsync_to.arg("--compress");
}
rsync_to
.arg("-a")
.arg("--delete")
.arg("--info=progress2")
.arg(format!(
"{}:{}/Cargo.lock",
build_server,
build_path.to_string_lossy()
))
.arg(format!("{}/Cargo.lock", project_dir.to_string_lossy()))
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.output()
.unwrap_or_else(|e| {
error!(
"Failed to transfer Cargo.lock back to local machine (error: {})",
e
);
exit(-7);
});
}
if !output.status.success() {
exit(output.status.code().unwrap_or(1))
}
}
|
mod test_blocks;
mod test_functions;
mod test_operators;
mod test_primitives;
mod test_tables;
mod test_types;
mod test_variables;
mod test_statements;
pub mod utils;
|
use rocket::catch;
use rocket_contrib::{json, json::JsonValue};
#[catch(504)]
pub fn index() -> JsonValue {
json!({ "message": "Evaluation timed out." })
}
|
extern crate advent_of_code_2017_day_x;
use advent_of_code_2017_day_x::*;
#[test]
fn part_1_example() {
// FIXME
}
|
#![allow(proc_macro_derive_resolution_fallback)]
use crate::schema::passwords;
use chrono::NaiveDateTime;
use serde::Serialize;
use serde_derive::Deserialize;
#[derive(Debug, Queryable, Serialize, Deserialize, Identifiable, PartialEq, AsChangeset)]
#[table_name = "passwords"]
pub struct PasswordModel {
pub id: u32,
pub key: String,
pub value: String,
pub length: i32,
pub type_: String,
pub user_id: u32,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}
#[derive(Insertable, Debug)]
#[table_name = "passwords"]
pub struct NewPassword<'a> {
pub key: &'a str,
pub value: &'a str,
pub length: i32,
pub user_id: u32,
}
#[derive(Deserialize, AsChangeset, Default, Clone)]
#[table_name = "passwords"]
pub struct UpdatePassword<'a> {
pub key: Option<&'a str>,
pub value: Option<&'a str>,
pub length: Option<i32>,
}
|
mod disease_sh;
mod gitlab;
use crate::{error::ProcessingError, fuzzers, metadata, search::read_runs};
use indicatif::ParallelProgressIterator;
use rayon::prelude::*;
use std::{fmt, fs, fs::File, path::Path, str::FromStr, time::Instant};
#[derive(Debug)]
pub enum Target {
AgeOfEmpires2Api(TargetKind),
CcccatalogApi(TargetKind),
Covid19JapanWebApi(TargetKind),
DiseaseSh(TargetKind),
GitLab(TargetKind),
HttpBin(TargetKind),
JupyterServer(TargetKind),
JupyterHub(TargetKind),
MailHog(TargetKind),
OpenFec(TargetKind),
OpenTopoData(TargetKind),
OttoParser(TargetKind),
PslabWebapp(TargetKind),
Pulpcore(TargetKind),
RequestBaskets(TargetKind),
RestlerDemo(TargetKind),
Worklog(TargetKind),
}
impl fmt::Display for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Target::AgeOfEmpires2Api(kind) => {
f.write_fmt(format_args!("age_of_empires_2_api:{}", kind))
}
Target::CcccatalogApi(kind) => f.write_fmt(format_args!("cccatalog_api:{}", kind)),
Target::Covid19JapanWebApi(kind) => {
f.write_fmt(format_args!("covid19_japan_web_api:{}", kind))
}
Target::DiseaseSh(kind) => f.write_fmt(format_args!("disease_sh:{}", kind)),
Target::GitLab(kind) => f.write_fmt(format_args!("gitlab:{}", kind)),
Target::HttpBin(kind) => f.write_fmt(format_args!("httpbin:{}", kind)),
Target::JupyterServer(kind) => f.write_fmt(format_args!("jupyter_server:{}", kind)),
Target::JupyterHub(kind) => f.write_fmt(format_args!("jupyterhub:{}", kind)),
Target::MailHog(kind) => f.write_fmt(format_args!("mailhog:{}", kind)),
Target::OpenFec(kind) => f.write_fmt(format_args!("open_fec:{}", kind)),
Target::OpenTopoData(kind) => f.write_fmt(format_args!("opentopodata:{}", kind)),
Target::OttoParser(kind) => f.write_fmt(format_args!("otto_parser:{}", kind)),
Target::PslabWebapp(kind) => f.write_fmt(format_args!("pslab_webapp:{}", kind)),
Target::Pulpcore(kind) => f.write_fmt(format_args!("pulpcore:{}", kind)),
Target::RequestBaskets(kind) => f.write_fmt(format_args!("request_baskets:{}", kind)),
Target::RestlerDemo(kind) => f.write_fmt(format_args!("restler_demo:{}", kind)),
Target::Worklog(kind) => f.write_fmt(format_args!("worklog:{}", kind)),
}
}
}
#[derive(Debug)]
pub enum TargetKind {
Default,
Linked,
}
impl fmt::Display for TargetKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TargetKind::Default => f.write_str("Default"),
TargetKind::Linked => f.write_str("Linked"),
}
}
}
/// Deserialize Fuzzer via its FromStr implementation.
impl<'de> serde::Deserialize<'de> for Target {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum TargetError {
UnknownTarget(String),
}
impl fmt::Display for TargetError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TargetError::UnknownTarget(name) => {
f.write_fmt(format_args!("Unknown target: {}", name))
}
}
}
}
impl FromStr for TargetKind {
type Err = TargetError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Default" => Ok(Self::Default),
"Linked" => Ok(Self::Linked),
_ => Err(Self::Err::UnknownTarget(s.to_string())),
}
}
}
impl FromStr for Target {
type Err = TargetError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (target, kind) = s.split_once(':').unwrap_or_else(|| (s, "Default"));
let kind = TargetKind::from_str(kind)?;
match target {
"age_of_empires_2_api" => Ok(Target::AgeOfEmpires2Api(kind)),
"cccatalog_api" => Ok(Target::CcccatalogApi(kind)),
"covid19_japan_web_api" => Ok(Target::Covid19JapanWebApi(kind)),
"disease_sh" => Ok(Target::DiseaseSh(kind)),
"gitlab" => Ok(Target::GitLab(kind)),
"httpbin" => Ok(Target::HttpBin(kind)),
"jupyter_server" => Ok(Target::JupyterServer(kind)),
"jupyterhub" => Ok(Target::JupyterHub(kind)),
"mailhog" => Ok(Target::MailHog(kind)),
"open_fec" => Ok(Target::OpenFec(kind)),
"opentopodata" => Ok(Target::OpenTopoData(kind)),
"otto_parser" => Ok(Target::OttoParser(kind)),
"pslab_webapp" => Ok(Target::PslabWebapp(kind)),
"pulpcore" => Ok(Target::Pulpcore(kind)),
"request_baskets" => Ok(Target::RequestBaskets(kind)),
"restler_demo" => Ok(Target::RestlerDemo(kind)),
"worklog" => Ok(Target::Worklog(kind)),
_ => Err(Self::Err::UnknownTarget(s.to_string())),
}
}
}
pub fn process(
in_directory: &Path,
out_directory: &Path,
fuzzers: &[fuzzers::Fuzzer],
targets: &[String],
indices: &[String],
) -> Result<(), ProcessingError> {
let start = Instant::now();
let paths = read_runs(in_directory, fuzzers, targets, indices)?;
let results: Vec<_> = paths
.par_iter()
.progress_count(paths.len() as u64)
.map(|entry| process_run(entry, out_directory))
.collect();
for result in &results {
if let Err(err) = result {
eprintln!("Error: {}", err);
}
}
println!(
"TARGETS: Processed {} runs in {:.3} seconds",
results.len(),
Instant::now().duration_since(start).as_secs_f32()
);
Ok(())
}
fn read_stdout(entry: &fs::DirEntry) -> std::io::Result<String> {
let path = entry.path().join("target").join("stdout.txt");
std::fs::read_to_string(path)
}
#[derive(Debug, serde::Serialize)]
pub(crate) struct Failure {
method: Option<String>,
path: Option<String>,
title: String,
traceback: String,
}
fn process_run(entry: &fs::DirEntry, out_directory: &Path) -> Result<(), ProcessingError> {
let mut path = entry.path();
path.push("metadata.json");
let metadata = metadata::read_metadata(&path)?;
match metadata.target {
Target::DiseaseSh(_) => {
let stdout = read_stdout(entry).expect("Failed to read stdout.txt");
let failures = disease_sh::process_stdout(&stdout);
store_failures(entry, &failures, out_directory)
}
Target::GitLab(_) => {
let stdout = read_stdout(entry).expect("Failed to read stdout.txt");
let failures = gitlab::process_stdout(&stdout);
store_failures(entry, &failures, out_directory)
}
Target::AgeOfEmpires2Api(_)
| Target::CcccatalogApi(_)
| Target::Covid19JapanWebApi(_)
| Target::HttpBin(_)
| Target::JupyterServer(_)
| Target::JupyterHub(_)
| Target::MailHog(_)
| Target::OpenFec(_)
| Target::OpenTopoData(_)
| Target::OttoParser(_)
| Target::PslabWebapp(_)
| Target::Pulpcore(_)
| Target::RequestBaskets(_)
| Target::RestlerDemo(_)
| Target::Worklog(_) => {}
}
Ok(())
}
fn store_failures(entry: &fs::DirEntry, failures: &[Failure], out_directory: &Path) {
let out_directory =
out_directory.join(entry.path().file_name().expect("Missing directory name"));
fs::create_dir_all(&out_directory).expect("Failed to create output directory");
let output_path = out_directory.join("failures.json");
let output_file = File::create(output_path).expect("Failed to create file");
serde_json::to_writer(output_file, failures).expect("Failed to serialize failures");
}
|
use crate::rendering::Instance;
use cgmath::prelude::*;
use rand::prelude::*;
use std::ops::{Add, Div, DivAssign, Mul};
use web_sys::WebGlRenderingContext as GL;
use crate::{
quadtree::Quadtree, quadtree::Rectangle as Rect, rendering::Rectangle, rendering::Triangle,
utils::ScreenSpaceEncoder,
};
#[derive(Debug, Copy, Clone)]
pub struct Boid {
pub position: cgmath::Vector2<f32>,
pub velocity: cgmath::Vector2<f32>,
pub acceleration: cgmath::Vector2<f32>,
pub alignment_force: f32,
pub cohesion_force: f32,
pub seperation_force: f32,
pub perception_size: f32,
pub max_speed: f32,
pub index: usize,
}
impl Boid {
pub fn update(&mut self, width: i32, height: i32, sensed_boids: &Vec<(Boid, f32)>) {
self.acceleration *= 0.0;
let alignment = self.align(sensed_boids);
let cohesion = self.cohesion(sensed_boids);
let seperation = self.seperation(sensed_boids);
self.edges(width, height); // wrap space into a torus
self.acceleration = self.acceleration + seperation + cohesion + alignment;
self.position = self.position.add(self.velocity);
self.velocity = self.velocity.add(self.acceleration);
self.velocity = self.limit(&self.velocity, self.max_speed);
if self.velocity.magnitude() < self.max_speed * 0.25 {
self.velocity = self.setMag(self.max_speed * 0.25, &self.velocity);
}
//apply cohesion seperation and alignment forces
}
//instead of boids draining off the edges wrap space into a torus lmao
fn edges(&mut self, width: i32, height: i32) {
if self.position.x > (width + 11) as f32 {
self.position.x = -11.0;
} else if self.position.x < -11.0 {
self.position.x = (width + 11) as f32;
} // add some arbitrary amount so the triangles are popping in and out of existancel
if self.position.y > (height + 11) as f32 {
self.position.y = -11.0;
} else if self.position.y < -11.0 {
self.position.y = (height + 11) as f32;
}
}
fn align(&mut self, boids: &Vec<(Boid, f32)>) -> cgmath::Vector2<f32> {
let mut steering = cgmath::Vector2::new(0.0, 0.0);
let mut total = 0;
for boid in boids {
if boid.0.index == self.index {
continue;
}
let distance = boid.1;
if distance < self.perception_size {
steering = steering.add(boid.0.velocity);
total += 1;
}
}
if total > 0 && steering != cgmath::Vector2::new(0.0, 0.0) {
steering /= total as f32;
steering = self.setMag(self.max_speed, &steering);
steering -= self.velocity;
steering = self.limit(&steering, self.alignment_force)
}
return steering;
}
fn cohesion(&mut self, boids: &Vec<(Boid, f32)>) -> cgmath::Vector2<f32> {
let mut steering = cgmath::Vector2::new(0.0, 0.0);
let mut total = 0;
for other in boids {
let distance = other.1;
if distance < self.perception_size {
steering = steering + other.0.position;
total += 1;
}
}
if total > 0 && steering != cgmath::Vector2::new(0.0, 0.0) {
steering /= total as f32;
steering -= self.position;
steering = self.setMag(self.max_speed, &steering);
steering -= self.velocity;
steering = self.limit(&steering, self.cohesion_force);
}
return steering;
}
fn seperation(&mut self, boids: &Vec<(Boid, f32)>) -> cgmath::Vector2<f32> {
let perception = 50.0 / 1.75;
let mut steering = cgmath::Vector2::new(0.0, 0.0);
let mut total = 0;
for other in boids {
let distance = other.1;
if distance < perception {
let mut diff = self.position.clone();
diff -= other.0.position;
diff /= distance;
steering += diff;
total += 1;
}
}
if total > 0 && steering != cgmath::Vector2::new(0.0, 0.0) {
steering /= total as f32;
steering = self.setMag(self.max_speed, &steering);
steering -= self.velocity;
steering = self.limit(&steering, self.seperation_force);
}
return steering;
}
fn limit(&self, vec: &cgmath::Vector2<f32>, speed: f32) -> cgmath::Vector2<f32> {
if vec.magnitude() > speed {
return self.setMag(speed, vec);
} else {
return *vec;
}
}
fn setMag(&self, mag: f32, vec: &cgmath::Vector2<f32>) -> cgmath::Vector2<f32> {
let currentMag = vec.magnitude();
let mut newmag = cgmath::Vector2::new(0.0, 0.0);
if currentMag != 0.0 {
newmag = (vec * mag) / currentMag;
}
return newmag;
}
}
pub struct Flock {
dimensions: (u32, u32),
aspect: f32,
boids: Vec<Boid>,
triangle: Triangle,
quadtree: Quadtree,
line: Rectangle,
encoder: ScreenSpaceEncoder,
count: u32,
}
impl Flock {
pub fn new(gl: &GL, width: u32, height: u32) -> Self {
let encoder = ScreenSpaceEncoder {
dimensions: (width, height),
};
let boidshape = [0.0, 0.5, 0.34, -0.5, -0.34, -0.5];
let mut rng = rand::thread_rng();
let mut boids = Vec::<Boid>::new();
let mut qt = Quadtree::new(
2,
Rect {
x: 0.0,
y: 0.0,
width: width as f32,
height: height as f32,
},
);
for index in 0..300 {
boids.push(Boid {
position: cgmath::Vector2::new(
rng.gen::<f32>() * encoder.dimensions.0 as f32,
rng.gen::<f32>() * encoder.dimensions.1 as f32,
),
velocity: cgmath::Vector2::new(
(rng.gen::<f32>() * 2.0) - 1.0,
(rng.gen::<f32>() * 2.0) - 1.0,
),
acceleration: cgmath::Vector2::new(0.0, 0.0),
alignment_force: 0.4,
cohesion_force: 0.2,
seperation_force: 0.4,
perception_size: 75.0 / 2.0,
max_speed: 7.0 / 2.0,
index,
});
//TODO vectorlib
let _ = qt.insert(boids[index].position, index);
}
Self {
dimensions: (width, height),
aspect: width as f32 / height as f32,
triangle: Triangle::new(&gl, boidshape),
boids,
quadtree: qt,
line: Rectangle::new(&gl),
encoder,
count: 0,
}
}
fn wrapped_distance(
vec1: cgmath::Vector2<f32>,
vec2: cgmath::Vector2<f32>,
width: u32,
height: u32,
) -> f32 {
let mut dx = (vec1.x - vec2.x).abs();
let mut dy = (vec1.y - vec2.y).abs();
if dx > width as f32 {
dx = width as f32 - dx;
}
if dy > height as f32 {
dy = height as f32 - dy;
}
return (dx.powi(2) + dy.powi(2)).sqrt();
}
fn getLocalBoids(&self, circle: (f32, f32, f32)) {
let mut boid_indexs: Vec<usize> = Vec::new();
if circle.0 + circle.2 > 2.0 {
boid_indexs.extend(self.quadtree.query(circle));
}
}
fn distance(vec1: (f32, f32), vec2: (f32, f32)) -> f32 {
return ((vec2.0 - vec1.0).powi(2) + (vec2.1 - vec1.1).powi(2)).sqrt();
}
pub fn update(&mut self, width: i32, height: i32) {
self.encoder.updateDimensions(width as u32, height as u32);
self.count = (self.count + 1) % 101;
self.aspect = width as f32 / height as f32;
self.dimensions = (width as u32, height as u32);
self.quadtree.set_dimensions(width as f32, height as f32);
let mut newquadtree = Quadtree::new(
2,
Rect {
x: 0.0,
y: 0.0,
width: width as f32,
height: height as f32,
},
);
//self.quadtree.reset();
//got a feeling this needs to be in the loop, p sure it causes ghost boids or something when it isnt
let test = self.boids.clone();
for (pos, boid) in self.boids.iter_mut().enumerate() {
let mut sensed: Vec<(Boid, f32)> = Vec::new();
let selected =
self.quadtree
.query((boid.position.x, boid.position.y, boid.perception_size));
for i in selected {
if boid.index != test[i].index {
sensed.push((
test[i],
crate::Flock::wrapped_distance(
boid.position,
test[i].position,
self.dimensions.0,
self.dimensions.1,
),
))
}
}
boid.update(width, height, &sensed);
newquadtree.insert(boid.position, pos);
}
self.quadtree = newquadtree;
}
pub fn render(&self, gl: &GL) {
/*let selected = self.quadtree.query((
self.dimensions.0 as f32 / 2.0,
self.dimensions.1 as f32 / 2.0,
100.0,
));*/
self.quadtree.renderroot(&gl, &self.line, self.encoder);
let mut color = [1.0, 1.0, 1.0, 1.0];
let mut instances = Vec::<Instance>::with_capacity(self.boids.len());
for (index, boid) in self.boids.iter().enumerate() {
let ang = boid.velocity.y.atan2(boid.velocity.x);
//if selected.iter().any(|&i| i == index) {
// color = [0.0, 1.0, 0.0, 1.0];
//} else {
color = [0.37, 0.22, 0.40, 1.0];
//}
let test = self.encoder.encode(boid.position.x, boid.position.y);
instances.push(Instance {
x: test.0,
y: test.1,
width: 0.05,
height: 0.05,
angle: ang - std::f32::consts::FRAC_PI_2,
color,
});
/*self.triangle.render(
&gl,
test.0,
test.1,
0.05,
0.05, // self.aspect,
ang - std::f32::consts::FRAC_PI_2,
color,
);*/
}
self.triangle.render_instances(&gl, instances)
}
}
|
pub fn max_profit(prices: Vec<i32>) -> i32 {
let mut profit = 0;
let n = prices.len();
if n == 0 {
return 0
}
let mut max_price = prices[n-1];
for i in (0..n-1).rev() {
profit = std::cmp::max(profit, max_price - prices[i]);
max_price = std::cmp::max(max_price, prices[i]);
}
profit
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.