text stringlengths 8 4.13M |
|---|
use std::net::{ TcpStream, Shutdown };
use std::io::{ BufRead, BufReader };
use std::str;
fn main() {
let mut stream = TcpStream::connect("127.0.0.1:8888")
.expect("Error connecting to the server");
println!("Connected to server");
let mut buffer: Vec<u8> = Vec::new();
let mut reader = BufReader::new(&stream);
reader.read_until(b'\n', &mut buffer)
.expect("Error reading into buffer");
print!("{}", str::from_utf8(&buffer).expect("Error writing buffer as string"));
stream.shutdown(Shutdown::Both).expect("Shutdown failed");
} |
// Copyright © 2018–2019 Trevor Spiteri
// This library is free software: you can redistribute it and/or
// modify it under the terms of either
//
// * the Apache License, Version 2.0 or
// * the MIT License
//
// at your option.
//
// You should have recieved copies of the Apache License and the MIT
// License along with the library. If not, see
// <https://www.apache.org/licenses/LICENSE-2.0> and
// <https://opensource.org/licenses/MIT>.
/*!
This module contains sealed traits.
*/
#![deprecated(since = "0.4.2")]
#![allow(deprecated)]
use crate::{
traits::Fixed as TraitsFixed,
types::extra::{LeEqU128, LeEqU16, LeEqU32, LeEqU64, LeEqU8},
FixedI128, FixedI16, FixedI32, FixedI64, FixedI8, FixedU128, FixedU16, FixedU32, FixedU64,
FixedU8,
};
#[cfg(feature = "f16")]
use half::f16;
/// This trait is implemented for all the primitive integer types.
#[deprecated(since = "0.4.2", note = "do not use")]
pub trait Int: Copy {}
/// This trait is implemented for the primitive floating-point types,
/// and for [`f16`] if the [`f16` feature] is enabled.
///
/// [`f16`]: https://docs.rs/half/^1.2/half/struct.f16.html
/// [`f16` feature]: ../index.html#optional-features
#[deprecated(since = "0.4.2", note = "do not use")]
pub trait Float: Copy {}
/// This trait is implemented for all the fixed-point types.
#[deprecated(since = "0.4.2", note = "use `traits::Fixed` instead")]
pub trait Fixed: TraitsFixed {}
impl Int for i8 {}
impl Int for i16 {}
impl Int for i32 {}
impl Int for i64 {}
impl Int for i128 {}
impl Int for isize {}
impl Int for u8 {}
impl Int for u16 {}
impl Int for u32 {}
impl Int for u64 {}
impl Int for u128 {}
impl Int for usize {}
#[cfg(feature = "f16")]
impl Float for f16 {}
impl Float for f32 {}
impl Float for f64 {}
impl<Frac: LeEqU8> Fixed for FixedI8<Frac> {}
impl<Frac: LeEqU16> Fixed for FixedI16<Frac> {}
impl<Frac: LeEqU32> Fixed for FixedI32<Frac> {}
impl<Frac: LeEqU64> Fixed for FixedI64<Frac> {}
impl<Frac: LeEqU128> Fixed for FixedI128<Frac> {}
impl<Frac: LeEqU8> Fixed for FixedU8<Frac> {}
impl<Frac: LeEqU16> Fixed for FixedU16<Frac> {}
impl<Frac: LeEqU32> Fixed for FixedU32<Frac> {}
impl<Frac: LeEqU64> Fixed for FixedU64<Frac> {}
impl<Frac: LeEqU128> Fixed for FixedU128<Frac> {}
|
use crate::xml::read_xml;
use azure_core::headers::{date_from_headers, request_id_from_headers};
use azure_core::prelude::NextMarker;
use azure_core::RequestId;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use std::convert::TryFrom;
#[derive(Debug, Clone, PartialEq)]
pub struct ListBlobsByTagsResponse {
pub max_results: Option<u32>,
pub delimiter: Option<String>,
pub next_marker: Option<NextMarker>,
pub r#where: Option<String>,
pub blobs: Blobs,
pub request_id: RequestId,
pub date: DateTime<Utc>,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "PascalCase")]
struct ListBlobsByTagsResponseInternal {
pub max_results: Option<u32>,
pub delimiter: Option<String>,
pub next_marker: Option<String>,
pub r#where: Option<String>,
pub blobs: Blobs,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Blobs {
#[serde(rename = "Blob", default = "Vec::new")]
pub blobs: Vec<Blob>,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Blob {
pub name: String,
pub container_name: String,
pub tag_value: String,
}
impl TryFrom<&http::Response<Bytes>> for ListBlobsByTagsResponse {
type Error = crate::Error;
fn try_from(response: &http::Response<Bytes>) -> Result<Self, Self::Error> {
let body = response.body();
trace!("body == {:?}", body);
let list_blobs_response_internal: ListBlobsByTagsResponseInternal = read_xml(body)?;
Ok(Self {
request_id: request_id_from_headers(response.headers())?,
date: date_from_headers(response.headers())?,
max_results: list_blobs_response_internal.max_results,
delimiter: list_blobs_response_internal.delimiter,
r#where: list_blobs_response_internal.r#where,
blobs: list_blobs_response_internal.blobs,
next_marker: NextMarker::from_possibly_empty_string(
list_blobs_response_internal.next_marker,
),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn deserde_azure() {
const S: &'static str = "<?xml version=\"1.0\" encoding=\"utf-8\"?>
<EnumerationResults ServiceEndpoint=\"https://hsdgeventstoredev.blob.core.windows.net/\">
<Where>tag1='value1'</Where>
<Blobs>
<Blob>
<Name>test1</Name>
<ContainerName>container1</ContainerName>
<TagValue>value1</TagValue>
</Blob>
</Blobs>
<NextMarker/>
</EnumerationResults>";
let bytes = Bytes::from(S);
let _list_blobs_response_internal: ListBlobsByTagsResponseInternal =
read_xml(&bytes).unwrap();
}
}
|
use super::phys::GRAVITY;
use crate::{
comp::{
ActionState::*, CharacterState, Controller, Mounting, MovementState::*, Ori, PhysicsState,
Pos, Stats, Vel,
},
state::DeltaTime,
terrain::TerrainGrid,
};
use specs::prelude::*;
use std::time::Duration;
use vek::*;
pub const ROLL_DURATION: Duration = Duration::from_millis(600);
const HUMANOID_ACCEL: f32 = 70.0;
const HUMANOID_SPEED: f32 = 120.0;
const HUMANOID_AIR_ACCEL: f32 = 10.0;
const HUMANOID_AIR_SPEED: f32 = 100.0;
const HUMANOID_WATER_ACCEL: f32 = 70.0;
const HUMANOID_WATER_SPEED: f32 = 120.0;
const HUMANOID_CLIMB_ACCEL: f32 = 5.0;
const ROLL_SPEED: f32 = 13.0;
const GLIDE_ACCEL: f32 = 15.0;
const GLIDE_SPEED: f32 = 45.0;
const BLOCK_ACCEL: f32 = 30.0;
const BLOCK_SPEED: f32 = 75.0;
// Gravity is 9.81 * 4, so this makes gravity equal to .15
const GLIDE_ANTIGRAV: f32 = GRAVITY * 0.96;
const CLIMB_SPEED: f32 = 5.0;
pub const MOVEMENT_THRESHOLD_VEL: f32 = 3.0;
/// This system applies forces and calculates new positions and velocities.
pub struct Sys;
impl<'a> System<'a> for Sys {
type SystemData = (
Entities<'a>,
ReadExpect<'a, TerrainGrid>,
Read<'a, DeltaTime>,
ReadStorage<'a, Stats>,
ReadStorage<'a, Controller>,
ReadStorage<'a, PhysicsState>,
WriteStorage<'a, CharacterState>,
WriteStorage<'a, Pos>,
WriteStorage<'a, Vel>,
WriteStorage<'a, Ori>,
ReadStorage<'a, Mounting>,
);
fn run(
&mut self,
(
entities,
_terrain,
dt,
stats,
controllers,
physics_states,
mut character_states,
mut positions,
mut velocities,
mut orientations,
mountings,
): Self::SystemData,
) {
// Apply movement inputs
for (
_entity,
stats,
controller,
physics,
mut character,
mut _pos,
mut vel,
mut ori,
mounting,
) in (
&entities,
&stats,
&controllers,
&physics_states,
&mut character_states,
&mut positions,
&mut velocities,
&mut orientations,
mountings.maybe(),
)
.join()
{
if stats.is_dead {
continue;
}
if mounting.is_some() {
character.movement = Sit;
continue;
}
if character.movement.is_roll() {
vel.0 = Vec3::new(0.0, 0.0, vel.0.z)
+ controller
.move_dir
.try_normalized()
.unwrap_or(Vec2::from(vel.0).try_normalized().unwrap_or_default())
* ROLL_SPEED
}
if character.action.is_block() || character.action.is_attack() {
vel.0 += Vec2::broadcast(dt.0)
* controller.move_dir
* match physics.on_ground {
true if vel.0.magnitude_squared() < BLOCK_SPEED.powf(2.0) => BLOCK_ACCEL,
_ => 0.0,
}
} else {
// Move player according to move_dir
vel.0 += Vec2::broadcast(dt.0)
* controller.move_dir
* match (physics.on_ground, &character.movement) {
(true, Run) if vel.0.magnitude_squared() < HUMANOID_SPEED.powf(2.0) => {
HUMANOID_ACCEL
}
(false, Climb) if vel.0.magnitude_squared() < HUMANOID_SPEED.powf(2.0) => {
HUMANOID_CLIMB_ACCEL
}
(false, Glide) if vel.0.magnitude_squared() < GLIDE_SPEED.powf(2.0) => {
GLIDE_ACCEL
}
(false, Jump)
if vel.0.magnitude_squared() < HUMANOID_AIR_SPEED.powf(2.0) =>
{
HUMANOID_AIR_ACCEL
}
(false, Swim)
if vel.0.magnitude_squared() < HUMANOID_WATER_SPEED.powf(2.0) =>
{
HUMANOID_WATER_ACCEL
}
_ => 0.0,
};
}
// Set direction based on move direction when on the ground
let ori_dir = if character.action.is_wield()
|| character.action.is_attack()
|| character.action.is_block()
{
Vec2::from(controller.look_dir).normalized()
} else if let (Climb, Some(wall_dir)) = (character.movement, physics.on_wall) {
if Vec2::<f32>::from(wall_dir).magnitude_squared() > 0.001 {
Vec2::from(wall_dir).normalized()
} else {
Vec2::from(vel.0)
}
} else {
Vec2::from(vel.0)
};
if ori_dir.magnitude_squared() > 0.0001
&& (ori.0.normalized() - Vec3::from(ori_dir).normalized()).magnitude_squared()
> 0.001
{
ori.0 = vek::ops::Slerp::slerp(
ori.0,
ori_dir.into(),
if physics.on_ground { 12.0 } else { 2.0 } * dt.0,
);
}
// Glide
if character.movement == Glide
&& Vec2::<f32>::from(vel.0).magnitude_squared() < GLIDE_SPEED.powf(2.0)
&& vel.0.z < 0.0
{
character.action = Idle;
let lift = GLIDE_ANTIGRAV + vel.0.z.abs().powf(2.0) * 0.15;
vel.0.z += dt.0
* lift
* (Vec2::<f32>::from(vel.0).magnitude() * 0.075)
.min(1.0)
.max(0.2);
}
// Roll
if let Roll { time_left } = &mut character.movement {
character.action = Idle;
if *time_left == Duration::default() || vel.0.magnitude_squared() < 10.0 {
character.movement = Run;
} else {
*time_left = time_left
.checked_sub(Duration::from_secs_f32(dt.0))
.unwrap_or_default();
}
}
// Climb
if let (true, Some(_wall_dir)) = (
(controller.climb | controller.climb_down) && vel.0.z <= CLIMB_SPEED,
physics.on_wall,
) {
if controller.climb_down && !controller.climb {
vel.0 -= dt.0 * vel.0.map(|e| e.abs().powf(1.5) * e.signum() * 6.0);
} else if controller.climb && !controller.climb_down {
vel.0.z = (vel.0.z + dt.0 * GRAVITY * 1.25).min(CLIMB_SPEED);
} else {
vel.0.z = vel.0.z + dt.0 * GRAVITY * 1.5;
vel.0 = Lerp::lerp(
vel.0,
Vec3::zero(),
30.0 * dt.0 / (1.0 - vel.0.z.min(0.0) * 5.0),
);
}
character.movement = Climb;
character.action = Idle;
} else if let Climb = character.movement {
character.movement = Jump;
}
if physics.on_ground
&& (character.movement == Jump
|| character.movement == Climb
|| character.movement == Glide
|| character.movement == Swim)
{
character.movement = Stand;
}
if !physics.on_ground
&& (character.movement == Stand
|| character.movement.is_roll()
|| character.movement == Run)
{
character.movement = Jump;
}
if !physics.on_ground && physics.in_fluid {
character.movement = Swim;
} else if let Swim = character.movement {
character.movement = Stand;
}
}
}
}
|
// Finger trees for Rust
extern crate monoid;
|
//! Provides definitions of the vertex format for the game's
//! the vertex attributes. Each vertex attribute will be stored
//! in a Vertex Buffer Object (either by itself or interleaved
//! with others).
//!
//! This module provides the strongly-typed storage ``struct``s
//! that are employed to represent a vertex.
use luminance_derive::{Semantics, Vertex};
#[derive(Clone, Copy, Debug, Eq, PartialEq, Semantics)]
pub enum Semantic {
#[sem(name = "pos", repr = "[f32; 3]", type_name = "PosAttrib")]
Pos,
#[sem(name = "uv", repr = "[f32; 2]", type_name = "UvAttrib")]
Color,
}
#[derive(Clone, Copy, Debug, PartialEq, Vertex)]
#[vertex(sem = "Semantic")]
pub struct VoxelVertex {
pub pos: PosAttrib,
pub uv: UvAttrib,
}
|
use std::io::{stdin, Read, StdinLock};
use std::str::FromStr;
#[allow(dead_code)]
struct Scanner<'a> {
cin: StdinLock<'a>,
}
#[allow(dead_code)]
impl<'a> Scanner<'a> {
fn new(cin: StdinLock<'a>) -> Scanner<'a> {
Scanner { cin: cin }
}
fn read<T: FromStr>(&mut self) -> Option<T> {
let token = self.cin.by_ref().bytes().map(|c| c.unwrap() as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
token.parse::<T>().ok()
}
fn input<T: FromStr>(&mut self) -> T {
self.read().unwrap()
}
}
#[allow(dead_code)]
type Graph = Vec<Vec<(usize, i64)>>;
fn main() {
let cin = stdin();
let cin = cin.lock();
let mut sc = Scanner::new(cin);
let (n, m): (i64, i64) = (sc.input(), sc.input());
let mut graph: Graph = vec![vec![]; n as usize];
for _ in 0..m {
let (mut l, mut r, d): (i64, i64, i64) = (sc.input() , sc.input(), sc.input());
l -= 1; r -=1;
graph[l as usize].push((r as usize, d));
graph[r as usize].push((l as usize, -d));
}
println!("{}", if dfs(&graph) { "Yes"} else { "No" });
}
fn dfs(graph: &Graph) -> bool {
let mut dist :Vec<Option<i64>> = vec![None; graph.len()];
let mut stack = vec![];
for i in 0..graph.len() {
if dist[i] != None { continue; }
dist[i] = Some(0);
stack.push((i as usize, 0 as usize));
while let Some((cur, index)) = stack.pop() {
if index == graph[cur].len() { continue; }
stack.push((cur, index + 1));
let (next, d) = graph[cur][index];
if dist[next] != None {
if dist[cur].unwrap() as i64 + d
!= dist[next].unwrap() as i64 { return false; }
} else {
dist[next] = Some(dist[cur].unwrap() as i64 + d as i64);
stack.push((next, 0));
}
}
}
true
}
|
use std::collections::HashMap;
use prometheus::{CounterVec, Encoder, Opts, Registry, TextEncoder};
pub struct Metrics {
registry: Registry,
request_counter: CounterVec,
}
impl Metrics {
pub fn new(id: u64) -> Metrics {
let request_counter_opts = Opts::new("bayard_requests_total", "Total number of requests.")
.const_label("id", &id.to_string());
let request_counter = CounterVec::new(request_counter_opts, &["request_type"]).unwrap();
let registry = Registry::new();
registry
.register(Box::new(request_counter.clone()))
.unwrap();
Metrics {
registry,
request_counter,
}
}
pub fn inc_request_count(&mut self, request_type: &str) {
let mut labels = HashMap::new();
labels.insert("request_type", request_type);
self.request_counter.with(&labels).inc();
}
pub fn get_metrics(&mut self) -> String {
let mut buffer = Vec::<u8>::new();
let encoder = TextEncoder::new();
let metric_families = self.registry.gather();
encoder.encode(&metric_families, &mut buffer).unwrap();
let metrics = String::from_utf8(buffer.clone()).unwrap();
buffer.clear();
metrics
}
}
|
use std::collections::HashMap;
use ggez::graphics::{Mesh, MeshBuilder, DrawMode, Drawable, Rect, Color, TextFragment, Scale, Text};
use std::rc::Rc;
use ggez::{Context, GameResult, graphics};
use crate::{DPPoint, point};
pub struct Renderer {
width: f32,
height: f32,
mesh_cache: HashMap<String, Rc<Mesh>>
}
impl Renderer {
pub fn new(ctx: &mut Context) -> Renderer {
let (width, height) = Renderer::get_screen_size(ctx);
Renderer {
width,
height,
mesh_cache: HashMap::new()
}
}
}
impl Renderer {
pub fn get_screen_size(ctx: &mut Context) -> (f32, f32) {
return graphics::window(ctx).get_inner_size()
.map(|physical| (physical.width as f32, physical.height as f32))
.expect("Failed to get/convert window size");
}
}
impl Renderer {
pub fn calc_percent_to_point(&self, x: f32, y: f32) -> DPPoint {
return point(x * self.width, y * self.height);
}
pub fn calc_percent_to_px(&self, x: f32, y: f32) -> (f32, f32) {
return (x * self.width, y * self.height);
}
pub fn calc_width(&self, percent: f32) -> f32 {
return self.width * percent;
}
pub fn calc_height(&self, percent: f32) -> f32 {
return self.height * percent;
}
pub fn make_grid_mesh(&mut self, ctx: &mut Context, cell_size: f32, horz_count: usize, vert_count: usize, intensity: u8) -> GameResult<Rc<Mesh>> {
let width = cell_size * horz_count as f32;
let height = cell_size * vert_count as f32;
let key = format!("grid_{}_{}_{}_{}", cell_size, horz_count, vert_count, intensity);
if self.mesh_cache.contains_key(&key) {
return Ok(self.mesh_cache[&key].clone());
} else {
let grid_line_width = 2.;
let grid_line_color = (intensity, intensity, intensity, 255).into();
let mut mesh_builder = MeshBuilder::new();
for x in 0..horz_count {
mesh_builder.line(&[point(x as f32 * cell_size, 0.), point(x as f32 * cell_size, height)], grid_line_width, grid_line_color)?;
}
for y in 0..vert_count {
mesh_builder.line(&[point(0., y as f32 * cell_size), point(width, y as f32 * cell_size)], grid_line_width, grid_line_color)?;
}
mesh_builder.rectangle(DrawMode::stroke(grid_line_width), Rect::new(0., 0., width, height), grid_line_color);
let mesh = Rc::new(mesh_builder.build(ctx)?);
self.mesh_cache.insert(key, mesh.clone());
return Ok(mesh);
}
}
pub fn make_rect_mesh(&mut self, ctx: &mut Context, width: f32, height: f32, filled: bool, thickness: f32) -> GameResult<Rc<Mesh>> {
let key = format!("rect_{}_{}_{}", width, height, filled);
if self.mesh_cache.contains_key(&key) {
return Ok(self.mesh_cache[&key].clone());
} else {
let mut mesh_builder = MeshBuilder::new();
let mode;
if filled {
mode = DrawMode::fill();
} else {
mode = DrawMode::stroke(thickness);
}
mesh_builder.rectangle(mode, Rect::new(0., 0., width, height), (0.8, 0.8, 0.8, 1.).into());
let mesh = Rc::new(mesh_builder.build(ctx)?);
self.mesh_cache.insert(key, mesh.clone());
return Ok(mesh);
}
}
pub fn make_cross_mesh(&mut self, ctx: &mut Context, size: f32) -> GameResult<Rc<Mesh>> {
let key = format!("cross_{}", size);
if self.mesh_cache.contains_key(&key) {
return Ok(self.mesh_cache[&key].clone());
} else {
let mut mesh_builder = MeshBuilder::new();
mesh_builder.line(&[point(0.,0.), point(size, size)], 4., (1.,0.,0.,1.).into())?;
mesh_builder.line(&[point(0.,size), point(size, 0.)], 4., (1.,0.,0.,1.).into())?;
let mesh = Rc::new(mesh_builder.build(ctx)?);
self.mesh_cache.insert(key, mesh.clone());
return Ok(mesh);
}
}
pub fn make_tick_mesh(&mut self, ctx: &mut Context, size: f32) -> GameResult<Rc<Mesh>> {
let key = format!("tick_{}", size);
if self.mesh_cache.contains_key(&key) {
return Ok(self.mesh_cache[&key].clone());
} else {
let mut mesh_builder = MeshBuilder::new();
mesh_builder.line(&[point(0.,size * 0.7), point(size * 0.3, size)], 4., (0.,1.,0.,1.).into())?;
mesh_builder.line(&[point(size * 0.3,size), point(size, 0.3 * size)], 4., (0.,1.,0.,1.).into())?;
let mesh = Rc::new(mesh_builder.build(ctx)?);
self.mesh_cache.insert(key, mesh.clone());
return Ok(mesh);
}
}
pub fn make_list_indicator_mesh(&mut self, ctx: &mut Context, size: f32) -> GameResult<Rc<Mesh>> {
let key = format!("list_indicator_{}", size);
if self.mesh_cache.contains_key(&key) {
return Ok(self.mesh_cache[&key].clone());
} else {
let mut mesh_builder = MeshBuilder::new();
mesh_builder.polygon(DrawMode::fill(), &[point(0.,0.), point(0., size), point(size,size * 0.5)], (107,200,255,255).into())?;
let mesh = Rc::new(mesh_builder.build(ctx)?);
self.mesh_cache.insert(key, mesh.clone());
return Ok(mesh);
}
}
pub fn make_square_mesh(&mut self, ctx: &mut Context, cell_size: f32, filled: bool, thickness: f32) -> GameResult<Rc<Mesh>> {
return self.make_rect_mesh(ctx, cell_size, cell_size, filled, thickness);
}
pub fn draw_mesh<D: Drawable>(&mut self, ctx: &mut Context, mesh: &D, xy: DPPoint) {
graphics::draw(ctx, mesh, (xy, )).expect("couldn't draw");
}
pub fn draw_coloured_mesh<D: Drawable>(&mut self, ctx: &mut Context, mesh: &D, xy: DPPoint, new_colour: Color) {
graphics::draw(ctx, mesh, (xy, new_colour)).expect("couldn't draw");
}
pub fn draw_white_text<S: Into<String>>(&mut self, ctx: &mut Context, text: S, position: DPPoint, font_size: f32, centered: bool) {
self.draw_text(ctx, text, position, (1., 1., 1., 1.).into(), font_size, centered);
}
pub fn draw_text<S: Into<String>>(&mut self, ctx: &mut Context, text: S, position: DPPoint, color: Color, font_size: f32, centered: bool) {
let text = Text::new(TextFragment {
text: text.into(),
color: Some(color),
scale: Some(Scale::uniform(font_size)),
..TextFragment::default()
});
let mut xy = position;
if centered {
xy = point(position.x - (text.width(ctx) as f32 / 2.), position.y);
}
self.draw_mesh(ctx, &text, xy);
}
}
|
/// ARINC653P1-5 3.7.2.2
pub mod blackboard;
/// ARINC653P1-5 3.7.2.1
pub mod buffer;
/// ARINC653P1-5 3.8
pub mod error;
/// ARINC653P1-5 3.7.2.4
pub mod event;
/// ARINC653P2-4 3.2
pub mod file_system;
/// ARINC653P2-4 3.13
pub mod interrupt;
/// Hypervisor dependent limits
pub mod limits;
/// ARINC653P2-4 3.5
pub mod logbook;
/// ARINC653P2-4 3.9
pub mod memory_block;
/// ARINC653P2-4 3.12
pub mod multicore;
/// ARINC653P1-5 3.7.2.5
pub mod mutex;
/// ARINC653P2-4 3.8
pub mod name_service;
/// ARINC653P1-5 3.2.2
pub mod partition;
/// ARINC653P1-5 3.3.2
pub mod process;
/// ARINC653P1-5 3.6.2.2
pub mod queuing;
/// ARINC653P2-4 3.11
pub mod queuing_list;
/// ARINC653P1-5 3.6.2.1
pub mod sampling;
/// ARINC653P2-4 3.7
pub mod sap;
/// ARINC653P2-4 3.4
pub mod schedules;
/// ARINC653P1-5 3.7.2.3
pub mod semaphore;
/// ARINC653P2-4 3.3
pub mod sp_data;
/// ARINC653P1-5 3.4.2
pub mod time;
/// ARINC653 specific types
pub mod types;
|
use crate::rbatis::Rbatis;
use crate::Error;
use rbs::Value;
/// sql intercept
pub trait SqlIntercept: Send + Sync {
/// do intercept sql/args
/// is_prepared_sql: if is run in prepared_sql=ture
fn do_intercept(
&self,
rb: &Rbatis,
sql: &mut String,
args: &mut Vec<Value>,
is_prepared_sql: bool,
) -> Result<(), Error>;
}
/// Prevent full table updates and deletions
#[derive(Debug)]
pub struct BlockAttackDeleteInterceptor {}
impl SqlIntercept for BlockAttackDeleteInterceptor {
fn do_intercept(
&self,
_rb: &Rbatis,
sql: &mut String,
_args: &mut Vec<Value>,
_is_prepared_sql: bool,
) -> Result<(), Error> {
let sql = sql.trim();
if sql.starts_with(crate::sql::TEMPLATE.delete_from.value)
&& !sql.contains(crate::sql::TEMPLATE.r#where.left_right_space)
{
return Err(Error::from(format!(
"[rbatis][BlockAttackDeleteInterceptor] not allow attack sql:{}",
sql
)));
}
return Ok(());
}
}
/// Prevent full table updates and deletions
#[derive(Debug)]
pub struct BlockAttackUpdateInterceptor {}
impl SqlIntercept for BlockAttackUpdateInterceptor {
fn do_intercept(
&self,
_rb: &Rbatis,
sql: &mut String,
_args: &mut Vec<Value>,
_is_prepared_sql: bool,
) -> Result<(), Error> {
let sql = sql.trim();
if sql.starts_with(crate::sql::TEMPLATE.update.value)
&& !sql.contains(crate::sql::TEMPLATE.r#where.left_right_space)
{
return Err(Error::from(format!(
"[rbatis][BlockAttackUpdateInterceptor] not allow attack sql:{}",
sql
)));
}
return Ok(());
}
}
|
use crate::config::Config;
use clap::crate_version;
use clap::crate_authors;
use clap::{Arg, App, SubCommand, AppSettings};
pub fn boot() -> Result<Config, Box<dyn std::error::Error>> {
let verbose_arg = Arg::with_name("verbose")
.long("verbose")
.possible_values(&["info", "warn", "error", "debug", "trace"])
.default_value("info")
.help("Sets the level of verbosity");
let upstream_arg = Arg::with_name("upstream")
.long("upstream")
.multiple(true)
.help("Upstream name server")
.long_help("Example: --upstream tcp+udp+tls+https://8.8.8.8:53:53:853:443");
let bind_arg = Arg::with_name("bind")
.long("bind")
.long_help("Example: --bind 'udp+tcp+tls+https://127.0.0.1:53:53:853:443'")
.required(true)
.takes_value(true)
.default_value("udp+tcp://127.0.0.1:53:53");
let tls_key_arg = Arg::with_name("tls-key")
.long("tls-key")
.help("TLS Server PKCS #12 Key");
let tls_key_password_arg = Arg::with_name("tls-key-password")
.long("tls-key-password");
let https_key_arg = Arg::with_name("https-key")
.long("https-key")
.help("HTTPS Server PKCS #12 Key");
let https_key_password_arg = Arg::with_name("https-key-password")
.long("https-key-password");
let mut app = App::new("Named")
.version(crate_version!())
.author(crate_authors!())
.about("DNS Named/Proxy/Authoritative")
.setting(AppSettings::ColorAuto)
.setting(AppSettings::DisableHelpSubcommand)
.subcommand(
SubCommand::with_name("authoritative")
.about("Authoritative DNS Server")
.arg(verbose_arg.clone())
.arg(bind_arg.clone())
.arg(tls_key_arg.clone())
.arg(tls_key_password_arg.clone())
.arg(https_key_arg.clone())
.arg(https_key_password_arg.clone())
)
.subcommand(
SubCommand::with_name("recursive")
.about("Recursive DNS Server")
.arg(Arg::with_name("use-ipv4")
.long("use-ipv4")
.help("use ipv4"))
.arg(Arg::with_name("use-ipv6")
.long("use-ipv6")
.help("use ipv6"))
.arg(verbose_arg.clone())
.arg(bind_arg.clone())
.arg(tls_key_arg.clone())
.arg(tls_key_password_arg.clone())
.arg(https_key_arg.clone())
.arg(https_key_password_arg.clone())
)
.subcommand(
SubCommand::with_name("proxy")
.about("Proxy DNS Server")
.arg(verbose_arg.clone())
.arg(upstream_arg.clone())
.arg(bind_arg.clone())
.arg(tls_key_arg.clone())
.arg(tls_key_password_arg.clone())
.arg(https_key_arg.clone())
.arg(https_key_password_arg.clone())
)
.subcommand(
SubCommand::with_name("stub")
.about("stub resolver")
// .arg(Arg::with_name("enable-mdns")
// .long("enable-mdns")
// .possible_values(&["true", "false"])
// .default_value("true")
// .help("enable mdns query"))
// .arg(Arg::with_name("enable-hosts")
// .long("enable-hosts")
// .possible_values(&["true", "false"])
// .default_value("true")
// .help("query with system hosts file"))
.arg(verbose_arg.clone())
.arg(upstream_arg.clone())
.arg(bind_arg.clone())
.arg(tls_key_arg.clone())
.arg(tls_key_password_arg.clone())
.arg(https_key_arg.clone())
.arg(https_key_password_arg.clone())
);
let mut usage = Vec::new();
let _ = app.write_long_help(&mut usage);
let usage = unsafe { String::from_utf8_unchecked(usage) };
let matches = app.get_matches();
let sub_matches = (
matches.subcommand_matches("proxy"),
matches.subcommand_matches("stub"),
matches.subcommand_matches("recursive"),
matches.subcommand_matches("authoritative"),
);
let (subcommand, matches) = match sub_matches {
(Some(matches), None, None, None) => ("proxy", matches),
(None, Some(matches), None, None) => ("stub", matches),
(None, None, Some(matches), None) => ("recursive", matches),
(None, None, None, Some(matches)) => ("authoritative", matches),
_ => {
println!("{}", usage);
std::process::exit(1);
}
};
let verbose = matches.value_of("verbose").unwrap();
// std::env::set_var("RUST_LOG", "debug");
// tcp+udp+tls+https://8.8.8.8:53:53:853:443
let bind_url = matches.value_of("bind").unwrap();
println!("bind: {}", bind_url);
let tmp = bind_url.split("://").collect::<Vec<&str>>();
todo!()
} |
// src/main.rs
mod db;
mod logger;
mod models;
mod routes;
type StdErr = Box<dyn std::error::Error>;
#[actix_web::get("/")]
async fn hello_world() -> &'static str {
"Hello, world!"
}
#[actix_web::main]
async fn main() -> Result<(), StdErr> {
dotenv::dotenv().ok();
#[cfg(debug_assertions)]
logger::init()?;
let db = db::Db::connect().await?;
actix_web::HttpServer::new(move || {
actix_web::App::new()
.data(db.clone())
.service(hello_world)
.service(routes::api())
})
.bind(("127.0.0.1", 8000))?
.run()
.await?;
Ok(())
}
|
pub mod auth;
pub mod error;
pub mod models;
pub mod utils;
use crate::error::ServiceError;
use async_trait::async_trait;
use deadpool::managed::Object;
use deadpool_postgres::{ClientWrapper, Pool};
use std::ops::Deref;
use tokio_postgres::{
types::{BorrowToSql, ToSql, Type},
Error, Row, RowStream, Statement, ToStatement, Transaction,
};
#[async_trait]
pub trait Client: Sync {
async fn prepare(&self, query: &str) -> Result<Statement, Error>;
async fn prepare_typed(&self, query: &str, types: &[Type]) -> Result<Statement, Error>;
async fn execute<T>(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Sync;
async fn query_opt<T>(
&self,
statement: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Option<Row>, Error>
where
T: ?Sized + ToStatement + Sync;
async fn query_raw<T, P, I>(&self, statement: &T, params: I) -> Result<RowStream, Error>
where
T: ?Sized + ToStatement + Sync,
P: BorrowToSql,
I: IntoIterator<Item = P> + Sync + Send,
I::IntoIter: ExactSizeIterator;
}
#[async_trait]
impl Client for Object<ClientWrapper, Error> {
async fn prepare(&self, query: &str) -> Result<Statement, Error> {
self.deref().prepare(query).await
}
async fn prepare_typed(&self, query: &str, types: &[Type]) -> Result<Statement, Error> {
self.deref().prepare_typed(query, types).await
}
async fn execute<T>(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().execute(statement, params).await
}
async fn query_opt<T>(
&self,
statement: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Option<Row>, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().query_opt(statement, params).await
}
async fn query_raw<T, P, I>(&self, statement: &T, params: I) -> Result<RowStream, Error>
where
T: ?Sized + ToStatement + Sync,
P: BorrowToSql,
I: IntoIterator<Item = P> + Sync + Send,
I::IntoIter: ExactSizeIterator,
{
self.deref().query_raw(statement, params).await
}
}
#[async_trait]
impl Client for ClientWrapper {
async fn prepare(&self, query: &str) -> Result<Statement, Error> {
self.deref().prepare(query).await
}
async fn prepare_typed(&self, query: &str, types: &[Type]) -> Result<Statement, Error> {
self.deref().prepare_typed(query, types).await
}
async fn execute<T>(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().execute(statement, params).await
}
async fn query_opt<T>(
&self,
statement: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Option<Row>, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().query_opt(statement, params).await
}
async fn query_raw<T, P, I>(&self, statement: &T, params: I) -> Result<RowStream, Error>
where
T: ?Sized + ToStatement + Sync,
P: BorrowToSql,
I: IntoIterator<Item = P> + Sync + Send,
I::IntoIter: ExactSizeIterator,
{
self.deref().query_raw(statement, params).await
}
}
#[async_trait]
impl<'a> Client for Transaction<'a> {
async fn prepare(&self, query: &str) -> Result<Statement, Error> {
Transaction::prepare(self, query).await
}
async fn prepare_typed(&self, query: &str, types: &[Type]) -> Result<Statement, Error> {
Transaction::prepare_typed(self, query, types).await
}
async fn execute<T>(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Sync,
{
Transaction::execute(self, statement, params).await
}
async fn query_opt<T>(
&self,
statement: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Option<Row>, Error>
where
T: ?Sized + ToStatement + Sync,
{
Transaction::query_opt(&self, statement, params).await
}
async fn query_raw<T, P, I>(&self, statement: &T, params: I) -> Result<RowStream, Error>
where
T: ?Sized + ToStatement + Sync,
P: BorrowToSql,
I: IntoIterator<Item = P> + Sync + Send,
I::IntoIter: ExactSizeIterator,
{
Transaction::query_raw(&self, statement, params).await
}
}
#[async_trait]
impl<'a> Client for deadpool_postgres::Transaction<'a> {
async fn prepare(&self, query: &str) -> Result<Statement, Error> {
deadpool_postgres::Transaction::prepare(self, query).await
}
async fn prepare_typed(&self, query: &str, types: &[Type]) -> Result<Statement, Error> {
deadpool_postgres::Transaction::prepare_typed(self, query, types).await
}
async fn execute<T>(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().execute(statement, params).await
}
async fn query_opt<T>(
&self,
statement: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Option<Row>, Error>
where
T: ?Sized + ToStatement + Sync,
{
self.deref().query_opt(statement, params).await
}
async fn query_raw<T, P, I>(&self, statement: &T, params: I) -> Result<RowStream, Error>
where
T: ?Sized + ToStatement + Sync,
P: BorrowToSql,
I: IntoIterator<Item = P> + Sync + Send,
I::IntoIter: ExactSizeIterator,
{
self.deref().query_raw(statement, params).await
}
}
/// A database based service.
#[async_trait]
pub trait DatabaseService: Send + Sync + Sized {
fn pool(&self) -> &Pool;
async fn is_ready(&self) -> Result<(), ServiceError> {
self.pool().get().await?.simple_query("SELECT 1").await?;
Ok(())
}
}
|
use std::time::Duration;
use crate::echo_server::{Echo, EchoServer};
use crate::greeter_server::{Greeter, GreeterServer};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use tonic::{transport::Server, Request, Response, Status};
tonic::include_proto!("helloworld");
tonic::include_proto!("grpc.examples.echo");
#[derive(Default)]
pub struct MyGreeter {}
#[tonic::async_trait]
impl Greeter for MyGreeter {
async fn say_hello(
&self,
request: Request<HelloRequest>,
) -> Result<Response<HelloReply>, Status> {
println!("Got a request from {:?}", request.remote_addr());
let reply = HelloReply {
message: format!("Hello {}!", request.into_inner().name),
};
Ok(Response::new(reply))
}
}
type ResponseStream = ReceiverStream<Result<EchoResponse, Status>>;
#[derive(Default)]
pub struct MyEcho;
#[tonic::async_trait]
impl Echo for MyEcho {
async fn unary_echo(
&self,
request: Request<EchoRequest>,
) -> Result<Response<EchoResponse>, Status> {
let message = request.into_inner().message;
Ok(Response::new(EchoResponse { message }))
}
type ServerStreamingEchoStream = ResponseStream;
async fn server_streaming_echo(
&self,
request: Request<EchoRequest>,
) -> Result<Response<Self::ServerStreamingEchoStream>, Status> {
let (tx, rx) = mpsc::channel(4);
let message = request.into_inner().message;
tokio::spawn(async move {
tx.send(Ok(EchoResponse {
message: message.clone(),
}))
.await
.unwrap();
tokio::time::sleep(Duration::from_secs(1)).await;
tx.send(Ok(EchoResponse { message })).await.unwrap();
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn client_streaming_echo(
&self,
_: Request<tonic::Streaming<EchoRequest>>,
) -> Result<Response<EchoResponse>, Status> {
Err(Status::unimplemented("Not yet implemented"))
}
type BidirectionalStreamingEchoStream = ResponseStream;
async fn bidirectional_streaming_echo(
&self,
_: Request<tonic::Streaming<EchoRequest>>,
) -> Result<Response<Self::BidirectionalStreamingEchoStream>, Status> {
Err(Status::unimplemented("Not yet implemented"))
}
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
pretty_env_logger::init();
let greeter = MyGreeter::default();
let echo = MyEcho::default();
let config = tonic_web::config().allow_all_origins();
Server::builder()
.accept_http1(true)
.add_service(config.enable(GreeterServer::new(greeter)))
.add_service(config.enable(EchoServer::new(echo)))
.serve("127.0.0.1:8080".parse().unwrap())
.await?;
Ok(())
}
|
pub mod hash;
pub mod os;
#[cfg(test)]
mod tests;
|
#![allow(bad_style, unused_variables)]
extern crate winapi;
mod sqltypes;
mod sql;
mod sqlext;
use sqltypes::*;
use sql::*;
#[no_mangle]
pub extern fn SQLAllocHandle(
handleType: SQLSMALLINT,
inputHandle: SQLHANDLE,
outputHandlePtr: *mut SQLHANDLE) -> SQLRETURN {
return SQL_SUCCESS;
}
#[no_mangle]
pub extern fn SQLBindCol(
statementHandle: SQLHSTMT,
columnNumber: SQLUSMALLINT,
targetType: SQLSMALLINT,
targetValuePtr: SQLPOINTER,
bufferLength: SQLLEN,
strLen_or_Ind: *const SQLLEN) -> SQLRETURN {
return SQL_SUCCESS;
}
//TODO: SQLBindParameter
//TODO: SQLBrowseConnectW
//TODO: SQLBulkOperations
#[no_mangle]
pub extern fn SQLCancel(
statementHandle: SQLHSTMT) -> SQLRETURN {
return SQL_SUCCESS;
}
//SQLCloseCursor
//SQLColAttributeW
//SQLColumnPrivilegesW
//SQLColumnsW
#[no_mangle]
pub extern fn SQLConnectW(
connectionHandle: SQLHDBC,
serverName: *const SQLCHAR,
nameLength1: SQLSMALLINT,
userName: *const SQLCHAR,
nameLength2: SQLSMALLINT,
authentication: *const SQLCHAR,
nameLength3: SQLSMALLINT) -> SQLRETURN {
return SQL_SUCCESS;
}
//SQLCopyDesc
//SQLDebug
//SQLDescribeColW
//SQLDescribeParam
//SQLDisconnect
#[no_mangle]
pub extern fn SQLDriverConnectW(
connectionHandle: SQLHDBC,
windowHandle: SQLHWND,
inConnectionString: *const SQLCHAR,
stringLength1: SQLSMALLINT,
outConnectionString: *mut SQLCHAR,
bufferLength: SQLSMALLINT,
stringLength2Ptr: *mut SQLSMALLINT,
driverCompletion: SQLUSMALLINT) -> SQLRETURN {
unsafe {
std::ptr::copy_nonoverlapping(inConnectionString, outConnectionString, stringLength1 as usize);
std::ptr::write(stringLength2Ptr, stringLength1);
}
return SQL_SUCCESS;
}
//SQLEndTran
//SQLExecDirectW
//SQLExecute
//SQLExtendedFetch
//SQLFetch
//SQLFetchScroll
//SQLForeignKeysW
//SQLFreeHandle
//SQLFreeStmt
//SQLGetConnectAttrW
//SQLGetConnectOptionW
//SQLGetCursorNameW
//SQLGetData
//SQLGetDescFieldW
//SQLGetDescRecW
//SQLGetDiagFieldW
//SQLGetDiagRecW
//SQLGetEnvAttr
//SQLGetFunctions
//SQLGetInfoW
//SQLGetStmtAttrW
//SQLGetTypeInfoW
//SQLMoreResults
//SQLNativeSqlW
//SQLNumParams
//SQLNumResultCols
//SQLParamData
//SQLParamOptions
//SQLPrepareW
//SQLPrimaryKeysW
//SQLProcedureColumnsW
//SQLProceduresW
//SQLPutData
//SQLRowCount
//SQLSetConnectAttrW
//SQLSetConnectOptionW
//SQLSetCursorNameW
//SQLSetDescFieldW
//SQLSetDescRec
//SQLSetEnvAttr
//SQLSetPos
//SQLSetScrollOptions
//SQLSetStmtAttrW
//SQLSpecialColumnsW
//SQLStatisticsW
//SQLTablePrivilegesW
//SQLTablesW
//TestDlgProc
//WizDSNDlgProc
//WizDatabaseDlgProc
//WizIntSecurityDlgProc
//WizLanguageDlgProc
//BCP_batch
//BCP_bind
//BCP_colfmt
//BCP_collen
//BCP_colptr
//BCP_columns
//BCP_control
//BCP_done
//BCP_exec
//BCP_getcolfmt
//BCP_init
//BCP_moretext
//BCP_readfmt
//BCP_sendrow
//BCP_setcolfmt
//BCP_writefmt
//ConfigDSNW
//ConfigDriverW
//ConnectDlgProc
//FinishDlgProc
//LibMain |
use std::cmp;
use std::collections::HashMap;
use minhashes::KmerCount;
use statistics::hist;
/// Used to pass around filter options for sketching
#[derive(Debug)]
pub struct FilterParams {
pub filter_on: Option<bool>,
pub abun_filter: (Option<u16>, Option<u16>),
pub err_filter: f32,
pub strand_filter: f32,
}
/// Applies filter options to a sketch
pub fn filter_sketch(hashes: &[KmerCount], filters: &FilterParams) -> (Vec<KmerCount>, HashMap<String, String>) {
let filter_on = filters.filter_on.expect("Sorry! Filter should have either been passed or set during detection");
let mut filter_stats: HashMap<String, String> = HashMap::new();
let mut low_abun_filter = filters.abun_filter.0;
let mut filtered_hashes = hashes.to_vec();
if filter_on && filters.strand_filter > 0f32 {
filtered_hashes = filter_strands(&filtered_hashes, filters.strand_filter);
filter_stats.insert(String::from("strandFilter"), filters.strand_filter.to_string());
}
if filter_on && filters.err_filter > 0f32 {
let cutoff = guess_filter_threshold(&filtered_hashes, filters.err_filter);
if let None = low_abun_filter {
low_abun_filter = Some(cutoff);
filter_stats.insert(String::from("errFilter"), filters.err_filter.to_string());
}
}
if filter_on && (low_abun_filter != None || filters.abun_filter.1 != None) {
filtered_hashes = filter_abundance(&filtered_hashes, low_abun_filter, filters.abun_filter.1);
if let Some(v) = low_abun_filter {
filter_stats.insert(String::from("minCopies"), v.to_string());
}
if let Some(v) = filters.abun_filter.1 {
filter_stats.insert(String::from("maxCopies"), v.to_string());
}
}
(filtered_hashes, filter_stats)
}
/// Determines a dynamic filtering threshold for low abundance kmers
///
/// Useful for removing, e.g. kmers containing sequencing errors
///
pub fn guess_filter_threshold(sketch: &[KmerCount], filter_level: f32) -> u16 {
let hist_data = hist(sketch);
let total_counts = hist_data.iter().enumerate().map(|t| (t.0 as u64 + 1) * t.1).sum::<u64>() as f32;
let cutoff_amt = filter_level * total_counts;
// calculate the coverage that N% of the weighted data is above
let mut wgt_cutoff: usize = 1;
let mut cum_count: u64 = 0;
for count in &hist_data {
cum_count += wgt_cutoff as u64 * *count as u64;
if cum_count as f32 > cutoff_amt {
break;
}
wgt_cutoff += 1;
}
if wgt_cutoff <= 2 {
return wgt_cutoff as u16;
}
// now find the right-most global maxima
let win_size = cmp::max(1, wgt_cutoff / 20);
let mut sum: u64 = hist_data[..win_size].iter().sum();
let mut lowest_val = sum;
let mut lowest_idx = win_size;
for (i, j) in (0..wgt_cutoff - win_size).zip(win_size..wgt_cutoff) {
if sum <= lowest_val {
lowest_val = sum;
lowest_idx = j;
}
sum -= hist_data[i];
sum += hist_data[j];
}
lowest_idx as u16
}
#[test]
fn test_guess_filter_threshold() {
let sketch = vec![];
let cutoff = guess_filter_threshold(&sketch, 0.2);
assert_eq!(cutoff, 1);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
];
let cutoff = guess_filter_threshold(&sketch, 0.2);
assert_eq!(cutoff, 1);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 1, extra_count: 0},
];
let cutoff = guess_filter_threshold(&sketch, 0.2);
assert_eq!(cutoff, 1);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 9, extra_count: 0},
];
let cutoff = guess_filter_threshold(&sketch, 0.2);
assert_eq!(cutoff, 8);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 10, extra_count: 0},
KmerCount {hash: 3, kmer: vec![], count: 10, extra_count: 0},
KmerCount {hash: 4, kmer: vec![], count: 9, extra_count: 0},
];
let cutoff = guess_filter_threshold(&sketch, 0.1);
assert_eq!(cutoff, 8);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 3, kmer: vec![], count: 2, extra_count: 0},
KmerCount {hash: 4, kmer: vec![], count: 4, extra_count: 0},
];
let cutoff = guess_filter_threshold(&sketch, 0.1);
assert_eq!(cutoff, 1);
}
pub fn filter_abundance(sketch: &[KmerCount], low: Option<u16>, high: Option<u16>) -> Vec<KmerCount> {
let mut filtered = Vec::new();
let lo_threshold = low.unwrap_or(0u16);
let hi_threshold = high.unwrap_or(u16::max_value());
for kmer in sketch {
if lo_threshold <= kmer.count && kmer.count <= hi_threshold {
filtered.push(kmer.clone());
}
}
filtered
}
#[test]
fn test_filter_abundance() {
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 1, extra_count: 0},
];
let filtered = filter_abundance(&sketch, Some(1), None);
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0].hash, 1);
assert_eq!(filtered[1].hash, 2);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 1, extra_count: 0},
KmerCount {hash: 2, kmer: vec![], count: 10, extra_count: 0},
KmerCount {hash: 3, kmer: vec![], count: 10, extra_count: 0},
KmerCount {hash: 4, kmer: vec![], count: 9, extra_count: 0},
];
let filtered = filter_abundance(&sketch, Some(9), None);
assert_eq!(filtered.len(), 3);
assert_eq!(filtered[0].hash, 2);
assert_eq!(filtered[1].hash, 3);
assert_eq!(filtered[2].hash, 4);
let filtered = filter_abundance(&sketch, Some(2), Some(9));
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].hash, 4);
}
/// Filter out kmers that have a large abundance difference between being seen in the
/// "forward" and "reverse" orientations (picked arbitrarily which is which).
///
/// These tend to be sequencing adapters.
pub fn filter_strands(sketch: &[KmerCount], ratio_cutoff: f32) -> Vec<KmerCount> {
let mut filtered = Vec::new();
for kmer in sketch {
// "special-case" anything with fewer than 16 kmers -> these are too stochastic to accurately
// determine if something is an adapter or not. The odds of randomly picking less than 10%
// (0 or 1 reversed kmers) in 16 should be ~ 17 / 2 ** 16 or 1/4000 so we're avoiding
// removing "good" kmers
if kmer.count < 16u16 {
filtered.push(kmer.clone());
continue;
}
// check the forward/reverse ratio and only add if it's within bounds
let lowest_strand_count: u16 = cmp::min(kmer.extra_count, kmer.count - kmer.extra_count);
if (lowest_strand_count as f32 / kmer.count as f32) >= ratio_cutoff {
filtered.push(kmer.clone());
}
}
filtered
}
#[test]
fn test_filter_strands() {
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 10, extra_count: 1},
KmerCount {hash: 2, kmer: vec![], count: 10, extra_count: 2},
KmerCount {hash: 3, kmer: vec![], count: 10, extra_count: 8},
KmerCount {hash: 4, kmer: vec![], count: 10, extra_count: 9},
];
let filtered = filter_strands(&sketch, 0.15);
assert_eq!(filtered.len(), 4);
assert_eq!(filtered[0].hash, 1);
assert_eq!(filtered[3].hash, 4);
let sketch = vec![
KmerCount {hash: 1, kmer: vec![], count: 16, extra_count: 1},
KmerCount {hash: 2, kmer: vec![], count: 16, extra_count: 2},
KmerCount {hash: 3, kmer: vec![], count: 16, extra_count: 8},
KmerCount {hash: 4, kmer: vec![], count: 16, extra_count: 9},
];
let filtered = filter_strands(&sketch, 0.15);
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0].hash, 3);
assert_eq!(filtered[1].hash, 4);
}
|
extern crate colored;
use colored::*;
#[allow(dead_code)]
const EMPTY_STRING: &'static str = "";
pub const DEFAULT_PATTERN: &'static str = ".*";
pub const DEFAULT_TARGET: &'static str = ".";
#[derive(Clone, Debug)]
pub struct SearchArgs<'a> {
pub highlight: bool,
pattern: &'a str,
pub target: &'a str,
pub case_sensitive: bool,
pub include_dirs: bool,
pub include_files: bool
}
impl<'a> SearchArgs<'a> {
pub fn with_pattern(&self, pattern: &'a str) -> Self {
let mut result = self.clone();
result.pattern = pattern;
result
}
pub fn construct_pattern(&self) -> String {
if !self.case_sensitive {
String::from("(?i)") + self.pattern
} else {
String::from(self.pattern)
}
}
}
impl<'a> Default for SearchArgs<'a> {
fn default() -> Self {
let highlight_default = if cfg!(windows) { false } else { true };
SearchArgs {
highlight: highlight_default,
pattern: DEFAULT_PATTERN,
target: DEFAULT_TARGET,
case_sensitive: true,
include_dirs: false,
include_files: true
}
}
}
pub fn search<'a>(args: &'a SearchArgs, callback: &mut FnMut(&str)) {
let re = regex::Regex::new(&args.construct_pattern());
let filter: Box<Fn(&walkdir::DirEntry) -> bool> = if args.include_dirs && args.include_files {
Box::new(|_| true)
} else if args.include_dirs {
Box::new(|x| x.path().is_dir())
} else if args.include_files {
Box::new(|x| x.path().exists() && !x.path().is_dir())
} else {
Box::new(|_| false)
};
for entry in walkdir::WalkDir::new(args.target).into_iter().filter_map(|e| e.ok()).filter(filter.as_ref()) {
match &re {
Ok(reg) => {
if reg.is_match(entry.path().to_str().unwrap_or("")) {
if args.highlight {
let after = reg.replace_all(
entry.path().to_str().unwrap_or(""), "$0".yellow().to_string().as_str());
callback(&after);
} else {
callback(&entry.path().to_str().unwrap_or(""));
}
}
},
Err(_x) => { callback(&entry.path().to_str().unwrap_or("")); }
};
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
#[test]
fn test_on_self() {
let mut results = Vec::new();
let mut args = super::SearchArgs::default().with_pattern("\\.rs$");
args.highlight = false;
super::search(&args, &mut |x| results.push(String::from(x)));
assert_eq!(results.len(), 2);
assert_eq!(Path::new(&results[0]), Path::new(".").join("src").join("lib.rs"));
assert_eq!(Path::new(&results[1]), Path::new(".").join("src").join("main.rs"));
}
}
|
#[derive(Default)]
pub struct State {
pub characters: Vec<Character>,
pub selected_index: usize,
pub log_messages: Vec<String>,
}
pub struct Character {
pub name: String,
pub hp: String,
pub notes: Option<String>,
}
impl Character {
fn new(name: &str, hp: &str) -> Self {
Character {
name: name.to_string(),
hp: hp.to_string(),
notes: None,
}
}
}
pub fn build_state() -> State {
let mut s = State::default();
let mut characters = vec![
Character::new("Player #1", "24/24"),
Character::new("Player #2", "24/24"),
Character::new("Monster #1", "24/24"),
Character::new("Player #3", "24/24"),
Character::new("Monster #2", "24/24"),
Character::new("Player #4", "24/24"),
Character::new("Monster #3", "24/24"),
Character::new("Monster #4", "24/24"),
];
characters[2].notes = Some("dazed".to_string());
s.characters.extend(characters);
s
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Cluster GPIO%s, containing GPIO*_STATUS, GPIO*_CTRL"]
pub gpio: [GPIO; 30],
#[doc = "0xf0 - Raw Interrupts"]
pub intr0: INTR0,
#[doc = "0xf4 - Raw Interrupts"]
pub intr1: INTR1,
#[doc = "0xf8 - Raw Interrupts"]
pub intr2: INTR2,
#[doc = "0xfc - Raw Interrupts"]
pub intr3: INTR3,
#[doc = "0x100 - Interrupt Enable for proc0"]
pub proc0_inte0: PROC0_INTE0,
#[doc = "0x104 - Interrupt Enable for proc0"]
pub proc0_inte1: PROC0_INTE1,
#[doc = "0x108 - Interrupt Enable for proc0"]
pub proc0_inte2: PROC0_INTE2,
#[doc = "0x10c - Interrupt Enable for proc0"]
pub proc0_inte3: PROC0_INTE3,
#[doc = "0x110 - Interrupt Force for proc0"]
pub proc0_intf0: PROC0_INTF0,
#[doc = "0x114 - Interrupt Force for proc0"]
pub proc0_intf1: PROC0_INTF1,
#[doc = "0x118 - Interrupt Force for proc0"]
pub proc0_intf2: PROC0_INTF2,
#[doc = "0x11c - Interrupt Force for proc0"]
pub proc0_intf3: PROC0_INTF3,
#[doc = "0x120 - Interrupt status after masking & forcing for proc0"]
pub proc0_ints0: PROC0_INTS0,
#[doc = "0x124 - Interrupt status after masking & forcing for proc0"]
pub proc0_ints1: PROC0_INTS1,
#[doc = "0x128 - Interrupt status after masking & forcing for proc0"]
pub proc0_ints2: PROC0_INTS2,
#[doc = "0x12c - Interrupt status after masking & forcing for proc0"]
pub proc0_ints3: PROC0_INTS3,
#[doc = "0x130 - Interrupt Enable for proc1"]
pub proc1_inte0: PROC1_INTE0,
#[doc = "0x134 - Interrupt Enable for proc1"]
pub proc1_inte1: PROC1_INTE1,
#[doc = "0x138 - Interrupt Enable for proc1"]
pub proc1_inte2: PROC1_INTE2,
#[doc = "0x13c - Interrupt Enable for proc1"]
pub proc1_inte3: PROC1_INTE3,
#[doc = "0x140 - Interrupt Force for proc1"]
pub proc1_intf0: PROC1_INTF0,
#[doc = "0x144 - Interrupt Force for proc1"]
pub proc1_intf1: PROC1_INTF1,
#[doc = "0x148 - Interrupt Force for proc1"]
pub proc1_intf2: PROC1_INTF2,
#[doc = "0x14c - Interrupt Force for proc1"]
pub proc1_intf3: PROC1_INTF3,
#[doc = "0x150 - Interrupt status after masking & forcing for proc1"]
pub proc1_ints0: PROC1_INTS0,
#[doc = "0x154 - Interrupt status after masking & forcing for proc1"]
pub proc1_ints1: PROC1_INTS1,
#[doc = "0x158 - Interrupt status after masking & forcing for proc1"]
pub proc1_ints2: PROC1_INTS2,
#[doc = "0x15c - Interrupt status after masking & forcing for proc1"]
pub proc1_ints3: PROC1_INTS3,
#[doc = "0x160 - Interrupt Enable for dormant_wake"]
pub dormant_wake_inte0: DORMANT_WAKE_INTE0,
#[doc = "0x164 - Interrupt Enable for dormant_wake"]
pub dormant_wake_inte1: DORMANT_WAKE_INTE1,
#[doc = "0x168 - Interrupt Enable for dormant_wake"]
pub dormant_wake_inte2: DORMANT_WAKE_INTE2,
#[doc = "0x16c - Interrupt Enable for dormant_wake"]
pub dormant_wake_inte3: DORMANT_WAKE_INTE3,
#[doc = "0x170 - Interrupt Force for dormant_wake"]
pub dormant_wake_intf0: DORMANT_WAKE_INTF0,
#[doc = "0x174 - Interrupt Force for dormant_wake"]
pub dormant_wake_intf1: DORMANT_WAKE_INTF1,
#[doc = "0x178 - Interrupt Force for dormant_wake"]
pub dormant_wake_intf2: DORMANT_WAKE_INTF2,
#[doc = "0x17c - Interrupt Force for dormant_wake"]
pub dormant_wake_intf3: DORMANT_WAKE_INTF3,
#[doc = "0x180 - Interrupt status after masking & forcing for dormant_wake"]
pub dormant_wake_ints0: DORMANT_WAKE_INTS0,
#[doc = "0x184 - Interrupt status after masking & forcing for dormant_wake"]
pub dormant_wake_ints1: DORMANT_WAKE_INTS1,
#[doc = "0x188 - Interrupt status after masking & forcing for dormant_wake"]
pub dormant_wake_ints2: DORMANT_WAKE_INTS2,
#[doc = "0x18c - Interrupt status after masking & forcing for dormant_wake"]
pub dormant_wake_ints3: DORMANT_WAKE_INTS3,
}
#[doc = r"Register block"]
#[repr(C)]
pub struct GPIO {
#[doc = "0x00 - GPIO status"]
pub gpio_status: self::gpio::GPIO_STATUS,
#[doc = "0x04 - GPIO control including function select and overrides."]
pub gpio_ctrl: self::gpio::GPIO_CTRL,
}
#[doc = r"Register block"]
#[doc = "Cluster GPIO%s, containing GPIO*_STATUS, GPIO*_CTRL"]
pub mod gpio;
#[doc = "Raw Interrupts\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intr0](intr0) module"]
pub type INTR0 = crate::Reg<u32, _INTR0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR0;
#[doc = "`read()` method returns [intr0::R](intr0::R) reader structure"]
impl crate::Readable for INTR0 {}
#[doc = "`write(|w| ..)` method takes [intr0::W](intr0::W) writer structure"]
impl crate::Writable for INTR0 {}
#[doc = "Raw Interrupts"]
pub mod intr0;
#[doc = "Raw Interrupts\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intr1](intr1) module"]
pub type INTR1 = crate::Reg<u32, _INTR1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR1;
#[doc = "`read()` method returns [intr1::R](intr1::R) reader structure"]
impl crate::Readable for INTR1 {}
#[doc = "`write(|w| ..)` method takes [intr1::W](intr1::W) writer structure"]
impl crate::Writable for INTR1 {}
#[doc = "Raw Interrupts"]
pub mod intr1;
#[doc = "Raw Interrupts\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intr2](intr2) module"]
pub type INTR2 = crate::Reg<u32, _INTR2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR2;
#[doc = "`read()` method returns [intr2::R](intr2::R) reader structure"]
impl crate::Readable for INTR2 {}
#[doc = "`write(|w| ..)` method takes [intr2::W](intr2::W) writer structure"]
impl crate::Writable for INTR2 {}
#[doc = "Raw Interrupts"]
pub mod intr2;
#[doc = "Raw Interrupts\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intr3](intr3) module"]
pub type INTR3 = crate::Reg<u32, _INTR3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTR3;
#[doc = "`read()` method returns [intr3::R](intr3::R) reader structure"]
impl crate::Readable for INTR3 {}
#[doc = "`write(|w| ..)` method takes [intr3::W](intr3::W) writer structure"]
impl crate::Writable for INTR3 {}
#[doc = "Raw Interrupts"]
pub mod intr3;
#[doc = "Interrupt Enable for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_inte0](proc0_inte0) module"]
pub type PROC0_INTE0 = crate::Reg<u32, _PROC0_INTE0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTE0;
#[doc = "`read()` method returns [proc0_inte0::R](proc0_inte0::R) reader structure"]
impl crate::Readable for PROC0_INTE0 {}
#[doc = "`write(|w| ..)` method takes [proc0_inte0::W](proc0_inte0::W) writer structure"]
impl crate::Writable for PROC0_INTE0 {}
#[doc = "Interrupt Enable for proc0"]
pub mod proc0_inte0;
#[doc = "Interrupt Enable for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_inte1](proc0_inte1) module"]
pub type PROC0_INTE1 = crate::Reg<u32, _PROC0_INTE1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTE1;
#[doc = "`read()` method returns [proc0_inte1::R](proc0_inte1::R) reader structure"]
impl crate::Readable for PROC0_INTE1 {}
#[doc = "`write(|w| ..)` method takes [proc0_inte1::W](proc0_inte1::W) writer structure"]
impl crate::Writable for PROC0_INTE1 {}
#[doc = "Interrupt Enable for proc0"]
pub mod proc0_inte1;
#[doc = "Interrupt Enable for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_inte2](proc0_inte2) module"]
pub type PROC0_INTE2 = crate::Reg<u32, _PROC0_INTE2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTE2;
#[doc = "`read()` method returns [proc0_inte2::R](proc0_inte2::R) reader structure"]
impl crate::Readable for PROC0_INTE2 {}
#[doc = "`write(|w| ..)` method takes [proc0_inte2::W](proc0_inte2::W) writer structure"]
impl crate::Writable for PROC0_INTE2 {}
#[doc = "Interrupt Enable for proc0"]
pub mod proc0_inte2;
#[doc = "Interrupt Enable for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_inte3](proc0_inte3) module"]
pub type PROC0_INTE3 = crate::Reg<u32, _PROC0_INTE3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTE3;
#[doc = "`read()` method returns [proc0_inte3::R](proc0_inte3::R) reader structure"]
impl crate::Readable for PROC0_INTE3 {}
#[doc = "`write(|w| ..)` method takes [proc0_inte3::W](proc0_inte3::W) writer structure"]
impl crate::Writable for PROC0_INTE3 {}
#[doc = "Interrupt Enable for proc0"]
pub mod proc0_inte3;
#[doc = "Interrupt Force for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_intf0](proc0_intf0) module"]
pub type PROC0_INTF0 = crate::Reg<u32, _PROC0_INTF0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTF0;
#[doc = "`read()` method returns [proc0_intf0::R](proc0_intf0::R) reader structure"]
impl crate::Readable for PROC0_INTF0 {}
#[doc = "`write(|w| ..)` method takes [proc0_intf0::W](proc0_intf0::W) writer structure"]
impl crate::Writable for PROC0_INTF0 {}
#[doc = "Interrupt Force for proc0"]
pub mod proc0_intf0;
#[doc = "Interrupt Force for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_intf1](proc0_intf1) module"]
pub type PROC0_INTF1 = crate::Reg<u32, _PROC0_INTF1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTF1;
#[doc = "`read()` method returns [proc0_intf1::R](proc0_intf1::R) reader structure"]
impl crate::Readable for PROC0_INTF1 {}
#[doc = "`write(|w| ..)` method takes [proc0_intf1::W](proc0_intf1::W) writer structure"]
impl crate::Writable for PROC0_INTF1 {}
#[doc = "Interrupt Force for proc0"]
pub mod proc0_intf1;
#[doc = "Interrupt Force for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_intf2](proc0_intf2) module"]
pub type PROC0_INTF2 = crate::Reg<u32, _PROC0_INTF2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTF2;
#[doc = "`read()` method returns [proc0_intf2::R](proc0_intf2::R) reader structure"]
impl crate::Readable for PROC0_INTF2 {}
#[doc = "`write(|w| ..)` method takes [proc0_intf2::W](proc0_intf2::W) writer structure"]
impl crate::Writable for PROC0_INTF2 {}
#[doc = "Interrupt Force for proc0"]
pub mod proc0_intf2;
#[doc = "Interrupt Force for proc0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_intf3](proc0_intf3) module"]
pub type PROC0_INTF3 = crate::Reg<u32, _PROC0_INTF3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTF3;
#[doc = "`read()` method returns [proc0_intf3::R](proc0_intf3::R) reader structure"]
impl crate::Readable for PROC0_INTF3 {}
#[doc = "`write(|w| ..)` method takes [proc0_intf3::W](proc0_intf3::W) writer structure"]
impl crate::Writable for PROC0_INTF3 {}
#[doc = "Interrupt Force for proc0"]
pub mod proc0_intf3;
#[doc = "Interrupt status after masking & forcing for proc0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_ints0](proc0_ints0) module"]
pub type PROC0_INTS0 = crate::Reg<u32, _PROC0_INTS0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTS0;
#[doc = "`read()` method returns [proc0_ints0::R](proc0_ints0::R) reader structure"]
impl crate::Readable for PROC0_INTS0 {}
#[doc = "Interrupt status after masking & forcing for proc0"]
pub mod proc0_ints0;
#[doc = "Interrupt status after masking & forcing for proc0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_ints1](proc0_ints1) module"]
pub type PROC0_INTS1 = crate::Reg<u32, _PROC0_INTS1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTS1;
#[doc = "`read()` method returns [proc0_ints1::R](proc0_ints1::R) reader structure"]
impl crate::Readable for PROC0_INTS1 {}
#[doc = "Interrupt status after masking & forcing for proc0"]
pub mod proc0_ints1;
#[doc = "Interrupt status after masking & forcing for proc0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_ints2](proc0_ints2) module"]
pub type PROC0_INTS2 = crate::Reg<u32, _PROC0_INTS2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTS2;
#[doc = "`read()` method returns [proc0_ints2::R](proc0_ints2::R) reader structure"]
impl crate::Readable for PROC0_INTS2 {}
#[doc = "Interrupt status after masking & forcing for proc0"]
pub mod proc0_ints2;
#[doc = "Interrupt status after masking & forcing for proc0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc0_ints3](proc0_ints3) module"]
pub type PROC0_INTS3 = crate::Reg<u32, _PROC0_INTS3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC0_INTS3;
#[doc = "`read()` method returns [proc0_ints3::R](proc0_ints3::R) reader structure"]
impl crate::Readable for PROC0_INTS3 {}
#[doc = "Interrupt status after masking & forcing for proc0"]
pub mod proc0_ints3;
#[doc = "Interrupt Enable for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_inte0](proc1_inte0) module"]
pub type PROC1_INTE0 = crate::Reg<u32, _PROC1_INTE0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTE0;
#[doc = "`read()` method returns [proc1_inte0::R](proc1_inte0::R) reader structure"]
impl crate::Readable for PROC1_INTE0 {}
#[doc = "`write(|w| ..)` method takes [proc1_inte0::W](proc1_inte0::W) writer structure"]
impl crate::Writable for PROC1_INTE0 {}
#[doc = "Interrupt Enable for proc1"]
pub mod proc1_inte0;
#[doc = "Interrupt Enable for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_inte1](proc1_inte1) module"]
pub type PROC1_INTE1 = crate::Reg<u32, _PROC1_INTE1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTE1;
#[doc = "`read()` method returns [proc1_inte1::R](proc1_inte1::R) reader structure"]
impl crate::Readable for PROC1_INTE1 {}
#[doc = "`write(|w| ..)` method takes [proc1_inte1::W](proc1_inte1::W) writer structure"]
impl crate::Writable for PROC1_INTE1 {}
#[doc = "Interrupt Enable for proc1"]
pub mod proc1_inte1;
#[doc = "Interrupt Enable for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_inte2](proc1_inte2) module"]
pub type PROC1_INTE2 = crate::Reg<u32, _PROC1_INTE2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTE2;
#[doc = "`read()` method returns [proc1_inte2::R](proc1_inte2::R) reader structure"]
impl crate::Readable for PROC1_INTE2 {}
#[doc = "`write(|w| ..)` method takes [proc1_inte2::W](proc1_inte2::W) writer structure"]
impl crate::Writable for PROC1_INTE2 {}
#[doc = "Interrupt Enable for proc1"]
pub mod proc1_inte2;
#[doc = "Interrupt Enable for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_inte3](proc1_inte3) module"]
pub type PROC1_INTE3 = crate::Reg<u32, _PROC1_INTE3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTE3;
#[doc = "`read()` method returns [proc1_inte3::R](proc1_inte3::R) reader structure"]
impl crate::Readable for PROC1_INTE3 {}
#[doc = "`write(|w| ..)` method takes [proc1_inte3::W](proc1_inte3::W) writer structure"]
impl crate::Writable for PROC1_INTE3 {}
#[doc = "Interrupt Enable for proc1"]
pub mod proc1_inte3;
#[doc = "Interrupt Force for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_intf0](proc1_intf0) module"]
pub type PROC1_INTF0 = crate::Reg<u32, _PROC1_INTF0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTF0;
#[doc = "`read()` method returns [proc1_intf0::R](proc1_intf0::R) reader structure"]
impl crate::Readable for PROC1_INTF0 {}
#[doc = "`write(|w| ..)` method takes [proc1_intf0::W](proc1_intf0::W) writer structure"]
impl crate::Writable for PROC1_INTF0 {}
#[doc = "Interrupt Force for proc1"]
pub mod proc1_intf0;
#[doc = "Interrupt Force for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_intf1](proc1_intf1) module"]
pub type PROC1_INTF1 = crate::Reg<u32, _PROC1_INTF1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTF1;
#[doc = "`read()` method returns [proc1_intf1::R](proc1_intf1::R) reader structure"]
impl crate::Readable for PROC1_INTF1 {}
#[doc = "`write(|w| ..)` method takes [proc1_intf1::W](proc1_intf1::W) writer structure"]
impl crate::Writable for PROC1_INTF1 {}
#[doc = "Interrupt Force for proc1"]
pub mod proc1_intf1;
#[doc = "Interrupt Force for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_intf2](proc1_intf2) module"]
pub type PROC1_INTF2 = crate::Reg<u32, _PROC1_INTF2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTF2;
#[doc = "`read()` method returns [proc1_intf2::R](proc1_intf2::R) reader structure"]
impl crate::Readable for PROC1_INTF2 {}
#[doc = "`write(|w| ..)` method takes [proc1_intf2::W](proc1_intf2::W) writer structure"]
impl crate::Writable for PROC1_INTF2 {}
#[doc = "Interrupt Force for proc1"]
pub mod proc1_intf2;
#[doc = "Interrupt Force for proc1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_intf3](proc1_intf3) module"]
pub type PROC1_INTF3 = crate::Reg<u32, _PROC1_INTF3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTF3;
#[doc = "`read()` method returns [proc1_intf3::R](proc1_intf3::R) reader structure"]
impl crate::Readable for PROC1_INTF3 {}
#[doc = "`write(|w| ..)` method takes [proc1_intf3::W](proc1_intf3::W) writer structure"]
impl crate::Writable for PROC1_INTF3 {}
#[doc = "Interrupt Force for proc1"]
pub mod proc1_intf3;
#[doc = "Interrupt status after masking & forcing for proc1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_ints0](proc1_ints0) module"]
pub type PROC1_INTS0 = crate::Reg<u32, _PROC1_INTS0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTS0;
#[doc = "`read()` method returns [proc1_ints0::R](proc1_ints0::R) reader structure"]
impl crate::Readable for PROC1_INTS0 {}
#[doc = "Interrupt status after masking & forcing for proc1"]
pub mod proc1_ints0;
#[doc = "Interrupt status after masking & forcing for proc1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_ints1](proc1_ints1) module"]
pub type PROC1_INTS1 = crate::Reg<u32, _PROC1_INTS1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTS1;
#[doc = "`read()` method returns [proc1_ints1::R](proc1_ints1::R) reader structure"]
impl crate::Readable for PROC1_INTS1 {}
#[doc = "Interrupt status after masking & forcing for proc1"]
pub mod proc1_ints1;
#[doc = "Interrupt status after masking & forcing for proc1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_ints2](proc1_ints2) module"]
pub type PROC1_INTS2 = crate::Reg<u32, _PROC1_INTS2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTS2;
#[doc = "`read()` method returns [proc1_ints2::R](proc1_ints2::R) reader structure"]
impl crate::Readable for PROC1_INTS2 {}
#[doc = "Interrupt status after masking & forcing for proc1"]
pub mod proc1_ints2;
#[doc = "Interrupt status after masking & forcing for proc1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [proc1_ints3](proc1_ints3) module"]
pub type PROC1_INTS3 = crate::Reg<u32, _PROC1_INTS3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PROC1_INTS3;
#[doc = "`read()` method returns [proc1_ints3::R](proc1_ints3::R) reader structure"]
impl crate::Readable for PROC1_INTS3 {}
#[doc = "Interrupt status after masking & forcing for proc1"]
pub mod proc1_ints3;
#[doc = "Interrupt Enable for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_inte0](dormant_wake_inte0) module"]
pub type DORMANT_WAKE_INTE0 = crate::Reg<u32, _DORMANT_WAKE_INTE0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTE0;
#[doc = "`read()` method returns [dormant_wake_inte0::R](dormant_wake_inte0::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTE0 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_inte0::W](dormant_wake_inte0::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTE0 {}
#[doc = "Interrupt Enable for dormant_wake"]
pub mod dormant_wake_inte0;
#[doc = "Interrupt Enable for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_inte1](dormant_wake_inte1) module"]
pub type DORMANT_WAKE_INTE1 = crate::Reg<u32, _DORMANT_WAKE_INTE1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTE1;
#[doc = "`read()` method returns [dormant_wake_inte1::R](dormant_wake_inte1::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTE1 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_inte1::W](dormant_wake_inte1::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTE1 {}
#[doc = "Interrupt Enable for dormant_wake"]
pub mod dormant_wake_inte1;
#[doc = "Interrupt Enable for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_inte2](dormant_wake_inte2) module"]
pub type DORMANT_WAKE_INTE2 = crate::Reg<u32, _DORMANT_WAKE_INTE2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTE2;
#[doc = "`read()` method returns [dormant_wake_inte2::R](dormant_wake_inte2::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTE2 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_inte2::W](dormant_wake_inte2::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTE2 {}
#[doc = "Interrupt Enable for dormant_wake"]
pub mod dormant_wake_inte2;
#[doc = "Interrupt Enable for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_inte3](dormant_wake_inte3) module"]
pub type DORMANT_WAKE_INTE3 = crate::Reg<u32, _DORMANT_WAKE_INTE3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTE3;
#[doc = "`read()` method returns [dormant_wake_inte3::R](dormant_wake_inte3::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTE3 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_inte3::W](dormant_wake_inte3::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTE3 {}
#[doc = "Interrupt Enable for dormant_wake"]
pub mod dormant_wake_inte3;
#[doc = "Interrupt Force for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_intf0](dormant_wake_intf0) module"]
pub type DORMANT_WAKE_INTF0 = crate::Reg<u32, _DORMANT_WAKE_INTF0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTF0;
#[doc = "`read()` method returns [dormant_wake_intf0::R](dormant_wake_intf0::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTF0 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_intf0::W](dormant_wake_intf0::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTF0 {}
#[doc = "Interrupt Force for dormant_wake"]
pub mod dormant_wake_intf0;
#[doc = "Interrupt Force for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_intf1](dormant_wake_intf1) module"]
pub type DORMANT_WAKE_INTF1 = crate::Reg<u32, _DORMANT_WAKE_INTF1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTF1;
#[doc = "`read()` method returns [dormant_wake_intf1::R](dormant_wake_intf1::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTF1 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_intf1::W](dormant_wake_intf1::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTF1 {}
#[doc = "Interrupt Force for dormant_wake"]
pub mod dormant_wake_intf1;
#[doc = "Interrupt Force for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_intf2](dormant_wake_intf2) module"]
pub type DORMANT_WAKE_INTF2 = crate::Reg<u32, _DORMANT_WAKE_INTF2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTF2;
#[doc = "`read()` method returns [dormant_wake_intf2::R](dormant_wake_intf2::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTF2 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_intf2::W](dormant_wake_intf2::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTF2 {}
#[doc = "Interrupt Force for dormant_wake"]
pub mod dormant_wake_intf2;
#[doc = "Interrupt Force for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_intf3](dormant_wake_intf3) module"]
pub type DORMANT_WAKE_INTF3 = crate::Reg<u32, _DORMANT_WAKE_INTF3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTF3;
#[doc = "`read()` method returns [dormant_wake_intf3::R](dormant_wake_intf3::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTF3 {}
#[doc = "`write(|w| ..)` method takes [dormant_wake_intf3::W](dormant_wake_intf3::W) writer structure"]
impl crate::Writable for DORMANT_WAKE_INTF3 {}
#[doc = "Interrupt Force for dormant_wake"]
pub mod dormant_wake_intf3;
#[doc = "Interrupt status after masking & forcing for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_ints0](dormant_wake_ints0) module"]
pub type DORMANT_WAKE_INTS0 = crate::Reg<u32, _DORMANT_WAKE_INTS0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTS0;
#[doc = "`read()` method returns [dormant_wake_ints0::R](dormant_wake_ints0::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTS0 {}
#[doc = "Interrupt status after masking & forcing for dormant_wake"]
pub mod dormant_wake_ints0;
#[doc = "Interrupt status after masking & forcing for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_ints1](dormant_wake_ints1) module"]
pub type DORMANT_WAKE_INTS1 = crate::Reg<u32, _DORMANT_WAKE_INTS1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTS1;
#[doc = "`read()` method returns [dormant_wake_ints1::R](dormant_wake_ints1::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTS1 {}
#[doc = "Interrupt status after masking & forcing for dormant_wake"]
pub mod dormant_wake_ints1;
#[doc = "Interrupt status after masking & forcing for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_ints2](dormant_wake_ints2) module"]
pub type DORMANT_WAKE_INTS2 = crate::Reg<u32, _DORMANT_WAKE_INTS2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTS2;
#[doc = "`read()` method returns [dormant_wake_ints2::R](dormant_wake_ints2::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTS2 {}
#[doc = "Interrupt status after masking & forcing for dormant_wake"]
pub mod dormant_wake_ints2;
#[doc = "Interrupt status after masking & forcing for dormant_wake\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dormant_wake_ints3](dormant_wake_ints3) module"]
pub type DORMANT_WAKE_INTS3 = crate::Reg<u32, _DORMANT_WAKE_INTS3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DORMANT_WAKE_INTS3;
#[doc = "`read()` method returns [dormant_wake_ints3::R](dormant_wake_ints3::R) reader structure"]
impl crate::Readable for DORMANT_WAKE_INTS3 {}
#[doc = "Interrupt status after masking & forcing for dormant_wake"]
pub mod dormant_wake_ints3;
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
use std::env;
extern crate prost_build;
fn main() {
let protopkg = vcpkg::find_package("protobuf").unwrap();
let protobuf_path = protopkg.link_paths[0].parent().unwrap();
let protobuf_bin_path = protobuf_path
.join("tools")
.join("protobuf")
.join("protoc.exe")
.to_str()
.unwrap()
.to_string();
env::set_var("PROTOC", protobuf_bin_path);
let protobuf_inc_path = protobuf_path
.join("include")
.join("google")
.join("protobuf")
.to_str()
.unwrap()
.to_string();
env::set_var("PROTOC_INCLUDE", protobuf_inc_path);
prost_build::compile_protos(&["src/indexlog.proto"], &["src/"]).unwrap();
}
|
use std::time::{Duration, Instant};
// NOTE: Rust doesn't have a built-in random number generator so I did need to use a package for that
use rand::distributions::{Distribution, Uniform};
fn main() {
let min = 1_000_000;
let max = 10_000_000;
let step = 1_000_000;
let mut durations = Vec::new();
for n in (min..=max).step_by(step) {
durations.push(build_and_sort_randos(n));
}
println!("\nRunning times for build_and_count_randos");
for d in durations {
println!("n={}, time (ns)={:?}", d.0, d.1.as_nanos());
}
}
// TODO: get a better understanding of Rust's owndership feature (references and borrowing)
// and then rewrite this function to accept the array as a parameter
fn build_and_sort_randos(n: i32) -> (i32, Duration) {
// build
let mut rng = rand::thread_rng();
// Assumption: range for uniform distribution intended to be inclusive
let dist = Uniform::from(1..=3);
let mut randos = vec![0; n as usize];
for i in 0..n {
let number = dist.sample(&mut rng);
randos[i as usize] = number;
}
// sort
let start = Instant::now();
let mut ones = 0;
let mut twos = 0;
let mut threes = 0;
for i in randos.iter() {
match i {
1 => ones = ones + 1,
2 => twos = twos + 1,
3 => threes = threes + 1,
_ => panic!("this isn't the value I'm looking for"),
}
}
// write `ones` 1s
for i in 0..ones {
randos[i] = 1;
}
// write `twos` 2s
for j in ones..(ones + twos) {
randos[j] = 2;
}
// write `threes` 3s
for k in (ones + twos)..(ones + twos + threes) {
randos[k] = 3;
}
println!("{:?}", randos);
return (n, start.elapsed());
}
|
use std::ffi::{CString, CStr};
use std::os::raw::{c_int, c_char};
use std::slice;
use raw::{aw, vp};
use attributes::{AttribValue, Attrib};
use instance::Instance;
fn vp_string(instance: &mut Instance, vp_attribute: vp::string_attribute_t) -> CString {
unsafe {
CStr::from_ptr(vp::string(instance.vp, vp_attribute) as *const c_char).to_owned()
}
}
fn cint(val: c_int) -> c_int { val }
fn coord_aw_to_vp(aw: c_int) -> f32 {
(aw as f32) / 1000.0
}
fn coord_vp_to_aw(vp: f32) -> c_int {
(vp * 1000.0) as c_int
}
fn flip_yaw(yaw: c_int) -> c_int {
(5400 - yaw) % 3600
}
fn yaw_aw_to_vp(aw: c_int) -> f32 {
(flip_yaw(aw) as f32) / 10.0
}
fn yaw_vp_to_aw(vp: f32) -> c_int {
flip_yaw((vp * 10.0) as c_int)
}
unsafe fn debug_vp_string(vp: vp::VPInstance, attribute: vp::string_attribute_t) -> *mut c_char {
let vpstring = vp::string(vp, attribute);
if vpstring.is_null() {
debug!("vp_string({:?}, {:?}) is null", vp, attribute);
} else {
let copy = CStr::from_ptr(vpstring as *const c_char).to_owned();
debug!("vp_string({:?}, {:?}) = {:?}", vp, attribute, copy);
}
vpstring
}
fn vp_data(vp: vp::VPInstance, attribute: vp::data_attribute_t) -> Vec<u8> {
let mut length: c_int = 0;
unsafe {
let ptr = vp::data(vp, attribute, &mut length) as *const u8;
let slice = slice::from_raw_parts(ptr, length as usize);
slice.to_owned()
}
}
pub trait InstanceExt {
fn get<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE) -> Option<T>;
fn set<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE, value: T);
fn set_override<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE, value: Option<T>);
}
impl InstanceExt for Instance {
fn set_override<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE, value: Option<T>) {
match value {
Some(value) => {
self.overrides.insert(attribute, value.to_attrib());
},
None => {
self.overrides.remove(&attribute);
}
}
}
fn get<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE) -> Option<T> {
if let Some(overridden) = self.overrides.get(&attribute).and_then(|attrib| T::from_attrib(attrib)) {
return Some(overridden);
}
match attribute {
aw::ATTRIBUTE::CITIZEN_NUMBER => unsafe { vp::int(self.vp, vp::USER_ID) }.into_req(),
aw::ATTRIBUTE::CITIZEN_NAME => vp_string(self, vp::USER_NAME).into_req(),
aw::ATTRIBUTE::WORLD_NAME => vp_string(self, vp::WORLD_NAME).into_req(),
aw::ATTRIBUTE::WORLD_BUILD_NUMBER => 69.into_req(), // Needed for Xelagot to whisper, 69 = latest 3.6
aw::ATTRIBUTE::UNIVERSE_BUILD_NUMBER => 100.into_req(),
aw::ATTRIBUTE::CHAT_MESSAGE => vp_string(self, vp::CHAT_MESSAGE).into_req(),
aw::ATTRIBUTE::AVATAR_NAME => vp_string(self, vp::AVATAR_NAME).into_req(),
aw::ATTRIBUTE::AVATAR_SESSION => unsafe { vp::int(self.vp, vp::AVATAR_SESSION) }.into_req(),
aw::ATTRIBUTE::CHAT_SESSION => unsafe { vp::int(self.vp, vp::AVATAR_SESSION) }.into_req(),
aw::ATTRIBUTE::WORLD_SPEAK_CAPABILITY => 1.into_req(),
aw::ATTRIBUTE::WORLD_ALLOW_TOURIST_WHISPER => 1.into_req(),
aw::ATTRIBUTE::WORLD_ALLOW_CITIZEN_WHISPER => 1.into_req(),
aw::ATTRIBUTE::WORLD_SPEAK_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::WORLD_BUILD_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::WORLD_ENTER_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::WORLD_SPECIAL_OBJECTS_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::WORLD_BOTS_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::WORLD_SPECIAL_COMMANDS_RIGHT => CString::new("*").ok().and_then(|cstr| cstr.into_req()),
aw::ATTRIBUTE::MY_X => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::MY_X) }).into_req(),
aw::ATTRIBUTE::MY_Z => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::MY_Z) }).into_req(),
aw::ATTRIBUTE::MY_Y => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::MY_Y) }).into_req(),
aw::ATTRIBUTE::MY_YAW => yaw_vp_to_aw(unsafe { vp::float(self.vp, vp::MY_YAW) }).into_req(),
aw::ATTRIBUTE::MY_TYPE => unsafe { vp::int(self.vp, vp::MY_TYPE) }.into_req(),
aw::ATTRIBUTE::AVATAR_TYPE => unsafe { vp::int(self.vp, vp::AVATAR_TYPE) }.into_req(),
aw::ATTRIBUTE::AVATAR_X => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::AVATAR_X) }).into_req(),
aw::ATTRIBUTE::AVATAR_Z => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::AVATAR_Z) }).into_req(),
aw::ATTRIBUTE::AVATAR_Y => coord_vp_to_aw(unsafe { vp::float(self.vp, vp::AVATAR_Y) }).into_req(),
aw::ATTRIBUTE::AVATAR_YAW => yaw_vp_to_aw(unsafe { vp::float(self.vp, vp::AVATAR_YAW) }).into_req(),
aw::ATTRIBUTE::AVATAR_PRIVILEGE => unsafe { vp::int(self.vp, vp::USER_ID) }.into_req(),
aw::ATTRIBUTE::AVATAR_CITIZEN => unsafe {
let name = vp_string(self, vp::AVATAR_NAME);
if name.as_bytes().first() == Some(&b'[') {
0
} else {
vp::int(self.vp, vp::USER_ID)
}
}.into_req(),
_ => self.attributes.get(attribute)
}
}
fn set<T: Attrib>(&mut self, attribute: aw::ATTRIBUTE, value: T) {
match attribute {
aw::ATTRIBUTE::WORLD_NAME => unsafe { vp::string_set(self.vp, vp::WORLD_NAME, value.into_req().expect("Wrong type for attribute!")); },
aw::ATTRIBUTE::CHAT_MESSAGE => unsafe { vp::string_set(self.vp, vp::CHAT_MESSAGE, value.into_req().expect("Wrong type for attribute!")); },
aw::ATTRIBUTE::MY_TYPE => unsafe { vp::int_set(self.vp, vp::MY_TYPE, value.into_req().expect("Wrong type for attribute!")); },
aw::ATTRIBUTE::MY_X => unsafe { vp::float_set(self.vp, vp::MY_X, coord_aw_to_vp(value.into_req().expect("Wrong type for attribute!"))); },
aw::ATTRIBUTE::MY_Z => unsafe { vp::float_set(self.vp, vp::MY_Z, coord_aw_to_vp(value.into_req().expect("Wrong type for attribute!"))); },
aw::ATTRIBUTE::MY_Y => unsafe { vp::float_set(self.vp, vp::MY_Y, coord_aw_to_vp(value.into_req().expect("Wrong type for attribute!"))); },
aw::ATTRIBUTE::MY_YAW => unsafe { vp::float_set(self.vp, vp::MY_YAW, yaw_aw_to_vp(value.into_req().expect("Wrong type for attribute!"))); },
_ => self.attributes.set(attribute, value)
}
}
} |
//! Uses a trait that has general page table managing functions.
use super::frame_allocator::FRAME_ALLOCATOR;
use super::page_table::{Level1, Level2, Level4, PageTable};
use super::page_table_entry::{PageTableEntry, PageTableEntryFlags, PRESENT};
use super::{Page, PageFrame};
use core::ops::{Deref, DerefMut};
use memory::{Address, PhysicalAddress, VirtualAddress};
use sync::PreemptionState;
use x86_64::instructions::tlb;
/// A reference to a locked level 1 page table.
pub struct Level1TableReference<'a> {
/// The reference to the level 2 table that contains the level 1 table.
table: &'a mut PageTable<Level2>,
/// The address of the level 1 table.
address: VirtualAddress,
/// The preemption state of the lock.
preemption_state: PreemptionState
}
impl<'a> Deref for Level1TableReference<'a> {
type Target = PageTable<Level1>;
fn deref(&self) -> &PageTable<Level1> {
self.table.get_next_level(self.address).unwrap()
}
}
impl<'a> DerefMut for Level1TableReference<'a> {
fn deref_mut(&mut self) -> &mut PageTable<Level1> {
self.table.get_next_level_mut(self.address).unwrap()
}
}
impl<'a> Drop for Level1TableReference<'a> {
fn drop(&mut self) {
let table_index = PageTable::<Level2>::table_index(self.address);
let l2_entry = &mut self.table[table_index];
l2_entry.unlock(&self.preemption_state);
}
}
/// A reference to a page table entry in a locked level 1 page table.
pub struct PageTableEntryReference<'a> {
table_reference: Level1TableReference<'a>
}
impl<'a> Deref for PageTableEntryReference<'a> {
type Target = PageTableEntry;
fn deref(&self) -> &PageTableEntry {
let index = PageTable::<Level1>::table_index(self.table_reference.address);
&self.table_reference[index]
}
}
impl<'a> DerefMut for PageTableEntryReference<'a> {
fn deref_mut(&mut self) -> &mut PageTableEntry {
let index = PageTable::<Level1>::table_index(self.table_reference.address);
&mut self.table_reference[index]
}
}
/// Structs managing a level 4 page table and it's decendants can implement
/// this to manage paging.
pub trait PageTableManager {
/// Returns a mutable reference to the level 4 page table.
fn get_l4(&mut self) -> &mut PageTable<Level4>;
/// Returns the corresponding physical address to a virtual address.
fn translate_address(&mut self, address: VirtualAddress) -> Option<PhysicalAddress> {
self.get_l1(address)
.and_then(|l1| l1[PageTable::<Level1>::table_index(address)].points_to())
.map(|page_address| page_address + (address.as_usize() & 0xfff))
}
/// Returns a mutable reference to the level 1 table corresponding to the
/// given address.
fn get_l1(&mut self, address: VirtualAddress) -> Option<Level1TableReference> {
assert!(valid_address!(address));
let table_index = PageTable::<Level2>::table_index(address);
let preemption_state = {
let l4 = self.get_l4();
let l2 = l4
.get_next_level_mut(address)
.and_then(|l3| l3.get_next_level_mut(address));
match l2 {
Some(table) => {
let l2_entry = &mut table[table_index];
if l2_entry.points_to().is_some() {
Some(l2_entry.lock())
} else {
None
}
},
None => None
}
};
match preemption_state {
Some(preemption_state) => Some(Level1TableReference {
table: self
.get_l4()
.get_next_level_mut(address)
.and_then(|l3| l3.get_next_level_mut(address))
.unwrap(),
address,
preemption_state
}),
None => None
}
}
/// Returns a reference to the level 1 table at the given address, possibly
/// creating it.
///
/// This creates new page tables if the parent tables for the wanted table
/// are not already mapped.
fn get_l1_and_map(&mut self, address: VirtualAddress) -> Level1TableReference {
assert!(valid_address!(address));
let table_index = PageTable::<Level2>::table_index(address);
let preemption_state = {
let l2 = self
.get_l4()
.next_level_and_map(address)
.next_level_and_map(address);
let l2_entry = &mut l2[table_index];
l2_entry.lock()
};
// Make sure the next level is mapped.
self.get_l4()
.next_level_and_map(address)
.next_level_and_map(address)
.next_level_and_map(address);
Level1TableReference {
table: self
.get_l4()
.next_level_and_map(address)
.next_level_and_map(address),
address,
preemption_state
}
}
/// Returns a reference to the page table entry corresponding to the given
/// address.
///
/// This creates new page tables if the parent tables for the wanted table
/// are not already mapped.
fn get_entry_and_map(&mut self, address: VirtualAddress) -> PageTableEntryReference {
let l1 = self.get_l1_and_map(address);
PageTableEntryReference {
table_reference: l1
}
}
/// Returns a mutable reference to the level 1 page table entry
/// corresponding to the given address.
fn get_entry(&mut self, address: VirtualAddress) -> Option<PageTableEntryReference> {
let l1 = self.get_l1(address);
l1.map(|l1| PageTableEntryReference {
table_reference: l1
})
}
/// Maps the given page to the given frame with the given flags.
fn map_page_at(&mut self, page: Page, frame: PageFrame, flags: PageTableEntryFlags) {
if let Some(entry) = self.get_entry(page.get_address()) {
debug_assert!(
!entry.flags().contains(PRESENT),
"Trying to double map page {:?}",
page.get_address()
);
}
let target_address = page.get_address();
let mut entry = self.get_entry_and_map(target_address);
entry
.set_address(frame.get_address())
.set_flags(flags | PRESENT);
}
/// Maps the given page to an allocated frame with the given flags.
fn map_page(&mut self, page: Page, flags: PageTableEntryFlags) {
if let Some(entry) = self.get_entry(page.get_address()) {
debug_assert!(
!entry.flags().contains(PRESENT),
"Trying to double map page {:?}",
page.get_address()
);
}
let frame = FRAME_ALLOCATOR.allocate();
self.map_page_at(page, frame, flags);
}
/// Changes the permissions of the page or map it, if it wasn't mapped.
fn change_permissions_or_map(&mut self, page: Page, flags: PageTableEntryFlags) {
let is_mapped = {
if let Some(entry) = self.get_entry(page.get_address()) {
entry.flags().contains(PRESENT)
} else {
false
}
};
if is_mapped {
self.get_entry(page.get_address())
.unwrap()
.set_flags(PRESENT | flags);
} else {
self.map_page(page, flags);
}
}
/// Unmaps the given page.
///
/// # Safety
/// - Make sure the page isn't referenced anywhere anymore.
unsafe fn unmap_page(&mut self, page: Page) {
// TODO: Consider multiple CPUs.
// TODO: Consider that the page may still be in use elsewhere (don't free the
// frame then).
let entry = self.get_entry(page.get_address());
entry
.expect("Trying to unmap a page that isn't mapped.")
.unmap();
tlb::flush(::x86_64::VirtualAddress(page.get_address().as_usize()));
}
/// Unmaps the given page, not checking if it was mapped.
///
/// # Safety
/// - Make sure the page isn't referenced anywhere anymore.
unsafe fn unmap_page_unchecked(&mut self, page: Page) {
// TODO: Consider multiple CPUs.
// TODO: Consider that the page may still be in use elsewhere (don't free the
// frame then).
let entry = self.get_entry(page.get_address());
if let Some(mut entry) = entry {
if entry.points_to().is_some() {
entry.unmap();
}
tlb::flush(::x86_64::VirtualAddress(page.get_address().as_usize()));
}
}
}
|
extern crate fuse;
extern crate libc;
extern crate time;
extern crate hyper;
extern crate rustc_serialize;
extern crate clap;
use std::collections::BTreeMap;
use std::path::Path;
use std::env;
use std::io::prelude::Read;
use libc::{ENOENT, ENOSYS};
use time::Timespec;
use fuse::{FileAttr, FileType, Filesystem, Request, ReplyAttr, ReplyData, ReplyEntry, ReplyDirectory, ReplyOpen};
use hyper::client::{Client, Response};
use rustc_serialize::json::Json;
use clap::{App, Arg};
struct TwitchFileSystem {
attrs: BTreeMap<u64, FileAttr>,
inodes: BTreeMap<String, u64>
}
impl TwitchFileSystem {
fn new() -> TwitchFileSystem {
let mut attrs = BTreeMap::new();
let mut inodes = BTreeMap::new();
let ts = time::now().to_timespec();
let attr = FileAttr {
ino: 1,
size: 0,
blocks: 0,
atime: ts,
mtime: ts,
ctime: ts,
crtime: ts,
kind: FileType::Directory,
perm: 0o755,
nlink: 0,
uid: 0,
gid: 0,
rdev: 0,
flags: 0
};
attrs.insert(1, attr);
inodes.insert("/".to_owned(), 1);
TwitchFileSystem {attrs: attrs, inodes: inodes}
}
}
impl Filesystem for TwitchFileSystem {
fn getattr(&mut self, _req: &Request, ino: u64, reply: ReplyAttr) {
match self.attrs.get(&ino) {
Some(attr) => {
let ttl = Timespec::new(1, 0);
reply.attr(&ttl, attr);
},
None => reply.error(ENOENT)
}
}
fn lookup(&mut self, _req: &Request, parent: u64, name: &Path, reply: ReplyEntry) {
let inode = match self.inodes.get(name.to_str().unwrap()) {
Some(inode) => inode,
None => {
reply.error(ENOENT);
return;
}
};
match self.attrs.get(inode) {
Some(attr) => {
let ttl = Timespec::new(1, 0);
reply.entry(&ttl, attr, 0);
},
None => reply.error(ENOENT),
};
}
fn readdir(&mut self, _req: &Request, ino: u64, fh: u64, offset: u64, mut reply: ReplyDirectory) {
if offset == 0 {
self.attrs.clear();
self.inodes.clear();
let ts = time::now().to_timespec();
let attr = FileAttr {
ino: 1,
size: 0,
blocks: 0,
atime: ts,
mtime: ts,
ctime: ts,
crtime: ts,
kind: FileType::Directory,
perm: 0o755,
nlink: 0,
uid: 0,
gid: 0,
rdev: 0,
flags: 0
};
self.attrs.insert(1, attr);
self.inodes.insert("/".to_owned(), 1);
if ino == 1 {
let mut body = String::new();
Client::new()
.get("https://api.twitch.tv/kraken/games/top")
.send()
.expect("Couldn't load twitch")
.read_to_string(&mut body);
match Json::from_str(&body) {
Ok(data) => {
let games = data.find("top").unwrap().as_array().unwrap();
for (i, game) in games.iter().enumerate() {
let attr = FileAttr {
ino: i as u64 + 2,
size: 0,
blocks: 0,
atime: ts,
mtime: ts,
ctime: ts,
crtime: ts,
kind: FileType::Directory,
perm: 0o644,
nlink: 0,
uid: 0,
gid: 0,
rdev: 0,
flags: 0
};
let name = game
.find_path(&["game", "name"])
.unwrap()
.as_string()
.unwrap();
self.attrs.insert(attr.ino, attr);
self.inodes.insert(name.to_owned(), attr.ino);
reply.add(attr.ino, 2, FileType::Directory, &Path::new(name));
}
},
Err(_) => println!("Twitch returned invalid json")
}
} else {
// just for testing
let game = "Dota 2";
/*for (name, inode) in self.inodes.clone().iter() {
if inode == &ino {
game = name;
return;
}
}*/
let mut body = String::new();
let url = format!("https://api.twitch.tv/kraken/streams?game={}", game);
Client::new()
.get(&url)
.send()
.expect("Couldn't load twitch")
.read_to_string(&mut body);
match Json::from_str(&body) {
Ok(data) => {
let streams = data.find("streams").unwrap().as_array().unwrap();
for (i, stream) in streams.iter().enumerate() {
let attr = FileAttr {
ino: i as u64 + 30,
size: 0,
blocks: 0,
atime: ts,
mtime: ts,
ctime: ts,
crtime: ts,
kind: FileType::RegularFile,
perm: 0o644,
nlink: 0,
uid: 0,
gid: 0,
rdev: 0,
flags: 0
};
let name = stream
.find_path(&["channel", "name"])
.unwrap()
.as_string()
.unwrap();
self.attrs.insert(attr.ino, attr);
self.inodes.insert(name.to_owned(), attr.ino);
reply.add(attr.ino, 2, FileType::RegularFile, &Path::new(name));
}
},
Err(_) => println!("Twitch returned invalid json")
}
}
reply.ok();
}
}
fn opendir(&mut self, _req: &Request, ino: u64, _flags: u32, reply: ReplyOpen) {
reply.opened(ino, 0);
}
fn read(&mut self, _req: &Request, ino: u64, fh: u64, offset: u64, size: u32, reply: ReplyData) {
reply.data("test".as_bytes());
}
}
fn is_valid_dir(mountpoint: String) -> Result<(), String> {
match Path::new(&mountpoint).is_dir() {
true => Ok(()),
false => Err("Mountpoint must be a directory".to_string()),
}
}
fn main() {
let matches = App::new("twitch-fs")
.version(option_env!("CARGO_PKG_VERSION").unwrap_or("unknown version"))
.arg(Arg::with_name("mountpoint")
.validator(is_valid_dir)
.index(1)
.required(true))
.get_matches();
// unwrap() is safe here because the argument is set as required
let mountpoint = matches.value_of_os("mountpoint").unwrap();
let fs = TwitchFileSystem::new();
fuse::mount(fs , &mountpoint, &[])
}
|
use futures::stream::TryStreamExt;
use crate::{
bson::doc,
error::ErrorKind,
options::{CommitQuorum, CreateIndexOptions, IndexOptions},
test::{
log_uncaptured,
util::{EventClient, TestClient},
},
IndexModel,
};
// Test that creating indexes works as expected.
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn index_management_creates() {
let client = TestClient::new().await;
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
// Test creating a single index with driver-generated name.
let result = coll
.create_index(
IndexModel::builder().keys(doc! { "a": 1, "b": -1 }).build(),
None,
)
.await
.expect("Test failed to create index");
assert_eq!(result.index_name, "a_1_b_-1".to_string());
// Test creating several indexes, with both specified and unspecified names.
let result = coll
.create_indexes(
vec![
IndexModel::builder().keys(doc! { "c": 1 }).build(),
IndexModel::builder()
.keys(doc! { "d": 1 })
.options(
IndexOptions::builder()
.name("customname".to_string())
.build(),
)
.build(),
],
None,
)
.await
.expect("Test failed to create indexes");
assert_eq!(
result.index_names,
vec!["c_1".to_string(), "customname".to_string()]
);
// Pull all index names from db to verify the _id_ index.
let names = coll
.list_index_names()
.await
.expect("Test failed to list index names");
assert_eq!(names, vec!["_id_", "a_1_b_-1", "c_1", "customname"]);
}
// Test that creating a duplicate index works as expected.
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn index_management_handles_duplicates() {
let client = TestClient::new().await;
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
let result = coll
.create_index(IndexModel::builder().keys(doc! { "a": 1 }).build(), None)
.await
.expect("Test failed to create index");
assert_eq!(result.index_name, "a_1".to_string());
// Insert duplicate.
let result = coll
.create_index(IndexModel::builder().keys(doc! { "a": 1 }).build(), None)
.await
.expect("Test failed to create index");
assert_eq!(result.index_name, "a_1".to_string());
// Test partial duplication.
let result = coll
.create_indexes(
vec![
IndexModel::builder().keys(doc! { "a": 1 }).build(), // Duplicate
IndexModel::builder().keys(doc! { "b": 1 }).build(), // Not duplicate
],
None,
)
.await
.expect("Test failed to create indexes");
assert_eq!(
result.index_names,
vec!["a_1".to_string(), "b_1".to_string()]
);
}
// Test that listing indexes works as expected.
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn index_management_lists() {
let client = TestClient::new().await;
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
let insert_data = vec![
IndexModel::builder().keys(doc! { "a": 1 }).build(),
IndexModel::builder().keys(doc! { "b": 1, "c": 1 }).build(),
IndexModel::builder()
.keys(doc! { "d": 1 })
.options(IndexOptions::builder().unique(Some(true)).build())
.build(),
];
coll.create_indexes(insert_data.clone(), None)
.await
.expect("Test failed to create indexes");
let expected_names = vec![
"_id_".to_string(),
"a_1".to_string(),
"b_1_c_1".to_string(),
"d_1".to_string(),
];
let mut indexes = coll
.list_indexes(None)
.await
.expect("Test failed to list indexes");
let id = indexes.try_next().await.unwrap().unwrap();
assert_eq!(id.get_name().unwrap(), expected_names[0]);
assert!(!id.is_unique());
let a = indexes.try_next().await.unwrap().unwrap();
assert_eq!(a.get_name().unwrap(), expected_names[1]);
assert!(!a.is_unique());
let b_c = indexes.try_next().await.unwrap().unwrap();
assert_eq!(b_c.get_name().unwrap(), expected_names[2]);
assert!(!b_c.is_unique());
// Unique index.
let d = indexes.try_next().await.unwrap().unwrap();
assert_eq!(d.get_name().unwrap(), expected_names[3]);
assert!(d.is_unique());
assert!(indexes.try_next().await.unwrap().is_none());
let names = coll
.list_index_names()
.await
.expect("Test failed to list index names");
assert_eq!(names, expected_names);
}
// Test that dropping indexes works as expected.
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn index_management_drops() {
let client = TestClient::new().await;
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
let result = coll
.create_indexes(
vec![
IndexModel::builder().keys(doc! { "a": 1 }).build(),
IndexModel::builder().keys(doc! { "b": 1 }).build(),
IndexModel::builder().keys(doc! { "c": 1 }).build(),
],
None,
)
.await
.expect("Test failed to create multiple indexes");
assert_eq!(
result.index_names,
vec!["a_1".to_string(), "b_1".to_string(), "c_1".to_string()]
);
// Test dropping single index.
coll.drop_index("a_1", None)
.await
.expect("Test failed to drop index");
let names = coll
.list_index_names()
.await
.expect("Test failed to list index names");
assert_eq!(names, vec!["_id_", "b_1", "c_1"]);
// Test dropping several indexes.
coll.drop_indexes(None)
.await
.expect("Test failed to drop indexes");
let names = coll
.list_index_names()
.await
.expect("Test failed to list index names");
assert_eq!(names, vec!["_id_"]);
}
// Test that index management commands execute the expected database commands.
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn index_management_executes_commands() {
let client = EventClient::new().await;
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
// Collection::create_index and Collection::create_indexes execute createIndexes.
assert_eq!(
client.get_command_started_events(&["createIndexes"]).len(),
0
);
coll.create_index(IndexModel::builder().keys(doc! { "a": 1 }).build(), None)
.await
.expect("Create Index op failed");
assert_eq!(
client.get_command_started_events(&["createIndexes"]).len(),
1
);
coll.create_indexes(
vec![
IndexModel::builder().keys(doc! { "b": 1 }).build(),
IndexModel::builder().keys(doc! { "c": 1 }).build(),
],
None,
)
.await
.expect("Create Indexes op failed");
assert_eq!(
client.get_command_started_events(&["createIndexes"]).len(),
2
);
// Collection::list_indexes and Collection::list_index_names execute listIndexes.
assert_eq!(client.get_command_started_events(&["listIndexes"]).len(), 0);
coll.list_indexes(None).await.expect("List index op failed");
assert_eq!(client.get_command_started_events(&["listIndexes"]).len(), 1);
coll.list_index_names().await.expect("List index op failed");
assert_eq!(client.get_command_started_events(&["listIndexes"]).len(), 2);
// Collection::drop_index and Collection::drop_indexes execute dropIndexes.
assert_eq!(client.get_command_started_events(&["dropIndexes"]).len(), 0);
coll.drop_index("a_1", None)
.await
.expect("Drop index op failed");
assert_eq!(client.get_command_started_events(&["dropIndexes"]).len(), 1);
coll.drop_indexes(None)
.await
.expect("Drop indexes op failed");
assert_eq!(client.get_command_started_events(&["dropIndexes"]).len(), 2);
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
#[function_name::named]
async fn commit_quorum_error() {
let client = TestClient::new().await;
if client.is_standalone() {
log_uncaptured("skipping commit_quorum_error due to standalone topology");
return;
}
let coll = client
.init_db_and_coll(function_name!(), function_name!())
.await;
let model = IndexModel::builder().keys(doc! { "x": 1 }).build();
let options = CreateIndexOptions::builder()
.commit_quorum(CommitQuorum::Majority)
.build();
let result = coll.create_index(model, options).await;
if client.server_version_lt(4, 4) {
let err = result.unwrap_err();
assert!(matches!(*err.kind, ErrorKind::InvalidArgument { .. }));
} else {
assert!(result.is_ok());
}
}
|
/// Check a Luhn checksum.
pub fn is_valid(code: &str) -> bool {
let code = code.replace(" ", "");
if code.chars().any(|c| !c.is_digit(10)) {
return false;
}
if code.chars().count() <= 1 {
return false;
}
let total = code
.chars()
.rev()
.map(|c| c.to_digit(10).unwrap())
.enumerate()
.fold(0, |acc, (i, d)| {
if i % 2 == 0 {
acc + d
} else if d * 2 > 9 {
acc + d * 2 - 9
} else {
acc + d * 2
}
});
total % 10 == 0
}
|
mod compiler;
use compiler::lifetime_elision::*;
//Same scope for references everything is nice!
fn main() {
let city = City::new(54);
let city2 = City::new(30);
let result = compare_size_with_lifetimes(&city, &city2);
println!("{}", result.size_in_sqm);
}
// Calling this means that city2 is in a difference lifetime
// fn main() {
// let city = City::new(54);
// let want_to_do_something = true;
// let result;
// if(want_to_do_something) {
// let city2 = City::new(30);
// result = compare_size_with_lifetimes(&city, &city2);
// }
// println!("{}", result.size_in_sqm);
// }
// // Calling these will work since result, city and city2 are used in the same of greater lifetime
// fn main() {
// let city = City::new(54);
// let want_to_do_something = true;
// let result;
// if(want_to_do_something) {
// let city2 = City::new(30);
// result = compare_size_with_lifetimes(&city, &city2);
// println!("{}", result.size_in_sqm);
// }
// }
// // Calling this will work since result, city and city2 are used in the same of greater lifetime
// fn main() {
// let city = City::new(54);
// let want_to_do_something = true;
// let result;
// let city2;
// if(want_to_do_something) {
// city2 = City::new(30);
// result = compare_size_with_lifetimes(&city, &city2);
// } else {
// result = compare_size_with_lifetimes(&city, &city); // this is a work around
// }
// println!("{}", result.size_in_sqm);
// }
// // like wise if only one lifetime is relevant that is the only one enforces
// fn main() {
// let city = City::new(54);
// let city3 = City::new(54);
// let want_to_do_something = true;
// let result;
// if(want_to_do_something) {
// let city2 = City::new(30);
// result = do_something_with_cities(&city, &city2, &city);
// // --> println! here
// }
// println!("{}", result.size_in_sqm);
// }
// fn main() {
// let city2 = City::new(30);
// let want_to_do_something = true;
// let mut result = &City::new(0);
// if(want_to_do_something) {
// let city = City::new(54);
// let city3 = City::new(54);
// result = do_something_with_cities(&city, &city2, &city);
// // --> println! here
// }
// println!("{}", result.size_in_sqm);
// } |
#![feature(vec_remove_item)]
use std::collections::HashMap;
use std::collections::HashSet;
type NodeID = &'static str;
type Graph = HashMap<NodeID, &'static str>;
type Group = HashSet<NodeID>;
fn main() {
let input = include_str!("input.txt");
let graph = parse_graph(input);
println!("Answer #1: {}", get_group_by_root("0", &graph).len());
println!("Answer #2: {}", get_all_groups(&graph).len());
}
fn get_group_by_root(root_id: NodeID, graph: &Graph) -> Group {
let mut to_visit = vec![root_id];
let mut visited = HashSet::new();
// Keep taking candidates from the queue intil empty
while let Some(next) = to_visit.pop() {
// Add node to visited
visited.insert(next);
// Get associated nodes
let candidates: Vec<&'static str> = graph.get(next).unwrap().split(", ").collect();
// Push unvisited candidates to queue
for candidate in candidates {
if visited.contains(candidate) {
// do nothing
} else {
to_visit.push(candidate);
}
}
}
visited
}
fn get_all_groups(graph: &Graph) -> Vec<Group> {
let mut hash_keys: Vec<&str> = graph.keys().map(|x| *x).collect();
let mut groups: Vec<Group> = vec![];
// Take the next Node
while let Some(next) = hash_keys.pop() {
// Get all the nodes in the group
let group = get_group_by_root(next, graph);
// Remove all nodes in the current group from the node list
for node in &group {
hash_keys.remove_item(&node);
}
groups.push(group);
}
groups
}
fn parse_graph(input: &'static str) -> Graph {
let mut graph = Graph::new();
for line in input.lines() {
let split: Vec<&'static str> = line.split(" <-> ").collect();
graph.insert(split[0], split[1]);
}
graph
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_group_by_root() {
let input = include_str!("test_input.txt");
let graph = parse_graph(input);
assert_eq!(6, get_group_by_root("0", &graph).len());
}
#[test]
fn test_get_all_groups() {
let input = include_str!("test_input.txt");
let graph = parse_graph(input);
assert_eq!(2, get_all_groups(&graph).len());
}
}
|
use std::fs;
use std::io::Error as IOError;
use std::path::Path;
pub struct Reader {
underlying_buffer: Vec<u8>,
}
impl Reader {
pub fn new<T: Into<Vec<u8>>>(buffer: T) -> Reader {
Reader {
underlying_buffer: buffer.into(),
}
}
pub fn new_from_path<T: AsRef<Path>>(path: T) -> Result<Reader, IOError> {
Ok(Reader {
underlying_buffer: fs::read(path)?,
})
}
pub fn read<T: From<Vec<u8>>>(&mut self, length: usize) -> T {
let buff = self
.underlying_buffer
.drain(..length)
.take(length)
.collect::<Vec<u8>>()
.into();
self.underlying_buffer.shrink_to_fit();
buff
}
pub fn read_byte(&mut self) -> u8 {
let byte = self.underlying_buffer.remove(0);
self.underlying_buffer.shrink_to_fit();
byte
}
pub fn remove(&mut self, length: usize) {
if length == 1 {
self.underlying_buffer.remove(0);
} else {
for indx in 0..length {
self.underlying_buffer.remove(indx);
}
}
self.underlying_buffer.shrink_to_fit();
}
}
|
use std::ffi::CStr;
use std::mem::forget;
use std::os::raw::c_char;
use crate::parser::Parser;
use crate::rasterbackend::RasterBackend;
use std::time::Duration;
#[repr(C)]
pub struct PictureBuffer {
/// data in rgba8888 format
data: *const u8,
/// length of the buffer
len: u32,
/// stride of the buffer
stride: u32,
/// depth of the buffer
depth: u32,
}
#[repr(C)]
pub struct RenderSettings {
/// width of the image
width: u32,
/// height of the image
height: u32,
/// embed a size hint
size_hint: bool,
/// max duration of the rendering, 0 to disable
timeout: u64,
}
#[no_mangle]
/// Renders a mesh to a picture
/// Free the buffer with free_picture_buffer
pub extern "C" fn render(path: *const c_char, settings: RenderSettings) -> PictureBuffer {
let path = unsafe { CStr::from_ptr(path).to_str().unwrap() };
let mut backend = RasterBackend::new(settings.width, settings.height);
let parser = Parser::from_file(path, true);
if let Ok(mut parser) = parser {
let mesh = parser.read_all();
if let Ok(mesh) = mesh {
let (aabb, scale) = backend.fit_mesh_scale(&mesh);
// set flags
backend.render_options.draw_size_hint = settings.size_hint;
// render
let mut pic = backend.render(&mesh, scale, &aabb, None);
let boxed_data = pic.data_as_boxed_slice();
let data = boxed_data.as_ptr();
let len = pic.data().len() as u32;
let stride = pic.stride() as u32;
let depth = pic.depth() as u32;
// leak the memory owned by boxed_data
forget(boxed_data);
return PictureBuffer {
data,
len,
stride,
depth,
};
}
}
PictureBuffer {
data: std::ptr::null(),
len: 0,
stride: 0,
depth: 0,
}
}
#[no_mangle]
/// Frees the memory of a PictureBuffer
pub extern "C" fn free_picture_buffer(buffer: PictureBuffer) {
unsafe {
let s = std::slice::from_raw_parts_mut(buffer.data as *mut u8, buffer.len as usize);
// put the memory back into the box such that is can be freed
Box::from_raw(s as *mut [u8]);
}
}
|
// Copyright 2019 Steven Bosnick
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE-2.0 or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms
use sel4_build::{CMakeTarget, Platform};
fn main() {
let target = CMakeTarget::Kernel(Platform::Am335x);
// build the kernel for the Am335x platform
target.build();
// generate the rust bindings to the platform specifc parts of the sel4 library
target.bindgen();
}
|
mod cpu;
mod debugger;
mod file;
mod instruction;
mod state;
use crate::debugger::Debugger;
use crate::state::State;
use std::error::Error;
pub fn run(filename: String, debug: bool) -> Result<(), Box<dyn Error>> {
let mut rom = file::read_rom(filename)?;
let mut state = State::new();
state.load_rom(&mut rom)?;
if debug {
let mut debugger = Debugger::new();
debugger.step(state)
} else {
while state.running {
state = state.step()
}
}
Ok(())
}
|
use crate::utils::copy_string;
use crate::{check_status, Error};
use std::ffi::CString;
use std::os::raw::c_int;
use std::ptr;
/// Information from the EEPROM of a daughter board
pub struct DaughterBoardEeprom(uhd_sys::uhd_dboard_eeprom_handle);
impl DaughterBoardEeprom {
pub fn id(&self) -> Result<String, Error> {
copy_string(|buffer, length| unsafe {
uhd_sys::uhd_dboard_eeprom_get_id(self.0, buffer, length as _)
})
}
pub fn set_id(&mut self, id: &str) -> Result<(), Error> {
let id_c = CString::new(id)?;
check_status(unsafe { uhd_sys::uhd_dboard_eeprom_set_id(self.0, id_c.as_ptr()) })
}
pub fn serial(&self) -> Result<String, Error> {
copy_string(|buffer, length| unsafe {
uhd_sys::uhd_dboard_eeprom_get_serial(self.0, buffer, length as _)
})
}
pub fn set_serial(&mut self, serial: &str) -> Result<(), Error> {
let serial_c = CString::new(serial)?;
check_status(unsafe { uhd_sys::uhd_dboard_eeprom_set_serial(self.0, serial_c.as_ptr()) })
}
pub fn revision(&self) -> Result<c_int, Error> {
let mut revision = 0;
check_status(unsafe { uhd_sys::uhd_dboard_eeprom_get_revision(self.0, &mut revision) })?;
Ok(revision)
}
pub fn set_revision(&mut self, revision: c_int) -> Result<(), Error> {
check_status(unsafe { uhd_sys::uhd_dboard_eeprom_set_revision(self.0, revision) })
}
pub(crate) fn handle(&mut self) -> uhd_sys::uhd_dboard_eeprom_handle {
self.0
}
}
unsafe impl Send for DaughterBoardEeprom {}
unsafe impl Sync for DaughterBoardEeprom {}
impl Default for DaughterBoardEeprom {
fn default() -> Self {
let mut handle: uhd_sys::uhd_dboard_eeprom_handle = ptr::null_mut();
check_status(unsafe { uhd_sys::uhd_dboard_eeprom_make(&mut handle) }).unwrap();
DaughterBoardEeprom(handle)
}
}
impl Drop for DaughterBoardEeprom {
fn drop(&mut self) {
let _ = unsafe { uhd_sys::uhd_dboard_eeprom_free(&mut self.0) };
}
}
mod fmt {
use super::DaughterBoardEeprom;
use std::fmt::{Debug, Formatter, Result};
impl Debug for DaughterBoardEeprom {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let revision = self.revision();
let revision: &dyn Debug = revision
.as_ref()
.map(|rev| rev as &dyn Debug)
.unwrap_or(&"<error>");
f.debug_struct("DaughterBoardEeprom")
.field("id", &self.id().as_deref().unwrap_or("<error>"))
.field("serial", &self.serial().as_deref().unwrap_or("<error>"))
.field("revision", revision)
.finish()
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationValue>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationValue {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationValueDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsProfile {
#[serde(rename = "computerName", default, skip_serializing_if = "Option::is_none")]
pub computer_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineProperties {
#[serde(rename = "locationData", default, skip_serializing_if = "Option::is_none")]
pub location_data: Option<LocationData>,
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<serde_json::Value>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<machine_properties::Status>,
#[serde(rename = "lastStatusChange", default, skip_serializing_if = "Option::is_none")]
pub last_status_change: Option<String>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Vec::is_empty")]
pub error_details: Vec<ErrorDetail>,
#[serde(rename = "agentVersion", default, skip_serializing_if = "Option::is_none")]
pub agent_version: Option<String>,
#[serde(rename = "vmId", default, skip_serializing_if = "Option::is_none")]
pub vm_id: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "machineFqdn", default, skip_serializing_if = "Option::is_none")]
pub machine_fqdn: Option<String>,
#[serde(rename = "clientPublicKey", default, skip_serializing_if = "Option::is_none")]
pub client_public_key: Option<String>,
#[serde(rename = "osName", default, skip_serializing_if = "Option::is_none")]
pub os_name: Option<String>,
#[serde(rename = "osVersion", default, skip_serializing_if = "Option::is_none")]
pub os_version: Option<String>,
#[serde(rename = "vmUuid", default, skip_serializing_if = "Option::is_none")]
pub vm_uuid: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<MachineExtensionInstanceView>,
#[serde(rename = "osSku", default, skip_serializing_if = "Option::is_none")]
pub os_sku: Option<String>,
#[serde(rename = "domainName", default, skip_serializing_if = "Option::is_none")]
pub domain_name: Option<String>,
#[serde(rename = "adFqdn", default, skip_serializing_if = "Option::is_none")]
pub ad_fqdn: Option<String>,
#[serde(rename = "dnsFqdn", default, skip_serializing_if = "Option::is_none")]
pub dns_fqdn: Option<String>,
#[serde(rename = "privateLinkScopedResources", default, skip_serializing_if = "Vec::is_empty")]
pub private_link_scoped_resources: Vec<String>,
}
pub mod machine_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Connected,
Disconnected,
Error,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineUpdateProperties {
#[serde(rename = "locationData", default, skip_serializing_if = "Option::is_none")]
pub location_data: Option<LocationData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Machine {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineListResult {
pub value: Vec<Machine>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
pub code: String,
pub message: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
pub error: ErrorDetail,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Identity {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineAssessPatchesResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<machine_assess_patches_result::Status>,
#[serde(rename = "assessmentActivityId", default, skip_serializing_if = "Option::is_none")]
pub assessment_activity_id: Option<String>,
#[serde(rename = "rebootPending", default, skip_serializing_if = "Option::is_none")]
pub reboot_pending: Option<bool>,
#[serde(rename = "availablePatchCountByClassification", default, skip_serializing_if = "Option::is_none")]
pub available_patch_count_by_classification: Option<AvailablePatchCountByClassification>,
#[serde(rename = "startDateTime", default, skip_serializing_if = "Option::is_none")]
pub start_date_time: Option<String>,
#[serde(rename = "lastModifiedDateTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_date_time: Option<String>,
#[serde(rename = "startedBy", default, skip_serializing_if = "Option::is_none")]
pub started_by: Option<machine_assess_patches_result::StartedBy>,
#[serde(rename = "patchServiceUsed", default, skip_serializing_if = "Option::is_none")]
pub patch_service_used: Option<machine_assess_patches_result::PatchServiceUsed>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<machine_assess_patches_result::OsType>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<ErrorDetail>,
}
pub mod machine_assess_patches_result {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Unknown,
InProgress,
Failed,
Succeeded,
CompletedWithWarnings,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StartedBy {
User,
Platform,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PatchServiceUsed {
Unknown,
#[serde(rename = "WU")]
Wu,
#[serde(rename = "WU_WSUS")]
WuWsus,
#[serde(rename = "YUM")]
Yum,
#[serde(rename = "APT")]
Apt,
Zypper,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailablePatchCountByClassification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub security: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub critical: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub definition: Option<i32>,
#[serde(rename = "updateRollup", default, skip_serializing_if = "Option::is_none")]
pub update_rollup: Option<i32>,
#[serde(rename = "featurePack", default, skip_serializing_if = "Option::is_none")]
pub feature_pack: Option<i32>,
#[serde(rename = "servicePack", default, skip_serializing_if = "Option::is_none")]
pub service_pack: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tools: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub updates: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub other: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineInstallPatchesParameters {
#[serde(rename = "maximumDuration")]
pub maximum_duration: String,
#[serde(rename = "rebootSetting")]
pub reboot_setting: machine_install_patches_parameters::RebootSetting,
#[serde(rename = "windowsParameters", default, skip_serializing_if = "Option::is_none")]
pub windows_parameters: Option<WindowsParameters>,
#[serde(rename = "linuxParameters", default, skip_serializing_if = "Option::is_none")]
pub linux_parameters: Option<LinuxParameters>,
}
pub mod machine_install_patches_parameters {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RebootSetting {
IfRequired,
Never,
Always,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WindowsParameters {
#[serde(rename = "classificationsToInclude", default, skip_serializing_if = "Vec::is_empty")]
pub classifications_to_include: Vec<String>,
#[serde(rename = "kbNumbersToInclude", default, skip_serializing_if = "Vec::is_empty")]
pub kb_numbers_to_include: Vec<String>,
#[serde(rename = "kbNumbersToExclude", default, skip_serializing_if = "Vec::is_empty")]
pub kb_numbers_to_exclude: Vec<String>,
#[serde(rename = "excludeKbsRequiringReboot", default, skip_serializing_if = "Option::is_none")]
pub exclude_kbs_requiring_reboot: Option<bool>,
#[serde(rename = "maxPatchPublishDate", default, skip_serializing_if = "Option::is_none")]
pub max_patch_publish_date: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinuxParameters {
#[serde(rename = "classificationsToInclude", default, skip_serializing_if = "Vec::is_empty")]
pub classifications_to_include: Vec<String>,
#[serde(rename = "packageNameMasksToInclude", default, skip_serializing_if = "Vec::is_empty")]
pub package_name_masks_to_include: Vec<String>,
#[serde(rename = "packageNameMasksToExclude", default, skip_serializing_if = "Vec::is_empty")]
pub package_name_masks_to_exclude: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineInstallPatchesResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<machine_install_patches_result::Status>,
#[serde(rename = "installationActivityId", default, skip_serializing_if = "Option::is_none")]
pub installation_activity_id: Option<String>,
#[serde(rename = "rebootStatus", default, skip_serializing_if = "Option::is_none")]
pub reboot_status: Option<machine_install_patches_result::RebootStatus>,
#[serde(rename = "maintenanceWindowExceeded", default, skip_serializing_if = "Option::is_none")]
pub maintenance_window_exceeded: Option<bool>,
#[serde(rename = "excludedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub excluded_patch_count: Option<i32>,
#[serde(rename = "notSelectedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub not_selected_patch_count: Option<i32>,
#[serde(rename = "pendingPatchCount", default, skip_serializing_if = "Option::is_none")]
pub pending_patch_count: Option<i32>,
#[serde(rename = "installedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub installed_patch_count: Option<i32>,
#[serde(rename = "failedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub failed_patch_count: Option<i32>,
#[serde(rename = "startDateTime", default, skip_serializing_if = "Option::is_none")]
pub start_date_time: Option<String>,
#[serde(rename = "lastModifiedDateTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_date_time: Option<String>,
#[serde(rename = "startedBy", default, skip_serializing_if = "Option::is_none")]
pub started_by: Option<machine_install_patches_result::StartedBy>,
#[serde(rename = "patchServiceUsed", default, skip_serializing_if = "Option::is_none")]
pub patch_service_used: Option<machine_install_patches_result::PatchServiceUsed>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<machine_install_patches_result::OsType>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<ErrorDetail>,
}
pub mod machine_install_patches_result {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Unknown,
InProgress,
Failed,
Succeeded,
CompletedWithWarnings,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RebootStatus {
Unknown,
NotNeeded,
Required,
Started,
Failed,
Completed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StartedBy {
User,
Platform,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PatchServiceUsed {
Unknown,
#[serde(rename = "WU")]
Wu,
#[serde(rename = "WU_WSUS")]
WuWsus,
#[serde(rename = "YUM")]
Yum,
#[serde(rename = "APT")]
Apt,
Zypper,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtension {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtensionUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtensionProperties {
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<serde_json::Value>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<serde_json::Value>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtensionUpdateProperties {
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<serde_json::Value>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtensionInstanceView {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<machine_extension_instance_view::Status>,
}
pub mod machine_extension_instance_view {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Status {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub level: Option<status::Level>,
#[serde(rename = "displayStatus", default, skip_serializing_if = "Option::is_none")]
pub display_status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub time: Option<String>,
}
pub mod status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Level {
Info,
Warning,
Error,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineExtensionsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<MachineExtension>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkScopesResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HybridComputePrivateLinkScope {
#[serde(flatten)]
pub private_link_scopes_resource: PrivateLinkScopesResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<HybridComputePrivateLinkScopeProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HybridComputePrivateLinkScopeProperties {
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<PublicNetworkAccessType>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HybridComputePrivateLinkScopeListResult {
pub value: Vec<HybridComputePrivateLinkScope>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpointProperty>,
#[serde(rename = "privateLinkServiceConnectionState", default, skip_serializing_if = "Option::is_none")]
pub private_link_service_connection_state: Option<PrivateLinkServiceConnectionStateProperty>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointProperty {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionStateProperty {
pub status: String,
pub description: String,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopedResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ScopedResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopedResource {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ScopedResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopedResourceProperties {
#[serde(rename = "linkedResourceId", default, skip_serializing_if = "Option::is_none")]
pub linked_resource_id: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponseCommon {
#[serde(flatten)]
pub error_response_v2: ErrorResponseV2,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorResponseCommon>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponseV2 {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<error_response_v2::Error>,
}
pub mod error_response_v2 {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorResponseV2>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccessType {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LocationData {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub city: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub district: Option<String>,
#[serde(rename = "countryOrRegion", default, skip_serializing_if = "Option::is_none")]
pub country_or_region: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
|
pub mod transistor;
pub mod and_gate;
pub mod wire;
pub use crate::components::transistor::Transistor;
pub use crate::components::and_gate::AndGate;
pub use crate::components::wire::wiring; |
use crate::commands::wallet::wallet_query;
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use clap::Clap;
use ic_utils::interfaces::wallet::AddressEntry;
/// Print wallet's address book.
#[derive(Clap)]
pub struct AddressesOpts {}
pub async fn exec(env: &dyn Environment, _opts: AddressesOpts) -> DfxResult {
let (entries,): (Vec<AddressEntry>,) = wallet_query(env, "list_addresses", ()).await?;
for entry in entries {
let name = entry.name.unwrap_or_else(|| "No name set.".to_string());
println!(
"Id: {}, Kind: {:?}, Role: {:?}, Name: {}",
entry.id, entry.kind, entry.role, name
);
}
Ok(())
}
|
use super::range_wrapper::RangeStartWrapper;
use crate::std_ext::*;
use std::collections::BTreeMap;
use std::fmt::{self, Debug};
use std::ops::Range;
/// A map whose keys are stored as (half-open) ranges bounded
/// inclusively below and exclusively above `(start..end)`.
///
/// Contiguous and overlapping ranges that map to the same value
/// are coalesced into a single range.
#[derive(Clone)]
pub struct RangeMap<K, V> {
// Wrap ranges so that they are `Ord`.
// See `range_wrapper.rs` for explanation.
btm: BTreeMap<RangeStartWrapper<K>, V>,
}
impl<K, V> Default for RangeMap<K, V>
where
K: Ord + Clone,
V: Eq + Clone,
{
fn default() -> Self {
Self::new()
}
}
impl<K, V> RangeMap<K, V>
where
K: Ord + Clone,
V: Eq + Clone,
{
/// Makes a new empty `RangeMap`.
pub fn new() -> Self {
RangeMap {
btm: BTreeMap::new(),
}
}
/// Returns a reference to the value corresponding to the given key,
/// if the key is covered by any range in the map.
pub fn get(&self, key: &K) -> Option<&V> {
self.get_key_value(key).map(|(_range, value)| value)
}
/// Returns the range-value pair (as a pair of references) corresponding
/// to the given key, if the key is covered by any range in the map.
pub fn get_key_value(&self, key: &K) -> Option<(&Range<K>, &V)> {
use std::ops::Bound;
// The only stored range that could contain the given key is the
// last stored range whose start is less than or equal to this key.
let key_as_start = RangeStartWrapper::new(key.clone()..key.clone());
self.btm
.range((Bound::Unbounded, Bound::Included(key_as_start)))
.next_back()
.filter(|(range_start_wrapper, _value)| {
// Does the only candidate range contain
// the requested key?
range_start_wrapper.range.contains(key)
})
.map(|(range_start_wrapper, value)| (&range_start_wrapper.range, value))
}
/// Returns `true` if any range in the map covers the specified key.
pub fn contains_key(&self, key: &K) -> bool {
self.get(key).is_some()
}
/// Gets an iterator over all pairs of key range and value,
/// ordered by key range.
///
/// The iterator element type is `(&'a Range<K>, &'a V)`.
pub fn iter(&self) -> impl Iterator<Item = (&Range<K>, &V)> {
self.btm.iter().map(|(by_start, v)| (&by_start.range, v))
}
/// Insert a pair of key range and value into the map.
///
/// If the inserted range partially or completely overlaps any
/// existing range in the map, then the existing range (or ranges) will be
/// partially or completely replaced by the inserted range.
///
/// If the inserted range either overlaps or is immediately adjacent
/// any existing range _mapping to the same value_, then the ranges
/// will be coalesced into a single contiguous range.
///
/// # Panics
///
/// Panics if range `start >= end`.
pub fn insert(&mut self, range: Range<K>, value: V) {
use std::ops::Bound;
// We don't want to have to make empty ranges make sense;
// they don't represent anything meaningful in this structure.
assert!(range.start < range.end);
// Wrap up the given range so that we can "borrow"
// it as a wrapper reference to either its start or end.
// See `range_wrapper.rs` for explanation of these hacks.
let mut new_range_start_wrapper: RangeStartWrapper<K> = RangeStartWrapper::new(range);
let new_value = value;
// Is there a stored range either overlapping the start of
// the range to insert or immediately preceding it?
//
// If there is any such stored range, it will be the last
// whose start is less than or equal to the start of the range to insert,
// or the one before that if both of the above cases exist.
let mut candidates = self
.btm
.range((Bound::Unbounded, Bound::Included(&new_range_start_wrapper)))
.rev()
.take(2)
.filter(|(stored_range_start_wrapper, _stored_value)| {
// Does the candidate range either overlap
// or immediately precede the range to insert?
// (Remember that it might actually cover the _whole_
// range to insert and then some.)
stored_range_start_wrapper
.range
.touches(&new_range_start_wrapper.range)
});
if let Some(mut candidate) = candidates.next() {
// Or the one before it if both cases described above exist.
if let Some(another_candidate) = candidates.next() {
candidate = another_candidate;
}
let (stored_range_start_wrapper, stored_value) =
(candidate.0.clone(), candidate.1.clone());
self.adjust_touching_ranges_for_insert(
stored_range_start_wrapper,
stored_value,
&mut new_range_start_wrapper.range,
&new_value,
);
}
// Are there any stored ranges whose heads overlap or immediately
// follow the range to insert?
//
// If there are any such stored ranges (that weren't already caught above),
// their starts will fall somewhere after the start of the range to insert,
// and on or before its end.
//
// This time around, if the latter holds, it also implies
// the former so we don't need to check here if they touch.
//
// REVISIT: Possible micro-optimisation: `impl Borrow<T> for RangeStartWrapper<T>`
// and use that to search here, to avoid constructing another `RangeStartWrapper`.
let new_range_end_as_start = RangeStartWrapper::new(
new_range_start_wrapper.range.end.clone()..new_range_start_wrapper.range.end.clone(),
);
while let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((
Bound::Included(&new_range_start_wrapper),
Bound::Included(&new_range_end_as_start),
))
.next()
{
// One extra exception: if we have different values,
// and the stored range starts at the end of the range to insert,
// then we don't want to keep looping forever trying to find more!
if stored_range_start_wrapper.range.start == new_range_start_wrapper.range.end
&& *stored_value != new_value
{
// We're beyond the last stored range that could be relevant.
// Avoid wasting time on irrelevant ranges, or even worse, looping forever.
// (`adjust_touching_ranges_for_insert` below assumes that the given range
// is relevant, and behaves very poorly if it is handed a range that it
// shouldn't be touching.)
break;
}
let stored_range_start_wrapper = stored_range_start_wrapper.clone();
let stored_value = stored_value.clone();
self.adjust_touching_ranges_for_insert(
stored_range_start_wrapper,
stored_value,
&mut new_range_start_wrapper.range,
&new_value,
);
}
// Insert the (possibly expanded) new range, and we're done!
self.btm.insert(new_range_start_wrapper, new_value);
}
/// Removes a range from the map, if all or any of it was present.
///
/// If the range to be removed _partially_ overlaps any ranges
/// in the map, then those ranges will be contracted to no
/// longer cover the removed range.
///
///
/// # Panics
///
/// Panics if range `start >= end`.
pub fn remove(&mut self, range: Range<K>) {
use std::ops::Bound;
// We don't want to have to make empty ranges make sense;
// they don't represent anything meaningful in this structure.
assert!(range.start < range.end);
let range_start_wrapper: RangeStartWrapper<K> = RangeStartWrapper::new(range);
let range = &range_start_wrapper.range;
// Is there a stored range overlapping the start of
// the range to insert?
//
// If there is any such stored range, it will be the last
// whose start is less than or equal to the start of the range to insert.
if let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((Bound::Unbounded, Bound::Included(&range_start_wrapper)))
.next_back()
.filter(|(stored_range_start_wrapper, _stored_value)| {
// Does the only candidate range overlap
// the range to insert?
stored_range_start_wrapper.range.overlaps(&range)
})
.map(|(stored_range_start_wrapper, stored_value)| {
(stored_range_start_wrapper.clone(), stored_value.clone())
})
{
self.adjust_overlapping_ranges_for_remove(
stored_range_start_wrapper,
stored_value,
&range,
);
}
// Are there any stored ranges whose heads overlap the range to insert?
//
// If there are any such stored ranges (that weren't already caught above),
// their starts will fall somewhere after the start of the range to insert,
// and before its end.
//
// REVISIT: Possible micro-optimisation: `impl Borrow<T> for RangeStartWrapper<T>`
// and use that to search here, to avoid constructing another `RangeStartWrapper`.
let new_range_end_as_start = RangeStartWrapper::new(range.end.clone()..range.end.clone());
while let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((
Bound::Excluded(&range_start_wrapper),
Bound::Excluded(&new_range_end_as_start),
))
.next()
.map(|(stored_range_start_wrapper, stored_value)| {
(stored_range_start_wrapper.clone(), stored_value.clone())
})
{
self.adjust_overlapping_ranges_for_remove(
stored_range_start_wrapper,
stored_value,
&range,
);
}
}
fn adjust_touching_ranges_for_insert(
&mut self,
stored_range_start_wrapper: RangeStartWrapper<K>,
stored_value: V,
new_range: &mut Range<K>,
new_value: &V,
) {
use std::cmp::{max, min};
if stored_value == *new_value {
// The ranges have the same value, so we can "adopt"
// the stored range.
//
// This means that no matter how big or where the stored range is,
// we will expand the new range's bounds to subsume it,
// and then delete the stored range.
new_range.start =
min(&new_range.start, &stored_range_start_wrapper.range.start).clone();
new_range.end = max(&new_range.end, &stored_range_start_wrapper.range.end).clone();
self.btm.remove(&stored_range_start_wrapper);
} else {
// The ranges have different values.
if new_range.overlaps(&stored_range_start_wrapper.range) {
// The ranges overlap. This is a little bit more complicated.
// Delete the stored range, and then add back between
// 0 and 2 subranges at the ends of the range to insert.
self.btm.remove(&stored_range_start_wrapper);
if stored_range_start_wrapper.range.start < new_range.start {
// Insert the piece left of the range to insert.
self.btm.insert(
RangeStartWrapper::new(
stored_range_start_wrapper.range.start..new_range.start.clone(),
),
stored_value.clone(),
);
}
if stored_range_start_wrapper.range.end > new_range.end {
// Insert the piece right of the range to insert.
self.btm.insert(
RangeStartWrapper::new(
new_range.end.clone()..stored_range_start_wrapper.range.end,
),
stored_value,
);
}
} else {
// No-op; they're not overlapping,
// so we can just keep both ranges as they are.
}
}
}
fn adjust_overlapping_ranges_for_remove(
&mut self,
stored_range_start_wrapper: RangeStartWrapper<K>,
stored_value: V,
range_to_remove: &Range<K>,
) {
// Delete the stored range, and then add back between
// 0 and 2 subranges at the ends of the range to insert.
self.btm.remove(&stored_range_start_wrapper);
let stored_range = stored_range_start_wrapper.range;
if stored_range.start < range_to_remove.start {
// Insert the piece left of the range to insert.
self.btm.insert(
RangeStartWrapper::new(stored_range.start..range_to_remove.start.clone()),
stored_value.clone(),
);
}
if stored_range.end > range_to_remove.end {
// Insert the piece right of the range to insert.
self.btm.insert(
RangeStartWrapper::new(range_to_remove.end.clone()..stored_range.end),
stored_value,
);
}
}
/// Gets an iterator over all the maximally-sized ranges
/// contained in `outer_range` that are not covered by
/// any range stored in the map.
///
/// The iterator element type is `Range<K>`.
///
/// NOTE: Calling `gaps` eagerly finds the first gap,
/// even if the iterator is never consumed.
pub fn gaps<'a>(&'a self, outer_range: &'a Range<K>) -> Gaps<'a, K, V> {
let mut keys = self.btm.keys().peekable();
// Find the first potential gap.
let mut candidate_start = &outer_range.start;
while let Some(item) = keys.peek() {
if item.range.end <= outer_range.start {
// This range sits entirely before the start of
// the outer range; just skip it.
let _ = keys.next();
} else if item.range.start <= outer_range.start {
// This range overlaps the start of the
// outer range, so the first possible candidate
// range begins at its end.
candidate_start = &item.range.end;
let _ = keys.next();
} else {
// The rest of the items might contribute to gaps.
break;
}
}
Gaps {
outer_range,
keys,
candidate_start,
}
}
}
// We can't just derive this automatically, because that would
// expose irrelevant (and private) implementation details.
// Instead implement it in the same way that the underlying BTreeMap does.
impl<K: Debug, V: Debug> Debug for RangeMap<K, V>
where
K: Ord + Clone,
V: Eq + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
pub struct Gaps<'a, K, V> {
outer_range: &'a Range<K>,
keys: std::iter::Peekable<std::collections::btree_map::Keys<'a, RangeStartWrapper<K>, V>>,
candidate_start: &'a K,
}
// `Gaps` is always fused. (See definition of `next` below.)
impl<'a, K, V> std::iter::FusedIterator for Gaps<'a, K, V> where K: Ord + Clone {}
impl<'a, K, V> Iterator for Gaps<'a, K, V>
where
K: Ord + Clone,
{
type Item = Range<K>;
fn next(&mut self) -> Option<Self::Item> {
if *self.candidate_start >= self.outer_range.end {
// We've already passed the end of the outer range;
// there are no more gaps to find.
return None;
}
// Figure out where this gap ends.
let (end, next_candidate_start) = if let Some(item) = self.keys.next() {
if item.range.start < self.outer_range.end {
// The gap goes up until the start of the next item,
// and the next candidate starts after it.
(&item.range.start, &item.range.end)
} else {
// The item sits after the end of the outer range,
// so this gap ends at the end of the outer range.
// This also means there will be no more gaps.
(&self.outer_range.end, &self.outer_range.end)
}
} else {
// There's no next item; the end is at the
// end of the outer range.
// This also means there will be no more gaps.
(&self.outer_range.end, &self.outer_range.end)
};
// Move the next candidate gap start past the end
// of this gap, and yield the gap we found.
let gap = self.candidate_start.clone()..end.clone();
self.candidate_start = next_candidate_start;
Some(gap)
}
}
#[cfg(test)]
mod tests {
use super::*;
trait RangeMapExt<K, V> {
fn to_vec(&self) -> Vec<(Range<K>, V)>;
}
impl<K, V> RangeMapExt<K, V> for RangeMap<K, V>
where
K: Ord + Clone,
V: Eq + Clone,
{
fn to_vec(&self) -> Vec<(Range<K>, V)> {
self.iter().map(|(kr, v)| (kr.clone(), v.clone())).collect()
}
}
//
// Insertion tests
//
#[test]
fn empty_map_is_empty() {
let range_map: RangeMap<u32, bool> = RangeMap::new();
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn insert_into_empty_map() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(0..50, false);
assert_eq!(range_map.to_vec(), vec![(0..50, false)]);
}
#[test]
fn new_same_value_immediately_following_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---◌ ◌ ◌ ◌ ◌
range_map.insert(3..5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..5, false)]);
}
#[test]
fn new_different_value_immediately_following_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
range_map.insert(3..5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..3, false), (3..5, true)]);
}
#[test]
fn new_same_value_overlapping_end_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-----◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---◌ ◌ ◌ ◌ ◌
range_map.insert(3..5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..5, false)]);
}
#[test]
fn new_different_value_overlapping_end_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-----◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
range_map.insert(3..5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..3, false), (3..5, true)]);
}
#[test]
fn new_same_value_immediately_preceding_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---◌ ◌ ◌ ◌ ◌
range_map.insert(3..5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..5, false)]);
}
#[test]
fn new_different_value_immediately_preceding_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
range_map.insert(3..5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..3, false), (3..5, true)]);
}
#[test]
fn new_same_value_wholly_inside_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
range_map.insert(1..5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..5, false)]);
}
#[test]
fn new_different_value_wholly_inside_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------◇ ◌ ◌ ◌ ◌
range_map.insert(1..5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◌ ●-◌ ◌ ◌ ◌ ◌
assert_eq!(
range_map.to_vec(),
vec![(1..2, true), (2..4, false), (4..5, true)]
);
}
#[test]
fn replace_at_end_of_existing_range_should_coalesce() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---◌ ◌ ◌ ◌ ◌
range_map.insert(3..5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---◌ ◌ ◌ ◌ ◌
range_map.insert(3..5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..5, false)]);
}
#[test]
// Test every permutation of a bunch of touching and overlapping ranges.
fn lots_of_interesting_ranges() {
use crate::stupid_range_map::StupidU32RangeMap;
use permutator::Permutation;
let mut ranges_with_values = [
(2..3, false),
// A duplicate duplicates
(2..3, false),
// Almost a duplicate, but with a different value
(2..3, true),
// A few small ranges, some of them overlapping others,
// some of them touching others
(3..5, true),
(4..6, true),
(5..7, true),
// A really big range
(2..6, true),
];
ranges_with_values.permutation().for_each(|permutation| {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
let mut stupid: StupidU32RangeMap<bool> = StupidU32RangeMap::new();
for (k, v) in permutation {
// Insert it into both maps.
range_map.insert(k.clone(), v);
// NOTE: Clippy's `range_minus_one` lint is a bit overzealous here,
// because we _can't_ pass an open-ended range to `insert`.
#[allow(clippy::range_minus_one)]
stupid.insert(k.start..=(k.end - 1), v);
// At every step, both maps should contain the same stuff.
let stupid2: StupidU32RangeMap<bool> = range_map.clone().into();
assert_eq!(stupid, stupid2);
}
});
}
//
// Get* tests
//
#[test]
fn get() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(0..50, false);
assert_eq!(range_map.get(&49), Some(&false));
assert_eq!(range_map.get(&50), None);
}
#[test]
fn get_key_value() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(0..50, false);
assert_eq!(range_map.get_key_value(&49), Some((&(0..50), &false)));
assert_eq!(range_map.get_key_value(&50), None);
}
//
// Removal tests
//
#[test]
fn remove_from_empty_map() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.remove(0..50);
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn remove_non_covered_range_before_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(0..25);
assert_eq!(range_map.to_vec(), vec![(25..75, false)]);
}
#[test]
fn remove_non_covered_range_after_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(75..100);
assert_eq!(range_map.to_vec(), vec![(25..75, false)]);
}
#[test]
fn remove_overlapping_start_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(0..30);
assert_eq!(range_map.to_vec(), vec![(30..75, false)]);
}
#[test]
fn remove_middle_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(30..70);
assert_eq!(range_map.to_vec(), vec![(25..30, false), (70..75, false)]);
}
#[test]
fn remove_overlapping_end_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(70..100);
assert_eq!(range_map.to_vec(), vec![(25..70, false)]);
}
#[test]
fn remove_exactly_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(25..75);
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn remove_superset_of_stored() {
let mut range_map: RangeMap<u32, bool> = RangeMap::new();
range_map.insert(25..75, false);
range_map.remove(0..100);
assert_eq!(range_map.to_vec(), vec![]);
}
// Gaps tests
#[test]
fn whole_range_is_a_gap() {
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌
let range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------------◇ ◌
let outer_range = 1..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn whole_range_is_covered_exactly() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------◌ ◌ ◌ ◌
range_map.insert(1..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆---------◇ ◌ ◌ ◌
let outer_range = 1..6;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_before_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◇ ◌
let outer_range = 5..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(5..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_touching_start_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------◌ ◌ ◌ ◌ ◌
range_map.insert(1..5, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◇ ◌
let outer_range = 5..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(5..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_overlapping_start_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------◌ ◌ ◌ ◌
range_map.insert(1..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◇ ◌
let outer_range = 5..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the end of the stored item
// to the end of the outer range.
assert_eq!(gaps.next(), Some(6..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_starting_at_start_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ●-◌ ◌ ◌ ◌
range_map.insert(5..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◇ ◌
let outer_range = 5..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the item onwards.
assert_eq!(gaps.next(), Some(6..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn items_floating_inside_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ●-◌ ◌ ◌ ◌
range_map.insert(5..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●-◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(3..4, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------------◇ ◌
let outer_range = 1..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield gaps at start, between items,
// and at end.
assert_eq!(gaps.next(), Some(1..3));
assert_eq!(gaps.next(), Some(4..5));
assert_eq!(gaps.next(), Some(6..8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_ending_at_end_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ◌ ●-◌ ◌
range_map.insert(7..8, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◇ ◌
let outer_range = 5..8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the start of the outer range
// up to the start of the stored item.
assert_eq!(gaps.next(), Some(5..7));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_overlapping_end_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●---◌ ◌ ◌ ◌
range_map.insert(4..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆-----◇ ◌ ◌ ◌ ◌
let outer_range = 2..5;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the start of the outer range
// up to the start of the stored item.
assert_eq!(gaps.next(), Some(2..4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_touching_end_of_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●-------◌ ◌
range_map.insert(4..8, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-----◇ ◌ ◌ ◌ ◌ ◌
let outer_range = 1..4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_after_outer_range() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ●---◌ ◌
range_map.insert(6..7, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-----◇ ◌ ◌ ◌ ◌ ◌
let outer_range = 1..4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn empty_outer_range_with_items_away_from_both_sides() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆---◇ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..3, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆---◇ ◌ ◌
range_map.insert(5..7, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn empty_outer_range_with_items_touching_both_sides() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..4, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌
range_map.insert(4..6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn empty_outer_range_with_item_straddling() {
let mut range_map: RangeMap<u32, ()> = RangeMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆-----◇ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..5, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
///
/// impl Debug
///
#[test]
fn map_debug_repr_looks_right() {
let mut map: RangeMap<u32, ()> = RangeMap::new();
// Empty
assert_eq!(format!("{:?}", map), "{}");
// One entry
map.insert(2..5, ());
assert_eq!(format!("{:?}", map), "{2..5: ()}");
// Many entries
map.insert(6..7, ());
map.insert(8..9, ());
assert_eq!(format!("{:?}", map), "{2..5: (), 6..7: (), 8..9: ()}");
}
}
|
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::{HashMap, HashSet};
fn parse_line(line: &str) -> (String, Vec<(usize, String)>) {
const KEY_STR: &str = r"^(?P<color>\w+ \w+)";
const VAL_RE_STR: &str = r"(?P<amount>\d+) (?P<color>\w+ \w+) bags?";
lazy_static! {
static ref KEY_RE: Regex = Regex::new(KEY_STR).unwrap();
static ref VAL_RE: Regex = Regex::new(VAL_RE_STR).unwrap();
}
let key = Regex::captures(&KEY_RE, line).unwrap()["color"].into();
let values = Regex::captures_iter(&VAL_RE, line)
.map(|c| (c["amount"].parse().unwrap(), c["color"].into()))
.collect();
(key, values)
}
fn reverse_map(input: &str) -> HashMap<String, Vec<String>> {
let mut map = HashMap::new();
for (parent, children) in input.lines().map(parse_line) {
for (_, child) in children {
map.entry(child).or_insert(Vec::new()).push(parent.clone());
}
}
map
}
fn part1() -> usize {
let input = aoc2020::input_file!("07");
let map = reverse_map(input);
let mut to_visit = vec!["shiny gold"];
let mut visited = HashSet::new();
while !to_visit.is_empty() {
let key = to_visit.pop().unwrap();
if visited.contains(key) {
continue;
}
visited.insert(key);
map.get(key)
.map(|v| v.iter().for_each(|p| to_visit.push(p)));
}
visited.len() - 1
}
fn part2() -> usize {
let input = aoc2020::input_file!("07");
let map: HashMap<_, _> = input.lines().map(parse_line).collect();
let mut stack = vec!["shiny gold"];
let mut amount: HashMap<&str, usize> = Default::default();
while !stack.is_empty() {
let &parent = stack.last().unwrap();
let children = map.get(parent).unwrap();
if children.is_empty() {
amount.insert(parent, 0);
stack.pop();
continue;
}
if children
.iter()
.all(|(_, child)| amount.contains_key(&child[..]))
{
let total = children
.iter()
.map(|(count, color)| count * (1 + amount[&color[..]]))
.sum();
amount.insert(parent, total);
stack.pop();
continue;
}
children
.iter()
.filter(|(_, child)| !amount.contains_key(&child[..]))
.for_each(|(_, child)| stack.push(&child[..]));
}
amount["shiny gold"]
}
fn main() {
println!("Day 7:");
println!("1: {}", part1());
println!("2: {}", part2());
}
|
pub use amethyst::core::nalgebra::{
Point1, Point2, Point3, UnitQuaternion, Rotation, Rotation2, Rotation3, Vector1, Vector2, Vector3,
Vector4,
};
pub use std::f32::consts::PI;
pub const TAU: f32 = ::std::f32::consts::PI * 2.0;
pub trait Clamp {
fn clamp(self, min: Self, max: Self) -> Self;
}
impl Clamp for f32 {
fn clamp(self, min: Self, max: Self) -> Self {
if self < min {
return min;
} else if self > max {
return max;
}
self
}
}
impl Clamp for f64 {
fn clamp(self, min: Self, max: Self) -> Self {
if self < min {
return min;
} else if self > max {
return max;
}
self
}
}
|
use log::error;
use mpd::{Client, Stats};
use serenity::{
framework::standard::{macros::command, CommandResult},
model::channel::Message,
prelude::*,
};
use std::process::Command;
#[command]
#[description = "How much music does Phate have?"]
fn hmm(ctx: &mut Context, msg: &Message) -> CommandResult {
let mut c = Client::connect("127.0.0.1:6600").unwrap();
let stats: Stats = c.stats().unwrap();
let artists = stats.artists.to_string();
let albums = stats.albums.to_string();
let songs = stats.songs.to_string();
let files = Command::new("scripts/hmm")
.arg("files")
.output()
.expect("Could not obtain amount of files.");
let size = Command::new("scripts/hmm")
.arg("size")
.output()
.expect("Could not obtain size of music collection.");
let amount = Command::new("scripts/hmm")
.arg("amount")
.output()
.expect("Could not obtain amount of times played.");
let msg = msg.channel_id.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title("`^hmm`");
e.description("How Much Music (Does Phate Have?)");
e.fields(vec![
("Artists", artists, true),
("Albums", albums, true),
("Songs", songs, true),
(
"Files",
String::from_utf8_lossy(&files.stdout).to_string(),
true,
),
(
"Size of Collection",
String::from_utf8_lossy(&size.stdout).to_string(),
true,
),
(
"Amount of Songs Played",
String::from_utf8_lossy(&amount.stdout).to_string(),
true,
),
]);
e
});
m
});
if let Err(why) = msg {
error!("Error sending message: {:?}", why);
}
Ok(())
}
|
pub use system::error::*;
pub use system::syscall::*;
use arch::regs::Regs;
use arch::context::context_switch;
pub mod execute;
pub mod fs;
pub mod memory;
pub mod process;
pub mod time;
pub fn name(number: usize) -> &'static str {
match number {
// Redox
SYS_SUPERVISE => "supervise",
// Unix
SYS_BRK => "brk",
SYS_CHDIR => "chdir",
SYS_CLONE => "clone",
SYS_CLOSE => "close",
SYS_CLOCK_GETTIME => "clock_gettime",
SYS_DUP => "dup",
SYS_EXECVE => "execve",
SYS_EXIT => "exit",
SYS_FPATH => "fpath",
SYS_FSTAT => "fstat",
SYS_FSYNC => "fsync",
SYS_FTRUNCATE => "ftruncate",
SYS_FUTEX => "futex",
SYS_GETPID => "getpid",
SYS_IOPL => "iopl",
// TODO: link
SYS_LSEEK => "lseek",
SYS_MKDIR => "mkdir",
SYS_NANOSLEEP => "nanosleep",
SYS_OPEN => "open",
SYS_PIPE2 => "pipe2",
SYS_READ => "read",
SYS_RMDIR => "rmdir",
SYS_UNLINK => "unlink",
SYS_WAITPID => "waitpid",
SYS_WRITE => "write",
SYS_YIELD => "yield",
_ => "unknown",
}
}
/// Handle the syscall defined by the given registers.
///
/// AX defines which syscall to use. The arguments are provided in other registers, as specified by
/// the specific sycall.
///
/// The return value is placed in AX, unless otherwise specified.
pub fn handle(regs: &mut Regs) {
{
let contexts = unsafe { &mut *::env().contexts.get() };
if let Ok(cur) = contexts.current_mut() {
cur.current_syscall = Some((regs.ip, regs.ax, regs.bx, regs.cx, regs.dx));
// debugln!("PID {}: {} @ {:X}: {} {} {:X} {:X} {:X}", cur.pid, cur.name, regs.ip, regs.ax, name(regs.ax), regs.bx, regs.cx, regs.dx);
if cur.supervised {
// Block the process.
cur.blocked_syscall = true;
cur.block("syscall::handle Supervise");
// Clear the timer.
cur.wake = None;
loop {
if cur.blocked > 0 {
unsafe { context_switch() };
} else {
return;
}
}
}
}
}
let result = match regs.ax {
// These are arranged in such a way that the most frequent syscalls preceeds less frequent
// once, to acheive the best performance.
SYS_YIELD => process::sched_yield(),
SYS_FUTEX => process::futex(regs.bx as *mut i32, regs.cx, (regs.dx as isize) as i32, regs.si, regs.di as *mut i32),
SYS_WRITE => fs::write(regs.bx, regs.cx as *mut u8, regs.dx),
SYS_READ => fs::read(regs.bx, regs.cx as *mut u8, regs.dx),
SYS_LSEEK => fs::lseek(regs.bx, regs.cx as isize, regs.dx),
SYS_OPEN => fs::open(regs.bx as *const u8, regs.cx, regs.dx),
SYS_CLOSE => fs::close(regs.bx),
SYS_CLONE => process::clone(regs),
SYS_MKDIR => fs::mkdir(regs.bx as *const u8, regs.cx, regs.dx),
SYS_NANOSLEEP => time::nanosleep(regs.bx as *const TimeSpec, regs.cx as *mut TimeSpec),
SYS_FPATH => fs::fpath(regs.bx, regs.cx as *mut u8, regs.dx),
SYS_FSTAT => fs::fstat(regs.bx, regs.cx as *mut Stat),
SYS_FSYNC => fs::fsync(regs.bx),
SYS_FTRUNCATE => fs::ftruncate(regs.bx, regs.cx),
SYS_DUP => fs::dup(regs.bx),
SYS_IOPL => process::iopl(regs),
SYS_CLOCK_GETTIME => time::clock_gettime(regs.bx, regs.cx as *mut TimeSpec),
SYS_EXECVE => process::execve(regs.bx as *const u8, regs.cx as *const *const u8),
SYS_EXIT => process::exit(regs.bx),
SYS_GETPID => process::getpid(),
// TODO: link
SYS_PIPE2 => fs::pipe2(regs.bx as *mut usize, regs.cx),
SYS_RMDIR => fs::rmdir(regs.bx as *const u8, regs.cx),
SYS_UNLINK => fs::unlink(regs.bx as *const u8, regs.cx),
SYS_WAITPID => process::waitpid(regs.bx as isize, regs.cx as *mut usize, regs.dx),
SYS_BRK => memory::brk(regs.bx),
SYS_CHDIR => fs::chdir(regs.bx as *const u8, regs.cx),
SYS_SUPERVISE => process::supervise(regs.bx),
_ => Err(Error::new(ENOSYS)),
};
{
let contexts = unsafe { &mut *::env().contexts.get() };
if let Ok(cur) = contexts.current_mut() {
// debugln!("PID {}: {} @ {:X}: {} {} {:X} {:X} {:X} = {:?}", cur.pid, cur.name, regs.ip, regs.ax, name(regs.ax), regs.bx, regs.cx, regs.dx, result);
cur.current_syscall = None;
}
}
regs.ax = Error::mux(result);
}
|
#[doc = "Reader of register OA_RES1_CTRL"]
pub type R = crate::R<u32, super::OA_RES1_CTRL>;
#[doc = "Writer for register OA_RES1_CTRL"]
pub type W = crate::W<u32, super::OA_RES1_CTRL>;
#[doc = "Register OA_RES1_CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::OA_RES1_CTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `OA1_PWR_MODE`"]
pub type OA1_PWR_MODE_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `OA1_PWR_MODE`"]
pub struct OA1_PWR_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_PWR_MODE_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
#[doc = "Reader of field `OA1_DRIVE_STR_SEL`"]
pub type OA1_DRIVE_STR_SEL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_DRIVE_STR_SEL`"]
pub struct OA1_DRIVE_STR_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_DRIVE_STR_SEL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `OA1_COMP_EN`"]
pub type OA1_COMP_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_COMP_EN`"]
pub struct OA1_COMP_EN_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_COMP_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `OA1_HYST_EN`"]
pub type OA1_HYST_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_HYST_EN`"]
pub struct OA1_HYST_EN_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_HYST_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `OA1_BYPASS_DSI_SYNC`"]
pub type OA1_BYPASS_DSI_SYNC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_BYPASS_DSI_SYNC`"]
pub struct OA1_BYPASS_DSI_SYNC_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_BYPASS_DSI_SYNC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `OA1_DSI_LEVEL`"]
pub type OA1_DSI_LEVEL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_DSI_LEVEL`"]
pub struct OA1_DSI_LEVEL_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_DSI_LEVEL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Opamp1 comparator edge detect for interrupt and pulse mode of DSI (trigger)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum OA1_COMPINT_A {
#[doc = "0: Disabled, no interrupts will be detected"]
DISABLE,
#[doc = "1: Rising edge"]
RISING,
#[doc = "2: Falling edge"]
FALLING,
#[doc = "3: Both rising and falling edges"]
BOTH,
}
impl From<OA1_COMPINT_A> for u8 {
#[inline(always)]
fn from(variant: OA1_COMPINT_A) -> Self {
match variant {
OA1_COMPINT_A::DISABLE => 0,
OA1_COMPINT_A::RISING => 1,
OA1_COMPINT_A::FALLING => 2,
OA1_COMPINT_A::BOTH => 3,
}
}
}
#[doc = "Reader of field `OA1_COMPINT`"]
pub type OA1_COMPINT_R = crate::R<u8, OA1_COMPINT_A>;
impl OA1_COMPINT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OA1_COMPINT_A {
match self.bits {
0 => OA1_COMPINT_A::DISABLE,
1 => OA1_COMPINT_A::RISING,
2 => OA1_COMPINT_A::FALLING,
3 => OA1_COMPINT_A::BOTH,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
*self == OA1_COMPINT_A::DISABLE
}
#[doc = "Checks if the value of the field is `RISING`"]
#[inline(always)]
pub fn is_rising(&self) -> bool {
*self == OA1_COMPINT_A::RISING
}
#[doc = "Checks if the value of the field is `FALLING`"]
#[inline(always)]
pub fn is_falling(&self) -> bool {
*self == OA1_COMPINT_A::FALLING
}
#[doc = "Checks if the value of the field is `BOTH`"]
#[inline(always)]
pub fn is_both(&self) -> bool {
*self == OA1_COMPINT_A::BOTH
}
}
#[doc = "Write proxy for field `OA1_COMPINT`"]
pub struct OA1_COMPINT_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_COMPINT_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OA1_COMPINT_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Disabled, no interrupts will be detected"]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(OA1_COMPINT_A::DISABLE)
}
#[doc = "Rising edge"]
#[inline(always)]
pub fn rising(self) -> &'a mut W {
self.variant(OA1_COMPINT_A::RISING)
}
#[doc = "Falling edge"]
#[inline(always)]
pub fn falling(self) -> &'a mut W {
self.variant(OA1_COMPINT_A::FALLING)
}
#[doc = "Both rising and falling edges"]
#[inline(always)]
pub fn both(self) -> &'a mut W {
self.variant(OA1_COMPINT_A::BOTH)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Reader of field `OA1_PUMP_EN`"]
pub type OA1_PUMP_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_PUMP_EN`"]
pub struct OA1_PUMP_EN_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_PUMP_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `OA1_BOOST_EN`"]
pub type OA1_BOOST_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_BOOST_EN`"]
pub struct OA1_BOOST_EN_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_BOOST_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - Opamp1 power level: see description of OA0_PWR_MODE"]
#[inline(always)]
pub fn oa1_pwr_mode(&self) -> OA1_PWR_MODE_R {
OA1_PWR_MODE_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bit 3 - Opamp1 output strength select 0=1x, 1=10x This setting sets specific requirements for OA1_BOOST_EN and OA1_COMP_TRIM"]
#[inline(always)]
pub fn oa1_drive_str_sel(&self) -> OA1_DRIVE_STR_SEL_R {
OA1_DRIVE_STR_SEL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Opamp1 comparator enable"]
#[inline(always)]
pub fn oa1_comp_en(&self) -> OA1_COMP_EN_R {
OA1_COMP_EN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Opamp1 hysteresis enable (10mV)"]
#[inline(always)]
pub fn oa1_hyst_en(&self) -> OA1_HYST_EN_R {
OA1_HYST_EN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Opamp1 bypass comparator output synchronization for DSI output: 0=synchronize, 1=bypass"]
#[inline(always)]
pub fn oa1_bypass_dsi_sync(&self) -> OA1_BYPASS_DSI_SYNC_R {
OA1_BYPASS_DSI_SYNC_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Opamp1 comparator DSI (trigger) out level : 0=pulse, each time an edge is detected (see OA1_COMPINT) a pulse is sent out on DSI 1=level, DSI output is a synchronized version of the comparator output"]
#[inline(always)]
pub fn oa1_dsi_level(&self) -> OA1_DSI_LEVEL_R {
OA1_DSI_LEVEL_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:9 - Opamp1 comparator edge detect for interrupt and pulse mode of DSI (trigger)"]
#[inline(always)]
pub fn oa1_compint(&self) -> OA1_COMPINT_R {
OA1_COMPINT_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bit 11 - Opamp1 pump enable"]
#[inline(always)]
pub fn oa1_pump_en(&self) -> OA1_PUMP_EN_R {
OA1_PUMP_EN_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Opamp1 gain booster enable for class A output, for risk mitigation only, not user selectable. Value depends on the drive strength setting - 1x mode: set to 1; 10x mode: set to 0"]
#[inline(always)]
pub fn oa1_boost_en(&self) -> OA1_BOOST_EN_R {
OA1_BOOST_EN_R::new(((self.bits >> 12) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Opamp1 power level: see description of OA0_PWR_MODE"]
#[inline(always)]
pub fn oa1_pwr_mode(&mut self) -> OA1_PWR_MODE_W {
OA1_PWR_MODE_W { w: self }
}
#[doc = "Bit 3 - Opamp1 output strength select 0=1x, 1=10x This setting sets specific requirements for OA1_BOOST_EN and OA1_COMP_TRIM"]
#[inline(always)]
pub fn oa1_drive_str_sel(&mut self) -> OA1_DRIVE_STR_SEL_W {
OA1_DRIVE_STR_SEL_W { w: self }
}
#[doc = "Bit 4 - Opamp1 comparator enable"]
#[inline(always)]
pub fn oa1_comp_en(&mut self) -> OA1_COMP_EN_W {
OA1_COMP_EN_W { w: self }
}
#[doc = "Bit 5 - Opamp1 hysteresis enable (10mV)"]
#[inline(always)]
pub fn oa1_hyst_en(&mut self) -> OA1_HYST_EN_W {
OA1_HYST_EN_W { w: self }
}
#[doc = "Bit 6 - Opamp1 bypass comparator output synchronization for DSI output: 0=synchronize, 1=bypass"]
#[inline(always)]
pub fn oa1_bypass_dsi_sync(&mut self) -> OA1_BYPASS_DSI_SYNC_W {
OA1_BYPASS_DSI_SYNC_W { w: self }
}
#[doc = "Bit 7 - Opamp1 comparator DSI (trigger) out level : 0=pulse, each time an edge is detected (see OA1_COMPINT) a pulse is sent out on DSI 1=level, DSI output is a synchronized version of the comparator output"]
#[inline(always)]
pub fn oa1_dsi_level(&mut self) -> OA1_DSI_LEVEL_W {
OA1_DSI_LEVEL_W { w: self }
}
#[doc = "Bits 8:9 - Opamp1 comparator edge detect for interrupt and pulse mode of DSI (trigger)"]
#[inline(always)]
pub fn oa1_compint(&mut self) -> OA1_COMPINT_W {
OA1_COMPINT_W { w: self }
}
#[doc = "Bit 11 - Opamp1 pump enable"]
#[inline(always)]
pub fn oa1_pump_en(&mut self) -> OA1_PUMP_EN_W {
OA1_PUMP_EN_W { w: self }
}
#[doc = "Bit 12 - Opamp1 gain booster enable for class A output, for risk mitigation only, not user selectable. Value depends on the drive strength setting - 1x mode: set to 1; 10x mode: set to 0"]
#[inline(always)]
pub fn oa1_boost_en(&mut self) -> OA1_BOOST_EN_W {
OA1_BOOST_EN_W { w: self }
}
}
|
use std::fs::File;
use std::io::{BufReader, BufWriter};
use serde::{Deserialize, Serialize};
use dmc::Dmc;
use noise::Noise;
use square::Square;
use triangle::Triangle;
use crate::decay::Decay;
use crate::filters::{Filter, HighPass, LowPass};
use crate::savable::Savable;
// http://wiki.nesdev.com/w/index.php/APU_Length_Counter
/// Length counter values table
const LENGTH_TABLE: [u8; 32] = [
10, 254, 20, 2, 40, 4, 80, 6, 160, 8, 60, 10, 14, 12, 26, 14, 12, 16, 24, 18, 48, 20, 96, 22,
192, 24, 72, 26, 16, 28, 32, 30,
];
/// Square channel 1 volume register
const SQ1_VOL: u16 = 0x4000;
/// Square channel 1 sweep register
const SQ1_SWEEP: u16 = 0x4001;
/// Square channel 1 timer low register
const SQ1_LO: u16 = 0x4002;
/// Square channel 1 timer high register
const SQ1_HI: u16 = 0x4003;
/// Square channel 2 volume register
const SQ2_VOL: u16 = 0x4004;
/// Square channel 2 sweep register
const SQ2_SWEEP: u16 = 0x4005;
/// Square channel 2 timer low register
const SQ2_LO: u16 = 0x4006;
/// Square channel 2 timer high register
const SQ2_HI: u16 = 0x4007;
/// Triangle channel linear counter register
const TRI_LINEAR: u16 = 0x4008;
/// Triangle channel timer low register
const TRI_LO: u16 = 0x400A;
/// Triangle channel timer high register
const TRI_HI: u16 = 0x400B;
/// Noise channel volume register
const NOISE_VOL: u16 = 0x400C;
/// Noise channel timer low register
const NOISE_LO: u16 = 0x400E;
/// Noise channel timer high register
const NOISE_HI: u16 = 0x400F;
/// DMC frequency register
const DMC_FREQ: u16 = 0x4010;
/// DMC raw sample register
const DMC_RAW: u16 = 0x4011;
/// DMC start (address) register
const DMC_START: u16 = 0x4012;
/// DMC length register
const DMC_LEN: u16 = 0x4013;
/// Sound status / enable register
const SND_CHN: u16 = 0x4015;
/// Frame counter register
const FRAME_COUNTER: u16 = 0x4017;
mod dmc;
mod noise;
mod square;
mod triangle;
/// Sequencer stepping mode
#[derive(PartialEq, Serialize, Deserialize)]
enum SequencerMode {
FourStep,
FiveStep,
}
/// NES audio processing unit
pub struct Apu {
cycles: u32,
hz240_counter: u16,
irq_off: bool,
pending_irq: Option<bool>,
sq1: Square,
sq2: Square,
tri: Triangle,
noise: Noise,
dmc: Dmc,
sequencer: u8,
mode: SequencerMode,
tri_decay: Decay,
filters: Vec<Box<dyn Filter>>,
}
impl Savable for Apu {
fn save(&self, output: &mut BufWriter<File>) -> bincode::Result<()> {
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.cycles)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.hz240_counter)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.irq_off)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.pending_irq)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.sq1)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.sq2)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.tri)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.noise)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.dmc)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.sequencer)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.mode)?;
Ok(())
}
fn load(&mut self, input: &mut BufReader<File>) -> bincode::Result<()> {
self.cycles = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.hz240_counter = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.irq_off = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.pending_irq = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.sq1 = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.sq2 = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.tri = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.noise = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.dmc = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.sequencer = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.mode = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.filters.iter_mut().for_each(|f| f.reset());
Ok(())
}
}
impl Apu {
fn new_filters(sample_rate: f32) -> Vec<Box<dyn Filter>> {
vec![
Box::new(HighPass::new(90.0, sample_rate, 2.0f32.sqrt())),
// Box::new(HighPass::new(440.0, sample_rate, 2.0f32.sqrt())),
Box::new(LowPass::new(14000.0, sample_rate, 2.0f32.sqrt())),
]
}
pub fn new(sample_rate: f32) -> Self {
Self {
cycles: 0,
hz240_counter: 0,
irq_off: false,
pending_irq: None,
sq1: Square::new(),
sq2: Square::new(),
tri: Triangle::new(),
noise: Noise::new(),
dmc: Dmc::new(),
sequencer: 0,
mode: SequencerMode::FourStep,
tri_decay: Decay::new(0.1),
filters: Self::new_filters(sample_rate),
}
}
pub fn read(&mut self, addr: u16) -> u8 {
// The Apu can only be read from the status register
match addr {
SND_CHN => {
// Returns IF-D NT21
// I: DMC Interrupt requested and clears it if set
// F: Apu interrupt flag and clears it if set
// D: 1 if DMC length counter > 0
// N: 1 if noise length counter > 0
// T: 1 if triangle length counter > 0
// 2: 1 if square 2 length counter > 0
// 1: 1 if square 1 length counter > 0
let sq1 = (self.sq1.length_counter() > 0) as u8;
let sq2 = (self.sq2.length_counter() > 0) as u8;
let tri = (self.tri.length_counter() > 0) as u8;
let noise = (self.noise.length_counter() > 0) as u8;
let dmc = (self.dmc.length_counter() > 0) as u8;
let irq = self.pending_irq.take().is_some() as u8;
let dmc_irq = self.dmc.poll_irq() as u8;
dmc_irq << 7 | irq << 6 | dmc << 4 | noise << 3 | tri << 2 | sq2 << 1 | sq1
}
_ => 0,
}
}
pub fn write(&mut self, addr: u16, data: u8) {
match addr {
SQ1_VOL => self.sq1.write_vol(data),
SQ1_SWEEP => self.sq1.write_sweep(data),
SQ1_LO => self.sq1.write_lo(data),
SQ1_HI => self.sq1.write_hi(data),
SQ2_VOL => self.sq2.write_vol(data),
SQ2_SWEEP => self.sq2.write_sweep(data),
SQ2_LO => self.sq2.write_lo(data),
SQ2_HI => self.sq2.write_hi(data),
TRI_LINEAR => self.tri.write_linear(data),
TRI_LO => self.tri.write_lo(data),
TRI_HI => self.tri.write_hi(data),
NOISE_VOL => self.noise.write_vol(data),
NOISE_LO => self.noise.write_lo(data),
NOISE_HI => self.noise.write_hi(data),
DMC_FREQ => self.dmc.write_freq(data),
DMC_RAW => self.dmc.write_raw(data),
DMC_START => self.dmc.write_start(data),
DMC_LEN => self.dmc.write_len(data),
SND_CHN => {
// ---D NT21
// Enables or disable a channel based on the bits of data
self.sq1.set_enabled(data & 0x1 != 0);
self.sq2.set_enabled(data & 0x2 != 0);
self.tri.set_enabled(data & 0x4 != 0);
self.noise.set_enabled(data & 0x8 != 0);
self.dmc.set_enabled(data & 0x10 != 0);
}
FRAME_COUNTER => {
// MI-- ---
// Sets the stepping based on M
self.mode = match data & 0x80 == 0 {
true => SequencerMode::FiveStep,
false => SequencerMode::FourStep,
};
// Reset counter and sequencer
self.hz240_counter = 0;
self.sequencer = 0;
// Sets the IRQ disable bit based on I
self.irq_off = data & 0x40 != 0;
// Clear the IRQ flag if set to disabled
if self.irq_off {
self.dmc.poll_irq();
self.pending_irq = None;
}
}
_ => {}
}
}
/// Clocks the Apu once
pub fn clock(&mut self) {
// Count the cycles
self.cycles = self.cycles.wrapping_add(1);
// The triangle channel's timer is clocked at Cpu rate
// The DMC rate counter is also clocked at Cpu rate
self.tri.tick_timer();
self.dmc.tick();
// Square and noise tick at half the cpu clock
if self.cycles % 2 == 0 {
self.sq1.tick_timer();
self.sq2.tick_timer();
self.noise.tick_timer();
}
// The frame counter runs at 240Hz.
// We need to divide the cpu clock to get the right timing
// (1,789,773Hz / 2) / 240Hz = ~14915
self.hz240_counter += 2;
if self.hz240_counter >= 14915 {
self.hz240_counter -= 14915;
self.sequencer += 1;
match self.mode {
SequencerMode::FourStep => self.sequencer %= 4,
SequencerMode::FiveStep => self.sequencer %= 5,
}
// Four step mode can request an interrupt on the last step
if !self.irq_off && self.mode == SequencerMode::FourStep && self.sequencer == 0 {
self.pending_irq = Some(true);
}
// Half tick happens on step 1 and 3
let half_tick = (self.hz240_counter & 0x5) == 1;
// Full tick happens on every step (Step 5 mode does nothing on last step)
let full_tick = self.sequencer < 4;
// Sweep tick and length tick
if half_tick {
self.sq1.tick_length();
self.sq2.tick_length();
self.sq1.tick_sweep(square::Channel::One);
self.sq2.tick_sweep(square::Channel::Two);
self.tri.tick_length();
self.noise.tick_length();
}
// Envelope and linear (triangle only) tick
if full_tick {
self.sq1.tick_envelope();
self.sq2.tick_envelope();
self.noise.tick_envelope();
self.tri.tick_counter();
}
}
}
/// Polls the IRQ flag
pub fn poll_irq(&mut self) -> bool {
// IRQ can be requested by the Apu or the DMC
self.pending_irq.take().is_some() | self.dmc.poll_irq()
}
/// Returns if the DMC needs a new audio sample or not
pub fn need_dmc_sample(&mut self) -> bool {
self.dmc.need_sample()
}
/// Sets the audio sample of the DMC
pub fn set_dmc_sample(&mut self, sample: u8) {
self.dmc.set_sample(sample);
}
/// Gets the address of the next DMC audio sample
pub fn dmc_sample_address(&self) -> u16 {
self.dmc.address()
}
/// Resets the Apu and its channels
pub fn reset(&mut self) {
self.cycles = 0;
self.hz240_counter = 0;
self.sequencer = 0;
self.pending_irq = None;
self.mode = SequencerMode::FourStep;
self.sq1.reset();
self.sq2.reset();
self.tri.reset();
self.noise.reset();
self.dmc.reset();
}
/// Gets an audio sample
pub fn output(&mut self) -> f32 {
// Mix the audio according to NesDev
// http://wiki.nesdev.com/w/index.php/APU_Mixer
let sq1 = self.sq1.output();
let sq2 = self.sq2.output();
let pulse = 95.88 / (100.0 + (8128.0 / (sq1 as f32 + sq2 as f32)));
// I apply a "decay" on the triangle channel to reduce audio pops
// Is only applied if the volume goes from a high value to zero
let tri = self.tri_decay.decay(self.tri.output() as f32);
let noise = self.noise.output() as f32;
let dmc = self.dmc.output() as f32;
let tnd = 159.79
/ (100.0 + (1.0 / ((tri as f32 / 8227.0) + (noise / 12241.0) + (dmc / 22638.0))));
let sample = pulse + tnd;
// Apply filters
// The NES has 3 filters applied
// High-pass at 90Hz
// High-pass at 440Hz (I removed this one because the bass sounds way better without it)
// Low-pass at 14000Hz
self.filters
.iter_mut()
.fold(sample, |sample, filter| filter.filter(sample))
}
}
|
use quote::quote_spanned;
use syn::parse_quote;
use super::{
FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, WriteContextArgs,
RANGE_1,
};
/// > 2 input streams of type S and T, 1 output stream of type (S, T)
///
/// Forms the cross-join (Cartesian product) of the items in the input streams, returning all
/// tupled pairs.
///
/// ```hydroflow
/// source_iter(vec!["happy", "sad"]) -> [0]my_join;
/// source_iter(vec!["dog", "cat"]) -> [1]my_join;
/// my_join = cross_join() -> assert_eq([("happy", "dog"), ("sad", "dog"), ("happy", "cat"), ("sad", "cat")]);
/// ```
///
/// `cross_join` can be provided with one or two generic lifetime persistence arguments
/// in the same way as [`join`](#join), see [`join`'s documentation](#join) for more info.
///
/// ```rustbook
/// let (input_send, input_recv) = hydroflow::util::unbounded_channel::<&str>();
/// let mut flow = hydroflow::hydroflow_syntax! {
/// my_join = cross_join::<'tick>();
/// source_iter(["hello", "bye"]) -> [0]my_join;
/// source_stream(input_recv) -> [1]my_join;
/// my_join -> for_each(|(s, t)| println!("({}, {})", s, t));
/// };
/// input_send.send("oakland").unwrap();
/// flow.run_tick();
/// input_send.send("san francisco").unwrap();
/// flow.run_tick();
/// ```
/// Prints only `"(hello, oakland)"` and `"(bye, oakland)"`. The `source_iter` is only included in
/// the first tick, then forgotten, so when `"san francisco"` arrives on input `[1]` in the second tick,
/// there is nothing for it to match with from input `[0]`, and therefore it does appear in the output.
pub const CROSS_JOIN: OperatorConstraints = OperatorConstraints {
name: "cross_join",
categories: &[OperatorCategory::MultiIn],
hard_range_inn: &(2..=2),
soft_range_inn: &(2..=2),
hard_range_out: RANGE_1,
soft_range_out: RANGE_1,
num_args: 0,
persistence_args: &(0..=2),
type_args: &(0..=1),
is_external_input: false,
ports_inn: Some(|| super::PortListSpec::Fixed(parse_quote! { 0, 1 })),
ports_out: None,
properties: FlowProperties {
deterministic: FlowPropertyVal::Preserve,
monotonic: FlowPropertyVal::Preserve,
inconsistency_tainted: false,
},
input_delaytype_fn: |_| None,
write_fn: |wc @ &WriteContextArgs {
op_span,
ident,
inputs,
..
},
diagnostics| {
let mut output = (super::join::JOIN.write_fn)(wc, diagnostics)?;
let lhs = &inputs[0];
let rhs = &inputs[1];
let write_iterator = output.write_iterator;
output.write_iterator = quote_spanned!(op_span=>
let #lhs = #lhs.map(|a| ((), a));
let #rhs = #rhs.map(|b| ((), b));
#write_iterator
let #ident = #ident.map(|((), (a, b))| (a, b));
);
Ok(output)
},
};
|
#[aoc(day1, part1)]
pub fn day_1_p1(input: &str) -> u64 {
input
.lines()
.map(|n| n.parse::<u64>().unwrap())
.map(|n| n / 3 - 2)
.sum()
}
#[aoc(day1, part2)]
pub fn day_1_p2(input: &str) -> u64 {
fn calc_fuel(mass: u64) -> u64 {
let mut additional_fuel = mass / 3 - 2;
let mut total_fuel = additional_fuel;
while additional_fuel > 0 {
if let Some(x) = (additional_fuel / 3).checked_sub(2) {
additional_fuel = x;
total_fuel += x;
} else {
break;
}
}
total_fuel
}
input
.lines()
.map(|n| n.parse::<u64>().unwrap())
.map(calc_fuel)
.sum::<u64>()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn p1_sample1() {
assert_eq!(day_1_p1("12"), 2)
}
#[test]
fn p1_sample2() {
assert_eq!(day_1_p1("14"), 2)
}
#[test]
fn p1_sample3() {
assert_eq!(day_1_p1("1969"), 654)
}
#[test]
fn p1_sample4() {
assert_eq!(day_1_p1("100756"), 33583)
}
#[test]
fn p2_sample1() {
assert_eq!(day_1_p2("12"), 2)
}
#[test]
fn p2_sample2() {
assert_eq!(day_1_p2("1969"), 966)
}
#[test]
fn p2_sample3() {
assert_eq!(day_1_p2("100756"), 50346)
}
}
|
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::ffi::OsStr;
use std::io::Cursor;
use std::path::{Path, PathBuf};
use ::image::{
codecs::png::{PngDecoder, PngEncoder},
ColorType, GenericImage, GenericImageView, ImageBuffer, ImageDecoder, Rgba, RgbaImage,
};
use serde::{Deserialize, Serialize};
use skulpin::skia_safe as skia;
use skulpin::skia_safe::*;
use crate::viewport::Viewport;
#[derive(Clone, Debug)]
pub enum Brush {
Draw { color: Color4f, stroke_width: f32 },
Erase { stroke_width: f32 },
}
#[derive(Debug)]
pub struct StrokePoint {
pub point: Point,
pub brush: Brush,
}
impl Brush {
pub fn as_paint(&self) -> Paint {
let mut paint = Paint::new(Color4f::from(Color::TRANSPARENT), None);
paint.set_anti_alias(false);
paint.set_style(paint::Style::Stroke);
paint.set_stroke_cap(paint::Cap::Round);
match self {
Self::Draw { color, stroke_width } => {
paint.set_color(color.to_color());
paint.set_stroke_width(*stroke_width);
},
Self::Erase { stroke_width } => {
paint.set_blend_mode(BlendMode::Clear);
paint.set_stroke_width(*stroke_width);
},
}
paint
}
}
pub struct Chunk {
surface: RefCell<Surface>,
png_data: [Option<Vec<u8>>; Self::SUB_COUNT],
non_empty_subs: [bool; Self::SUB_COUNT],
saved_subs: [bool; Self::SUB_COUNT],
}
impl Chunk {
pub const SIZE: (u32, u32) = (256, 256);
const SUB_CHUNKS: (u32, u32) = (4, 4);
const SUB_COUNT: usize = (Self::SUB_CHUNKS.0 * Self::SUB_CHUNKS.1) as usize;
const SURFACE_SIZE: (u32, u32) = (
(Self::SIZE.0 * Self::SUB_CHUNKS.0) as u32,
(Self::SIZE.1 * Self::SUB_CHUNKS.1) as u32,
);
fn new(canvas: &mut Canvas) -> Self {
let surface = match canvas.new_surface(&Self::image_info(Self::SURFACE_SIZE), None) {
Some(surface) => surface,
None => panic!("failed to create a surface for storing the chunk"),
};
Self {
surface: RefCell::new(surface),
png_data: Default::default(),
non_empty_subs: [false; Self::SUB_COUNT],
saved_subs: [false; Self::SUB_COUNT],
}
}
fn screen_position(chunk_position: (i32, i32)) -> Point {
Point::new(
(chunk_position.0 * Self::SURFACE_SIZE.0 as i32) as _,
(chunk_position.1 * Self::SURFACE_SIZE.1 as i32) as _,
)
}
fn download_image(&self) -> RgbaImage {
let mut image_buffer = ImageBuffer::from_pixel(Self::SURFACE_SIZE.0, Self::SURFACE_SIZE.1, Rgba([0, 0, 0, 0]));
self.surface.borrow_mut().read_pixels(
&Self::image_info(Self::SURFACE_SIZE),
&mut image_buffer,
Self::SURFACE_SIZE.0 as usize * 4,
(0, 0),
);
image_buffer
}
fn upload_image(&mut self, image: RgbaImage, offset: (u32, u32)) {
let pixmap = Pixmap::new(
&Self::image_info(image.dimensions()),
&image,
image.width() as usize * 4,
);
self.surface
.borrow_mut()
.write_pixels_from_pixmap(&pixmap, (offset.0 as i32, offset.1 as i32));
}
// get master chunk position from absolute position
fn master(chunk_position: (i32, i32)) -> (i32, i32) {
(
chunk_position.0.div_euclid(Self::SUB_CHUNKS.0 as i32),
chunk_position.1.div_euclid(Self::SUB_CHUNKS.1 as i32),
)
}
// get sub chunk position from absolute position
fn sub(chunk_position: (i32, i32)) -> usize {
let x_bits = chunk_position.0.rem_euclid(Self::SUB_CHUNKS.0 as i32) as usize;
let y_bits = chunk_position.1.rem_euclid(Self::SUB_CHUNKS.1 as i32) as usize;
(x_bits << 2) | y_bits
}
// position of the given sub in a master chunk
fn sub_position(sub: usize) -> (u32, u32) {
(((sub & 0b1100) >> 2) as u32, (sub & 0b11) as u32)
}
// on-image position of the given sub in a master chunk
fn sub_screen_position(sub: usize) -> (u32, u32) {
(
((sub & 0b1100) >> 2) as u32 * Self::SIZE.0,
(sub & 0b11) as u32 * Self::SIZE.1,
)
}
// network position of a sub chunk in a master chunk
fn chunk_position(master_position: (i32, i32), sub: usize) -> (i32, i32) {
let sub_position = Self::sub_position(sub);
(
(master_position.0 * Self::SUB_CHUNKS.0 as i32) + sub_position.0 as i32,
(master_position.1 * Self::SUB_CHUNKS.1 as i32) + sub_position.1 as i32,
)
}
// reencodes PNG data if necessary.
// PNG data is reencoded upon outside request, but invalidated if the chunk is modified
fn png_data(&mut self, sub: usize) -> Option<&[u8]> {
if self.png_data[sub].is_none() {
eprintln!(" png data doesn't exist, encoding");
let chunk_image = self.download_image();
for sub in 0..Self::SUB_COUNT {
let (x, y) = Self::sub_screen_position(sub);
let sub_image = chunk_image.view(x, y, Self::SIZE.0, Self::SIZE.1).to_image();
if Self::image_is_empty(&sub_image) {
self.non_empty_subs[sub] = false;
continue
}
let mut bytes: Vec<u8> = Vec::new();
match PngEncoder::new(Cursor::new(&mut bytes)).encode(
&sub_image,
sub_image.width(),
sub_image.height(),
ColorType::Rgba8,
) {
Ok(()) => (),
Err(error) => {
eprintln!("error while encoding: {}", error);
continue
},
}
self.png_data[sub] = Some(bytes);
self.non_empty_subs[sub] = true;
}
}
self.png_data[sub].as_deref()
}
fn decode_png_data(&mut self, sub: usize, data: &[u8]) -> anyhow::Result<()> {
let decoder = PngDecoder::new(Cursor::new(data))?;
if decoder.color_type() != ColorType::Rgba8 {
eprintln!("received non-RGBA image data, ignoring");
return Ok(())
}
if decoder.dimensions() != Self::SIZE {
eprintln!(
"received chunk with invalid size. got: {:?}, expected: {:?}",
decoder.dimensions(),
Self::SIZE
);
return Ok(())
}
let mut image = RgbaImage::from_pixel(Self::SIZE.0, Self::SIZE.1, Rgba([0, 0, 0, 0]));
decoder.read_image(&mut image)?;
if !Self::image_is_empty(&image) {
self.upload_image(image, Self::sub_screen_position(sub));
self.png_data[sub] = Some(Vec::from(data));
self.non_empty_subs[sub] = true;
}
Ok(())
}
fn mark_dirty(&mut self, sub: usize) {
self.png_data[sub] = None;
self.non_empty_subs[sub] = true;
self.saved_subs[sub] = false;
}
fn mark_saved(&mut self, sub: usize) {
self.saved_subs[sub] = true;
}
fn image_is_empty(image: &RgbaImage) -> bool {
image.iter().all(|x| *x == 0)
}
fn image_info(size: (u32, u32)) -> ImageInfo {
ImageInfo::new(
ISize::new(size.0 as i32, size.1 as i32),
skia::ColorType::RGBA8888,
AlphaType::Premul,
None,
)
}
}
pub struct PaintCanvas {
chunks: HashMap<(i32, i32), Chunk>,
// this set contains all chunks that have already been visited in the current stroke() call
stroked_chunks: HashSet<(i32, i32)>,
filename: Option<PathBuf>,
}
pub const CANVAS_TOML_VERSION: u32 = 1;
#[derive(Serialize, Deserialize)]
struct CanvasToml {
version: u32,
// more stuff to be added in the future
}
impl PaintCanvas {
pub fn new() -> Self {
Self {
chunks: HashMap::new(),
stroked_chunks: HashSet::new(),
filename: None,
}
}
fn ensure_chunk_exists(&mut self, canvas: &mut Canvas, position: (i32, i32)) {
if !self.chunks.contains_key(&position) {
self.chunks.insert(position, Chunk::new(canvas));
}
}
pub fn stroke(&mut self, canvas: &mut Canvas, from: impl Into<Point>, to: impl Into<Point>, brush: &Brush) {
let a = from.into();
let b = to.into();
let step_count = i32::max((Point::distance(a, b) / 4.0) as _, 2);
let paint = brush.as_paint();
let stroke_width = paint.stroke_width();
let half_stroke_width = stroke_width / 2.0;
let mut delta = b - a;
delta.x /= step_count as f32;
delta.y /= step_count as f32;
let mut p = a;
self.stroked_chunks.clear();
for _ in 1..=step_count {
let top_left = p - Point::new(half_stroke_width, half_stroke_width);
let bottom_right = p + Point::new(half_stroke_width, half_stroke_width);
let top_left_chunk = (
(top_left.x / Chunk::SIZE.0 as f32).floor() as i32,
(top_left.y / Chunk::SIZE.0 as f32).floor() as i32,
);
let bottom_right_chunk = (
(bottom_right.x / Chunk::SIZE.1 as f32).ceil() as i32,
(bottom_right.y / Chunk::SIZE.1 as f32).ceil() as i32,
);
for y in top_left_chunk.1..bottom_right_chunk.1 {
for x in top_left_chunk.0..bottom_right_chunk.0 {
let chunk_position = (x, y);
let master = Chunk::master(chunk_position);
let sub = Chunk::sub(chunk_position);
if !self.stroked_chunks.contains(&master) {
self.ensure_chunk_exists(canvas, master);
let chunk = self.chunks.get_mut(&master).unwrap();
let screen_position = Chunk::screen_position(master);
chunk
.surface
.borrow_mut()
.canvas()
.draw_line(a - screen_position, b - screen_position, &paint);
chunk.mark_dirty(sub);
}
self.stroked_chunks.insert(master);
p.offset(delta);
}
}
}
}
pub fn draw_to(&self, canvas: &mut Canvas, viewport: &Viewport, window_size: (f32, f32)) {
for chunk_position in viewport.visible_tiles(Chunk::SURFACE_SIZE, window_size) {
if let Some(chunk) = self.chunks.get(&chunk_position) {
let screen_position = Chunk::screen_position(chunk_position);
// why is the position parameter a Size? only rust-skia devs know.
chunk
.surface
.borrow_mut()
.draw(canvas, (screen_position.x, screen_position.y), None);
}
}
}
pub fn png_data(&mut self, chunk_position: (i32, i32)) -> Option<&[u8]> {
eprintln!("fetching png data for {:?}", chunk_position);
self.chunks
.get_mut(&Chunk::master(chunk_position))?
.png_data(Chunk::sub(chunk_position))
}
pub fn decode_png_data(&mut self, canvas: &mut Canvas, to_chunk: (i32, i32), data: &[u8]) -> anyhow::Result<()> {
self.ensure_chunk_exists(canvas, Chunk::master(to_chunk));
let chunk = self.chunks.get_mut(&Chunk::master(to_chunk)).unwrap();
chunk.decode_png_data(Chunk::sub(to_chunk), data)
}
fn save_as_png(&self, path: &Path) -> anyhow::Result<()> {
let (mut left, mut top, mut right, mut bottom) = (i32::MAX, i32::MAX, i32::MIN, i32::MIN);
for (chunk_position, _) in &self.chunks {
left = left.min(chunk_position.0);
top = top.min(chunk_position.1);
right = right.max(chunk_position.0);
bottom = bottom.max(chunk_position.1);
}
eprintln!("left={}, top={}, right={}, bottom={}", left, top, right, bottom);
if left == i32::MAX {
anyhow::bail!("There's nothing to save! Draw something on the canvas and try again.");
}
let width = ((right - left + 1) * Chunk::SURFACE_SIZE.0 as i32) as u32;
let height = ((bottom - top + 1) * Chunk::SURFACE_SIZE.1 as i32) as u32;
eprintln!("size: {:?}", (width, height));
let mut image = RgbaImage::from_pixel(width, height, Rgba([0, 0, 0, 0]));
for (chunk_position, chunk) in &self.chunks {
eprintln!("writing chunk {:?}", chunk_position);
let pixel_position = (
(Chunk::SURFACE_SIZE.0 as i32 * (chunk_position.0 - left)) as u32,
(Chunk::SURFACE_SIZE.1 as i32 * (chunk_position.1 - top)) as u32,
);
eprintln!(" - pixel position: {:?}", pixel_position);
let chunk_image = chunk.download_image();
let mut sub_image = image.sub_image(
pixel_position.0,
pixel_position.1,
Chunk::SURFACE_SIZE.0 as u32,
Chunk::SURFACE_SIZE.1 as u32,
);
sub_image.copy_from(&chunk_image, 0, 0)?;
}
image.save(path)?;
eprintln!("image {:?} saved successfully", path);
Ok(())
}
fn validate_netcanv_save_path(path: &Path) -> anyhow::Result<PathBuf> {
// condition #1: remove canvas.toml
let mut result = PathBuf::from(path);
if result.file_name() == Some(OsStr::new("canvas.toml")) {
result.pop();
}
// condition #2: make sure that the directory name ends with .netcanv
if result.extension() != Some(OsStr::new("netcanv")) {
anyhow::bail!("Please select a valid canvas folder (one whose name ends with .netcanv)")
}
Ok(result)
}
fn clear_netcanv_save(path: &Path) -> anyhow::Result<()> {
eprintln!("clearing older netcanv save {:?}", path);
for entry in std::fs::read_dir(path)? {
let path = entry?.path();
if path.is_file() {
if path.extension() == Some(OsStr::new("png")) || path.file_name() == Some(OsStr::new("canvas.toml")) {
std::fs::remove_file(path)?;
}
}
}
Ok(())
}
fn save_as_netcanv(&mut self, path: &Path) -> anyhow::Result<()> {
// create the directory
eprintln!("creating or reusing existing directory ({:?})", path);
let path = Self::validate_netcanv_save_path(path)?;
std::fs::create_dir_all(path.clone())?; // use create_dir_all to not fail if the dir already exists
if self.filename != Some(path.clone()) {
Self::clear_netcanv_save(&path)?;
}
// save the canvas.toml manifest
eprintln!("saving canvas.toml");
let canvas_toml = CanvasToml {
version: CANVAS_TOML_VERSION,
};
std::fs::write(path.join(Path::new("canvas.toml")), toml::to_string(&canvas_toml)?)?;
// save all the chunks
eprintln!("saving chunks");
for (master_position, chunk) in &mut self.chunks {
for sub in 0..Chunk::SUB_COUNT {
if !chunk.non_empty_subs[sub] || chunk.saved_subs[sub] {
continue
}
let chunk_position = Chunk::chunk_position(*master_position, sub);
eprintln!(" chunk {:?}", chunk_position);
let saved = if let Some(png_data) = chunk.png_data(sub) {
let filename = format!("{},{}.png", chunk_position.0, chunk_position.1);
let filepath = path.join(Path::new(&filename));
eprintln!(" saving to {:?}", filepath);
std::fs::write(filepath, png_data)?;
true
} else {
false
};
if saved {
chunk.mark_saved(sub);
}
}
}
self.filename = Some(path);
Ok(())
}
// if path is None, self.filename will be used
pub fn save(&mut self, path: Option<&Path>) -> anyhow::Result<()> {
let path = path
.map(|p| p.to_path_buf())
.or(self.filename.clone())
.expect("no save path provided");
if let Some(ext) = path.extension() {
match ext.to_str() {
Some("png") => self.save_as_png(&path),
Some("netcanv") | Some("toml") => self.save_as_netcanv(&path),
_ => anyhow::bail!("Unsupported save format. Please choose either .png or .netcanv"),
}
} else {
anyhow::bail!("Can't save a canvas without an extension")
}
}
fn load_from_image_file(&mut self, canvas: &mut Canvas, path: &Path) -> anyhow::Result<()> {
use ::image::io::Reader as ImageReader;
let image = ImageReader::open(path)?.decode()?.into_rgba8();
eprintln!("image size: {:?}", image.dimensions());
let chunks_x = (image.width() as f32 / Chunk::SURFACE_SIZE.0 as f32).ceil() as i32;
let chunks_y = (image.height() as f32 / Chunk::SURFACE_SIZE.1 as f32).ceil() as i32;
eprintln!("n. chunks: x={}, y={}", chunks_x, chunks_y);
for y in 0..chunks_y {
for x in 0..chunks_x {
let chunk_position = (x, y);
self.ensure_chunk_exists(canvas, chunk_position);
let chunk = self.chunks.get_mut(&chunk_position).unwrap();
let pixel_position = (
(Chunk::SURFACE_SIZE.0 as i32 * chunk_position.0) as u32,
(Chunk::SURFACE_SIZE.1 as i32 * chunk_position.1) as u32,
);
eprintln!("plopping chunk at {:?}", pixel_position);
let right = (pixel_position.0 + Chunk::SURFACE_SIZE.0).min(image.width() - 1);
let bottom = (pixel_position.1 + Chunk::SURFACE_SIZE.1).min(image.height() - 1);
eprintln!(" to {:?}", (right, bottom));
let width = right - pixel_position.0;
let height = bottom - pixel_position.1;
let mut chunk_image =
RgbaImage::from_pixel(Chunk::SURFACE_SIZE.0, Chunk::SURFACE_SIZE.1, Rgba([0, 0, 0, 0]));
let sub_image = image.view(pixel_position.0, pixel_position.1, width, height);
chunk_image.copy_from(&sub_image, 0, 0)?;
if Chunk::image_is_empty(&chunk_image) {
continue
}
chunk.non_empty_subs = [true; Chunk::SUB_COUNT];
chunk.upload_image(chunk_image, (0, 0));
}
}
Ok(())
}
fn parse_chunk_position(coords: &str) -> anyhow::Result<(i32, i32)> {
let mut iter = coords.split(',');
let x_str = iter.next();
let y_str = iter.next();
anyhow::ensure!(
x_str.is_some() && y_str.is_some(),
"Chunk position must follow the pattern: x,y"
);
anyhow::ensure!(iter.next().is_none(), "Trailing coordinates found after x,y");
let (x_str, y_str) = (x_str.unwrap(), y_str.unwrap());
let x: i32 = x_str.parse()?;
let y: i32 = y_str.parse()?;
Ok((x, y))
}
fn load_from_netcanv(&mut self, canvas: &mut Canvas, path: &Path) -> anyhow::Result<()> {
let path = Self::validate_netcanv_save_path(path)?;
eprintln!("loading canvas from {:?}", path);
// load canvas.toml
eprintln!("loading canvas.toml");
let canvas_toml_path = path.join(Path::new("canvas.toml"));
let canvas_toml: CanvasToml = toml::from_str(&std::fs::read_to_string(&canvas_toml_path)?)?;
if canvas_toml.version < CANVAS_TOML_VERSION {
anyhow::bail!("Version mismatch in canvas.toml. Try updating your client");
}
// load chunks
eprintln!("loading chunks");
for entry in std::fs::read_dir(path.clone())? {
let path = entry?.path();
if path.is_file() && path.extension() == Some(OsStr::new("png")) {
if let Some(position_osstr) = path.file_stem() {
if let Some(position_str) = position_osstr.to_str() {
let chunk_position = Self::parse_chunk_position(&position_str)?;
eprintln!("chunk {:?}", chunk_position);
let master = Chunk::master(chunk_position);
let sub = Chunk::sub(chunk_position);
self.ensure_chunk_exists(canvas, master);
let chunk = self.chunks.get_mut(&master).unwrap();
chunk.decode_png_data(sub, &std::fs::read(path)?)?;
chunk.mark_saved(sub);
}
}
}
}
self.filename = Some(path);
Ok(())
}
pub fn load(&mut self, canvas: &mut Canvas, path: &Path) -> anyhow::Result<()> {
if let Some(ext) = path.extension() {
match ext.to_str() {
Some("netcanv") | Some("toml") => self.load_from_netcanv(canvas, path),
_ => self.load_from_image_file(canvas, path),
}
} else {
self.load_from_image_file(canvas, path)
}
}
pub fn chunk_positions(&self) -> Vec<(i32, i32)> {
let mut result = Vec::new();
for (master_position, chunk) in &self.chunks {
for (sub, non_empty) in chunk.non_empty_subs.iter().enumerate() {
if *non_empty {
result.push(Chunk::chunk_position(*master_position, sub));
}
}
}
result
}
pub fn filename(&self) -> Option<&Path> {
self.filename.as_deref()
}
}
|
//! Improved cross-platform clipboard library
//!
//! Fork of https://github.com/aweinstock314/rust-clipboard with better error handling
#[cfg(target_os="windows")]
extern crate clipboard_win;
#[cfg(any(target_os="linux", target_os="openbsd"))]
extern crate x11_clipboard;
#[cfg(target_os="macos")]
#[macro_use]
extern crate objc;
#[cfg(target_os="macos")]
extern crate objc_id;
#[cfg(target_os="macos")]
extern crate objc_foundation;
mod errors;
pub mod clipboard_metadata;
pub use errors::ClipboardError;
pub use clipboard_metadata::ClipboardContentType;
pub trait Clipboard {
type Output;
fn new() -> Result<Self::Output, ClipboardError>;
fn get_contents(&self) -> Result<(Vec<u8>, ClipboardContentType), ClipboardError>;
fn get_string_contents(&self) -> Result<String, ClipboardError>;
fn set_contents(&self, contents: Vec<u8>, format: ClipboardContentType) -> Result<(), ClipboardError>;
fn set_string_contents(&self, contents: String) -> Result<(), ClipboardError>;
}
#[cfg(target_os="windows")]
pub mod win;
#[cfg(target_os="windows")]
pub use win::WindowsClipboard as SystemClipboard;
#[cfg(any(target_os="linux", target_os="openbsd"))]
pub mod x11;
#[cfg(any(target_os="linux", target_os="openbsd"))]
pub use x11::X11Clipboard as SystemClipboard;
#[cfg(target_os="macos")]
pub mod macos;
#[cfg(target_os="macos")]
pub use macos::MacOsClipboard as SystemClipboard;
|
use crate::bus::Bus;
pub struct Cpu {
pc: u32,
regs: [u32; 32],
rom: Vec<u8>,
bus: Bus,
}
impl Cpu {
pub fn new(rom: Vec<u8>) -> Cpu {
Cpu {
pc: 0,
regs: [0; 32],
rom: rom,
bus: Bus::new(),
}
}
pub fn can_run(&self) -> bool {
(self.pc as usize) < (self.rom.len() - 4)
}
fn read_reg(&self, rx: usize) -> u32 {
self.regs[rx]
}
fn write_reg(&mut self, rx: usize, dt: u32) {
if rx != 0 {
self.regs[rx] = dt;
}
}
pub fn fetch(&self) -> u32 {
let index = self.pc as usize;
((self.rom[index] as u32) << 24) |
((self.rom[index + 1] as u32) << 16) |
((self.rom[index + 2] as u32) << 8)|
(self.rom[index + 3] as u32)
}
pub fn execute(&mut self, instruction: u32) {
if instruction != 0 {
let opcode = (instruction >> 26) as u8;
let jdir = instruction & 0x3FFFFFF;
let rs = ((instruction >> 21) & 0x1F) as usize;
let rt = ((instruction >> 16) & 0x1F) as usize;
let ct = (instruction & 0xFFFF) as u32;
print!("{:04X} - {:08X} -> ", self.pc, instruction);
match opcode {
0x2 => {
println!("J {}", jdir);
self.pc = (((self.pc + 4) >> 26) | jdir) << 2;
},
0x4 => {
println!("BEQ ${}, ${}, {:04X}", rs, rt, ct);
if self.read_reg(rs) == self.read_reg(rt) {
self.pc += 4 + (ct << 2);
} else {
self.pc += 4;
}
},
0x0 => {
println!("ALU");
self.pc += 4;
},
0x8 => {
println!("ADDI {}, {}, {}", rt, rs, ct);
self.write_reg(rt, self.read_reg(rs) + ct);
self.pc += 4;
},
0x23 => {
println!("LW ${}, {}(${})", rt, ct, rs);
self.write_reg(
rt,
self.bus.read_word((self.read_reg(rs) + ct) as usize)
);
self.pc += 4;
},
0x2B => {
println!("SW ${}, {}(${})", rt, ct, rs);
self.bus.write_word(
(self.read_reg(rs) + ct) as usize,
self.read_reg(rt)
);
self.pc += 4;
},
0x3F => {
println!("HALT");
self.pc += 4;
},
_ => unimplemented!("OPCODE {:#02x} is not implemented", opcode)
}
} else {
self.pc += 4;
}
}
pub fn print_mem(&self, to: usize) {
for row in 0..(to/4)+1 {
print!("{:02X} | ", row * 4);
for col in 0..4 {
print!("{:02X} ", self.bus.read_byte((4 * row + col) as usize));
}
println!();
}
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - low interrupt status register"]
pub lisr: LISR,
#[doc = "0x04 - high interrupt status register"]
pub hisr: HISR,
#[doc = "0x08 - low interrupt flag clear register"]
pub lifcr: LIFCR,
#[doc = "0x0c - high interrupt flag clear register"]
pub hifcr: HIFCR,
#[doc = "0x10 - stream x configuration register"]
pub s0cr: S0CR,
#[doc = "0x14 - stream x number of data register"]
pub s0ndtr: S0NDTR,
#[doc = "0x18 - stream x peripheral address register"]
pub s0par: S0PAR,
#[doc = "0x1c - stream x memory 0 address register"]
pub s0m0ar: S0M0AR,
#[doc = "0x20 - stream x memory 1 address register"]
pub s0m1ar: S0M1AR,
#[doc = "0x24 - stream x FIFO control register"]
pub s0fcr: S0FCR,
#[doc = "0x28 - stream x configuration register"]
pub s1cr: S1CR,
#[doc = "0x2c - stream x number of data register"]
pub s1ndtr: S1NDTR,
#[doc = "0x30 - stream x peripheral address register"]
pub s1par: S1PAR,
#[doc = "0x34 - stream x memory 0 address register"]
pub s1m0ar: S1M0AR,
#[doc = "0x38 - stream x memory 1 address register"]
pub s1m1ar: S1M1AR,
#[doc = "0x3c - stream x FIFO control register"]
pub s1fcr: S1FCR,
#[doc = "0x40 - stream x configuration register"]
pub s2cr: S2CR,
#[doc = "0x44 - stream x number of data register"]
pub s2ndtr: S2NDTR,
#[doc = "0x48 - stream x peripheral address register"]
pub s2par: S2PAR,
#[doc = "0x4c - stream x memory 0 address register"]
pub s2m0ar: S2M0AR,
#[doc = "0x50 - stream x memory 1 address register"]
pub s2m1ar: S2M1AR,
#[doc = "0x54 - stream x FIFO control register"]
pub s2fcr: S2FCR,
#[doc = "0x58 - stream x configuration register"]
pub s3cr: S3CR,
#[doc = "0x5c - stream x number of data register"]
pub s3ndtr: S3NDTR,
#[doc = "0x60 - stream x peripheral address register"]
pub s3par: S3PAR,
#[doc = "0x64 - stream x memory 0 address register"]
pub s3m0ar: S3M0AR,
#[doc = "0x68 - stream x memory 1 address register"]
pub s3m1ar: S3M1AR,
#[doc = "0x6c - stream x FIFO control register"]
pub s3fcr: S3FCR,
#[doc = "0x70 - stream x configuration register"]
pub s4cr: S4CR,
#[doc = "0x74 - stream x number of data register"]
pub s4ndtr: S4NDTR,
#[doc = "0x78 - stream x peripheral address register"]
pub s4par: S4PAR,
#[doc = "0x7c - stream x memory 0 address register"]
pub s4m0ar: S4M0AR,
#[doc = "0x80 - stream x memory 1 address register"]
pub s4m1ar: S4M1AR,
#[doc = "0x84 - stream x FIFO control register"]
pub s4fcr: S4FCR,
#[doc = "0x88 - stream x configuration register"]
pub s5cr: S5CR,
#[doc = "0x8c - stream x number of data register"]
pub s5ndtr: S5NDTR,
#[doc = "0x90 - stream x peripheral address register"]
pub s5par: S5PAR,
#[doc = "0x94 - stream x memory 0 address register"]
pub s5m0ar: S5M0AR,
#[doc = "0x98 - stream x memory 1 address register"]
pub s5m1ar: S5M1AR,
#[doc = "0x9c - stream x FIFO control register"]
pub s5fcr: S5FCR,
#[doc = "0xa0 - stream x configuration register"]
pub s6cr: S6CR,
#[doc = "0xa4 - stream x number of data register"]
pub s6ndtr: S6NDTR,
#[doc = "0xa8 - stream x peripheral address register"]
pub s6par: S6PAR,
#[doc = "0xac - stream x memory 0 address register"]
pub s6m0ar: S6M0AR,
#[doc = "0xb0 - stream x memory 1 address register"]
pub s6m1ar: S6M1AR,
#[doc = "0xb4 - stream x FIFO control register"]
pub s6fcr: S6FCR,
#[doc = "0xb8 - stream x configuration register"]
pub s7cr: S7CR,
#[doc = "0xbc - stream x number of data register"]
pub s7ndtr: S7NDTR,
#[doc = "0xc0 - stream x peripheral address register"]
pub s7par: S7PAR,
#[doc = "0xc4 - stream x memory 0 address register"]
pub s7m0ar: S7M0AR,
#[doc = "0xc8 - stream x memory 1 address register"]
pub s7m1ar: S7M1AR,
#[doc = "0xcc - stream x FIFO control register"]
pub s7fcr: S7FCR,
}
#[doc = "LISR (r) register accessor: low interrupt status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lisr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`lisr`]
module"]
pub type LISR = crate::Reg<lisr::LISR_SPEC>;
#[doc = "low interrupt status register"]
pub mod lisr;
#[doc = "HISR (r) register accessor: high interrupt status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hisr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`hisr`]
module"]
pub type HISR = crate::Reg<hisr::HISR_SPEC>;
#[doc = "high interrupt status register"]
pub mod hisr;
#[doc = "LIFCR (w) register accessor: low interrupt flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lifcr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`lifcr`]
module"]
pub type LIFCR = crate::Reg<lifcr::LIFCR_SPEC>;
#[doc = "low interrupt flag clear register"]
pub mod lifcr;
#[doc = "HIFCR (w) register accessor: high interrupt flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hifcr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`hifcr`]
module"]
pub type HIFCR = crate::Reg<hifcr::HIFCR_SPEC>;
#[doc = "high interrupt flag clear register"]
pub mod hifcr;
#[doc = "S0CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0cr`]
module"]
pub type S0CR = crate::Reg<s0cr::S0CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s0cr;
#[doc = "S0NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0ndtr`]
module"]
pub type S0NDTR = crate::Reg<s0ndtr::S0NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s0ndtr;
#[doc = "S0PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0par`]
module"]
pub type S0PAR = crate::Reg<s0par::S0PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s0par;
#[doc = "S0M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0m0ar`]
module"]
pub type S0M0AR = crate::Reg<s0m0ar::S0M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s0m0ar;
#[doc = "S0M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0m1ar`]
module"]
pub type S0M1AR = crate::Reg<s0m1ar::S0M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s0m1ar;
#[doc = "S0FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s0fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s0fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s0fcr`]
module"]
pub type S0FCR = crate::Reg<s0fcr::S0FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s0fcr;
#[doc = "S1CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1cr`]
module"]
pub type S1CR = crate::Reg<s1cr::S1CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s1cr;
#[doc = "S1NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1ndtr`]
module"]
pub type S1NDTR = crate::Reg<s1ndtr::S1NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s1ndtr;
#[doc = "S1PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1par`]
module"]
pub type S1PAR = crate::Reg<s1par::S1PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s1par;
#[doc = "S1M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1m0ar`]
module"]
pub type S1M0AR = crate::Reg<s1m0ar::S1M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s1m0ar;
#[doc = "S1M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1m1ar`]
module"]
pub type S1M1AR = crate::Reg<s1m1ar::S1M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s1m1ar;
#[doc = "S1FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s1fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s1fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s1fcr`]
module"]
pub type S1FCR = crate::Reg<s1fcr::S1FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s1fcr;
#[doc = "S2CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2cr`]
module"]
pub type S2CR = crate::Reg<s2cr::S2CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s2cr;
#[doc = "S2NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2ndtr`]
module"]
pub type S2NDTR = crate::Reg<s2ndtr::S2NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s2ndtr;
#[doc = "S2PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2par`]
module"]
pub type S2PAR = crate::Reg<s2par::S2PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s2par;
#[doc = "S2M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2m0ar`]
module"]
pub type S2M0AR = crate::Reg<s2m0ar::S2M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s2m0ar;
#[doc = "S2M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2m1ar`]
module"]
pub type S2M1AR = crate::Reg<s2m1ar::S2M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s2m1ar;
#[doc = "S2FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s2fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s2fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s2fcr`]
module"]
pub type S2FCR = crate::Reg<s2fcr::S2FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s2fcr;
#[doc = "S3CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3cr`]
module"]
pub type S3CR = crate::Reg<s3cr::S3CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s3cr;
#[doc = "S3NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3ndtr`]
module"]
pub type S3NDTR = crate::Reg<s3ndtr::S3NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s3ndtr;
#[doc = "S3PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3par`]
module"]
pub type S3PAR = crate::Reg<s3par::S3PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s3par;
#[doc = "S3M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3m0ar`]
module"]
pub type S3M0AR = crate::Reg<s3m0ar::S3M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s3m0ar;
#[doc = "S3M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3m1ar`]
module"]
pub type S3M1AR = crate::Reg<s3m1ar::S3M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s3m1ar;
#[doc = "S3FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s3fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s3fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s3fcr`]
module"]
pub type S3FCR = crate::Reg<s3fcr::S3FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s3fcr;
#[doc = "S4CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4cr`]
module"]
pub type S4CR = crate::Reg<s4cr::S4CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s4cr;
#[doc = "S4NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4ndtr`]
module"]
pub type S4NDTR = crate::Reg<s4ndtr::S4NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s4ndtr;
#[doc = "S4PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4par`]
module"]
pub type S4PAR = crate::Reg<s4par::S4PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s4par;
#[doc = "S4M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4m0ar`]
module"]
pub type S4M0AR = crate::Reg<s4m0ar::S4M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s4m0ar;
#[doc = "S4M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4m1ar`]
module"]
pub type S4M1AR = crate::Reg<s4m1ar::S4M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s4m1ar;
#[doc = "S4FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s4fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s4fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s4fcr`]
module"]
pub type S4FCR = crate::Reg<s4fcr::S4FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s4fcr;
#[doc = "S5CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5cr`]
module"]
pub type S5CR = crate::Reg<s5cr::S5CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s5cr;
#[doc = "S5NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5ndtr`]
module"]
pub type S5NDTR = crate::Reg<s5ndtr::S5NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s5ndtr;
#[doc = "S5PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5par`]
module"]
pub type S5PAR = crate::Reg<s5par::S5PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s5par;
#[doc = "S5M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5m0ar`]
module"]
pub type S5M0AR = crate::Reg<s5m0ar::S5M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s5m0ar;
#[doc = "S5M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5m1ar`]
module"]
pub type S5M1AR = crate::Reg<s5m1ar::S5M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s5m1ar;
#[doc = "S5FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s5fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s5fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s5fcr`]
module"]
pub type S5FCR = crate::Reg<s5fcr::S5FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s5fcr;
#[doc = "S6CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6cr`]
module"]
pub type S6CR = crate::Reg<s6cr::S6CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s6cr;
#[doc = "S6NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6ndtr`]
module"]
pub type S6NDTR = crate::Reg<s6ndtr::S6NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s6ndtr;
#[doc = "S6PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6par`]
module"]
pub type S6PAR = crate::Reg<s6par::S6PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s6par;
#[doc = "S6M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6m0ar`]
module"]
pub type S6M0AR = crate::Reg<s6m0ar::S6M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s6m0ar;
#[doc = "S6M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6m1ar`]
module"]
pub type S6M1AR = crate::Reg<s6m1ar::S6M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s6m1ar;
#[doc = "S6FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s6fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s6fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s6fcr`]
module"]
pub type S6FCR = crate::Reg<s6fcr::S6FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s6fcr;
#[doc = "S7CR (rw) register accessor: stream x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7cr`]
module"]
pub type S7CR = crate::Reg<s7cr::S7CR_SPEC>;
#[doc = "stream x configuration register"]
pub mod s7cr;
#[doc = "S7NDTR (rw) register accessor: stream x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7ndtr`]
module"]
pub type S7NDTR = crate::Reg<s7ndtr::S7NDTR_SPEC>;
#[doc = "stream x number of data register"]
pub mod s7ndtr;
#[doc = "S7PAR (rw) register accessor: stream x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7par`]
module"]
pub type S7PAR = crate::Reg<s7par::S7PAR_SPEC>;
#[doc = "stream x peripheral address register"]
pub mod s7par;
#[doc = "S7M0AR (rw) register accessor: stream x memory 0 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7m0ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7m0ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7m0ar`]
module"]
pub type S7M0AR = crate::Reg<s7m0ar::S7M0AR_SPEC>;
#[doc = "stream x memory 0 address register"]
pub mod s7m0ar;
#[doc = "S7M1AR (rw) register accessor: stream x memory 1 address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7m1ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7m1ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7m1ar`]
module"]
pub type S7M1AR = crate::Reg<s7m1ar::S7M1AR_SPEC>;
#[doc = "stream x memory 1 address register"]
pub mod s7m1ar;
#[doc = "S7FCR (rw) register accessor: stream x FIFO control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`s7fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`s7fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`s7fcr`]
module"]
pub type S7FCR = crate::Reg<s7fcr::S7FCR_SPEC>;
#[doc = "stream x FIFO control register"]
pub mod s7fcr;
|
mod cd;
mod ls;
mod echo;
mod set;
mod pwd;
mod exit;
use std::collections::HashMap;
use rsh::State;
pub use self::cd::cd;
pub use self::ls::ls;
pub use self::echo::echo;
pub use self::set::{set, unset, get};
pub use self::pwd::pwd;
pub use self::exit::exit;
pub type Builtin = fn(&mut State) -> i32;
pub fn load() -> HashMap<String, Builtin> {
let mut h: HashMap<String, Builtin> = HashMap::new();
h.insert("cd".to_string(), cd);
h.insert("ls".to_string(), ls);
h.insert("echo".to_string(), echo);
h.insert("set".to_string(), set);
h.insert("unset".to_string(), unset);
h.insert("get".to_string(), get);
h.insert("pwd".to_string(), pwd);
h.insert("exit".to_string(), exit);
h
}
// TODO make these tests better once we can capture stdout
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_cd() {
let mut s = State::default();
s.argv = vec!["cd".to_string(), "/".to_string()];
let i = cd(&mut s);
assert_eq!(i, 0);
assert_eq!(s.cwd.to_str().unwrap(), "/");
}
#[test]
#[should_panic]
fn test_cd_bad_path() {
let mut s = State::default();
s.argv = vec!["cd".to_string(), "/123".to_string()];
let i = cd(&mut s);
assert_eq!(i, 0);
}
#[test]
fn test_set() {
let mut s = State::default();
s.argv = vec!["set".to_string(), "test".to_string(), "hello world".to_string()];
let i = set(&mut s);
assert_eq!(i, 0);
assert_eq!(s.variables.get("test").unwrap(), "hello world");
}
#[test]
fn test_unset() {
let mut s = State::default();
s.variables.insert("test".to_string(), "hello world".to_string());
s.argv = vec!["unset".to_string(), "test".to_string()];
let i = unset(&mut s);
assert_eq!(i, 0);
assert_eq!(s.variables.get("test"), None);
}
#[test]
fn test_get() {
let mut s = State::default();
s.variables.insert("test".to_string(), "hello world".to_string());
s.argv = vec!["get".to_string(), "test".to_string()];
let i = get(&mut s);
assert_eq!(i, 0);
}
#[test]
fn test_ls() {
let mut s = State::default();
s.argv = vec!["ls".to_string()];
let i = ls(&mut s);
assert_eq!(i, 0);
}
#[test]
fn test_echo() {
let mut s = State::default();
s.argv = vec!["echo".to_string(), "\"Hello world!\"".to_string()];
let i = get(&mut s);
assert_eq!(i, 0);
}
#[test]
fn test_pwd() {
let mut s = State::default();
s.argv = vec!["pwd".to_string()];
let i = pwd(&mut s);
assert_eq!(i, 0);
}
}
|
// use super::*;
use rocket::http::Status;
use rocket::local::Client;
// In order to get your test module to run, you need to add a mod <file.rs>.
// Note that .rs files in this directory are each given their own "module" that
// is named appropriately, so by adding a .rs file you add a module. You
// should really like that.
//
// Adding `mod` <filename sans .rs> will do.
mod healthz;
mod ping;
|
pub mod algor;
pub struct RSAKey
{
pub p: u128, pub q: u128, pub phi: u128,
pub n: u128, pub e: u128, pub d: u128
}
impl RSAKey
{
pub fn create() -> RSAKey {
RSAKey { p: 0, q: 0, phi: 0, n: 0, e: 0, d: 0 }
}
}
pub struct RSASystem
{
pub key: RSAKey
}
impl RSASystem
{
pub fn create() -> RSASystem {
RSASystem { key: RSAKey::create() }
}
pub fn generate_key(p: u128, q: u128) -> RSAKey {
let mut key: RSAKey::create();
key.p = p;
key.q = q;
key.n = key.p * key.q;
key.phi = (key.p - 1) * (key.q - 1);
let mut gcd: u128 = 0;
let mut x: u128;
let mut y: u128;
key.e = algor::generate_e(key.phi);
gcd = algor::extended_euclidean_algorithm(key.e, key.phi, x, y);
while gcd != 1 || x < 0 {
key.e = generate_e(key.phi);
gcd = extended_euclidean_algorithm(key.e, key.phi, x, y);
}
key.d = x;
self.key = key;
return key;
}
}
|
use crypto::pbkdf2;
use mnemonic::Mnemonic;
use std::fmt;
/// The secret value used to derive HD wallet addresses from a [`Mnemonic`][Mnemonic] phrase.
///
/// Because it is not possible to create a [`Mnemonic`][Mnemonic] instance that is invalid, it is
/// therefore impossible to have a [`Seed`][Seed] instance that is invalid. This guarantees that only
/// a valid, intact mnemonic phrase can be used to derive HD wallet addresses.
///
/// To get the raw byte value use [`Seed::as_bytes()`][Seed::as_bytes()]. These can be used to derive
/// HD wallet addresses using another crate (deriving HD wallet addresses is outside the scope of this
/// crate and the BIP39 standard).
///
/// [Mnemonic]: ./mnemonic/struct.Mnemonic.html
/// [Seed]: ./seed/struct.Seed.html
/// [Seed::as_bytes()]: ./seed/struct.Seed.html#method.as_bytes
#[derive(Clone)]
pub struct Seed {
bytes: Vec<u8>,
}
impl Seed {
/// Generates the seed from the [`Mnemonic`][Mnemonic] and the password.
///
/// [Mnemonic]: ./mnemonic/struct.Mnemonic.html
pub fn new(mnemonic: &Mnemonic, password: &str) -> Self {
let salt = format!("mnemonic{}", password);
let bytes = pbkdf2(mnemonic.entropy(), &salt);
Self {
bytes,
}
}
/// Get the seed value as a byte slice
pub fn as_bytes(&self) -> &[u8] {
&self.bytes
}
}
impl AsRef<[u8]> for Seed {
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl fmt::Debug for Seed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:#X}", self)
}
}
impl fmt::LowerHex for Seed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
f.write_str("0x")?;
}
for byte in &self.bytes {
write!(f, "{:x}", byte)?;
}
Ok(())
}
}
impl fmt::UpperHex for Seed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
f.write_str("0x")?;
}
for byte in &self.bytes {
write!(f, "{:X}", byte)?;
}
Ok(())
}
}
|
use stm32f407;
use crate::hal::pin::*;
// Macro for PIOA, PIOB, PIOC, PIOD generation
macro_rules! add_control_pio {
($TARGET:ident, $PIOX:ident) => {
impl<'a, ENABLED, DIRECTION> PinConfigure<stm32f407::$PIOX, ENABLED, DIRECTION>
for Pin<'a, $TARGET::$PIOX, ENABLED, DIRECTION>
{
fn disable(&self) -> Pin<$TARGET::$PIOX, IsDisabled, Unknown> {
self.port
.odr
.write_with_zero(|w| unsafe { w.bits(self.port_offset) });
Pin {
port: self.port,
port_offset: self.port_offset,
direction: Unknown,
state: IsDisabled,
}
}
fn as_output(&self) -> Pin<$TARGET::$PIOX, IsEnabled, IsOutput> {
let offset = 2 * self.port_offset;
self.disable_pullup();
self.port.otyper.modify(|r, w| unsafe {
w.bits(r.bits() & !(0x01 << self.port_offset))
});
self.port.moder.modify(|r, w| unsafe {
w.bits((r.bits() & !(0b11 << offset)) | (0b01 << offset))
});
Pin {
port: self.port,
port_offset: self.port_offset,
direction: IsOutput,
state: IsEnabled,
}
}
// https://stackoverflow.com/questions/47759124/returning-a-generic-struct-from-new?rq=1
fn as_input(&self) -> Pin<$TARGET::$PIOX, IsEnabled, IsInput> {
let offset = 2 * self.port_offset;
//self.disable_pullup();
self.port.moder.modify(|r, w| unsafe {
w.bits((r.bits() & !(0b11 << offset)) | (0b00 << offset))
});
Pin {
port: self.port,
port_offset: self.port_offset,
direction: IsInput,
state: IsEnabled,
}
}
fn enable_pullup(&self) {
let offset = 2 * self.port_offset;
self.port.pupdr.modify(|r, w| unsafe {
w.bits((r.bits() & !(0b11 << offset)) | (0b01 << offset))
});
}
fn disable_pullup(&self) {
let offset = 2 * self.port_offset;
self.port.pupdr.modify(|r, w| unsafe {
w.bits((r.bits() & !(0b11 << offset)) | (0b00 << offset))
});
}
}
impl PinWrite for Pin<'_, $TARGET::$PIOX, IsEnabled, IsOutput> {
fn set_high(&self) {
// self.port
// .sodr
// .write_with_zero(|w| unsafe { w.bits(self.pin_mask) });
self.port.bsrr.write(|w| unsafe { w.bits(1 << self.port_offset) });
}
fn set_low(&self) {
// self.port
// .codr
// .write_with_zero(|w| unsafe { w.bits(self.pin_mask) });
self.port.bsrr.write(|w| unsafe { w.bits(1 << (self.port_offset + 16)) });
}
}
impl PinRead for Pin<'_, $TARGET::$PIOX, IsEnabled, IsInput> {
fn get_state(&self) -> bool {
// (self.port.pdsr.read().bits() & self.pin_mask) != 0
self.port.idr.read().bits() & (1 << self.port_offset) == 1
}
}
};
}
add_control_pio!(stm32f407, GPIOA);
add_control_pio!(stm32f407, GPIOB);
add_control_pio!(stm32f407, GPIOC);
add_control_pio!(stm32f407, GPIOD);
add_control_pio!(stm32f407, GPIOE);
add_control_pio!(stm32f407, GPIOF);
add_control_pio!(stm32f407, GPIOG);
add_control_pio!(stm32f407, GPIOH);
add_control_pio!(stm32f407, GPIOI); |
/**
--- Day 1: Report Repair ---
After saving Christmas five years in a row, you've decided to take a vacation at a nice resort on a tropical island. Surely, Christmas will go on without you.
The tropical island has its own currency and is entirely cash-only. The gold coins used there have a little picture of a starfish; the locals just call them stars. None of the currency exchanges seem to have heard of them, but somehow, you'll need to find fifty of these coins by the time you arrive so you can pay the deposit on your room.
To save your vacation, you need to get all fifty stars by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
Before you leave, the Elves in accounting just need you to fix your expense report (your puzzle input); apparently, something isn't quite adding up.
Specifically, they need you to find the two entries that sum to 2020 and then multiply those two numbers together.
For example, suppose your expense report contained the following:
1721
979
366
299
675
1456
In this list, the two entries that sum to 2020 are 1721 and 299. Multiplying them together produces 1721 * 299 = 514579, so the correct answer is 514579.
Of course, your expense report is much larger. Find the two entries that sum to 2020; what do you get if you multiply them together?
*/
/*
--- Part Two ---
The Elves in accounting are thankful for your help; one of them even offers you a starfish coin they had left over from a past vacation. They offer you a second one if you can find three numbers in your expense report that meet the same criteria.
Using the above example again, the three entries that sum to 2020 are 979, 366, and 675. Multiplying them together produces the answer, 241861950.
In your expense report, what is the product of the three entries that sum to 2020?
*/
use std::collections::HashSet;
fn main() -> std::io::Result<()> {
let file = include_str!("../input");
let mut numbers = HashSet::new();
let mut pairs: HashSet<(u32, u32)> = HashSet::new();
for line in file.lines() {
let value = line.parse::<u32>().expect("Expected lines to be ints");
for pair in &pairs {
if value + pair.0 + pair.1 == 2020 {
println!("Answer(3): {:?}", value * pair.0 * pair.1);
}
}
for number in &numbers {
if number + value == 2020 {
println!("Answer(2): {:?}", number * value);
}
let pair: (u32, u32) = (value, *number);
pairs.insert(pair);
}
numbers.insert(value);
}
Ok(())
}
|
use crate::datastructures::{
Entailment,
Pure::{And, True},
Rule,
Spatial::{Emp, SepConj},
};
pub struct Cleanup;
impl Rule for Cleanup {
fn predicate(&self, _goal: &Entailment) -> bool {
true
}
fn premisses(&self, goal: Entailment) -> Option<Vec<Entailment>> {
let (mut antecedent, mut consequent) = goal.destroy();
let change_apv = if let And(apv) = antecedent.get_pure() {
apv.is_empty()
} else {
false
};
let change_asv = if let SepConj(asv) = antecedent.get_spatial() {
asv.is_empty()
} else {
false
};
let change_cpv = if let And(cpv) = consequent.get_pure() {
cpv.is_empty()
} else {
false
};
let change_csv = if let SepConj(csv) = consequent.get_spatial() {
csv.is_empty()
} else {
false
};
if change_apv {
antecedent.0 = True;
}
if change_asv {
antecedent.1 = Emp;
}
if change_cpv {
consequent.0 = True;
}
if change_csv {
consequent.1 = Emp;
}
if change_apv || change_asv || change_cpv || change_csv {
Some(vec![Entailment {
antecedent,
consequent,
}])
} else {
None
}
}
}
|
#![no_std]
//! A simple compile-time derive macro to create type-to-value maps.
//!
//! This approach in contrast to crates such as [`typemap`](https://crates.io/crates/typemap/)
//! or [`type-map`](https://crates.io/crates/type-map/) that perform run-time lookup.
//! The static typing brings compile-time safety and faster execution at the expense
//! of using a derive macro and generics.
//!
//! The crate is `no_std` compatible.
//!
//! # Example
//! ```
//! # use typemap_meta::*;
//! #[derive(Typemap)]
//! struct Test(i32, f32);
//!
//! let t = Test(1, 2.0);
//! assert_eq!(*get!(t, i32), 1);
//! assert_eq!(*get!(t, f32), 2.0);
//! ```
//!
//! To get mutable references, add the `#[typemap_mut]` attribute on your struct, and
//! use `get_mut!` instead of `get!`:
//! ```
//! # use typemap_meta::*;
//! #[derive(Typemap)]
//! #[typemap_mut]
//! struct Test(i32, f32);
//!
//! let mut t = Test(1, 2.0);
//! assert_eq!(*get!(t, i32), 1);
//! assert_eq!(*get!(t, f32), 2.0);
//!
//! *get_mut!(t, i32) = 3;
//! *get_mut!(t, f32) = 4.0;
//!
//! assert_eq!(*get!(t, i32), 3);
//! assert_eq!(*get!(t, f32), 4.0);
//! ```
pub use typemap_meta_derive::*;
/// Helper trait to get a specific type `T` from a tuple struct containing disjoint heterogeneous types
pub trait Get<T> {
fn get(&self) -> &T;
}
/// Helper trait to mutably get a specific type `T` from a tuple struct containing disjoint heterogeneous types
pub trait GetMut<T> {
fn get_mut(&mut self) -> &mut T;
}
/// Convenience macro to get a specific type `$t` from a tuple struct `$s` containing disjoint heterogeneous types
///
/// Passing a value is fine, as [`get`] will add a reference to `$t` before calling [`Get`].
#[macro_export]
macro_rules! get {
($s:expr, $t:ty) => {
$crate::Get::<$t>::get(&$s)
};
}
/// Convenience macro to mutably get a specific type `$t` from a tuple struct `$s` containing disjoint heterogeneous types
///
/// Passing a value is fine, as [`get_mut`] will add a reference to `$t` before calling [`GetMut`].
#[macro_export]
macro_rules! get_mut {
($s:expr, $t:ty) => {
$crate::GetMut::<$t>::get_mut(&mut $s)
};
}
#[cfg(test)]
mod tests {
use crate::{get, Get, GetMut};
// without using the generation macro
#[test]
fn impl_get() {
struct Test(i32, f32);
impl Get<i32> for Test {
fn get(&self) -> &i32 {
&self.0
}
}
impl Get<f32> for Test {
fn get(&self) -> &f32 {
&self.1
}
}
let t = Test(1, 2.0);
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
}
#[test]
fn impl_get_mut() {
struct Test(i32, f32);
impl Get<i32> for Test {
fn get(&self) -> &i32 {
&self.0
}
}
impl Get<f32> for Test {
fn get(&self) -> &f32 {
&self.1
}
}
impl GetMut<i32> for Test {
fn get_mut(&mut self) -> &mut i32 {
&mut self.0
}
}
impl GetMut<f32> for Test {
fn get_mut(&mut self) -> &mut f32 {
&mut self.1
}
}
let t = Test(1, 2.0);
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
let mut t = t;
*get_mut!(t, i32) = 3;
*get_mut!(t, f32) = 4.0;
assert_eq!(*get!(t, i32), 3);
assert_eq!(*get!(t, f32), 4.0);
}
#[test]
fn impl_get_ref() {
struct Test<'a>(&'a i32, &'a f32);
impl<'a> Get<&'a i32> for Test<'a> {
fn get(&self) -> &&'a i32 {
&self.0
}
}
impl<'a> Get<&'a f32> for Test<'a> {
fn get(&self) -> &&'a f32 {
&self.1
}
}
let t = Test(&1, &2.0);
assert_eq!(**get!(t, &i32), 1);
assert_eq!(**get!(t, &f32), 2.0);
}
#[test]
fn impl_get_mut_ref() {
struct Test<'a>(&'a i32, &'a f32);
impl<'a> Get<&'a i32> for Test<'a> {
fn get(&self) -> &&'a i32 {
&self.0
}
}
impl<'a> Get<&'a f32> for Test<'a> {
fn get(&self) -> &&'a f32 {
&self.1
}
}
impl<'a> GetMut<&'a i32> for Test<'a> {
fn get_mut(&mut self) -> &mut &'a i32 {
&mut self.0
}
}
impl<'a> GetMut<&'a f32> for Test<'a> {
fn get_mut(&mut self) -> &mut &'a f32 {
&mut self.1
}
}
let t = Test(&1, &2.0);
assert_eq!(**get!(t, &i32), 1);
assert_eq!(**get!(t, &f32), 2.0);
let mut t = t;
*get_mut!(t, &i32) = &3;
*get_mut!(t, &f32) = &4.0;
assert_eq!(**get!(t, &i32), 3);
assert_eq!(**get!(t, &f32), 4.0);
}
// with using the generation macro
#[test]
fn derive_scalar() {
extern crate std;
use std::marker::PhantomData;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
struct A<T> {
_f: PhantomData<T>,
}
#[derive(crate::Typemap)]
struct Test(i32, f32, A<u32>);
let a = A { _f: PhantomData };
let t = Test(1, 2.0, a);
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
assert_eq!(*get!(t, A<u32>), a);
}
#[test]
fn derive_scalar_mut() {
extern crate std;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
struct A<T>(T);
#[derive(crate::Typemap)]
#[typemap_mut]
struct Test(i32, f32, A<u32>);
let a = A(3);
let t = Test(1, 2.0, a);
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
assert_eq!(*get!(t, A<u32>), a);
let mut t = t;
let b = A(6);
*get_mut!(t, i32) = 4;
*get_mut!(t, f32) = 5.0;
*get_mut!(t, A<u32>) = b;
assert_eq!(*get!(t, i32), 4);
assert_eq!(*get!(t, f32), 5.0);
assert_eq!(*get!(t, A<u32>), b);
}
#[test]
fn derive_struct() {
#[derive(Debug, PartialEq)]
struct A {}
#[derive(Debug, PartialEq)]
struct B {}
#[derive(crate::Typemap)]
struct Test(A, B);
let t = Test(A {}, B {});
assert_eq!(*get!(t, A), A {});
}
#[test]
fn derive_struct_mut() {
#[derive(Debug, PartialEq)]
struct A {
x: u32,
}
#[derive(Debug, PartialEq)]
struct B {}
#[derive(crate::Typemap)]
#[typemap_mut]
struct Test(A, B);
let t = Test(A { x: 1 }, B {});
assert_eq!(*get!(t, A), A { x: 1 });
let mut t = t;
get_mut!(t, A).x = 2;
assert_eq!(*get!(t, A), A { x: 2 });
}
#[test]
fn derive_ref() {
#[derive(Debug, PartialEq)]
struct A {}
#[derive(Debug, PartialEq)]
struct B {}
#[derive(crate::Typemap)]
struct Test<'l>(&'l A, &'l B, i32, f32);
let a = A {};
let b = B {};
let t = Test(&a, &b, 1, 2.0);
assert_eq!(**get!(t, &A), A {});
assert_eq!(**get!(t, &B), B {});
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
}
#[test]
fn derive_ref_mut() {
#[derive(Debug, PartialEq)]
struct A {
x: u32,
}
#[derive(Debug, PartialEq)]
struct B {}
#[derive(crate::Typemap)]
#[typemap_mut]
struct Test<'l>(&'l A, &'l B, i32, f32);
let a = A { x: 1 };
let b = B {};
let t = Test(&a, &b, 1, 2.0);
assert_eq!(**get!(t, &A), A { x: 1 });
assert_eq!(**get!(t, &B), B {});
assert_eq!(*get!(t, i32), 1);
assert_eq!(*get!(t, f32), 2.0);
let mut t = t;
*get_mut!(t, &A) = &A { x: 2 };
*get_mut!(t, &B) = &B {};
*get_mut!(t, i32) = 3;
*get_mut!(t, f32) = 4.0;
assert_eq!(**get!(t, &A), A { x: 2 });
assert_eq!(**get!(t, &B), B {});
assert_eq!(*get!(t, i32), 3);
assert_eq!(*get!(t, f32), 4.0);
}
#[test]
fn derive_box_trait() {
extern crate std;
use std::{boxed::Box, fmt::Debug};
// trait TA and struct A
trait TA {
fn value_a(&self) -> i32;
}
#[derive(Debug, PartialEq)]
struct A {
v: i32,
}
impl TA for A {
fn value_a(&self) -> i32 {
self.v
}
}
// trait TB and struct B
trait TB {
fn value_b(&self) -> f32;
}
#[derive(Debug, PartialEq)]
struct B {
v: f32,
}
impl TB for B {
fn value_b(&self) -> f32 {
self.v
}
}
// instance and asserts
#[derive(crate::Typemap)]
struct Test(Box<dyn TA>, Box<dyn TB>);
let t = Test(Box::new(A { v: 1 }), Box::new(B { v: 2.0 }));
assert_eq!(get!(t, Box<dyn TA>).value_a(), 1);
assert_eq!(get!(t, Box<dyn TB>).value_b(), 2.0);
}
#[test]
fn derive_box_trait_mut() {
extern crate std;
use std::{boxed::Box, fmt::Debug};
// trait TA and struct A
trait TA {
fn value_a(&self) -> i32;
fn set_a(&mut self, a: i32);
}
#[derive(Debug, PartialEq)]
struct A {
v: i32,
}
impl TA for A {
fn value_a(&self) -> i32 {
self.v
}
fn set_a(&mut self, a: i32) {
self.v = a;
}
}
// trait TB and struct B
trait TB {
fn value_b(&self) -> f32;
fn set_b(&mut self, b: f32);
}
#[derive(Debug, PartialEq)]
struct B {
v: f32,
}
impl TB for B {
fn value_b(&self) -> f32 {
self.v
}
fn set_b(&mut self, b: f32) {
self.v = b;
}
}
// instance and asserts
#[derive(crate::Typemap)]
#[typemap_mut]
struct Test(Box<dyn TA>, Box<dyn TB>);
let t = Test(Box::new(A { v: 1 }), Box::new(B { v: 2.0 }));
assert_eq!(get!(t, Box<dyn TA>).value_a(), 1);
assert_eq!(get!(t, Box<dyn TB>).value_b(), 2.0);
let mut t = t;
get_mut!(t, Box<dyn TA>).set_a(3);
get_mut!(t, Box<dyn TB>).set_b(4.0);
assert_eq!(get!(t, Box<dyn TA>).value_a(), 3);
assert_eq!(get!(t, Box<dyn TB>).value_b(), 4.0);
}
#[test]
fn derive_static_trait() {
extern crate std;
use std::fmt::Debug;
// trait TA and struct A
trait TA {
fn value_a(&self) -> i32;
}
#[derive(Debug, PartialEq)]
struct A {
v: i32,
}
impl TA for A {
fn value_a(&self) -> i32 {
self.v
}
}
// trait TB and struct B
trait TB {
fn value_b(&self) -> f32;
}
#[derive(Debug, PartialEq)]
struct B {
v: f32,
}
impl TB for B {
fn value_b(&self) -> f32 {
self.v
}
}
// instance and asserts
#[derive(crate::Typemap)]
struct Test(&'static dyn TA, &'static dyn TB);
let a: &'static dyn TA = &A { v: 1 };
let b: &'static dyn TB = &B { v: 2.0 };
let t = Test(a, b);
assert_eq!(get!(t, &'static dyn TA).value_a(), 1);
assert_eq!(get!(t, &'static dyn TB).value_b(), 2.0);
}
#[test]
fn derive_static_trait_mut() {
extern crate std;
use std::fmt::Debug;
// trait TA and struct A
trait TA {
fn value_a(&self) -> i32;
}
#[derive(Debug, PartialEq)]
struct A {
v: i32,
}
impl TA for A {
fn value_a(&self) -> i32 {
self.v
}
}
// trait TB and struct B
trait TB {
fn value_b(&self) -> f32;
}
#[derive(Debug, PartialEq)]
struct B {
v: f32,
}
impl TB for B {
fn value_b(&self) -> f32 {
self.v
}
}
// instance and asserts
#[derive(crate::Typemap)]
#[typemap_mut]
struct Test(&'static dyn TA, &'static dyn TB);
let a: &'static dyn TA = &A { v: 1 };
let b: &'static dyn TB = &B { v: 2.0 };
let t = Test(a, b);
assert_eq!(get!(t, &'static dyn TA).value_a(), 1);
assert_eq!(get!(t, &'static dyn TB).value_b(), 2.0);
let mut t = t;
let c: &'static dyn TA = &A { v: 3 };
let d: &'static dyn TB = &B { v: 4.0 };
*get_mut!(t, &'static dyn TA) = c;
*get_mut!(t, &'static dyn TB) = d;
assert_eq!(get!(t, &'static dyn TA).value_a(), 3);
assert_eq!(get!(t, &'static dyn TB).value_b(), 4.0);
}
}
|
use std::cmp::Ordering;
use std::collections::BinaryHeap;
/// Solves the Day 15 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let risks = parse_input(input);
let adjs = make_adjacency_list(&risks);
let risk = shortest_path(&adjs, 0, adjs.len() - 1).unwrap();
println!("{}", risk);
}
/// Solves the Day 15 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let tile = parse_input(input);
let rows = tile.len();
let cols = tile[0].len();
let mut risks = vec![vec![0usize; 5 * cols]; 5 * rows];
for r1 in 0..5 {
for c1 in 0..5 {
for r2 in 0..rows {
for c2 in 0..cols {
let r = r1 * rows + r2;
let c = c1 * cols + c2;
risks[r][c] = (tile[r2][c2] + r1 + c1 - 1) % 9 + 1;
}
}
}
}
let adjs = make_adjacency_list(&risks);
let risk = shortest_path(&adjs, 0, adjs.len() - 1).unwrap();
println!("{}", risk);
}
/// Parses a character representing a digit to an unsigned integer.
fn char_to_usize(digit: char) -> usize {
return digit.to_digit(10).unwrap() as usize;
}
/// Parses the input to the Day 15 puzzle.
fn parse_input(input: String) -> Vec<Vec<usize>> {
let mut risks = Vec::<Vec<usize>>::new();
for line in input.lines() {
let row: Vec<usize> = line.chars().map(char_to_usize).collect();
risks.push(row);
}
return risks;
}
/// Constructs an adjacency list of ``Edge``s from the given risk matrix.
fn make_adjacency_list(risks: &Vec<Vec<usize>>) -> Vec<Vec<Edge>> {
let rows = risks.len();
let cols = risks[0].len();
let make_edge = |r: usize, c: usize| Edge {
node: r * cols + c,
cost: risks[r][c],
};
let mut adjacencies = Vec::<Vec<Edge>>::new();
for r in 0..rows {
for c in 0..cols {
let mut edges = Vec::<Edge>::new();
if r > 0 {
let edge = make_edge(r - 1, c);
edges.push(edge);
}
if r < rows - 1 {
let edge = make_edge(r + 1, c);
edges.push(edge);
}
if c > 0 {
let edge = make_edge(r, c - 1);
edges.push(edge);
}
if c < cols - 1 {
let edge = make_edge(r, c + 1);
edges.push(edge);
}
adjacencies.push(edges);
}
}
return adjacencies;
}
//------------------------------------------------------------------------------
// The code below this dashed line was copied from:
// https://doc.rust-lang.org/std/collections/binary_heap/index.html
#[derive(Copy, Clone, Eq, PartialEq)]
struct State {
cost: usize,
position: usize,
}
// The priority queue depends on `Ord`.
// Explicitly implement the trait so the queue becomes a min-heap
// instead of a max-heap.
impl Ord for State {
fn cmp(&self, other: &Self) -> Ordering {
// Notice that the we flip the ordering on costs.
// In case of a tie we compare positions - this step is necessary
// to make implementations of `PartialEq` and `Ord` consistent.
other
.cost
.cmp(&self.cost)
.then_with(|| self.position.cmp(&other.position))
}
}
// `PartialOrd` needs to be implemented as well.
impl PartialOrd for State {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
// Each node is represented as a `usize`, for a shorter implementation.
struct Edge {
node: usize,
cost: usize,
}
// Dijkstra's shortest path algorithm.
// Start at `start` and use `dist` to track the current shortest distance
// to each node. This implementation isn't memory-efficient as it may leave duplicate
// nodes in the queue. It also uses `usize::MAX` as a sentinel value,
// for a simpler implementation.
fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
// dist[node] = current shortest distance from `start` to `node`
let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
let mut heap = BinaryHeap::new();
// We're at `start`, with a zero cost
dist[start] = 0;
heap.push(State {
cost: 0,
position: start,
});
// Examine the frontier with lower cost nodes first (min-heap)
while let Some(State { cost, position }) = heap.pop() {
// Alternatively we could have continued to find all shortest paths
if position == goal {
return Some(cost);
}
// Important as we may have already found a better way
if cost > dist[position] {
continue;
}
// For each node we can reach, see if we can find a way with
// a lower cost going through this node
for edge in &adj_list[position] {
let next = State {
cost: cost + edge.cost,
position: edge.node,
};
// If so, add it to the frontier and continue
if next.cost < dist[next.position] {
heap.push(next);
// Relaxation, we have now found a better way
dist[next.position] = next.cost;
}
}
}
// Goal not reachable
None
}
|
use crate::grammar::ast::{eq::AstEq, BooleanLit, Expression};
use crate::grammar::model::{HasSourceReference, WrightInput};
use crate::grammar::parsers::with_input;
use crate::grammar::tracing::parsers::alt;
use crate::grammar::tracing::parsers::tag;
use crate::grammar::tracing::{parsers::map, trace_result};
use nom::combinator::value;
use nom::IResult;
impl<T: std::fmt::Debug + Clone> BooleanLit<T> {
/// Literal representing a true value.
pub const TRUE: &'static str = "true";
/// Literal representing a false value.
pub const FALSE: &'static str = "false";
/// The name of this parser when appearing in function traces.
pub const TRACE_NAME: &'static str = "BooleanLit";
}
impl<I: WrightInput> BooleanLit<I> {
fn new(source: I, val: bool) -> Self {
Self { source, inner: val }
}
fn parser_inner(inp: I) -> IResult<I, bool> {
alt((value(true, tag(Self::TRUE)), value(false, tag(Self::FALSE))))(inp)
}
/// Parses a boolean literal from wright source code.
pub fn parse(input: I) -> IResult<I, Self> {
let res = map(with_input(Self::parser_inner), |(consumed, v)| {
Self::new(consumed, v)
})(input.trace_start_clone(Self::TRACE_NAME));
trace_result(Self::TRACE_NAME, res)
}
}
impl<I: std::fmt::Debug + Clone> HasSourceReference<I> for BooleanLit<I> {
fn get_source_ref(&self) -> &I {
&self.source
}
}
impl<I: std::fmt::Debug + Clone> Into<Expression<I>> for BooleanLit<I> {
fn into(self) -> Expression<I> {
Expression::BooleanLit(self)
}
}
impl<I: std::fmt::Debug + Clone> AstEq for BooleanLit<I> {
fn ast_eq(fst: &Self, snd: &Self) -> bool {
fst.inner == snd.inner
}
}
|
//! The repr module is concerned with the representation of parsed regular expressions. A Pattern
//! is compiled by the `compile` module into a state graph defined in `state`.
#![allow(dead_code)]
/// A Pattern is either a repeated pattern, a stored submatch, an alternation between two patterns,
/// two patterns following each other, or a character range or set.
#[derive(Clone, Debug, PartialEq)]
pub enum Pattern {
Concat(Vec<Pattern>),
/// A repeated sub-pattern.
Repeated(Box<Repetition>),
/// A stored submatch.
Submatch(Box<Pattern>),
/// An alternation between patterns (a|bb|ccc)
Alternate(Vec<Pattern>),
/// A single character.
Char(char),
/// Any character (.).
Any,
/// A string.
Str(String),
/// A character range.
CharRange(char, char),
/// A set of characters.
CharSet(Vec<char>),
/// A position anchor.
Anchor(AnchorLocation),
}
/// `AnchorLocation` encodes `^` and `$` anchors, respectively.
#[derive(Clone, Debug, PartialEq)]
pub enum AnchorLocation {
Begin,
End,
}
/// A pattern can be repeated in various manners, which is represented by the pattern being wrapped
/// in a Repetition.
///
/// The inner type is a pattern, because a repetition is either concerned with only one pattern
/// (`/.?/`), or a submatch (`/(abc)?/`).
#[derive(Clone, Debug, PartialEq)]
pub enum Repetition {
/// /P+/
ZeroOrOnce(Pattern),
/// /P*/
ZeroOrMore(Pattern),
/// /P+/
OnceOrMore(Pattern),
/// /P{min, (max)}/
Specific(Pattern, u32, Option<u32>),
}
#[cfg(test)]
mod tests {
use super::*;
use crate::state::*;
// /a(b|c)/
fn simple_re0() -> Pattern {
Pattern::Concat(vec![
Pattern::CharRange('a', 'a'),
Pattern::Alternate(vec![(Pattern::Char('b')), (Pattern::Char('c'))]),
])
}
// Returns compiled form of /(a[bc])?(cd)*(e|f)+x{1,3}(g|hh|i)j{2,}klm/
fn simple_re1() -> Pattern {
Pattern::Concat(vec![
Pattern::Repeated(Box::new(Repetition::ZeroOrOnce(Pattern::Submatch(
Box::new(Pattern::Concat(vec![
Pattern::Char('a'),
Pattern::CharRange('b', 'c'),
])),
)))),
Pattern::Repeated(Box::new(Repetition::ZeroOrMore(Pattern::Submatch(
Box::new(Pattern::Concat(vec![
Pattern::Char('c'),
Pattern::Char('d'),
])),
)))),
Pattern::Submatch(Box::new(Pattern::Repeated(Box::new(
Repetition::OnceOrMore(Pattern::Alternate(vec![
(Pattern::Char('e')),
(Pattern::Char('f')),
])),
)))),
Pattern::Repeated(Box::new(Repetition::Specific(
Pattern::Char('x'),
1,
Some(3),
))),
Pattern::Alternate(vec![
Pattern::Char('g'),
Pattern::Repeated(Box::new(Repetition::Specific(
Pattern::Char('h'),
2,
Some(2),
))),
(Pattern::Char('i')),
]),
Pattern::Repeated(Box::new(Repetition::Specific(Pattern::Char('j'), 2, None))),
Pattern::Str("klm".to_string()),
])
}
use crate::compile::start_compile;
#[test]
fn test_re1() {
// println!("{:?}", start_compile(simple_re0()));
let dot = dot(&start_compile(&simple_re1()));
println!("digraph st {{ {} }}", dot);
}
}
|
use proconio::{fastout, input};
#[fastout]
fn main() {
input! {
x_vec: [i64; 5],
}
for (i, x) in x_vec.iter().enumerate() {
if *x == 0 {
println!("{}", i + 1);
return;
}
}
}
|
use super::DType;
pub type Scm = &'static ScmValue;
#[derive(Debug, Clone)]
pub enum ScmValue {
Int(i64),
Flt(f64),
Vec(Vec<Scm>),
}
impl DType for Scm {
fn int(i: i64) -> Self {
Box::leak(Box::new(ScmValue::Int(i)))
}
fn flt(f: f64) -> Self {
Box::leak(Box::new(ScmValue::Flt(f)))
}
fn vec(v: Vec<Self>) -> Self {
Box::leak(Box::new(ScmValue::Vec(v)))
}
fn as_int(&self) -> Option<i64> {
match **self {
ScmValue::Int(i) => Some(i),
_ => None,
}
}
fn as_flt(&self) -> Option<f64> {
match **self {
ScmValue::Flt(i) => Some(i),
_ => None,
}
}
fn as_vec(&self) -> Option<&Vec<Self>> {
match *self {
ScmValue::Vec(i) => Some(i),
_ => None,
}
}
fn eq(&self, other: &Self) -> bool {
*self as *const ScmValue == *other as *const ScmValue
}
fn lt(&self, other: &Self) -> bool {
match (self, other) {
(ScmValue::Int(a), ScmValue::Int(b)) => a < b,
(ScmValue::Flt(a), ScmValue::Flt(b)) => a < b,
_ => panic!("invalid comparison"),
}
}
}
|
pub fn run() {
let mut i: i32 = 0;
println!("{0} and {0}", i);
loop {
i += 1;
if i == 5 {
continue;
} else if i >= 10 {
break;
}
println!("value i now {}", i);
}
let mut n = 1;
while n <= 30 {
n += 1;
println!("{0}", n);
}
}
|
//! Attributes implementation.
use crate::Attribute;
use arctk::{
err::Error,
img::Gradient,
ord::{Link, Set},
};
use arctk_attr::input;
/// Surface attribute setup.
#[input]
pub enum AttributeLinker {
/// Opaque coloured surface.
Opaque(String),
/// Partially reflective mirror, absorption fraction.
Mirror(String, f64),
/// Partially transparent, absorption fraction.
Transparent(String, f64),
/// Refractive, absorption fraction, inside and outside refractive indices.
Refractive(String, f64, [f64; 2]),
/// Luminous surface, brightness multiplier.
Luminous(String, f64),
}
impl<'a> Link<'a, Gradient> for AttributeLinker {
type Inst = Attribute<'a>;
#[inline]
fn requires(&self) -> Vec<String> {
match *self {
Self::Opaque(ref grad)
| Self::Mirror(ref grad, ..)
| Self::Transparent(ref grad, ..)
| Self::Refractive(ref grad, ..)
| Self::Luminous(ref grad, ..) => vec![grad.clone()],
}
}
#[inline]
fn link(self, grads: &'a Set<Gradient>) -> Result<Self::Inst, Error> {
Ok(match self {
Self::Opaque(ref grad) => Attribute::Opaque(
grads
.get(grad)
.unwrap_or_else(|| panic!("Failed to link attribute-gradient key: {}", grad)),
),
Self::Mirror(ref grad, abs_frac) => Attribute::Mirror(
grads
.get(grad)
.unwrap_or_else(|| panic!("Failed to link attribute-gradient key: {}", grad)),
abs_frac,
),
Self::Transparent(ref grad, abs_frac) => Attribute::Transparent(
grads
.get(grad)
.unwrap_or_else(|| panic!("Failed to link attribute-gradient key: {}", grad)),
abs_frac,
),
Self::Refractive(ref grad, abs_frac, ref_indices) => Attribute::Refractive(
grads
.get(grad)
.unwrap_or_else(|| panic!("Failed to link attribute-gradient key: {}", grad)),
abs_frac,
ref_indices,
),
Self::Luminous(ref grad, bright_mult) => Attribute::Luminous(
grads
.get(grad)
.unwrap_or_else(|| panic!("Failed to link attribute-gradient key: {}", grad)),
bright_mult,
),
})
}
}
|
struct ValidationInfo {
password: String,
required_char: char,
chars_allowed: (usize, usize),
}
fn get_num_range(s: &str) -> (usize, usize) {
// expects strings in the format of "X-Y"
let bounds: Vec<&str> = s.split("-").collect();
(
bounds[0].parse::<usize>().unwrap(),
bounds[1].parse::<usize>().unwrap(),
)
}
fn line_to_valid_info(l: &String) -> ValidationInfo {
let sections: Vec<&str> = l.split(" ").collect();
ValidationInfo {
password: String::from(sections[2]),
required_char: sections[1].chars().nth(0).unwrap(),
chars_allowed: get_num_range(sections[0]),
}
}
fn password_is_valid(v: &ValidationInfo) -> bool {
let ValidationInfo {
password,
required_char,
chars_allowed,
} = v;
let num_matches = password.matches(*required_char).count();
num_matches >= chars_allowed.0 && num_matches <= chars_allowed.1
}
// In this rendition, pw is valid if exactly one of the required_chars exist
// at the indicated index
fn password_is_valid_v2(v: &ValidationInfo) -> bool {
let ValidationInfo {
password,
required_char,
chars_allowed,
} = v;
let mut total_occurrences: usize = 0;
let (lb, ub) = chars_allowed;
if password.chars().nth(lb - 1).unwrap() == *required_char {
total_occurrences += 1;
}
if password.chars().nth(ub - 1).unwrap() == *required_char {
total_occurrences += 1;
}
total_occurrences == 1
}
pub fn get_answers(input: Vec<String>) -> (usize, usize) {
let valid_info = input.iter().map(line_to_valid_info);
(
valid_info.clone().filter(password_is_valid).count(),
valid_info.filter(password_is_valid_v2).count(),
)
}
|
use std::collections::HashMap;
#[derive(Default, Debug, Clone)]
pub struct ReadOnly {
req_status: HashMap<u64, Vec<Vec<u8>>>,
}
impl ReadOnly {
pub fn new() -> ReadOnly {
ReadOnly {
req_status: HashMap::default(),
}
}
pub fn reset(&mut self) {
self.req_status.clear();
}
pub fn add_request(&mut self, index: u64, req: Vec<u8>) {
if !self.req_status.contains_key(&index) {
self.req_status.insert(index, Vec::default());
}
self.req_status.get_mut(&index).unwrap().push(req);
}
pub fn pop_requests(&mut self, index: u64) -> Vec<Vec<u8>> {
self.req_status.remove(&index).unwrap_or_default()
}
}
|
// 8.2.1
use std::cell::RefCell;
use std::sync::Arc;
use std::thread;
use rustc_serialize::json;
use std::str;
use mio::*;
use mio::udp::*;
use std::net::ToSocketAddrs;
use mio::buf::{RingBuf, SliceBuf, MutSliceBuf};
use std::collections::VecDeque;
use std::net::SocketAddr;
use std::thread::sleep_ms;
type SeqNum = u64;
#[derive(PartialEq, Clone)]
#[derive(RustcEncodable, RustcDecodable)]
struct Guid(pub u64);
#[derive(RustcEncodable, RustcDecodable)]
struct Locator(pub u64);
trait Entity {
fn get_guid() -> Guid;
}
enum ProtocolId {
Rtps,
}
struct Header {
protocol:u8,
version:u8,
vendor_id:u8,
guid_prefix:u8,
}
#[derive(RustcEncodable, RustcDecodable)]
#[derive(Debug)]
enum SubmessageKind {
Data,
DataFrag,
Gap,
Heartbeat,
AckNack,
HeartbeatFrag,
NackFrag,
Pad,
InfoTimestamp,
InfoReply,
InfoDestination,
InfoSource,
}
struct SubmessageHeader {
submessage_id:SubmessageKind,
flags:u8,
length:u8,
}
struct SubmessageElement;
struct Submessage {
header:SubmessageHeader,
element:SubmessageElement,
}
struct EndPoint {
topic_kind:u8,
reliability_level:u8,
unicast_locator_list:Vec<Locator>,
multicast_locator_list:Vec<Locator>,
}
struct Participant {
protocol_version:u8,
vendor_id:u8,
default_unicast_locator_list:Vec<Locator>,
default_multicast_locator_list:Vec<Locator>,
}
struct Writer {
guid:Guid,
sequence_number:SeqNum,
history_cache:HistoryCache,
_target:Option<Arc<RefCell<Reader>>>,
}
impl Writer {
fn new() -> Writer {
Writer {
guid: Guid(44),
sequence_number: 0,
history_cache: HistoryCache::new(),
_target: None,
}
}
fn new_change(&mut self) -> CacheChange {
let seq = self.sequence_number;
self.sequence_number += 1;
CacheChange {
kind: ChangeKind::Kind,
writer_guid: self.guid.clone(),
sequence_number: seq,
data: vec![],
}
}
}
struct Reader {
history_cache:HistoryCache,
}
impl Reader {
fn new() -> Reader {
Reader {
history_cache: HistoryCache::new(),
}
}
fn _message(&mut self, message:SubmessageKind) -> SubmessageKind {
match message {
SubmessageKind::Data => {
self.history_cache.add_change(CacheChange {
kind: ChangeKind::Kind,
writer_guid: Guid(0),
sequence_number: 0,
data: vec![],
});
// ...The DDS user is notified by one of the mechanisms described in the DDS Specification
// RESPOND with AckNack { reader_guid, seq_num_change, }
},
_ => { }
}
message
}
}
#[derive(PartialEq)]
#[derive(RustcEncodable, RustcDecodable)]
enum ChangeKind {
Kind,
}
#[derive(PartialEq)]
#[derive(RustcEncodable, RustcDecodable)]
struct CacheChange {
kind:ChangeKind,
writer_guid:Guid,
//instance_handle
sequence_number:SeqNum,
data:Vec<u8>,
}
type Status = Result<(), ()>;
#[allow(non_upper_case_globals)]
const Success:Status = Ok(());
#[allow(non_upper_case_globals)]
const Failure:Status = Err(());
struct HistoryCache {
changes:Vec<CacheChange>,
}
impl HistoryCache {
fn new() -> HistoryCache {
HistoryCache {
changes: vec![]
}
}
fn add_change(&mut self, change:CacheChange) -> Status {
self.changes.push(change);
Success
}
fn remove_change(&mut self, change:CacheChange) -> Status {
if let Some(pos) = self.changes.iter().position(|r| *r == change) {
self.changes.remove(pos);
}
Success
}
// fn get_change(&self) {}
fn get_seq_num_min(&self) -> Option<SeqNum> {
self.changes.iter().map(|r| r.sequence_number).min()
}
fn get_seq_num_max(&self) -> Option<SeqNum> {
self.changes.iter().map(|r| r.sequence_number).max()
}
}
fn send_socket(tx:&UdpSocket, msg:&SubmessageKind) {
let mut buf = RingBuf::new(1024);
buf.write_slice(json::encode(msg).unwrap().as_bytes());
tx.send_to(&mut buf, &"227.1.1.100:7556".parse().unwrap());
}
fn recv_socket(rx:&UdpSocket) -> SubmessageKind {
let mut buf = RingBuf::new(1024);
rx.recv_from(&mut buf).unwrap();
json::decode(str::from_utf8(buf.bytes()).unwrap()).unwrap()
}
const TOKEN_WRITER: Token = Token(0);
struct TxHandler {
tx: UdpSocket,
writer: Writer,
queue: VecDeque<SubmessageKind>,
}
impl TxHandler {
fn new(writer:Writer, tx: UdpSocket) -> TxHandler {
TxHandler {
tx: tx,
writer: writer,
queue: VecDeque::new(),
}
}
}
impl Handler for TxHandler {
type Timeout = usize;
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<TxHandler>, token: Token, events: EventSet) {
if events.is_writable() {
match self.queue.pop_front() {
Some(msg) => {
debug!("We are writing a datagram now...");
send_socket(&self.tx, &msg);
event_loop.register_opt(&self.tx, Token(1), EventSet::writable(), PollOpt::edge()).unwrap();
},
None => {
// SHRUG
event_loop.shutdown();
}
}
}
}
}
struct RxHandler {
rx: UdpSocket,
reader: Reader,
}
impl RxHandler {
fn new<A: ToSocketAddrs>(reader:Reader, addr: &A) -> RxHandler {
RxHandler {
rx: UdpSocket::bound(&addr.to_socket_addrs().unwrap().next().unwrap()).unwrap(),
reader: reader,
}
}
fn register(&mut self, event_loop: &mut EventLoop<RxHandler>) {
event_loop.register_opt(&self.rx, Token(0), EventSet::readable(), PollOpt::edge()).unwrap();
}
}
impl Handler for RxHandler {
type Timeout = usize;
type Message = ();
fn ready(&mut self, event_loop: &mut EventLoop<RxHandler>, _: Token, events: EventSet) {
if events.is_readable() {
debug!("We are receiving a datagram now...");
let msg = self.reader._message(recv_socket(&self.rx));
match msg {
SubmessageKind::Data => {},
SubmessageKind::Heartbeat => {
event_loop.shutdown();
},
_ => {},
}
}
}
}
#[test]
fn test_8_4_1_1() {
let a = thread::spawn(move || {
let mut event_loop = EventLoop::new().unwrap();
let mut writer = Writer::new();
let change = writer.new_change();
writer.history_cache.add_change(change);
// on writer's thread...
// TODO: history cache thread or writer thread?
let tx = UdpSocket::bound(&"127.0.0.1:7555".parse().unwrap()).unwrap();
event_loop.register_opt(&tx, TOKEN_WRITER, EventSet::writable(), PollOpt::edge()).unwrap();
let mut handler = TxHandler::new(writer, tx);
handler.queue.push_back(SubmessageKind::Data);
handler.queue.push_back(SubmessageKind::Heartbeat);
sleep_ms(200);
event_loop.run(&mut handler).unwrap();
});
let b = thread::spawn(move || {
let mut event_loop = EventLoop::new().unwrap();
let reader = Reader::new();
let mut handler = RxHandler::new(reader, &"0.0.0.0:7556");
handler.rx.join_multicast(&"227.1.1.100".parse().unwrap()).unwrap();
handler.register(&mut event_loop);
event_loop.run(&mut handler).unwrap();
// The StatefulWriter records that the RTPS Reader has received the CacheChange and adds it to the set of
// acked_changes maintained by the ReaderProxy using the acked_changes_set operation
});
let _ = a.join();
let _ = b.join();
}
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Register `SR` writer"]
pub type W = crate::W<SR_SPEC>;
#[doc = "Field `UIF` reader - Update interrupt flag"]
pub type UIF_R = crate::BitReader<UIFR_A>;
#[doc = "Update interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum UIFR_A {
#[doc = "0: No update occurred"]
NoUpdateOccurred = 0,
#[doc = "1: Update interrupt pending"]
UpdatePending = 1,
}
impl From<UIFR_A> for bool {
#[inline(always)]
fn from(variant: UIFR_A) -> Self {
variant as u8 != 0
}
}
impl UIF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> UIFR_A {
match self.bits {
false => UIFR_A::NoUpdateOccurred,
true => UIFR_A::UpdatePending,
}
}
#[doc = "No update occurred"]
#[inline(always)]
pub fn is_no_update_occurred(&self) -> bool {
*self == UIFR_A::NoUpdateOccurred
}
#[doc = "Update interrupt pending"]
#[inline(always)]
pub fn is_update_pending(&self) -> bool {
*self == UIFR_A::UpdatePending
}
}
#[doc = "Update interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum UIFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<UIFW_AW> for bool {
#[inline(always)]
fn from(variant: UIFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `UIF` writer - Update interrupt flag"]
pub type UIF_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, UIFW_AW>;
impl<'a, REG, const O: u8> UIF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(UIFW_AW::Clear)
}
}
#[doc = "Field `CC1IF` reader - Capture/Compare 1 interrupt flag"]
pub type CC1IF_R = crate::BitReader<CC1IFR_A>;
#[doc = "Capture/Compare 1 interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CC1IFR_A {
#[doc = "0: No campture/compare has been detected"]
NoMatch = 0,
#[doc = "1: If CC1 is an output: The content of the counter TIMx_CNT matches the content of the TIMx_CCR1 register. If CC1 is an input: The counter value has been captured in TIMx_CCR1 register"]
Match = 1,
}
impl From<CC1IFR_A> for bool {
#[inline(always)]
fn from(variant: CC1IFR_A) -> Self {
variant as u8 != 0
}
}
impl CC1IF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CC1IFR_A {
match self.bits {
false => CC1IFR_A::NoMatch,
true => CC1IFR_A::Match,
}
}
#[doc = "No campture/compare has been detected"]
#[inline(always)]
pub fn is_no_match(&self) -> bool {
*self == CC1IFR_A::NoMatch
}
#[doc = "If CC1 is an output: The content of the counter TIMx_CNT matches the content of the TIMx_CCR1 register. If CC1 is an input: The counter value has been captured in TIMx_CCR1 register"]
#[inline(always)]
pub fn is_match(&self) -> bool {
*self == CC1IFR_A::Match
}
}
#[doc = "Capture/Compare 1 interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CC1IFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<CC1IFW_AW> for bool {
#[inline(always)]
fn from(variant: CC1IFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CC1IF` writer - Capture/Compare 1 interrupt flag"]
pub type CC1IF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CC1IFW_AW>;
impl<'a, REG, const O: u8> CC1IF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CC1IFW_AW::Clear)
}
}
#[doc = "Field `COMIF` reader - COM interrupt flag"]
pub type COMIF_R = crate::BitReader<COMIFR_A>;
#[doc = "COM interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum COMIFR_A {
#[doc = "0: No COM event occurred"]
NoCom = 0,
#[doc = "1: COM interrupt pending"]
Com = 1,
}
impl From<COMIFR_A> for bool {
#[inline(always)]
fn from(variant: COMIFR_A) -> Self {
variant as u8 != 0
}
}
impl COMIF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> COMIFR_A {
match self.bits {
false => COMIFR_A::NoCom,
true => COMIFR_A::Com,
}
}
#[doc = "No COM event occurred"]
#[inline(always)]
pub fn is_no_com(&self) -> bool {
*self == COMIFR_A::NoCom
}
#[doc = "COM interrupt pending"]
#[inline(always)]
pub fn is_com(&self) -> bool {
*self == COMIFR_A::Com
}
}
#[doc = "COM interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum COMIFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<COMIFW_AW> for bool {
#[inline(always)]
fn from(variant: COMIFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `COMIF` writer - COM interrupt flag"]
pub type COMIF_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, COMIFW_AW>;
impl<'a, REG, const O: u8> COMIF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(COMIFW_AW::Clear)
}
}
#[doc = "Field `BIF` reader - Break interrupt flag"]
pub type BIF_R = crate::BitReader<BIFR_A>;
#[doc = "Break interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BIFR_A {
#[doc = "0: No break event occurred"]
NoBreak = 0,
#[doc = "1: Break interrupt pending"]
Break = 1,
}
impl From<BIFR_A> for bool {
#[inline(always)]
fn from(variant: BIFR_A) -> Self {
variant as u8 != 0
}
}
impl BIF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BIFR_A {
match self.bits {
false => BIFR_A::NoBreak,
true => BIFR_A::Break,
}
}
#[doc = "No break event occurred"]
#[inline(always)]
pub fn is_no_break(&self) -> bool {
*self == BIFR_A::NoBreak
}
#[doc = "Break interrupt pending"]
#[inline(always)]
pub fn is_break(&self) -> bool {
*self == BIFR_A::Break
}
}
#[doc = "Break interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BIFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<BIFW_AW> for bool {
#[inline(always)]
fn from(variant: BIFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BIF` writer - Break interrupt flag"]
pub type BIF_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, BIFW_AW>;
impl<'a, REG, const O: u8> BIF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(BIFW_AW::Clear)
}
}
#[doc = "Field `CC1OF` reader - Capture/Compare 1 overcapture flag"]
pub type CC1OF_R = crate::BitReader<CC1OFR_A>;
#[doc = "Capture/Compare 1 overcapture flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CC1OFR_A {
#[doc = "0: No overcapture has been detected"]
NoOvercapture = 0,
#[doc = "1: The counter value has been captured in TIMx_CCRx register while CCxIF flag was already set"]
Overcapture = 1,
}
impl From<CC1OFR_A> for bool {
#[inline(always)]
fn from(variant: CC1OFR_A) -> Self {
variant as u8 != 0
}
}
impl CC1OF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CC1OFR_A {
match self.bits {
false => CC1OFR_A::NoOvercapture,
true => CC1OFR_A::Overcapture,
}
}
#[doc = "No overcapture has been detected"]
#[inline(always)]
pub fn is_no_overcapture(&self) -> bool {
*self == CC1OFR_A::NoOvercapture
}
#[doc = "The counter value has been captured in TIMx_CCRx register while CCxIF flag was already set"]
#[inline(always)]
pub fn is_overcapture(&self) -> bool {
*self == CC1OFR_A::Overcapture
}
}
#[doc = "Capture/Compare 1 overcapture flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CC1OFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<CC1OFW_AW> for bool {
#[inline(always)]
fn from(variant: CC1OFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CC1OF` writer - Capture/Compare 1 overcapture flag"]
pub type CC1OF_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, CC1OFW_AW>;
impl<'a, REG, const O: u8> CC1OF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CC1OFW_AW::Clear)
}
}
impl R {
#[doc = "Bit 0 - Update interrupt flag"]
#[inline(always)]
pub fn uif(&self) -> UIF_R {
UIF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Capture/Compare 1 interrupt flag"]
#[inline(always)]
pub fn cc1if(&self) -> CC1IF_R {
CC1IF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 5 - COM interrupt flag"]
#[inline(always)]
pub fn comif(&self) -> COMIF_R {
COMIF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 7 - Break interrupt flag"]
#[inline(always)]
pub fn bif(&self) -> BIF_R {
BIF_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 9 - Capture/Compare 1 overcapture flag"]
#[inline(always)]
pub fn cc1of(&self) -> CC1OF_R {
CC1OF_R::new(((self.bits >> 9) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Update interrupt flag"]
#[inline(always)]
#[must_use]
pub fn uif(&mut self) -> UIF_W<SR_SPEC, 0> {
UIF_W::new(self)
}
#[doc = "Bit 1 - Capture/Compare 1 interrupt flag"]
#[inline(always)]
#[must_use]
pub fn cc1if(&mut self) -> CC1IF_W<SR_SPEC, 1> {
CC1IF_W::new(self)
}
#[doc = "Bit 5 - COM interrupt flag"]
#[inline(always)]
#[must_use]
pub fn comif(&mut self) -> COMIF_W<SR_SPEC, 5> {
COMIF_W::new(self)
}
#[doc = "Bit 7 - Break interrupt flag"]
#[inline(always)]
#[must_use]
pub fn bif(&mut self) -> BIF_W<SR_SPEC, 7> {
BIF_W::new(self)
}
#[doc = "Bit 9 - Capture/Compare 1 overcapture flag"]
#[inline(always)]
#[must_use]
pub fn cc1of(&mut self) -> CC1OF_W<SR_SPEC, 9> {
CC1OF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "TIM16/TIM17 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sr::W`](W) writer structure"]
impl crate::Writable for SR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0x02a1;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SR to value 0"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
quick_error! {
#[derive(Debug)]
pub enum X11Error {
OperationFailed(operation: &'static str) {
from()
}
}
}
|
use bevy::math::Vec3;
struct Spherical {
pub r: f32,
pub φ: f32,
pub θ: f32,
}
impl From<Vec3> for Spherical {
fn from(p: Vec3) -> Self {
let r = p.length();
Self {
r,
θ: (p.z / r).acos(),
φ: (p.x/p.y).atan() + if p.x < 0.0 { std::f32::consts::PI } else { 0.0 }
}
}
}
impl From<Spherical> for Vec3 {
fn from(s: Spherical) -> Self {
Vec3::new(
s.r * s.φ.cos() * s.θ.sin(),
s.r * s.φ.sin() * s.θ.sin(),
s.r * s.θ.cos()
)
}
} |
pub mod player_movement;
pub mod asset_loading; |
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
ListOperations(#[from] list_operations::Error),
#[error(transparent)]
ListAddressesAtSubscriptionLevel(#[from] list_addresses_at_subscription_level::Error),
#[error(transparent)]
ListProductFamilies(#[from] list_product_families::Error),
#[error(transparent)]
ListConfigurations(#[from] list_configurations::Error),
#[error(transparent)]
ListProductFamiliesMetadata(#[from] list_product_families_metadata::Error),
#[error(transparent)]
ListOrderAtSubscriptionLevel(#[from] list_order_at_subscription_level::Error),
#[error(transparent)]
ListOrderItemsAtSubscriptionLevel(#[from] list_order_items_at_subscription_level::Error),
#[error(transparent)]
ListAddressesAtResourceGroupLevel(#[from] list_addresses_at_resource_group_level::Error),
#[error(transparent)]
GetAddressByName(#[from] get_address_by_name::Error),
#[error(transparent)]
CreateAddress(#[from] create_address::Error),
#[error(transparent)]
UpdateAddress(#[from] update_address::Error),
#[error(transparent)]
DeleteAddressByName(#[from] delete_address_by_name::Error),
#[error(transparent)]
ListOrderAtResourceGroupLevel(#[from] list_order_at_resource_group_level::Error),
#[error(transparent)]
GetOrderByName(#[from] get_order_by_name::Error),
#[error(transparent)]
ListOrderItemsAtResourceGroupLevel(#[from] list_order_items_at_resource_group_level::Error),
#[error(transparent)]
GetOrderItemByName(#[from] get_order_item_by_name::Error),
#[error(transparent)]
CreateOrderItem(#[from] create_order_item::Error),
#[error(transparent)]
UpdateOrderItem(#[from] update_order_item::Error),
#[error(transparent)]
DeleteOrderItemByName(#[from] delete_order_item_by_name::Error),
#[error(transparent)]
CancelOrderItem(#[from] cancel_order_item::Error),
#[error(transparent)]
ReturnOrderItem(#[from] return_order_item::Error),
}
pub async fn list_operations(
operation_config: &crate::OperationConfig,
) -> std::result::Result<models::OperationListResult, list_operations::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.EdgeOrder/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list_operations::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_operations::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_operations::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_operations::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_operations::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_operations::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_operations::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_operations {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_addresses_at_subscription_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<models::AddressResourceList, list_addresses_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/addresses",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_addresses_at_subscription_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_addresses_at_subscription_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_addresses_at_subscription_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_addresses_at_subscription_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AddressResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_addresses_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_addresses_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_addresses_at_subscription_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_addresses_at_subscription_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_product_families(
operation_config: &crate::OperationConfig,
subscription_id: &str,
expand: Option<&str>,
skip_token: Option<&str>,
product_families_request: &models::ProductFamiliesRequest,
) -> std::result::Result<models::ProductFamilies, list_product_families::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/listProductFamilies",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_product_families::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_product_families::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(product_families_request).map_err(list_product_families::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_product_families::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_product_families::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProductFamilies = serde_json::from_slice(rsp_body)
.map_err(|source| list_product_families::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_product_families::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_product_families::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_product_families {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_configurations(
operation_config: &crate::OperationConfig,
subscription_id: &str,
skip_token: Option<&str>,
configurations_request: &models::ConfigurationsRequest,
) -> std::result::Result<models::Configurations, list_configurations::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/listConfigurations",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_configurations::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_configurations::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(configurations_request).map_err(list_configurations::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_configurations::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_configurations::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Configurations = serde_json::from_slice(rsp_body)
.map_err(|source| list_configurations::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_configurations::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_configurations::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_configurations {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_product_families_metadata(
operation_config: &crate::OperationConfig,
subscription_id: &str,
skip_token: Option<&str>,
) -> std::result::Result<models::ProductFamiliesMetadata, list_product_families_metadata::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/productFamiliesMetadata",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_product_families_metadata::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_product_families_metadata::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_product_families_metadata::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_product_families_metadata::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ProductFamiliesMetadata = serde_json::from_slice(rsp_body)
.map_err(|source| list_product_families_metadata::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_product_families_metadata::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_product_families_metadata::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_product_families_metadata {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_order_at_subscription_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
skip_token: Option<&str>,
) -> std::result::Result<models::OrderResourceList, list_order_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/orders",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_order_at_subscription_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_order_at_subscription_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_order_at_subscription_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_order_at_subscription_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_order_at_subscription_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_order_at_subscription_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_order_items_at_subscription_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
expand: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<models::OrderItemResourceList, list_order_items_at_subscription_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.EdgeOrder/orderItems",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_order_items_at_subscription_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_order_items_at_subscription_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_order_items_at_subscription_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_order_items_at_subscription_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderItemResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_items_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_items_at_subscription_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_order_items_at_subscription_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_order_items_at_subscription_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_addresses_at_resource_group_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
filter: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<models::AddressResourceList, list_addresses_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/addresses",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_addresses_at_resource_group_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_addresses_at_resource_group_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_addresses_at_resource_group_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_addresses_at_resource_group_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AddressResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_addresses_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_addresses_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_addresses_at_resource_group_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_addresses_at_resource_group_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_address_by_name(
operation_config: &crate::OperationConfig,
address_name: &str,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::AddressResource, get_address_by_name::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/addresses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
address_name
);
let mut url = url::Url::parse(url_str).map_err(get_address_by_name::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_address_by_name::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_address_by_name::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_address_by_name::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AddressResource = serde_json::from_slice(rsp_body)
.map_err(|source| get_address_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_address_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_address_by_name::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_address_by_name {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_address(
operation_config: &crate::OperationConfig,
address_name: &str,
subscription_id: &str,
resource_group_name: &str,
address_resource: &models::AddressResource,
) -> std::result::Result<create_address::Response, create_address::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/addresses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
address_name
);
let mut url = url::Url::parse(url_str).map_err(create_address::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_address::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(address_resource).map_err(create_address::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_address::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_address::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AddressResource =
serde_json::from_slice(rsp_body).map_err(|source| create_address::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_address::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_address::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create_address::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_address::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_address {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AddressResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_address(
operation_config: &crate::OperationConfig,
address_name: &str,
subscription_id: &str,
resource_group_name: &str,
if_match: Option<&str>,
address_update_parameter: &models::AddressUpdateParameter,
) -> std::result::Result<update_address::Response, update_address::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/addresses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
address_name
);
let mut url = url::Url::parse(url_str).map_err(update_address::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_address::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(address_update_parameter).map_err(update_address::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_address::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_address::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(update_address::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AddressResource =
serde_json::from_slice(rsp_body).map_err(|source| update_address::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update_address::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update_address::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_address::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_address {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(models::AddressResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_address_by_name(
operation_config: &crate::OperationConfig,
address_name: &str,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<delete_address_by_name::Response, delete_address_by_name::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/addresses/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
address_name
);
let mut url = url::Url::parse(url_str).map_err(delete_address_by_name::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_address_by_name::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_address_by_name::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_address_by_name::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete_address_by_name::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete_address_by_name::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete_address_by_name::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_address_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_address_by_name::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_address_by_name {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_order_at_resource_group_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
skip_token: Option<&str>,
) -> std::result::Result<models::OrderResourceList, list_order_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orders",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_order_at_resource_group_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_order_at_resource_group_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_order_at_resource_group_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_order_at_resource_group_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_order_at_resource_group_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_order_at_resource_group_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_order_by_name(
operation_config: &crate::OperationConfig,
order_name: &str,
subscription_id: &str,
resource_group_name: &str,
location: &str,
) -> std::result::Result<models::OrderResource, get_order_by_name::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/locations/{}/orders/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
location,
order_name
);
let mut url = url::Url::parse(url_str).map_err(get_order_by_name::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_order_by_name::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_order_by_name::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_order_by_name::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderResource =
serde_json::from_slice(rsp_body).map_err(|source| get_order_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get_order_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_order_by_name::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_order_by_name {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_order_items_at_resource_group_level(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
filter: Option<&str>,
expand: Option<&str>,
skip_token: Option<&str>,
) -> std::result::Result<models::OrderItemResourceList, list_order_items_at_resource_group_level::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_order_items_at_resource_group_level::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_order_items_at_resource_group_level::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(skip_token) = skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_order_items_at_resource_group_level::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_order_items_at_resource_group_level::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderItemResourceList = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_items_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_order_items_at_resource_group_level::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_order_items_at_resource_group_level::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_order_items_at_resource_group_level {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_order_item_by_name(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
expand: Option<&str>,
) -> std::result::Result<models::OrderItemResource, get_order_item_by_name::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(get_order_item_by_name::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_order_item_by_name::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_order_item_by_name::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_order_item_by_name::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderItemResource = serde_json::from_slice(rsp_body)
.map_err(|source| get_order_item_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| get_order_item_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_order_item_by_name::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_order_item_by_name {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_order_item(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
order_item_resource: &models::OrderItemResource,
) -> std::result::Result<create_order_item::Response, create_order_item::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(create_order_item::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_order_item::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(order_item_resource).map_err(create_order_item::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_order_item::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_order_item::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderItemResource =
serde_json::from_slice(rsp_body).map_err(|source| create_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_order_item::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_order_item::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_order_item::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_order_item {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OrderItemResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_order_item(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
if_match: Option<&str>,
order_item_update_parameter: &models::OrderItemUpdateParameter,
) -> std::result::Result<update_order_item::Response, update_order_item::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(update_order_item::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_order_item::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(order_item_update_parameter).map_err(update_order_item::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_order_item::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_order_item::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(update_order_item::Response::Accepted202),
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OrderItemResource =
serde_json::from_slice(rsp_body).map_err(|source| update_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update_order_item::Response::Ok200(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update_order_item::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update_order_item {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(models::OrderItemResource),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_order_item_by_name(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<delete_order_item_by_name::Response, delete_order_item_by_name::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(delete_order_item_by_name::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_order_item_by_name::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(delete_order_item_by_name::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_order_item_by_name::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete_order_item_by_name::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete_order_item_by_name::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete_order_item_by_name::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_order_item_by_name::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_order_item_by_name::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_order_item_by_name {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn cancel_order_item(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
cancellation_reason: &models::CancellationReason,
) -> std::result::Result<cancel_order_item::Response, cancel_order_item::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}/cancel",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(cancel_order_item::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(cancel_order_item::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(cancellation_reason).map_err(cancel_order_item::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(cancel_order_item::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(cancel_order_item::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(cancel_order_item::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(cancel_order_item::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| cancel_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Err(cancel_order_item::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod cancel_order_item {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn return_order_item(
operation_config: &crate::OperationConfig,
order_item_name: &str,
subscription_id: &str,
resource_group_name: &str,
return_order_item_details: &models::ReturnOrderItemDetails,
) -> std::result::Result<return_order_item::Response, return_order_item::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EdgeOrder/orderItems/{}/return",
operation_config.base_path(),
subscription_id,
resource_group_name,
order_item_name
);
let mut url = url::Url::parse(url_str).map_err(return_order_item::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(return_order_item::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(return_order_item_details).map_err(return_order_item::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(return_order_item::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(return_order_item::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(return_order_item::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(return_order_item::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| return_order_item::Error::DeserializeError(source, rsp_body.clone()))?;
Err(return_order_item::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod return_order_item {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
|
// file: iterlimit.rs
//
// Copyright 2015-2017 The RsGenetic Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/// An iteration limiter.
#[derive(Copy, Clone, Debug)]
pub struct IterLimit {
/// Maximum number of iterations allowed.
max: u64,
/// Current number of iterations.
cur: u64,
}
impl IterLimit {
/// Create a new iteration limiter.
pub fn new(max: u64) -> IterLimit {
IterLimit { max: max, cur: 0 }
}
/// Increment the number of iterations.
pub fn inc(&mut self) {
self.cur += 1;
}
/// Check if the maximum has been reached.
pub fn reached(&self) -> bool {
self.cur >= self.max
}
/// Reset the number of iterations to zero.
pub fn reset(&mut self) {
self.cur = 0;
}
/// Get the current number of iterations.
pub fn get(&self) -> u64 {
self.cur
}
}
#[cfg(test)]
mod tests {
use super::IterLimit;
#[test]
fn test_iter_limit_reset() {
let mut limit = IterLimit::new(5);
for _ in 0..4 {
limit.inc();
}
assert_eq!(limit.reached(), false);
limit.reset();
assert_eq!(limit.reached(), false);
}
#[test]
fn test_iter_limit_reached() {
let mut limit = IterLimit::new(5);
for _ in 0..5 {
limit.inc();
}
assert!(limit.reached());
for _ in 0..10 {
limit.inc();
}
assert!(limit.reached());
assert_eq!(limit.get(), 15);
}
}
|
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct KeysLowActive(u16);
impl KeysLowActive {
const_new!();
bitfield_bool!(u16; 0, a_released, with_a_released, set_a_released);
bitfield_bool!(u16; 1, b_released, with_b_released, set_b_released);
bitfield_bool!(u16; 2, select_released, with_select_released, set_select_released);
bitfield_bool!(u16; 3, start_released, with_start_released, set_start_released);
bitfield_bool!(u16; 4, right_released, with_right_released, set_right_released);
bitfield_bool!(u16; 5, left_released, with_left_released, set_left_released);
bitfield_bool!(u16; 6, up_released, with_up_released, set_up_released);
bitfield_bool!(u16; 7, down_released, with_down_released, set_down_released);
bitfield_bool!(u16; 8, r_released, with_r_released, set_r_released);
bitfield_bool!(u16; 9, l_released, with_l_released, set_l_released);
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct Keys(u16);
impl Keys {
const_new!();
bitfield_bool!(u16; 0, a, with_a, set_a);
bitfield_bool!(u16; 1, b, with_b, set_b);
bitfield_bool!(u16; 2, select, with_select, set_select);
bitfield_bool!(u16; 3, start, with_start, set_start);
bitfield_bool!(u16; 4, right, with_right, set_right);
bitfield_bool!(u16; 5, left, with_left, set_left);
bitfield_bool!(u16; 6, up, with_up, set_up);
bitfield_bool!(u16; 7, down, with_down, set_down);
bitfield_bool!(u16; 8, r, with_r, set_r);
bitfield_bool!(u16; 9, l, with_l, set_l);
pub const fn x_signum(self) -> i32 {
if self.right() {
1
} else if self.left() {
-1
} else {
0
}
}
pub const fn y_signum(self) -> i32 {
if self.down() {
1
} else if self.up() {
-1
} else {
0
}
}
}
// TODO: bit ops for keys
impl From<KeysLowActive> for Keys {
fn from(low_active: KeysLowActive) -> Self {
Self(low_active.0 ^ 0b11_1111_1111)
}
}
impl From<Keys> for KeysLowActive {
fn from(keys: Keys) -> Self {
Self(keys.0 ^ 0b11_1111_1111)
}
}
|
use std::rc::Rc;
use std::cell::RefCell;
use failure::Error;
use smpl::{parse_module, UnparsedModule};
use smpl::interpreter::*;
pub const STATE_RUN: i32 = 0;
pub const STATE_END: i32 = 1;
const RT_CHOICE: &'static str = "choice";
const RT_CLEAR_CHOICES: &'static str = "clear_choices";
const RT_INIT_CTXT: &'static str = "init_ctxt";
const RT_NEW_DATA: &'static str = "new_data";
const RT_SET_STATE: &'static str = "set_state";
const RT_SET_DATA: &'static str = "set_data";
const RT_GET_DATA: &'static str = "get_data";
pub const CTXT_STATE: &'static str = "state";
pub const CTXT_CHOICE: &'static str = "choice_list";
pub const CHOICE_HANDLE: &'static str = "handle";
pub const CHOICE_DISPLAY: &'static str = "display";
const RT_LIB: &'static str = include_str!("rt.smpl");
pub fn vm_module() -> VmModule {
let parsed = parse_module(UnparsedModule::anonymous(RT_LIB)).unwrap();
let vm_module = VmModule::new(parsed)
.add_builtin(RT_INIT_CTXT, init_ctxt)
.add_builtin(RT_CHOICE, choice)
.add_builtin(RT_CLEAR_CHOICES, clear_choices)
.add_builtin(RT_SET_STATE, set_state)
.add_builtin(RT_SET_DATA, set_data)
.add_builtin(RT_GET_DATA, get_data)
.add_builtin(RT_NEW_DATA, new_data)
;
vm_module
}
fn init_ctxt(args: Option<Vec<Value>>) -> Result<Value, Error> {
let c = args.unwrap().pop().unwrap();
let mut c = irmatch!(c; Value::Struct(c) => c);
c.set_field(CTXT_STATE.to_string(), Value::Int(STATE_RUN));
c.set_field(CTXT_CHOICE.to_string(), Value::Array(Vec::new()));
Ok(Value::Struct(c))
}
fn choice(args: Option<Vec<Value>>) -> Result<Value, Error> {
let mut args = args.unwrap();
let display = args.pop().unwrap();
let handler = args.pop().unwrap();
let context = irmatch!(args.pop().unwrap(); Value::Struct(c) => c);
let list = context.ref_field(CTXT_CHOICE).unwrap();
let mut list = list.borrow_mut();
let list = irmatch!(*list; Value::Array(ref mut vec) => vec);
let mut choice = Struct::new();
choice.set_field(CHOICE_HANDLE.to_owned(), handler);
choice.set_field(CHOICE_DISPLAY.to_owned(), display);
list.push(Rc::new(RefCell::new(Value::Struct(choice))));
Ok(Value::Struct(context))
}
fn clear_choices(args: Option<Vec<Value>>) -> Result<Value, Error> {
let mut args = args.unwrap();
let context = irmatch!(args.pop().unwrap(); Value::Struct(c) => c);
let list = context.ref_field(CTXT_CHOICE).unwrap();
let mut list = list.borrow_mut();
let list = irmatch!(*list; Value::Array(ref mut vec) => vec);
list.clear();
Ok(Value::Struct(context))
}
fn set_state(args: Option<Vec<Value>>) -> Result<Value, Error> {
let mut args = args.unwrap();
let new_state = args.pop().unwrap();
let mut context = irmatch!(args.pop().unwrap(); Value::Struct(c) => c);
context.set_field(CTXT_STATE.to_owned(), new_state);
Ok(Value::Struct(context))
}
fn new_data(_args: Option<Vec<Value>>) -> Result<Value, Error> {
Ok(Value::Struct(Struct::new()))
}
fn set_data(args: Option<Vec<Value>>) -> Result<Value, Error> {
let mut args = args.unwrap();
let to_set = args.pop().unwrap();
let name = irmatch!(args.pop().unwrap(); Value::String(s) => s);
let mut data_s = irmatch!(args.pop().unwrap(); Value::Struct(c) => c);
data_s.set_field(name, to_set);
Ok(Value::Struct(data_s))
}
fn get_data(args: Option<Vec<Value>>) -> Result<Value, Error> {
let mut args = args.unwrap();
let name = irmatch!(args.pop().unwrap(); Value::String(s) => s);
let data = irmatch!(args.pop().unwrap(); Value::Struct(c) => c);
let value = data.get_field(&name)
.expect(&format!("Unknown flag {}", name));
Ok(value)
}
|
pub fn run() {
greeting("hello", "naveen");
println!("Value of 10+10 is {} ", add(10, 10));
// Closure
let add_nums = |n1: i32, n2: i32| n1 + n2;
println!("C Sum:{}", add_nums(3, 3));
}
fn greeting(greet: &str, name: &str) {
println!("{} {}, nice to meet you", greet, name);
}
fn add(num1: i32, num2: i32) -> i32 {
num1 + num2
}
|
use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::prelude::*;
use std::path::PathBuf;
pub struct LS;
#[derive(Deserialize)]
pub struct LsArgs {
path: Option<Tagged<PathBuf>>,
}
impl WholeStreamCommand for LS {
fn name(&self) -> &str {
"ls"
}
fn signature(&self) -> Signature {
Signature::build("ls").optional("path", SyntaxShape::Pattern)
}
fn usage(&self) -> &str {
"View the contents of the current or given path."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, ls)?.run()
// ls(args, registry)
}
}
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(path, context.name)
}
|
pub mod ast;
use ast::AssignmentLHS;
use ast::EnumAlternative;
use ast::EnumDefinition;
use ast::EnumDestructure;
use ast::EnumItem;
use ast::Expr;
use ast::FunctionCall;
use ast::FunctionDefinition;
use ast::FunctionSignature;
use ast::Identifier;
use ast::IfElse;
use ast::Match;
use ast::MatchArm;
use ast::MatchPattern;
use ast::Op;
use ast::RustFunctionDefinition;
use ast::Statement;
use std::fmt;
use std::fmt::Debug;
#[derive(Debug, PartialEq)]
pub struct ParseError {
context: String,
message: String,
}
#[derive(Debug)]
struct ParserState<'a> {
original_input: &'a str,
remaining_input: &'a str,
}
macro_rules! debug_println {
($fmt:expr, $($arg:tt)*) => (if cfg ! ( debug_assertions ) {
print!(concat!($fmt, "\n"), $($arg)*)
});
}
impl<'a> ParserState<'a> {
fn next_character(&self) -> Option<char> {
self.remaining_input.chars().nth(0)
}
fn consume_character(&mut self) {
self.consume_n_characters(1)
}
fn consume_n_characters(&mut self, n: usize) {
self.remaining_input = &self.remaining_input[n..]
}
fn consume_until<P: Fn(char) -> bool>(&mut self, predicate: P) {
loop {
while self.remaining_input.starts_with("//") {
self.consume_line_comment();
}
if self.next_character().is_some() && !predicate(self.next_character().unwrap()) {
self.consume_character();
} else {
break;
}
}
}
fn consume_until_nonwhitespace_or_newline(&mut self) {
self.consume_until(|c| c == '\n' || !c.is_whitespace())
}
fn consume_until_nonwhitespace(&mut self) {
self.consume_until(|c| !c.is_whitespace())
}
fn expect_character_and_consume(&mut self, character: char) -> Result<(), ParseError> {
if let Some(c) = self.next_character() {
if c == character {
self.consume_character();
return Ok(());
}
}
Err(make_parse_error(
self,
format!("Expected '{}'", character).as_str(),
))
}
fn consume_line_comment(&mut self) {
println!("consume_line_comment: {:?}", self);
debug_assert!(self.remaining_input.starts_with("//"));
let next_newline = self.remaining_input.find("\n");
match next_newline {
None => {
self.remaining_input = "";
}
Some(index_of_newline) => {
self.remaining_input = &self.remaining_input[index_of_newline + 1..];
}
}
}
}
fn is_block_terminator(c: char) -> bool {
c == '}'
}
// Does this character always terminate an expression?
fn is_expression_terminator(c: char) -> bool {
// Putting '}' here might cause trouble when I add structs
c == ')' || c == '}' || c == ',' || c == '{'
}
fn starts_with_keyword(s: &str, keyword: &str) -> bool {
let character_after_keyword = s.chars().nth(keyword.len());
s.starts_with(keyword)
&& (character_after_keyword.is_none() || character_after_keyword.unwrap().is_whitespace())
}
mod keywords {
pub const FUNCTION: &'static str = "fn";
pub const IMPL: &'static str = "impl";
pub const IF: &'static str = "if";
pub const ELSE: &'static str = "else";
pub const TRUE: &'static str = "true";
pub const FALSE: &'static str = "false";
pub const RUST_FUNCTION: &'static str = "rust fn";
pub const WHILE: &'static str = "while";
pub const LET: &'static str = "let";
pub const ENUM: &'static str = "enum";
pub const MATCH: &'static str = "match";
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}\n {}", &self.message, &self.context)
}
}
type ParseResult = Result<Expr, ParseError>;
fn make_parse_error(parser_state: &ParserState, msg: &str) -> ParseError {
let error_character_index =
parser_state.original_input.len() - parser_state.remaining_input.len();
let mut number_of_backwards_lines_of_context = 1;
let mut squiggle_count_start_index: Option<usize> = None;
let mut original_input_substring_start_index = error_character_index;
while original_input_substring_start_index > 0 && number_of_backwards_lines_of_context >= 0 {
original_input_substring_start_index -= 1;
if parser_state
.original_input
.chars()
.nth(original_input_substring_start_index)
== Some('\n')
{
if squiggle_count_start_index.is_none() {
squiggle_count_start_index = Some(original_input_substring_start_index);
}
number_of_backwards_lines_of_context -= 1;
}
}
let mut number_of_forwards_lines_of_context = 1;
let mut original_input_substring_end_index = error_character_index;
while original_input_substring_end_index < parser_state.original_input.len()
&& number_of_forwards_lines_of_context >= 0
{
original_input_substring_end_index += 1;
if parser_state
.original_input
.chars()
.nth(original_input_substring_end_index)
!= Some('\n')
{
number_of_forwards_lines_of_context -= 1;
}
}
let number_of_squiggles = error_character_index - squiggle_count_start_index.unwrap_or(0usize);
let squiggle_string = "~".repeat(number_of_squiggles);
let original_input_substring = &parser_state.original_input
[original_input_substring_start_index..original_input_substring_end_index];
let context = format!("\n{}\n{}^", original_input_substring, squiggle_string);
ParseError {
context: String::from(context),
message: String::from(msg),
}
}
fn parse_binop(parser_state: &mut ParserState, consume: bool) -> Option<Op> {
debug_assert!(parser_state.remaining_input.len() >= 1);
if let Some(single_character_op) = Op::from_str(&parser_state.remaining_input[0..1]) {
if consume {
parser_state.consume_character();
}
return Some(single_character_op);
}
if parser_state.remaining_input.len() >= 2 {
if let Some(double_character_op) = Op::from_str(&parser_state.remaining_input[0..2]) {
if consume {
parser_state.consume_n_characters(2);
}
return Some(double_character_op);
}
}
return None;
}
// If you ever forget how the hell this works, go to the LLVM tutorial on parsing
// binary expressions. It's really confusing.
fn parse_binop_rhs(
parser_state: &mut ParserState,
lhs: Expr,
minimum_precedence: i32,
) -> ParseResult {
debug_assert!(parser_state.remaining_input.len() > 0);
debug_println!("parse_binop_rhs: {:?}", parser_state);
let mut new_lhs = lhs;
loop {
parser_state.consume_until_nonwhitespace_or_newline();
let maybe_next_character = parser_state.next_character();
if maybe_next_character.is_none() {
return Ok(new_lhs);
}
let maybe_op = parse_binop(parser_state, false);
if maybe_op.is_none() {
let next_character = maybe_next_character.unwrap();
if !is_expression_terminator(next_character) && !next_character.is_whitespace() {
return Err(make_parse_error(parser_state, "Expected operator."));
}
return Ok(new_lhs);
}
let op = maybe_op.unwrap();
let op_precedence = op.precedence();
if op_precedence < minimum_precedence {
// Our current expression has higher precedence than the next, so we're done collecting
// the terms for it now.
return Ok(new_lhs);
}
// Must delay consuming the op, because we might have chosen to not parse it in this call to
// parse_binop_rhs.
parse_binop(parser_state, true).unwrap();
parser_state.consume_until_nonwhitespace_or_newline();
let next_primary_expr = parse_primary(parser_state)?;
debug_println!("next_primary_expr: {:?}", next_primary_expr);
debug_println!("{:?}", parser_state);
parser_state.consume_until_nonwhitespace_or_newline();
let rhs = if parser_state.remaining_input.len() > 0 {
let next_op = parse_binop(parser_state, false);
if next_op.is_some() && next_op.unwrap().precedence() > op_precedence {
parse_binop_rhs(
parser_state,
next_primary_expr,
next_op.unwrap().precedence(),
)?
} else {
next_primary_expr
}
} else {
next_primary_expr
};
new_lhs = Expr::BinOp {
op: op,
lhs: Box::new(new_lhs),
rhs: Box::new(rhs),
};
debug_println!("new_lhs: {:?}", new_lhs);
}
}
fn parse_number(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_number: {:?}", parser_state);
let is_part_of_number = |c: char| c.is_digit(10) || c == '.' || c == '-';
let maybe_last_nonnumeric_index = parser_state
.remaining_input
.chars()
.position(|c| !is_part_of_number(c));
let numeric_substring = match maybe_last_nonnumeric_index {
Some(last_nonnumeric_index) => &parser_state.remaining_input[..last_nonnumeric_index],
None => parser_state.remaining_input,
};
let number_parse_result = numeric_substring.parse::<f64>();
parser_state.remaining_input = match maybe_last_nonnumeric_index {
None => "",
Some(last_nonnumeric_index) => &parser_state.remaining_input[last_nonnumeric_index..],
};
match number_parse_result {
Ok(number) => Ok(Expr::Number(number)),
Err(_) => Err(make_parse_error(parser_state, "Expected number.")),
}
}
fn parse_identifier(parser_state: &mut ParserState) -> Result<Identifier, ParseError> {
debug_println!("parse_identifier: {:?}", parser_state);
let is_part_of_identifier = |c: char| c.is_alphanumeric() || c == '_';
let maybe_last_non_identifier_index = parser_state
.remaining_input
.chars()
.position(|c| !is_part_of_identifier(c));
let identifier_substring = match maybe_last_non_identifier_index {
Some(last_non_identifier_index) => {
&parser_state.remaining_input[..last_non_identifier_index]
}
None => parser_state.remaining_input,
};
if identifier_substring.len() == 0 {
return Err(make_parse_error(parser_state, "Expected identifier"));
}
parser_state.remaining_input = match maybe_last_non_identifier_index {
None => "",
Some(last_non_identifier_index) => {
&parser_state.remaining_input[last_non_identifier_index..]
}
};
Ok(Identifier::from(identifier_substring))
}
fn parse_assignment(parser_state: &mut ParserState, lhs: AssignmentLHS) -> ParseResult {
debug_println!("parse_assignment: {:?}", parser_state);
debug_assert!(parser_state.next_character() == Some('='));
parser_state.consume_character();
parser_state.consume_until_nonwhitespace();
let rhs = parse_expr(parser_state)?;
// We want to support both with newline and without newline at the
// last statement of a Unit-returning Statement-expr.
parser_state.consume_until_nonwhitespace_or_newline();
if let Some(_) = parser_state.next_character() {
parser_state.expect_character_and_consume('\n')?;
}
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::Assignment {
lhs: lhs,
rhs: Box::new(rhs),
},
Box::new(rest),
))
}
fn parse_paren_expr(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_paren_expr: {:?}", parser_state);
debug_assert!(parser_state.next_character() == Some('('));
parser_state.consume_character();
let expr = parse_expr(parser_state)?;
if parser_state.next_character() != Some(')') {
return Err(make_parse_error(parser_state, "Expected ')'"));
}
parser_state.consume_character();
Ok(expr)
}
fn parse_function_call_args(parser_state: &mut ParserState) -> Result<Vec<Expr>, ParseError> {
debug_println!("parse_function_call_args: {:?}", parser_state);
debug_assert!(parser_state.next_character() == Some('('));
parser_state.consume_character();
let mut args: Vec<Expr> = Vec::new();
while parser_state.next_character() != Some(')') {
if args.len() > 0 {
parser_state.expect_character_and_consume(',')?;
}
parser_state.consume_until_nonwhitespace();
args.push(parse_expr(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.expect_character_and_consume(')')?;
Ok(args)
}
fn parse_string_literal(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_string_literal: {:?}", parser_state);
debug_assert!(parser_state.next_character() == Some('"'));
parser_state.consume_character();
let maybe_string_end_index = parser_state.remaining_input.find('"');
match maybe_string_end_index {
None => Err(make_parse_error(
parser_state,
"Unterminated string literal",
)),
Some(string_end_index) => {
let string_literal = &parser_state.remaining_input[..string_end_index];
// Consume the string itself
parser_state.consume_n_characters(string_end_index);
// Consume the terminating '"'
parser_state.consume_character();
Ok(Expr::String(String::from(string_literal)))
}
}
}
fn parse_method_call(parser_state: &mut ParserState, target: Expr) -> ParseResult {
debug_assert!(parser_state.next_character() == Some('.'));
parser_state.consume_character();
parser_state.consume_until_nonwhitespace();
let method_name = parse_identifier(parser_state)?;
parser_state.consume_until_nonwhitespace();
let function_call_args = parse_function_call_args(parser_state)?;
return Ok(Expr::MethodCall {
name: method_name,
target: Box::new(target),
args: function_call_args,
});
}
fn parse_expr_as_statement(parser_state: &mut ParserState, primary_expr: Expr) -> ParseResult {
match primary_expr {
Expr::FunctionCall(function_call) => Ok(Expr::Statement(
Statement::FunctionCall(function_call),
Box::new(parse_expr(parser_state)?),
)),
_ => Ok(primary_expr), // Sometimes, this is wrong, but the error will be caught later
}
}
fn parse_bool_literal(parser_state: &mut ParserState) -> ParseResult {
if starts_with_keyword(parser_state.remaining_input, keywords::TRUE) {
parser_state.consume_n_characters(keywords::TRUE.len());
return Ok(Expr::Bool(true));
}
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::FALSE
));
parser_state.consume_n_characters(keywords::FALSE.len());
Ok(Expr::Bool(false))
}
fn parse_block(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_block: {:?}", parser_state);
parser_state.consume_until_nonwhitespace();
if parser_state.next_character() != Some('{') {
return Err(make_parse_error(
parser_state,
"Expected '{' at beginning of block.",
));
}
parser_state.consume_character();
let expr = parse_expr(parser_state)?;
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume('}')?;
Ok(Expr::Block(Box::new(expr)))
}
fn parse_primary(parser_state: &mut ParserState) -> ParseResult {
// Base case: We parse the rhs of a binary or unary operation
// Recursive case: We need to parse the rhs of a binary operation
debug_println!("parse_primary: {:?}", parser_state);
parser_state.consume_until_nonwhitespace();
if starts_with_keyword(parser_state.remaining_input, keywords::IF) {
return parse_if_else(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::TRUE)
|| starts_with_keyword(parser_state.remaining_input, keywords::FALSE)
{
return parse_bool_literal(parser_state);
}
let maybe_next_character = parser_state.next_character();
if maybe_next_character.is_none() {
return Ok(Expr::Unit);
}
let next_character = maybe_next_character.unwrap();
if next_character == '(' {
return parse_paren_expr(parser_state);
}
if next_character == '"' {
return parse_string_literal(parser_state);
}
if next_character == '{' {
return parse_block(parser_state);
}
if next_character.is_alphabetic() {
let identifier = parse_identifier(parser_state)?;
parser_state.consume_until_nonwhitespace_or_newline();
match parser_state.next_character() {
Some('(') => {
let function_call_args = parse_function_call_args(parser_state)?;
return Ok(Expr::FunctionCall(FunctionCall {
name: identifier,
args: function_call_args,
}));
}
Some(':') => {
return Ok(Expr::EnumAlternative(parse_enum_alternative(
parser_state,
identifier,
)?))
}
_ => return Ok(Expr::Ident(identifier)),
}
}
return parse_number(parser_state);
}
fn parse_function_signature(
parser_state: &mut ParserState,
) -> Result<FunctionSignature, ParseError> {
debug_println!("parse_function_signature: {:?}", parser_state);
let name = parse_identifier(parser_state)?;
parser_state.expect_character_and_consume('(')?;
let mut args: Vec<Identifier> = Vec::new();
while parser_state.next_character() != Some(')') {
if args.len() > 0 {
parser_state.expect_character_and_consume(',')?;
}
parser_state.consume_until_nonwhitespace();
args.push(parse_identifier(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.expect_character_and_consume(')')?;
Ok(FunctionSignature {
name: name,
args: args,
})
}
fn parse_function_definition(
parser_state: &mut ParserState,
) -> Result<FunctionDefinition, ParseError> {
debug_println!("parse_function_definition: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::FUNCTION
));
parser_state.consume_n_characters(keywords::FUNCTION.len());
parser_state.consume_until_nonwhitespace();
let signature = parse_function_signature(parser_state)?;
let body = parse_block(parser_state)?;
Ok(FunctionDefinition {
signature: signature,
body: Box::new(body),
})
}
fn parse_function_definition_expr(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parser_function_definition_expr: {:?}", parser_state);
let function_definition = parse_function_definition(parser_state)?;
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::FunctionDefinition(function_definition),
Box::new(rest),
))
}
fn parse_rust_function_definition(
parser_state: &mut ParserState,
) -> Result<RustFunctionDefinition, ParseError> {
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::RUST_FUNCTION
));
parser_state.consume_n_characters(keywords::RUST_FUNCTION.len());
parser_state.consume_until_nonwhitespace();
let signature = parse_function_signature(parser_state)?;
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume('{')?;
// Dead stupid way to detect the end of the rust block: count curly braces
// If it ever turns negative, that's where the enclosing '}' is.
// This breaks if there are comments with braces in them, but whatever, those
// braces will most likely be balanced.
let mut number_of_braces = 0i32;
let mut index_of_enclosing_brace = 0usize;
for c in parser_state.remaining_input.chars() {
if c == '{' {
number_of_braces += 1;
} else if c == '}' {
number_of_braces -= 1;
}
if number_of_braces < 0 {
break;
}
index_of_enclosing_brace += 1;
}
let body = String::from(&parser_state.remaining_input[0..index_of_enclosing_brace]);
parser_state.remaining_input = &parser_state.remaining_input[index_of_enclosing_brace..];
debug_assert!(parser_state.next_character() == Some('}'));
parser_state.expect_character_and_consume('}')?;
Ok(RustFunctionDefinition {
signature: signature,
body: body,
})
}
fn parse_rust_function_definition_expr(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_rust_function_definition_expr: {:?}", parser_state);
let function_definition = parse_rust_function_definition(parser_state)?;
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::RustFunctionDefinition(function_definition),
Box::new(rest),
))
}
fn parse_impl(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_impl: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::IMPL
));
parser_state.consume_n_characters(keywords::IMPL.len());
parser_state.consume_until_nonwhitespace();
let type_identifier = parse_identifier(parser_state)?;
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume('{')?;
let mut methods: Vec<FunctionDefinition> = Vec::new();
while parser_state.next_character().is_some() && parser_state.next_character() != Some('}') {
parser_state.consume_until_nonwhitespace();
methods.push(parse_function_definition(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.expect_character_and_consume('}')?;
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::Impl {
tid: type_identifier,
methods: methods,
},
Box::new(rest),
))
}
fn parse_enum_definition(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_enum_definition: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::ENUM
));
parser_state.consume_n_characters(keywords::ENUM.len());
parser_state.consume_until_nonwhitespace();
let type_identifier = parse_identifier(parser_state)?;
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume('{')?;
parser_state.consume_until_nonwhitespace();
let mut alternatives: Vec<EnumItem> = Vec::new();
while parser_state.next_character().is_some() && parser_state.next_character() != Some('}') {
if alternatives.len() > 0 {
parser_state.expect_character_and_consume(',')?;
}
parser_state.consume_until_nonwhitespace();
fn parse_enum_alternative(parser_state: &mut ParserState) -> Result<EnumItem, ParseError> {
let mut associated_values: Vec<Identifier> = Vec::new();
let tag = parse_identifier(parser_state)?;
if parser_state.next_character() != Some('(') {
return Ok(EnumItem {
tag: tag,
associated_values: associated_values,
});
}
parser_state.consume_character();
while parser_state.next_character() != Some(')') {
if associated_values.len() > 0 {
parser_state.expect_character_and_consume(',')?;
}
parser_state.consume_until_nonwhitespace();
associated_values.push(parse_identifier(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.consume_character();
return Ok(EnumItem {
tag: tag,
associated_values: associated_values,
});
}
alternatives.push(parse_enum_alternative(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.expect_character_and_consume('}')?;
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::EnumDefinition(EnumDefinition {
name: type_identifier,
alternatives: alternatives,
}),
Box::new(rest),
))
}
fn parse_enum_alternative(
parser_state: &mut ParserState,
enum_name: Identifier,
) -> Result<EnumAlternative, ParseError> {
debug_println!("parse_enum_alternative: {:?}", parser_state);
parser_state.expect_character_and_consume(':')?;
parser_state.expect_character_and_consume(':')?;
let alternative_name = parse_identifier(parser_state)?;
if parser_state.next_character() != Some('(') {
return Ok(EnumAlternative {
enum_name: enum_name,
alternative_name: alternative_name,
associated_values: vec![],
});
}
parser_state.consume_character();
let mut associated_values: Vec<Expr> = Vec::new();
while parser_state.next_character() != Some(')') {
if associated_values.len() > 0 {
parser_state.expect_character_and_consume(',')?;
}
parser_state.consume_until_nonwhitespace();
associated_values.push(parse_expr(parser_state)?);
parser_state.consume_until_nonwhitespace();
}
parser_state.consume_character();
return Ok(EnumAlternative {
enum_name: enum_name,
alternative_name: alternative_name,
associated_values: associated_values,
});
}
fn parse_match(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_match: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::MATCH
));
parser_state.consume_n_characters(keywords::MATCH.len());
let match_expr = parse_expr(parser_state)?;
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume('{')?;
fn parse_match_pattern(parser_state: &mut ParserState) -> Result<MatchPattern, ParseError> {
// TODO: this is kind of a hack, since technically a match pattern isn't necessarily an
// enum alternative once we add non-enum patterns
let enum_name = parse_identifier(parser_state)?;
let enum_alternative = parse_enum_alternative(parser_state, enum_name.clone())?;
let mut associated_values: Vec<Identifier> = Vec::new();
for value in &enum_alternative.associated_values {
if let Expr::Ident(identifier) = value {
associated_values.push(identifier.clone());
} else {
return Err(make_parse_error(
parser_state,
"Expected identifier inside enum destructure",
));
}
}
Ok(MatchPattern::EnumDestructure(EnumDestructure {
enum_name: enum_name,
alternative_name: enum_alternative.alternative_name,
associated_values: associated_values,
}))
}
fn parse_match_arm(parser_state: &mut ParserState) -> Result<MatchArm, ParseError> {
let pattern = parse_match_pattern(parser_state)?;
parser_state.consume_until_nonwhitespace();
if parser_state.remaining_input.len() < 2 && &parser_state.remaining_input[..2] != "=>" {
return Err(make_parse_error(parser_state, "Expected '=>'"));
}
parser_state.consume_n_characters(2);
let expr = parse_expr(parser_state)?;
Ok(MatchArm {
pattern: pattern,
expr: expr,
})
}
let mut arms: Vec<MatchArm> = Vec::new();
while parser_state.next_character() != Some('}') {
parser_state.consume_until_nonwhitespace();
arms.push(parse_match_arm(parser_state)?);
parser_state.consume_until_nonwhitespace();
parser_state.expect_character_and_consume(',')?;
parser_state.consume_until_nonwhitespace();
}
parser_state.consume_character();
Ok(Expr::Match(Match {
match_expr: Box::new(match_expr),
arms: arms,
}))
}
fn parse_if_else(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_if_else: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::IF
));
parser_state.consume_n_characters(keywords::IF.len());
parser_state.consume_until_nonwhitespace();
let condition = parse_expr(parser_state)?;
parser_state.consume_until_nonwhitespace();
let if_branch = parse_block(parser_state)?;
parser_state.consume_until_nonwhitespace();
if !starts_with_keyword(parser_state.remaining_input, keywords::ELSE) {
return Err(make_parse_error(
parser_state,
format!("Expected keyword '{}'", keywords::ELSE).as_str(),
));
}
parser_state.consume_n_characters(keywords::ELSE.len());
parser_state.consume_until_nonwhitespace();
let error_message = || format!("Expected keyword '{}' or '{{'", keywords::IF);
let else_branch: ParseResult = if parser_state.next_character() == Some('{') {
// Not "if else"
parse_block(parser_state)
} else {
if !starts_with_keyword(parser_state.remaining_input, keywords::IF) {
Err(make_parse_error(parser_state, error_message().as_str()))
} else {
parse_if_else(parser_state)
}
};
if else_branch.is_err() {
return else_branch;
}
Ok(Expr::IfElse(IfElse {
condition: Box::new(condition),
if_branch: Box::new(if_branch),
else_branch: Box::new(else_branch.unwrap()),
}))
}
fn parse_while(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_while: {:?}", parser_state);
debug_assert!(starts_with_keyword(
parser_state.remaining_input,
keywords::WHILE
));
parser_state.consume_n_characters(keywords::WHILE.len());
parser_state.consume_until_nonwhitespace();
let condition = parse_expr(parser_state)?;
parser_state.consume_until_nonwhitespace();
let body = parse_block(parser_state)?;
parser_state.consume_until_nonwhitespace_or_newline();
parser_state.expect_character_and_consume('\n')?;
let rest = parse_expr(parser_state)?;
Ok(Expr::Statement(
Statement::While {
condition: Box::new(condition),
body: Box::new(body),
},
Box::new(rest),
))
}
fn parse_expr(parser_state: &mut ParserState) -> ParseResult {
debug_println!("parse_expr: {:?}", parser_state);
parser_state.consume_until_nonwhitespace();
// Take care of Unit-returning blocks and top-levels
match parser_state.next_character() {
None => return Ok(Expr::Unit),
Some(c) => {
if is_block_terminator(c) {
return Ok(Expr::Unit);
}
}
}
if starts_with_keyword(parser_state.remaining_input, keywords::RUST_FUNCTION) {
return parse_rust_function_definition_expr(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::FUNCTION) {
return parse_function_definition_expr(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::IMPL) {
return parse_impl(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::ENUM) {
return parse_enum_definition(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::WHILE) {
return parse_while(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::MATCH) {
return parse_match(parser_state);
}
if starts_with_keyword(parser_state.remaining_input, keywords::LET) {
// TODO: let currently is cosmetic to make typing mold code feel more natural
// for rust users. Maybe it should be impued with variable declaration semantics as well?
parser_state.consume_n_characters(keywords::LET.len());
return parse_expr(parser_state);
}
let primary = parse_primary(parser_state)?;
parser_state.consume_until_nonwhitespace_or_newline();
let maybe_next_character = parser_state.next_character();
if maybe_next_character.is_none() {
return Ok(primary);
}
if let Some(_) = parse_binop(parser_state, false) {
return parse_binop_rhs(parser_state, primary, -1);
}
let next_character = maybe_next_character.unwrap();
if is_expression_terminator(next_character) {
Ok(primary)
} else if next_character == '=' {
// TODO: don't parse assignment lhs from primary expression, since this is a hack
match primary {
Expr::Ident(identifier) => {
parse_assignment(parser_state, AssignmentLHS::Single(identifier))
}
_ => Err(make_parse_error(
parser_state,
"Left-hand side of assignment must be a variable name",
)),
}
} else if next_character == '.' {
parse_method_call(parser_state, primary)
} else if next_character == '\n' {
parse_expr_as_statement(parser_state, primary)
} else {
Err(make_parse_error(
parser_state,
format!(
"Expected a binary operator, '=', or end of expression, but found {}",
next_character
)
.as_str(),
))
}
}
pub fn parse(input: &str) -> ParseResult {
let mut parser_state = ParserState {
original_input: input,
remaining_input: input,
};
parse_expr(&mut parser_state)
}
#[cfg(test)]
mod tests {
use super::ast::*;
use super::*;
#[test]
fn parse_simple_binary_op() {
assert_eq!(
parse("1+2"),
Ok(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(1f64)),
rhs: Box::new(Expr::Number(2f64))
})
);
}
#[test]
fn parse_grouped_ops() {
assert_eq!(
parse("1+((2+3)+4)+5"),
Ok(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(1f64)),
rhs: Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(2f64)),
rhs: Box::new(Expr::Number(3f64))
}),
rhs: Box::new(Expr::Number(4f64))
}),
}),
rhs: Box::new(Expr::Number(5f64)),
})
);
}
#[test]
fn parse_non_void_statement() {
assert_eq!(
parse("a = 1\nb = 2\na + b"),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::Number(1f64))
},
Box::new(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("b")),
rhs: Box::new(Expr::Number(2f64))
},
Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Ident(Identifier::from("a"))),
rhs: Box::new(Expr::Ident(Identifier::from("b")))
})
))
))
)
}
#[test]
fn parse_unit_returning_toplevel() {
assert_eq!(
parse("a = 1"),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::Number(1f64))
},
Box::new(Expr::Unit)
))
)
}
#[test]
fn parse_unit_returning_block() {
assert_eq!(
parse(
r#"
{
a = 1
b = 2
}
"#
),
Ok(Expr::Block(Box::new(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::Number(1f64))
},
Box::new(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("b")),
rhs: Box::new(Expr::Number(2f64))
},
Box::new(Expr::Unit)
))
))))
);
}
#[test]
fn parse_empty_block() {
assert_eq!(
parse(
r#"
{
}
"#
),
Ok(Expr::Block(Box::new(Expr::Unit)))
)
}
#[test]
fn parse_cascading_assignments() {
assert_eq!(
parse("a = 1\nb = a\nb"),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::Number(1f64))
},
Box::new(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("b")),
rhs: Box::new(Expr::Ident(Identifier::from("a")))
},
Box::new(Expr::Ident(Identifier::from("b")))
))
))
);
}
#[test]
fn parse_function_definition() {
assert_eq!(
parse(
r#"
fn f(a, b) {
a + b
}
"#
),
Ok(Expr::Statement(
Statement::FunctionDefinition(FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("f"),
args: vec!(Identifier::from("a"), Identifier::from("b")),
},
body: Box::new(Expr::Block(Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Ident(Identifier::from("a"))),
rhs: Box::new(Expr::Ident(Identifier::from("b"))),
})))
}),
Box::new(Expr::Unit)
))
)
}
#[test]
fn parse_function_multiple_function_definitions() {
assert_eq!(
parse(
r#"
fn f(a) {
a
}
fn g(b) {
b
}
"#
),
Ok(Expr::Statement(
Statement::FunctionDefinition(FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("f"),
args: vec!(Identifier::from("a")),
},
body: Box::new(Expr::Block(Box::new(Expr::Ident(Identifier::from("a")))))
}),
Box::new(Expr::Statement(
Statement::FunctionDefinition(FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("g"),
args: vec!(Identifier::from("b")),
},
body: Box::new(Expr::Block(Box::new(Expr::Ident(Identifier::from("b")))))
}),
Box::new(Expr::Unit)
))
))
)
}
#[test]
fn parse_function_call() {
assert_eq!(
parse("a(1, 2)"),
Ok(Expr::FunctionCall(FunctionCall {
name: Identifier::from("a"),
args: vec!(Expr::Number(1f64), Expr::Number(2f64),)
}))
);
}
#[test]
fn parse_function_call_with_complex_expression() {
assert_eq!(
parse("a(1+2, (3 + 4) + 5)"),
Ok(Expr::FunctionCall(FunctionCall {
name: Identifier::from("a"),
args: vec!(
Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(1f64)),
rhs: Box::new(Expr::Number(2f64)),
},
Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(3f64)),
rhs: Box::new(Expr::Number(4f64)),
}),
rhs: Box::new(Expr::Number(5f64)),
}
)
}))
);
}
#[test]
fn parse_function_call_statement() {
assert_eq!(
parse("a(1, 2)\n2"),
Ok(Expr::Statement(
Statement::FunctionCall(FunctionCall {
name: Identifier::from("a"),
args: vec!(Expr::Number(1f64), Expr::Number(2f64),)
}),
Box::new(Expr::Number(2f64))
))
);
}
#[test]
fn parse_function_definition_then_call() {
assert_eq!(
parse(
r#"
fn f(x) {
x + 1
}
f(1)"#
),
Ok(Expr::Statement(
Statement::FunctionDefinition(FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("f"),
args: vec!(Identifier::from("x")),
},
body: Box::new(Expr::Block(Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Ident(Identifier::from("x"))),
rhs: Box::new(Expr::Number(1f64))
})))
}),
Box::new(Expr::FunctionCall(FunctionCall {
name: Identifier::from("f"),
args: vec!(Expr::Number(1f64))
}))
))
);
}
#[test]
fn parse_string_literal() {
assert_eq!(
parse("a = \"hello, mold\""),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::String(String::from("hello, mold")))
},
Box::new(Expr::Unit)
))
);
}
#[test]
fn parse_method_call() {
assert_eq!(
parse("\"hello\".iter()"),
Ok(Expr::MethodCall {
name: Identifier::from("iter"),
target: Box::new(Expr::String(String::from("hello"))),
args: Vec::new()
})
)
}
#[test]
fn parse_impl() {
assert_eq!(
parse(
r#"impl String {
fn len(self) {
2
}
fn g() {
3
}
}"#
),
Ok(Expr::Statement(
Statement::Impl {
tid: TypeID::from("String"),
methods: vec!(
FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("len"),
args: vec!(Identifier::from("self")),
},
body: Box::new(Expr::Block(Box::new(Expr::Number(2f64))))
},
FunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("g"),
args: Vec::new(),
},
body: Box::new(Expr::Block(Box::new(Expr::Number(3f64))))
}
)
},
Box::new(Expr::Unit)
),)
);
}
#[test]
fn comments() {
assert_eq!(
parse(
r#"fn f(x) {
// Do the thing
x + 1
}
// Now call the thing
f(1)
"#
),
parse(
r#"fn f(x) {
x + 1
}
f(1)
"#
)
);
}
#[test]
fn parse_if_else() {
assert_eq!(
parse("if x { 1 } else { 2 }"),
Ok(Expr::IfElse(IfElse {
condition: Box::new(Expr::Ident(Identifier::from("x"))),
if_branch: Box::new(Expr::Block(Box::new(Expr::Number(1f64)))),
else_branch: Box::new(Expr::Block(Box::new(Expr::Number(2f64)))),
}))
)
}
#[test]
fn parse_if_else_if_else() {
assert_eq!(
parse("if x { 1 } else if true { 2 } else { false }"),
Ok(Expr::IfElse(IfElse {
condition: Box::new(Expr::Ident(Identifier::from("x"))),
if_branch: Box::new(Expr::Block(Box::new(Expr::Number(1f64)))),
else_branch: Box::new(Expr::IfElse(IfElse {
condition: Box::new(Expr::Bool(true)),
if_branch: Box::new(Expr::Block(Box::new(Expr::Number(2f64)))),
else_branch: Box::new(Expr::Block(Box::new(Expr::Bool(false)))),
})),
}))
)
}
#[test]
fn parse_bool_literal() {
assert_eq!(
parse(
r#"
a = true
b = false
"#
),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::Bool(true))
},
Box::new(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("b")),
rhs: Box::new(Expr::Bool(false))
},
Box::new(Expr::Unit)
))
))
)
}
#[test]
fn parse_complex_binops() {
assert_eq!(
parse("true && false || false && true || false"),
Ok(Expr::BinOp {
op: Op::Or,
lhs: Box::new(Expr::BinOp {
op: Op::Or,
lhs: Box::new(Expr::BinOp {
op: Op::And,
lhs: Box::new(Expr::Bool(true)),
rhs: Box::new(Expr::Bool(false)),
}),
rhs: Box::new(Expr::BinOp {
op: Op::And,
lhs: Box::new(Expr::Bool(false)),
rhs: Box::new(Expr::Bool(true)),
})
}),
rhs: Box::new(Expr::Bool(false))
})
);
}
#[test]
fn parse_rust_function() {
assert_eq!(
parse(
r#"
rust fn print() {
println!("Hello, world!")
}
print()"#
),
Ok(Expr::Statement(
Statement::RustFunctionDefinition(RustFunctionDefinition {
signature: FunctionSignature {
name: Identifier::from("print"),
args: vec!()
},
body: String::from(
r#"
println!("Hello, world!")
"#
)
}),
Box::new(Expr::FunctionCall(FunctionCall {
name: Identifier::from("print"),
args: vec!()
}))
))
)
}
#[test]
fn parse_block() {
assert_eq!(
parse("{ a + b }"),
Ok(Expr::Block(Box::new(Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Ident(Identifier::from("a"))),
rhs: Box::new(Expr::Ident(Identifier::from("b"))),
})))
);
}
#[test]
fn parse_enum_definition() {
assert_eq!(
parse(
r#"
enum Option {
None,
Some(value)
}"#
),
Ok(Expr::Statement(
Statement::EnumDefinition(EnumDefinition {
name: TypeID::from("Option"),
alternatives: vec!(
EnumItem {
tag: Identifier::from("None"),
associated_values: vec!()
},
EnumItem {
tag: Identifier::from("Some"),
associated_values: vec!(Identifier::from("value"))
}
)
}),
Box::new(Expr::Unit)
))
);
}
#[test]
fn parse_enum_alternative() {
assert_eq!(
parse("a = MyEnum::True\nOtherEnum::Tuple(1 + 2, \"hello\")"),
Ok(Expr::Statement(
Statement::Assignment {
lhs: AssignmentLHS::Single(Identifier::from("a")),
rhs: Box::new(Expr::EnumAlternative(EnumAlternative {
enum_name: TypeID::from("MyEnum"),
alternative_name: Identifier::from("True"),
associated_values: vec!()
}))
},
Box::new(Expr::EnumAlternative(EnumAlternative {
enum_name: TypeID::from("OtherEnum"),
alternative_name: Identifier::from("Tuple"),
associated_values: vec!(
Expr::BinOp {
op: Op::Plus,
lhs: Box::new(Expr::Number(1f64)),
rhs: Box::new(Expr::Number(2f64)),
},
Expr::String(String::from("hello"))
)
}))
))
)
}
#[test]
fn parse_match() {
assert_eq!(
parse("match x { Option::None => 1, Option::Some(n) => n, }"),
Ok(Expr::Match(Match {
match_expr: Box::new(Expr::Ident(Identifier::from("x"))),
arms: vec!(
MatchArm {
pattern: MatchPattern::EnumDestructure(EnumDestructure {
enum_name: TypeID::from("Option"),
alternative_name: Identifier::from("None"),
associated_values: vec!()
}),
expr: Expr::Number(1f64)
},
MatchArm {
pattern: MatchPattern::EnumDestructure(EnumDestructure {
enum_name: TypeID::from("Option"),
alternative_name: Identifier::from("Some"),
associated_values: vec!(Identifier::from("n"))
}),
expr: Expr::Ident(Identifier::from("n"))
}
)
}))
)
}
}
|
use quote::quote_spanned;
use super::{
FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, OperatorInstance,
OperatorWriteOutput, WriteContextArgs, RANGE_0, RANGE_1,
};
/// Filter outputs a subsequence of the items it receives at its input, according to a
/// Rust boolean closure passed in as an argument.
///
/// The closure receives a reference `&T` rather than an owned value `T` because filtering does
/// not modify or take ownership of the values. If you need to modify the values while filtering
/// use [`filter_map`](#filter_map) instead.
///
/// > Note: The closure has access to the [`context` object](surface_flows.md#the-context-object).
///
/// ```hydroflow
/// source_iter(vec!["hello", "world"]) -> filter(|x| x.starts_with('w'))
/// -> assert_eq(["world"]);
/// ```
pub const FILTER: OperatorConstraints = OperatorConstraints {
name: "filter",
categories: &[OperatorCategory::Filter],
hard_range_inn: RANGE_1,
soft_range_inn: RANGE_1,
hard_range_out: RANGE_1,
soft_range_out: RANGE_1,
num_args: 1,
persistence_args: RANGE_0,
type_args: RANGE_0,
is_external_input: false,
ports_inn: None,
ports_out: None,
properties: FlowProperties {
deterministic: FlowPropertyVal::Preserve,
monotonic: FlowPropertyVal::Preserve,
inconsistency_tainted: false,
},
input_delaytype_fn: |_| None,
write_fn: |&WriteContextArgs {
root,
op_span,
ident,
inputs,
outputs,
is_pull,
op_inst: OperatorInstance { arguments, .. },
..
},
_| {
let write_iterator = if is_pull {
let input = &inputs[0];
quote_spanned! {op_span=>
let #ident = #input.filter(#arguments);
}
} else {
let output = &outputs[0];
quote_spanned! {op_span=>
let #ident = #root::pusherator::filter::Filter::new(#arguments, #output);
}
};
Ok(OperatorWriteOutput {
write_iterator,
..Default::default()
})
},
};
|
use crate::kind::Kind;
use crate::resource::Resource;
#[derive(Clone)]
pub struct Structure {
pub name: String,
pub resources: Vec<Resource>,
}
impl Structure {
pub fn new(name: &str, resources: Vec<(Kind, f64)>) -> Structure {
Structure {
name: name.to_string(),
resources: resources.iter().map(
|r| Resource::new(r.0.clone(), r.1)
).collect(),
}
}
pub fn amount_for(&self, kind: Kind) -> f64 {
self.resources.iter().find(|r| r.kind == kind).map(|r| r.amount).unwrap_or(0.0)
}
pub fn searchable_content(&self) -> String {
format!("{} {}", self.name, self.resource_names().join(" ")).to_lowercase()
}
pub fn resource_names(&self) -> Vec<String> {
self.resources.iter().map(|r| r.kind.name() ).collect()
}
}
|
#![allow(dead_code, non_camel_case_types)]
use libc::size_t;
pub type Enum_Unnamed1 = ::libc::c_uint;
pub static GIT_CAP_THREADS: ::libc::c_uint = 1;
pub static GIT_CAP_HTTPS: ::libc::c_uint = 2;
pub static GIT_CAP_SSH: ::libc::c_uint = 4;
pub type git_cap_t = Enum_Unnamed1;
pub type Enum_Unnamed2 = ::libc::c_uint;
pub static GIT_OPT_GET_MWINDOW_SIZE: ::libc::c_uint = 0;
pub static GIT_OPT_SET_MWINDOW_SIZE: ::libc::c_uint = 1;
pub static GIT_OPT_GET_MWINDOW_MAPPED_LIMIT: ::libc::c_uint = 2;
pub static GIT_OPT_SET_MWINDOW_MAPPED_LIMIT: ::libc::c_uint = 3;
pub static GIT_OPT_GET_SEARCH_PATH: ::libc::c_uint = 4;
pub static GIT_OPT_SET_SEARCH_PATH: ::libc::c_uint = 5;
pub static GIT_OPT_SET_CACHE_OBJECT_LIMIT: ::libc::c_uint = 6;
pub static GIT_OPT_SET_CACHE_MAX_SIZE: ::libc::c_uint = 7;
pub static GIT_OPT_ENABLE_CACHING: ::libc::c_uint = 8;
pub static GIT_OPT_GET_CACHED_MEMORY: ::libc::c_uint = 9;
pub static GIT_OPT_GET_TEMPLATE_PATH: ::libc::c_uint = 10;
pub static GIT_OPT_SET_TEMPLATE_PATH: ::libc::c_uint = 11;
pub type git_libgit2_opt_t = Enum_Unnamed2;
pub type git_off_t = i64;
pub type git_time_t = i64;
pub type Enum_Unnamed3 = ::libc::c_int;
pub static GIT_OBJ_ANY: ::libc::c_int = -2;
pub static GIT_OBJ_BAD: ::libc::c_int = -1;
pub static GIT_OBJ__EXT1: ::libc::c_int = 0;
pub static GIT_OBJ_COMMIT: ::libc::c_int = 1;
pub static GIT_OBJ_TREE: ::libc::c_int = 2;
pub static GIT_OBJ_BLOB: ::libc::c_int = 3;
pub static GIT_OBJ_TAG: ::libc::c_int = 4;
pub static GIT_OBJ__EXT2: ::libc::c_int = 5;
pub static GIT_OBJ_OFS_DELTA: ::libc::c_int = 6;
pub static GIT_OBJ_REF_DELTA: ::libc::c_int = 7;
pub type git_otype = Enum_Unnamed3;
pub enum Struct_git_odb { }
pub type git_odb = Struct_git_odb;
pub enum Struct_git_odb_backend { }
pub type git_odb_backend = Struct_git_odb_backend;
pub enum Struct_git_odb_object { }
pub type git_odb_object = Struct_git_odb_object;
pub enum Struct_git_odb_stream { }
pub type git_odb_stream = Struct_git_odb_stream;
pub enum Struct_git_odb_writepack { }
pub type git_odb_writepack = Struct_git_odb_writepack;
pub enum Struct_git_refdb { }
pub type git_refdb = Struct_git_refdb;
pub enum Struct_git_refdb_backend { }
pub type git_refdb_backend = Struct_git_refdb_backend;
pub enum Struct_git_repository { }
pub type git_repository = Struct_git_repository;
pub enum Struct_git_object { }
pub type git_object = Struct_git_object;
pub enum Struct_git_revwalk { }
pub type git_revwalk = Struct_git_revwalk;
pub enum Struct_git_tag { }
pub type git_tag = Struct_git_tag;
pub enum Struct_git_blob { }
pub type git_blob = Struct_git_blob;
pub enum Struct_git_commit { }
pub type git_commit = Struct_git_commit;
pub enum Struct_git_tree_entry { }
pub type git_tree_entry = Struct_git_tree_entry;
pub enum Struct_git_tree { }
pub type git_tree = Struct_git_tree;
pub enum Struct_git_treebuilder { }
pub type git_treebuilder = Struct_git_treebuilder;
pub enum Struct_git_index { }
pub type git_index = Struct_git_index;
pub enum Struct_git_index_conflict_iterator { }
pub type git_index_conflict_iterator = Struct_git_index_conflict_iterator;
pub enum Struct_git_config { }
pub type git_config = Struct_git_config;
pub enum Struct_git_config_backend { }
pub type git_config_backend = Struct_git_config_backend;
pub enum Struct_git_reflog_entry { }
pub type git_reflog_entry = Struct_git_reflog_entry;
pub enum Struct_git_reflog { }
pub type git_reflog = Struct_git_reflog;
pub enum Struct_git_note { }
pub type git_note = Struct_git_note;
pub enum Struct_git_packbuilder { }
pub type git_packbuilder = Struct_git_packbuilder;
#[repr(C)]
pub struct Struct_git_time {
pub time: git_time_t,
pub offset: ::libc::c_int,
}
pub type git_time = Struct_git_time;
#[repr(C)]
pub struct Struct_git_signature {
pub name: *mut ::libc::c_char,
pub email: *mut ::libc::c_char,
pub when: git_time,
}
pub type git_signature = Struct_git_signature;
pub enum Struct_git_reference { }
pub type git_reference = Struct_git_reference;
pub enum Struct_git_reference_iterator { }
pub type git_reference_iterator = Struct_git_reference_iterator;
pub enum Struct_git_merge_head { }
pub type git_merge_head = Struct_git_merge_head;
pub enum Struct_git_merge_result { }
pub type git_merge_result = Struct_git_merge_result;
pub enum Struct_git_status_list { }
pub type git_status_list = Struct_git_status_list;
pub type Enum_Unnamed4 = ::libc::c_uint;
pub static GIT_REF_INVALID: ::libc::c_uint = 0;
pub static GIT_REF_OID: ::libc::c_uint = 1;
pub static GIT_REF_SYMBOLIC: ::libc::c_uint = 2;
pub static GIT_REF_LISTALL: ::libc::c_uint = 3;
pub type git_ref_t = Enum_Unnamed4;
pub type Enum_Unnamed5 = ::libc::c_uint;
pub static GIT_BRANCH_LOCAL: ::libc::c_uint = 1;
pub static GIT_BRANCH_REMOTE: ::libc::c_uint = 2;
pub type git_branch_t = Enum_Unnamed5;
pub type Enum_Unnamed6 = ::libc::c_uint;
pub static GIT_FILEMODE_NEW: ::libc::c_uint = 0;
pub static GIT_FILEMODE_TREE: ::libc::c_uint = 16384;
pub static GIT_FILEMODE_BLOB: ::libc::c_uint = 33188;
pub static GIT_FILEMODE_BLOB_EXECUTABLE: ::libc::c_uint = 33261;
pub static GIT_FILEMODE_LINK: ::libc::c_uint = 40960;
pub static GIT_FILEMODE_COMMIT: ::libc::c_uint = 57344;
pub type git_filemode_t = Enum_Unnamed6;
pub enum Struct_git_refspec { }
pub type git_refspec = Struct_git_refspec;
pub enum Struct_git_remote { }
pub type git_remote = Struct_git_remote;
pub enum Struct_git_push { }
pub type git_push = Struct_git_push;
pub type git_remote_head = Struct_git_remote_head;
pub type git_remote_callbacks = Struct_git_remote_callbacks;
#[repr(C)]
pub struct Struct_git_transfer_progress {
pub total_objects: ::libc::c_uint,
pub indexed_objects: ::libc::c_uint,
pub received_objects: ::libc::c_uint,
pub local_objects: ::libc::c_uint,
pub total_deltas: ::libc::c_uint,
pub indexed_deltas: ::libc::c_uint,
pub received_bytes: size_t,
}
pub type git_transfer_progress = Struct_git_transfer_progress;
pub type git_transfer_progress_callback =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_transfer_progress,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub enum Struct_git_submodule { }
pub type git_submodule = Struct_git_submodule;
pub type Enum_Unnamed7 = ::libc::c_int;
pub static GIT_SUBMODULE_UPDATE_RESET: ::libc::c_int = -1;
pub static GIT_SUBMODULE_UPDATE_CHECKOUT: ::libc::c_int = 1;
pub static GIT_SUBMODULE_UPDATE_REBASE: ::libc::c_int = 2;
pub static GIT_SUBMODULE_UPDATE_MERGE: ::libc::c_int = 3;
pub static GIT_SUBMODULE_UPDATE_NONE: ::libc::c_int = 4;
pub static GIT_SUBMODULE_UPDATE_DEFAULT: ::libc::c_int = 0;
pub type git_submodule_update_t = Enum_Unnamed7;
pub type Enum_Unnamed8 = ::libc::c_int;
pub static GIT_SUBMODULE_IGNORE_RESET: ::libc::c_int = -1;
pub static GIT_SUBMODULE_IGNORE_NONE: ::libc::c_int = 1;
pub static GIT_SUBMODULE_IGNORE_UNTRACKED: ::libc::c_int = 2;
pub static GIT_SUBMODULE_IGNORE_DIRTY: ::libc::c_int = 3;
pub static GIT_SUBMODULE_IGNORE_ALL: ::libc::c_int = 4;
pub static GIT_SUBMODULE_IGNORE_DEFAULT: ::libc::c_int = 0;
pub type git_submodule_ignore_t = Enum_Unnamed8;
pub type Enum_Unnamed9 = ::libc::c_uint;
pub static GIT_ATTR_UNSPECIFIED_T: ::libc::c_uint = 0;
pub static GIT_ATTR_TRUE_T: ::libc::c_uint = 1;
pub static GIT_ATTR_FALSE_T: ::libc::c_uint = 2;
pub static GIT_ATTR_VALUE_T: ::libc::c_uint = 3;
pub type git_attr_t = Enum_Unnamed9;
pub type git_attr_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *const ::libc::c_char,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
#[repr(C)]
pub struct Struct_git_oid {
pub id: [::libc::c_uchar, ..20u],
}
pub type git_oid = Struct_git_oid;
pub enum Struct_git_oid_shorten { }
pub type git_oid_shorten = Struct_git_oid_shorten;
#[repr(C)]
pub struct Struct_Unnamed10 {
pub ptr: *mut ::libc::c_char,
pub asize: size_t,
pub size: size_t,
}
pub type git_buf = Struct_Unnamed10;
pub type git_blob_chunk_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut ::libc::c_char, arg2: size_t,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed11 = ::libc::c_uint;
pub static GIT_BLAME_NORMAL: ::libc::c_uint = 0;
pub static GIT_BLAME_TRACK_COPIES_SAME_FILE: ::libc::c_uint = 1;
pub static GIT_BLAME_TRACK_COPIES_SAME_COMMIT_MOVES: ::libc::c_uint = 2;
pub static GIT_BLAME_TRACK_COPIES_SAME_COMMIT_COPIES: ::libc::c_uint = 4;
pub static GIT_BLAME_TRACK_COPIES_ANY_COMMIT_COPIES: ::libc::c_uint = 8;
pub type git_blame_flag_t = Enum_Unnamed11;
#[repr(C)]
pub struct Struct_git_blame_options {
pub version: ::libc::c_uint,
pub flags: u32,
pub min_match_characters: u16,
pub newest_commit: git_oid,
pub oldest_commit: git_oid,
pub min_line: u32,
pub max_line: u32,
}
pub type git_blame_options = Struct_git_blame_options;
#[repr(C)]
pub struct Struct_git_blame_hunk {
pub lines_in_hunk: u16,
pub final_commit_id: git_oid,
pub final_start_line_number: u16,
pub final_signature: *mut git_signature,
pub orig_commit_id: git_oid,
pub orig_path: *const ::libc::c_char,
pub orig_start_line_number: u16,
pub orig_signature: *mut git_signature,
pub boundary: ::libc::c_char,
}
pub type git_blame_hunk = Struct_git_blame_hunk;
pub enum Struct_git_blame { }
pub type git_blame = Struct_git_blame;
pub enum Struct_git_branch_iterator { }
pub type git_branch_iterator = Struct_git_branch_iterator;
pub type git_treebuilder_filter_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_tree_entry,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_treewalk_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *const git_tree_entry,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed12 = ::libc::c_uint;
pub static GIT_TREEWALK_PRE: ::libc::c_uint = 0;
pub static GIT_TREEWALK_POST: ::libc::c_uint = 1;
pub type git_treewalk_mode = Enum_Unnamed12;
#[repr(C)]
pub struct Struct_git_strarray {
pub strings: *mut *mut ::libc::c_char,
pub count: size_t,
}
pub type git_strarray = Struct_git_strarray;
pub type git_reference_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_reference,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_reference_foreach_name_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed13 = ::libc::c_uint;
pub static GIT_REF_FORMAT_NORMAL: ::libc::c_uint = 0;
pub static GIT_REF_FORMAT_ALLOW_ONELEVEL: ::libc::c_uint = 1;
pub static GIT_REF_FORMAT_REFSPEC_PATTERN: ::libc::c_uint = 2;
pub static GIT_REF_FORMAT_REFSPEC_SHORTHAND: ::libc::c_uint = 4;
pub type git_reference_normalize_t = Enum_Unnamed13;
pub type Enum_Unnamed14 = ::libc::c_uint;
pub static GIT_DIFF_NORMAL: ::libc::c_uint = 0;
pub static GIT_DIFF_REVERSE: ::libc::c_uint = 1;
pub static GIT_DIFF_INCLUDE_IGNORED: ::libc::c_uint = 2;
pub static GIT_DIFF_RECURSE_IGNORED_DIRS: ::libc::c_uint = 4;
pub static GIT_DIFF_INCLUDE_UNTRACKED: ::libc::c_uint = 8;
pub static GIT_DIFF_RECURSE_UNTRACKED_DIRS: ::libc::c_uint = 16;
pub static GIT_DIFF_INCLUDE_UNMODIFIED: ::libc::c_uint = 32;
pub static GIT_DIFF_INCLUDE_TYPECHANGE: ::libc::c_uint = 64;
pub static GIT_DIFF_INCLUDE_TYPECHANGE_TREES: ::libc::c_uint = 128;
pub static GIT_DIFF_IGNORE_FILEMODE: ::libc::c_uint = 256;
pub static GIT_DIFF_IGNORE_SUBMODULES: ::libc::c_uint = 512;
pub static GIT_DIFF_IGNORE_CASE: ::libc::c_uint = 1024;
pub static GIT_DIFF_DISABLE_PATHSPEC_MATCH: ::libc::c_uint = 4096;
pub static GIT_DIFF_SKIP_BINARY_CHECK: ::libc::c_uint = 8192;
pub static GIT_DIFF_ENABLE_FAST_UNTRACKED_DIRS: ::libc::c_uint = 16384;
pub static GIT_DIFF_FORCE_TEXT: ::libc::c_uint = 1048576;
pub static GIT_DIFF_FORCE_BINARY: ::libc::c_uint = 2097152;
pub static GIT_DIFF_IGNORE_WHITESPACE: ::libc::c_uint = 4194304;
pub static GIT_DIFF_IGNORE_WHITESPACE_CHANGE: ::libc::c_uint = 8388608;
pub static GIT_DIFF_IGNORE_WHITESPACE_EOL: ::libc::c_uint = 16777216;
pub static GIT_DIFF_SHOW_UNTRACKED_CONTENT: ::libc::c_uint = 33554432;
pub static GIT_DIFF_SHOW_UNMODIFIED: ::libc::c_uint = 67108864;
pub static GIT_DIFF_PATIENCE: ::libc::c_uint = 268435456;
pub static GIT_DIFF_MINIMAL: ::libc::c_uint = 536870912;
pub type git_diff_option_t = Enum_Unnamed14;
pub enum Struct_git_diff { }
pub type git_diff = Struct_git_diff;
pub type Enum_Unnamed15 = ::libc::c_uint;
pub static GIT_DIFF_FLAG_BINARY: ::libc::c_uint = 1;
pub static GIT_DIFF_FLAG_NOT_BINARY: ::libc::c_uint = 2;
pub static GIT_DIFF_FLAG_VALID_OID: ::libc::c_uint = 4;
pub type git_diff_flag_t = Enum_Unnamed15;
pub type Enum_Unnamed16 = ::libc::c_uint;
pub static GIT_DELTA_UNMODIFIED: ::libc::c_uint = 0;
pub static GIT_DELTA_ADDED: ::libc::c_uint = 1;
pub static GIT_DELTA_DELETED: ::libc::c_uint = 2;
pub static GIT_DELTA_MODIFIED: ::libc::c_uint = 3;
pub static GIT_DELTA_RENAMED: ::libc::c_uint = 4;
pub static GIT_DELTA_COPIED: ::libc::c_uint = 5;
pub static GIT_DELTA_IGNORED: ::libc::c_uint = 6;
pub static GIT_DELTA_UNTRACKED: ::libc::c_uint = 7;
pub static GIT_DELTA_TYPECHANGE: ::libc::c_uint = 8;
pub type git_delta_t = Enum_Unnamed16;
#[repr(C)]
pub struct Struct_Unnamed17 {
pub oid: git_oid,
pub path: *const ::libc::c_char,
pub size: git_off_t,
pub flags: u32,
pub mode: u16,
}
pub type git_diff_file = Struct_Unnamed17;
#[repr(C)]
pub struct Struct_Unnamed18 {
pub status: git_delta_t,
pub flags: u32,
pub similarity: u16,
pub nfiles: u16,
pub old_file: git_diff_file,
pub new_file: git_diff_file,
}
pub type git_diff_delta = Struct_Unnamed18;
pub type git_diff_notify_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_diff,
arg2: *const git_diff_delta,
arg3: *const ::libc::c_char,
arg4: *mut ::libc::c_void) -> ::libc::c_int>;
#[repr(C)]
pub struct Struct_Unnamed19 {
pub version: ::libc::c_uint,
pub flags: u32,
pub ignore_submodules: git_submodule_ignore_t,
pub pathspec: git_strarray,
pub notify_cb: git_diff_notify_cb,
pub notify_payload: *mut ::libc::c_void,
pub context_lines: u16,
pub interhunk_lines: u16,
pub oid_abbrev: u16,
pub max_size: git_off_t,
pub old_prefix: *const ::libc::c_char,
pub new_prefix: *const ::libc::c_char,
}
pub type git_diff_options = Struct_Unnamed19;
pub type git_diff_file_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_diff_delta,
arg2: ::libc::c_float,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_diff_hunk = Struct_git_diff_hunk;
#[repr(C)]
pub struct Struct_git_diff_hunk {
pub old_start: ::libc::c_int,
pub old_lines: ::libc::c_int,
pub new_start: ::libc::c_int,
pub new_lines: ::libc::c_int,
pub header_len: size_t,
pub header: [::libc::c_char, ..128u],
}
pub type git_diff_hunk_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_diff_delta,
arg2: *const git_diff_hunk,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed20 = ::libc::c_uint;
pub static GIT_DIFF_LINE_CONTEXT: ::libc::c_uint = 32;
pub static GIT_DIFF_LINE_ADDITION: ::libc::c_uint = 43;
pub static GIT_DIFF_LINE_DELETION: ::libc::c_uint = 45;
pub static GIT_DIFF_LINE_CONTEXT_EOFNL: ::libc::c_uint = 61;
pub static GIT_DIFF_LINE_ADD_EOFNL: ::libc::c_uint = 62;
pub static GIT_DIFF_LINE_DEL_EOFNL: ::libc::c_uint = 60;
pub static GIT_DIFF_LINE_FILE_HDR: ::libc::c_uint = 70;
pub static GIT_DIFF_LINE_HUNK_HDR: ::libc::c_uint = 72;
pub static GIT_DIFF_LINE_BINARY: ::libc::c_uint = 66;
pub type git_diff_line_t = Enum_Unnamed20;
pub type git_diff_line = Struct_git_diff_line;
#[repr(C)]
pub struct Struct_git_diff_line {
pub origin: ::libc::c_char,
pub old_lineno: ::libc::c_int,
pub new_lineno: ::libc::c_int,
pub num_lines: ::libc::c_int,
pub content_len: size_t,
pub content_offset: git_off_t,
pub content: *const ::libc::c_char,
}
pub type git_diff_line_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_diff_delta,
arg2: *const git_diff_hunk,
arg3: *const git_diff_line,
arg4: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed21 = ::libc::c_uint;
pub static GIT_DIFF_FIND_RENAMES: ::libc::c_uint = 1;
pub static GIT_DIFF_FIND_RENAMES_FROM_REWRITES: ::libc::c_uint = 2;
pub static GIT_DIFF_FIND_COPIES: ::libc::c_uint = 4;
pub static GIT_DIFF_FIND_COPIES_FROM_UNMODIFIED: ::libc::c_uint = 8;
pub static GIT_DIFF_FIND_REWRITES: ::libc::c_uint = 16;
pub static GIT_DIFF_BREAK_REWRITES: ::libc::c_uint = 32;
pub static GIT_DIFF_FIND_AND_BREAK_REWRITES: ::libc::c_uint = 48;
pub static GIT_DIFF_FIND_FOR_UNTRACKED: ::libc::c_uint = 64;
pub static GIT_DIFF_FIND_ALL: ::libc::c_uint = 255;
pub static GIT_DIFF_FIND_IGNORE_LEADING_WHITESPACE: ::libc::c_uint = 0;
pub static GIT_DIFF_FIND_IGNORE_WHITESPACE: ::libc::c_uint = 4096;
pub static GIT_DIFF_FIND_DONT_IGNORE_WHITESPACE: ::libc::c_uint = 8192;
pub static GIT_DIFF_FIND_EXACT_MATCH_ONLY: ::libc::c_uint = 16384;
pub static GIT_DIFF_BREAK_REWRITES_FOR_RENAMES_ONLY: ::libc::c_uint = 32768;
pub type git_diff_find_t = Enum_Unnamed21;
#[repr(C)]
pub struct Struct_Unnamed22 {
pub file_signature: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut *mut ::libc::c_void,
arg2: *const git_diff_file,
arg3:
*const ::libc::c_char,
arg4: *mut ::libc::c_void)
-> ::libc::c_int>,
pub buffer_signature: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut *mut ::libc::c_void,
arg2:
*const git_diff_file,
arg3:
*const ::libc::c_char,
arg4: size_t,
arg5:
*mut ::libc::c_void)
-> ::libc::c_int>,
pub free_signature: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut ::libc::c_void,
arg2:
*mut ::libc::c_void)>,
pub similarity: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut ::libc::c_int,
arg2: *mut ::libc::c_void,
arg3: *mut ::libc::c_void,
arg4: *mut ::libc::c_void)
-> ::libc::c_int>,
pub payload: *mut ::libc::c_void,
}
pub type git_diff_similarity_metric = Struct_Unnamed22;
#[repr(C)]
pub struct Struct_Unnamed23 {
pub version: ::libc::c_uint,
pub flags: u32,
pub rename_threshold: u16,
pub rename_from_rewrite_threshold: u16,
pub copy_threshold: u16,
pub break_rewrite_threshold: u16,
pub rename_limit: size_t,
pub metric: *mut git_diff_similarity_metric,
}
pub type git_diff_find_options = Struct_Unnamed23;
pub type Enum_Unnamed24 = ::libc::c_uint;
pub static GIT_DIFF_FORMAT_PATCH: ::libc::c_uint = 1;
pub static GIT_DIFF_FORMAT_PATCH_HEADER: ::libc::c_uint = 2;
pub static GIT_DIFF_FORMAT_RAW: ::libc::c_uint = 3;
pub static GIT_DIFF_FORMAT_NAME_ONLY: ::libc::c_uint = 4;
pub static GIT_DIFF_FORMAT_NAME_STATUS: ::libc::c_uint = 5;
pub type git_diff_format_t = Enum_Unnamed24;
pub type Enum_Unnamed25 = ::libc::c_uint;
pub static GIT_CHECKOUT_NONE: ::libc::c_uint = 0;
pub static GIT_CHECKOUT_SAFE: ::libc::c_uint = 1;
pub static GIT_CHECKOUT_SAFE_CREATE: ::libc::c_uint = 2;
pub static GIT_CHECKOUT_FORCE: ::libc::c_uint = 4;
pub static GIT_CHECKOUT_ALLOW_CONFLICTS: ::libc::c_uint = 16;
pub static GIT_CHECKOUT_REMOVE_UNTRACKED: ::libc::c_uint = 32;
pub static GIT_CHECKOUT_REMOVE_IGNORED: ::libc::c_uint = 64;
pub static GIT_CHECKOUT_UPDATE_ONLY: ::libc::c_uint = 128;
pub static GIT_CHECKOUT_DONT_UPDATE_INDEX: ::libc::c_uint = 256;
pub static GIT_CHECKOUT_NO_REFRESH: ::libc::c_uint = 512;
pub static GIT_CHECKOUT_SKIP_UNMERGED: ::libc::c_uint = 1024;
pub static GIT_CHECKOUT_USE_OURS: ::libc::c_uint = 2048;
pub static GIT_CHECKOUT_USE_THEIRS: ::libc::c_uint = 4096;
pub static GIT_CHECKOUT_DISABLE_PATHSPEC_MATCH: ::libc::c_uint = 8192;
pub static GIT_CHECKOUT_SKIP_LOCKED_DIRECTORIES: ::libc::c_uint = 262144;
pub static GIT_CHECKOUT_UPDATE_SUBMODULES: ::libc::c_uint = 65536;
pub static GIT_CHECKOUT_UPDATE_SUBMODULES_IF_CHANGED: ::libc::c_uint = 131072;
pub type git_checkout_strategy_t = Enum_Unnamed25;
pub type Enum_Unnamed26 = ::libc::c_uint;
pub static GIT_CHECKOUT_NOTIFY_NONE: ::libc::c_uint = 0;
pub static GIT_CHECKOUT_NOTIFY_CONFLICT: ::libc::c_uint = 1;
pub static GIT_CHECKOUT_NOTIFY_DIRTY: ::libc::c_uint = 2;
pub static GIT_CHECKOUT_NOTIFY_UPDATED: ::libc::c_uint = 4;
pub static GIT_CHECKOUT_NOTIFY_UNTRACKED: ::libc::c_uint = 8;
pub static GIT_CHECKOUT_NOTIFY_IGNORED: ::libc::c_uint = 16;
pub static GIT_CHECKOUT_NOTIFY_ALL: ::libc::c_uint = 65535;
pub type git_checkout_notify_t = Enum_Unnamed26;
pub type git_checkout_notify_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: git_checkout_notify_t,
arg2: *const ::libc::c_char,
arg3: *const git_diff_file,
arg4: *const git_diff_file,
arg5: *const git_diff_file,
arg6: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_checkout_progress_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char, arg2: size_t,
arg3: size_t, arg4: *mut ::libc::c_void)>;
#[repr(C)]
pub struct Struct_git_checkout_opts {
pub version: ::libc::c_uint,
pub checkout_strategy: ::libc::c_uint,
pub disable_filters: ::libc::c_int,
pub dir_mode: ::libc::c_uint,
pub file_mode: ::libc::c_uint,
pub file_open_flags: ::libc::c_int,
pub notify_flags: ::libc::c_uint,
pub notify_cb: git_checkout_notify_cb,
pub notify_payload: *mut ::libc::c_void,
pub progress_cb: git_checkout_progress_cb,
pub progress_payload: *mut ::libc::c_void,
pub paths: git_strarray,
pub baseline: *mut git_tree,
pub target_directory: *const ::libc::c_char,
pub our_label: *const ::libc::c_char,
pub their_label: *const ::libc::c_char,
}
pub type git_checkout_opts = Struct_git_checkout_opts;
pub enum Struct_git_indexer { }
pub type git_indexer = Struct_git_indexer;
pub type Enum_Unnamed27 = ::libc::c_uint;
pub static GIT_REPOSITORY_OPEN_NO_SEARCH: ::libc::c_uint = 1;
pub static GIT_REPOSITORY_OPEN_CROSS_FS: ::libc::c_uint = 2;
pub static GIT_REPOSITORY_OPEN_BARE: ::libc::c_uint = 4;
pub type git_repository_open_flag_t = Enum_Unnamed27;
pub type Enum_Unnamed28 = ::libc::c_uint;
pub static GIT_REPOSITORY_INIT_BARE: ::libc::c_uint = 1;
pub static GIT_REPOSITORY_INIT_NO_REINIT: ::libc::c_uint = 2;
pub static GIT_REPOSITORY_INIT_NO_DOTGIT_DIR: ::libc::c_uint = 4;
pub static GIT_REPOSITORY_INIT_MKDIR: ::libc::c_uint = 8;
pub static GIT_REPOSITORY_INIT_MKPATH: ::libc::c_uint = 16;
pub static GIT_REPOSITORY_INIT_EXTERNAL_TEMPLATE: ::libc::c_uint = 32;
pub type git_repository_init_flag_t = Enum_Unnamed28;
pub type Enum_Unnamed29 = ::libc::c_uint;
pub static GIT_REPOSITORY_INIT_SHARED_UMASK: ::libc::c_uint = 0;
pub static GIT_REPOSITORY_INIT_SHARED_GROUP: ::libc::c_uint = 1533;
pub static GIT_REPOSITORY_INIT_SHARED_ALL: ::libc::c_uint = 1535;
pub type git_repository_init_mode_t = Enum_Unnamed29;
#[repr(C)]
pub struct Struct_Unnamed30 {
pub version: ::libc::c_uint,
pub flags: u32,
pub mode: u32,
pub workdir_path: *const ::libc::c_char,
pub description: *const ::libc::c_char,
pub template_path: *const ::libc::c_char,
pub initial_head: *const ::libc::c_char,
pub origin_url: *const ::libc::c_char,
}
pub type git_repository_init_options = Struct_Unnamed30;
pub type git_repository_fetchhead_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *const ::libc::c_char,
arg3: *const git_oid, arg4: ::libc::c_uint,
arg5: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_repository_mergehead_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_oid,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed31 = ::libc::c_uint;
pub static GIT_REPOSITORY_STATE_NONE: ::libc::c_uint = 0;
pub static GIT_REPOSITORY_STATE_MERGE: ::libc::c_uint = 1;
pub static GIT_REPOSITORY_STATE_REVERT: ::libc::c_uint = 2;
pub static GIT_REPOSITORY_STATE_CHERRY_PICK: ::libc::c_uint = 3;
pub static GIT_REPOSITORY_STATE_BISECT: ::libc::c_uint = 4;
pub static GIT_REPOSITORY_STATE_REBASE: ::libc::c_uint = 5;
pub static GIT_REPOSITORY_STATE_REBASE_INTERACTIVE: ::libc::c_uint = 6;
pub static GIT_REPOSITORY_STATE_REBASE_MERGE: ::libc::c_uint = 7;
pub static GIT_REPOSITORY_STATE_APPLY_MAILBOX: ::libc::c_uint = 8;
pub static GIT_REPOSITORY_STATE_APPLY_MAILBOX_OR_REBASE: ::libc::c_uint = 9;
pub type git_repository_state_t = Enum_Unnamed31;
pub type Enum_Unnamed32 = ::libc::c_uint;
pub static GIT_DIRECTION_FETCH: ::libc::c_uint = 0;
pub static GIT_DIRECTION_PUSH: ::libc::c_uint = 1;
pub type git_direction = Enum_Unnamed32;
#[repr(C)]
pub struct Struct_git_remote_head {
pub local: ::libc::c_int,
pub oid: git_oid,
pub loid: git_oid,
pub name: *mut ::libc::c_char,
}
pub type git_headlist_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_remote_head,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed33 = ::libc::c_uint;
pub static GIT_CREDTYPE_USERPASS_PLAINTEXT: ::libc::c_uint = 1;
pub static GIT_CREDTYPE_SSH_KEY: ::libc::c_uint = 2;
pub static GIT_CREDTYPE_SSH_CUSTOM: ::libc::c_uint = 4;
pub static GIT_CREDTYPE_DEFAULT: ::libc::c_uint = 8;
pub type git_credtype_t = Enum_Unnamed33;
pub type git_cred = Struct_git_cred;
#[repr(C)]
pub struct Struct_git_cred {
pub credtype: git_credtype_t,
pub free: ::std::option::Option<unsafe extern "C" fn(arg1: *mut git_cred)>,
}
#[repr(C)]
pub struct Struct_Unnamed34 {
pub parent: git_cred,
pub username: *mut ::libc::c_char,
pub password: *mut ::libc::c_char,
}
pub type git_cred_userpass_plaintext = Struct_Unnamed34;
pub type git_cred_sign_callback =
::std::option::Option<unsafe extern "C" fn(arg1: *mut ::libc::c_void, ...)
-> ::libc::c_int>;
#[repr(C)]
pub struct Struct_git_cred_ssh_key {
pub parent: git_cred,
pub username: *mut ::libc::c_char,
pub publickey: *mut ::libc::c_char,
pub privatekey: *mut ::libc::c_char,
pub passphrase: *mut ::libc::c_char,
}
pub type git_cred_ssh_key = Struct_git_cred_ssh_key;
#[repr(C)]
pub struct Struct_git_cred_ssh_custom {
pub parent: git_cred,
pub username: *mut ::libc::c_char,
pub publickey: *mut ::libc::c_char,
pub publickey_len: size_t,
pub sign_callback: *mut ::libc::c_void,
pub sign_data: *mut ::libc::c_void,
}
pub type git_cred_ssh_custom = Struct_git_cred_ssh_custom;
pub type git_cred_default = Struct_git_cred;
pub type git_cred_acquire_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut *mut git_cred,
arg2: *const ::libc::c_char,
arg3: *const ::libc::c_char,
arg4: ::libc::c_uint,
arg5: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed35 = ::libc::c_uint;
pub static GIT_TRANSPORTFLAGS_NONE: ::libc::c_uint = 0;
pub static GIT_TRANSPORTFLAGS_NO_CHECK_CERT: ::libc::c_uint = 1;
pub type git_transport_flags_t = Enum_Unnamed35;
pub type git_transport_message_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: ::libc::c_int, arg3: *mut ::libc::c_void)
-> ::libc::c_int>;
pub type git_transport = Struct_git_transport;
#[repr(C)]
pub struct Struct_git_transport {
pub version: ::libc::c_uint,
pub set_callbacks: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2:
git_transport_message_cb,
arg3:
git_transport_message_cb,
arg4: *mut ::libc::c_void)
-> ::libc::c_int>,
pub connect: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2: *const ::libc::c_char,
arg3: git_cred_acquire_cb,
arg4: *mut ::libc::c_void,
arg5: ::libc::c_int,
arg6: ::libc::c_int)
-> ::libc::c_int>,
pub ls: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut *mut *const git_remote_head,
arg2: *mut size_t,
arg3: *mut git_transport)
-> ::libc::c_int>,
pub push: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2: *mut git_push)
-> ::libc::c_int>,
pub negotiate_fetch: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2: *mut git_repository,
arg3:
*const *const git_remote_head,
arg4: size_t)
-> ::libc::c_int>,
pub download_pack: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2: *mut git_repository,
arg3:
*mut git_transfer_progress,
arg4:
git_transfer_progress_callback,
arg5: *mut ::libc::c_void)
-> ::libc::c_int>,
pub is_connected: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport)
-> ::libc::c_int>,
pub read_flags: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport,
arg2: *mut ::libc::c_int)
-> ::libc::c_int>,
pub cancel: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_transport)>,
pub close: ::std::option::Option<unsafe extern "C" fn(arg1: *mut git_transport)
-> ::libc::c_int>,
pub free: ::std::option::Option<unsafe extern "C" fn(arg1: *mut git_transport)>,
}
pub type git_transport_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut *mut git_transport,
arg2: *mut git_remote,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed36 = ::libc::c_uint;
pub static GIT_SERVICE_UPLOADPACK_LS: ::libc::c_uint = 1;
pub static GIT_SERVICE_UPLOADPACK: ::libc::c_uint = 2;
pub static GIT_SERVICE_RECEIVEPACK_LS: ::libc::c_uint = 3;
pub static GIT_SERVICE_RECEIVEPACK: ::libc::c_uint = 4;
pub type git_smart_service_t = Enum_Unnamed36;
pub type git_smart_subtransport = Struct_git_smart_subtransport;
pub type git_smart_subtransport_stream = Struct_git_smart_subtransport_stream;
#[repr(C)]
pub struct Struct_git_smart_subtransport_stream {
pub subtransport: *mut git_smart_subtransport,
pub read: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut git_smart_subtransport_stream,
arg2: *mut ::libc::c_char,
arg3: size_t, arg4: *mut size_t)
-> ::libc::c_int>,
pub write: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut git_smart_subtransport_stream,
arg2: *const ::libc::c_char,
arg3: size_t) -> ::libc::c_int>,
pub free: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut git_smart_subtransport_stream)>,
}
#[repr(C)]
pub struct Struct_git_smart_subtransport {
pub action: ::std::option::Option<unsafe extern "C" fn
(arg1:
*mut *mut git_smart_subtransport_stream,
arg2: *mut git_smart_subtransport,
arg3: *const ::libc::c_char,
arg4: git_smart_service_t)
-> ::libc::c_int>,
pub close: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_smart_subtransport)
-> ::libc::c_int>,
pub free: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut git_smart_subtransport)>,
}
pub type git_smart_subtransport_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut *mut git_smart_subtransport,
arg2: *mut git_transport) -> ::libc::c_int>;
#[repr(C)]
pub struct Struct_git_smart_subtransport_definition {
pub callback: git_smart_subtransport_cb,
pub rpc: ::libc::c_uint,
}
pub type git_smart_subtransport_definition =
Struct_git_smart_subtransport_definition;
pub type git_remote_rename_problem_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_git_remote_completion_type = ::libc::c_uint;
pub static GIT_REMOTE_COMPLETION_DOWNLOAD: ::libc::c_uint = 0;
pub static GIT_REMOTE_COMPLETION_INDEXING: ::libc::c_uint = 1;
pub static GIT_REMOTE_COMPLETION_ERROR: ::libc::c_uint = 2;
pub type git_remote_completion_type = Enum_git_remote_completion_type;
#[repr(C)]
pub struct Struct_git_remote_callbacks {
pub version: ::libc::c_uint,
pub progress: ::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: ::libc::c_int,
arg3: *mut ::libc::c_void)
-> ::libc::c_int>,
pub completion: ::std::option::Option<unsafe extern "C" fn
(arg1:
git_remote_completion_type,
arg2: *mut ::libc::c_void)
-> ::libc::c_int>,
pub credentials: ::std::option::Option<unsafe extern "C" fn
(arg1: *mut *mut git_cred,
arg2: *const ::libc::c_char,
arg3: *const ::libc::c_char,
arg4: ::libc::c_uint,
arg5: *mut ::libc::c_void)
-> ::libc::c_int>,
pub transfer_progress: ::std::option::Option<unsafe extern "C" fn
(arg1:
*const git_transfer_progress,
arg2:
*mut ::libc::c_void)
-> ::libc::c_int>,
pub update_tips: ::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *const git_oid,
arg3: *const git_oid,
arg4: *mut ::libc::c_void)
-> ::libc::c_int>,
pub payload: *mut ::libc::c_void,
}
pub type Enum_Unnamed37 = ::libc::c_uint;
pub static GIT_REMOTE_DOWNLOAD_TAGS_AUTO: ::libc::c_uint = 0;
pub static GIT_REMOTE_DOWNLOAD_TAGS_NONE: ::libc::c_uint = 1;
pub static GIT_REMOTE_DOWNLOAD_TAGS_ALL: ::libc::c_uint = 2;
pub type git_remote_autotag_option_t = Enum_Unnamed37;
#[repr(C)]
pub struct Struct_git_clone_options {
pub version: ::libc::c_uint,
pub checkout_opts: git_checkout_opts,
pub remote_callbacks: git_remote_callbacks,
pub bare: ::libc::c_int,
pub ignore_cert_errors: ::libc::c_int,
pub remote_name: *const ::libc::c_char,
pub checkout_branch: *const ::libc::c_char,
}
pub type git_clone_options = Struct_git_clone_options;
pub type Enum_Unnamed38 = ::libc::c_int;
pub static GIT_CONFIG_LEVEL_SYSTEM: ::libc::c_int = 1;
pub static GIT_CONFIG_LEVEL_XDG: ::libc::c_int = 2;
pub static GIT_CONFIG_LEVEL_GLOBAL: ::libc::c_int = 3;
pub static GIT_CONFIG_LEVEL_LOCAL: ::libc::c_int = 4;
pub static GIT_CONFIG_LEVEL_APP: ::libc::c_int = 5;
pub static GIT_CONFIG_HIGHEST_LEVEL: ::libc::c_int = -1;
pub type git_config_level_t = Enum_Unnamed38;
#[repr(C)]
pub struct Struct_Unnamed39 {
pub name: *const ::libc::c_char,
pub value: *const ::libc::c_char,
pub level: git_config_level_t,
}
pub type git_config_entry = Struct_Unnamed39;
pub type git_config_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_config_entry,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub enum Struct_git_config_iterator { }
pub type git_config_iterator = Struct_git_config_iterator;
pub type Enum_Unnamed40 = ::libc::c_uint;
pub static GIT_CVAR_FALSE: ::libc::c_uint = 0;
pub static GIT_CVAR_TRUE: ::libc::c_uint = 1;
pub static GIT_CVAR_INT32: ::libc::c_uint = 2;
pub static GIT_CVAR_STRING: ::libc::c_uint = 3;
pub type git_cvar_t = Enum_Unnamed40;
#[repr(C)]
pub struct Struct_Unnamed41 {
pub cvar_type: git_cvar_t,
pub str_match: *const ::libc::c_char,
pub map_value: ::libc::c_int,
}
pub type git_cvar_map = Struct_Unnamed41;
pub type Enum_Unnamed42 = ::libc::c_int;
pub static GIT_OK: ::libc::c_int = 0;
pub static GIT_ERROR: ::libc::c_int = -1;
pub static GIT_ENOTFOUND: ::libc::c_int = -3;
pub static GIT_EEXISTS: ::libc::c_int = -4;
pub static GIT_EAMBIGUOUS: ::libc::c_int = -5;
pub static GIT_EBUFS: ::libc::c_int = -6;
pub static GIT_EUSER: ::libc::c_int = -7;
pub static GIT_EBAREREPO: ::libc::c_int = -8;
pub static GIT_EUNBORNBRANCH: ::libc::c_int = -9;
pub static GIT_EUNMERGED: ::libc::c_int = -10;
pub static GIT_ENONFASTFORWARD: ::libc::c_int = -11;
pub static GIT_EINVALIDSPEC: ::libc::c_int = -12;
pub static GIT_EMERGECONFLICT: ::libc::c_int = -13;
pub static GIT_ELOCKED: ::libc::c_int = -14;
pub static GIT_PASSTHROUGH: ::libc::c_int = -30;
pub static GIT_ITEROVER: ::libc::c_int = -31;
pub type git_error_code = Enum_Unnamed42;
#[repr(C)]
pub struct Struct_Unnamed43 {
pub message: *mut ::libc::c_char,
pub klass: ::libc::c_int,
}
pub type git_error = Struct_Unnamed43;
pub type Enum_Unnamed44 = ::libc::c_uint;
pub static GITERR_NONE: ::libc::c_uint = 0;
pub static GITERR_NOMEMORY: ::libc::c_uint = 1;
pub static GITERR_OS: ::libc::c_uint = 2;
pub static GITERR_INVALID: ::libc::c_uint = 3;
pub static GITERR_REFERENCE: ::libc::c_uint = 4;
pub static GITERR_ZLIB: ::libc::c_uint = 5;
pub static GITERR_REPOSITORY: ::libc::c_uint = 6;
pub static GITERR_CONFIG: ::libc::c_uint = 7;
pub static GITERR_REGEX: ::libc::c_uint = 8;
pub static GITERR_ODB: ::libc::c_uint = 9;
pub static GITERR_INDEX: ::libc::c_uint = 10;
pub static GITERR_OBJECT: ::libc::c_uint = 11;
pub static GITERR_NET: ::libc::c_uint = 12;
pub static GITERR_TAG: ::libc::c_uint = 13;
pub static GITERR_TREE: ::libc::c_uint = 14;
pub static GITERR_INDEXER: ::libc::c_uint = 15;
pub static GITERR_SSL: ::libc::c_uint = 16;
pub static GITERR_SUBMODULE: ::libc::c_uint = 17;
pub static GITERR_THREAD: ::libc::c_uint = 18;
pub static GITERR_STASH: ::libc::c_uint = 19;
pub static GITERR_CHECKOUT: ::libc::c_uint = 20;
pub static GITERR_FETCHHEAD: ::libc::c_uint = 21;
pub static GITERR_MERGE: ::libc::c_uint = 22;
pub static GITERR_SSH: ::libc::c_uint = 23;
pub static GITERR_FILTER: ::libc::c_uint = 24;
pub type git_error_t = Enum_Unnamed44;
pub type Enum_Unnamed45 = ::libc::c_uint;
pub static GIT_FILTER_TO_WORKTREE: ::libc::c_uint = 0;
pub static GIT_FILTER_SMUDGE: ::libc::c_uint = 0;
pub static GIT_FILTER_TO_ODB: ::libc::c_uint = 1;
pub static GIT_FILTER_CLEAN: ::libc::c_uint = 1;
pub type git_filter_mode_t = Enum_Unnamed45;
pub enum Struct_git_filter { }
pub type git_filter = Struct_git_filter;
pub enum Struct_git_filter_list { }
pub type git_filter_list = Struct_git_filter_list;
#[repr(C)]
pub struct Struct_Unnamed46 {
pub seconds: git_time_t,
pub nanoseconds: ::libc::c_uint,
}
pub type git_index_time = Struct_Unnamed46;
#[repr(C)]
pub struct Struct_git_index_entry {
pub ctime: git_index_time,
pub mtime: git_index_time,
pub dev: ::libc::c_uint,
pub ino: ::libc::c_uint,
pub mode: ::libc::c_uint,
pub uid: ::libc::c_uint,
pub gid: ::libc::c_uint,
pub file_size: git_off_t,
pub oid: git_oid,
pub flags: ::libc::c_ushort,
pub flags_extended: ::libc::c_ushort,
pub path: *mut ::libc::c_char,
}
pub type git_index_entry = Struct_git_index_entry;
pub type Enum_Unnamed47 = ::libc::c_uint;
pub static GIT_INDEXCAP_IGNORE_CASE: ::libc::c_uint = 1;
pub static GIT_INDEXCAP_NO_FILEMODE: ::libc::c_uint = 2;
pub static GIT_INDEXCAP_NO_SYMLINKS: ::libc::c_uint = 4;
pub static GIT_INDEXCAP_FROM_OWNER: ::libc::c_uint = -1;
pub type git_indexcap_t = Enum_Unnamed47;
pub type git_index_matched_path_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *const ::libc::c_char,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed48 = ::libc::c_uint;
pub static GIT_INDEX_ADD_DEFAULT: ::libc::c_uint = 0;
pub static GIT_INDEX_ADD_FORCE: ::libc::c_uint = 1;
pub static GIT_INDEX_ADD_DISABLE_PATHSPEC_MATCH: ::libc::c_uint = 2;
pub static GIT_INDEX_ADD_CHECK_PATHSPEC: ::libc::c_uint = 4;
pub type git_index_add_option_t = Enum_Unnamed48;
pub type Enum_Unnamed49 = ::libc::c_uint;
pub static GIT_MERGE_TREE_FIND_RENAMES: ::libc::c_uint = 1;
pub type git_merge_tree_flag_t = Enum_Unnamed49;
pub type Enum_Unnamed50 = ::libc::c_uint;
pub static GIT_MERGE_AUTOMERGE_NORMAL: ::libc::c_uint = 0;
pub static GIT_MERGE_AUTOMERGE_NONE: ::libc::c_uint = 1;
pub static GIT_MERGE_AUTOMERGE_FAVOR_OURS: ::libc::c_uint = 2;
pub static GIT_MERGE_AUTOMERGE_FAVOR_THEIRS: ::libc::c_uint = 3;
pub type git_merge_automerge_flags = Enum_Unnamed50;
#[repr(C)]
pub struct Struct_Unnamed51 {
pub version: ::libc::c_uint,
pub flags: git_merge_tree_flag_t,
pub rename_threshold: ::libc::c_uint,
pub target_limit: ::libc::c_uint,
pub metric: *mut git_diff_similarity_metric,
pub automerge_flags: git_merge_automerge_flags,
}
pub type git_merge_tree_opts = Struct_Unnamed51;
pub type Enum_Unnamed52 = ::libc::c_uint;
pub static GIT_MERGE_NO_FASTFORWARD: ::libc::c_uint = 1;
pub static GIT_MERGE_FASTFORWARD_ONLY: ::libc::c_uint = 2;
pub type git_merge_flags_t = Enum_Unnamed52;
#[repr(C)]
pub struct Struct_Unnamed53 {
pub version: ::libc::c_uint,
pub merge_flags: git_merge_flags_t,
pub merge_tree_opts: git_merge_tree_opts,
pub checkout_opts: git_checkout_opts,
}
pub type git_merge_opts = Struct_Unnamed53;
pub type git_note_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_oid, arg2: *const git_oid,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub enum Struct_git_iterator { }
pub type git_note_iterator = Struct_git_iterator;
pub type git_odb_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const git_oid,
arg2: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed54 = ::libc::c_uint;
pub static GIT_PACKBUILDER_ADDING_OBJECTS: ::libc::c_uint = 0;
pub static GIT_PACKBUILDER_DELTAFICATION: ::libc::c_uint = 1;
pub type git_packbuilder_stage_t = Enum_Unnamed54;
pub type git_packbuilder_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *mut ::libc::c_void, arg2: size_t,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type git_packbuilder_progress =
::std::option::Option<unsafe extern "C" fn
(arg1: ::libc::c_int, arg2: ::libc::c_uint,
arg3: ::libc::c_uint,
arg4: *mut ::libc::c_void) -> ::libc::c_int>;
pub enum Struct_git_patch { }
pub type git_patch = Struct_git_patch;
pub enum Struct_git_pathspec { }
pub type git_pathspec = Struct_git_pathspec;
pub enum Struct_git_pathspec_match_list { }
pub type git_pathspec_match_list = Struct_git_pathspec_match_list;
pub type Enum_Unnamed55 = ::libc::c_uint;
pub static GIT_PATHSPEC_DEFAULT: ::libc::c_uint = 0;
pub static GIT_PATHSPEC_IGNORE_CASE: ::libc::c_uint = 1;
pub static GIT_PATHSPEC_USE_CASE: ::libc::c_uint = 2;
pub static GIT_PATHSPEC_NO_GLOB: ::libc::c_uint = 4;
pub static GIT_PATHSPEC_NO_MATCH_ERROR: ::libc::c_uint = 8;
pub static GIT_PATHSPEC_FIND_FAILURES: ::libc::c_uint = 16;
pub static GIT_PATHSPEC_FAILURES_ONLY: ::libc::c_uint = 32;
pub type git_pathspec_flag_t = Enum_Unnamed55;
#[repr(C)]
pub struct Struct_Unnamed56 {
pub version: ::libc::c_uint,
pub pb_parallelism: ::libc::c_uint,
}
pub type git_push_options = Struct_Unnamed56;
pub type git_push_transfer_progress =
::std::option::Option<unsafe extern "C" fn
(arg1: ::libc::c_uint, arg2: ::libc::c_uint,
arg3: size_t, arg4: *mut ::libc::c_void)
-> ::libc::c_int>;
pub type Enum_Unnamed57 = ::libc::c_uint;
pub static GIT_RESET_SOFT: ::libc::c_uint = 1;
pub static GIT_RESET_MIXED: ::libc::c_uint = 2;
pub static GIT_RESET_HARD: ::libc::c_uint = 3;
pub type git_reset_t = Enum_Unnamed57;
pub type Enum_Unnamed58 = ::libc::c_uint;
pub static GIT_REVPARSE_SINGLE: ::libc::c_uint = 1;
pub static GIT_REVPARSE_RANGE: ::libc::c_uint = 2;
pub static GIT_REVPARSE_MERGE_BASE: ::libc::c_uint = 4;
pub type git_revparse_mode_t = Enum_Unnamed58;
#[repr(C)]
pub struct Struct_Unnamed59 {
pub from: *mut git_object,
pub to: *mut git_object,
pub flags: ::libc::c_uint,
}
pub type git_revspec = Struct_Unnamed59;
pub type Enum_Unnamed60 = ::libc::c_uint;
pub static GIT_STASH_DEFAULT: ::libc::c_uint = 0;
pub static GIT_STASH_KEEP_INDEX: ::libc::c_uint = 1;
pub static GIT_STASH_INCLUDE_UNTRACKED: ::libc::c_uint = 2;
pub static GIT_STASH_INCLUDE_IGNORED: ::libc::c_uint = 4;
pub type git_stash_flags = Enum_Unnamed60;
pub type git_stash_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: size_t, arg2: *const ::libc::c_char,
arg3: *const git_oid,
arg4: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed61 = ::libc::c_uint;
pub static GIT_STATUS_CURRENT: ::libc::c_uint = 0;
pub static GIT_STATUS_INDEX_NEW: ::libc::c_uint = 1;
pub static GIT_STATUS_INDEX_MODIFIED: ::libc::c_uint = 2;
pub static GIT_STATUS_INDEX_DELETED: ::libc::c_uint = 4;
pub static GIT_STATUS_INDEX_RENAMED: ::libc::c_uint = 8;
pub static GIT_STATUS_INDEX_TYPECHANGE: ::libc::c_uint = 16;
pub static GIT_STATUS_WT_NEW: ::libc::c_uint = 128;
pub static GIT_STATUS_WT_MODIFIED: ::libc::c_uint = 256;
pub static GIT_STATUS_WT_DELETED: ::libc::c_uint = 512;
pub static GIT_STATUS_WT_TYPECHANGE: ::libc::c_uint = 1024;
pub static GIT_STATUS_WT_RENAMED: ::libc::c_uint = 2048;
pub static GIT_STATUS_IGNORED: ::libc::c_uint = 16384;
pub type git_status_t = Enum_Unnamed61;
pub type git_status_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: ::libc::c_uint,
arg3: *mut ::libc::c_void) -> ::libc::c_int>;
pub type Enum_Unnamed62 = ::libc::c_uint;
pub static GIT_STATUS_SHOW_INDEX_AND_WORKDIR: ::libc::c_uint = 0;
pub static GIT_STATUS_SHOW_INDEX_ONLY: ::libc::c_uint = 1;
pub static GIT_STATUS_SHOW_WORKDIR_ONLY: ::libc::c_uint = 2;
pub type git_status_show_t = Enum_Unnamed62;
pub type Enum_Unnamed63 = ::libc::c_uint;
pub static GIT_STATUS_OPT_INCLUDE_UNTRACKED: ::libc::c_uint = 1;
pub static GIT_STATUS_OPT_INCLUDE_IGNORED: ::libc::c_uint = 2;
pub static GIT_STATUS_OPT_INCLUDE_UNMODIFIED: ::libc::c_uint = 4;
pub static GIT_STATUS_OPT_EXCLUDE_SUBMODULES: ::libc::c_uint = 8;
pub static GIT_STATUS_OPT_RECURSE_UNTRACKED_DIRS: ::libc::c_uint = 16;
pub static GIT_STATUS_OPT_DISABLE_PATHSPEC_MATCH: ::libc::c_uint = 32;
pub static GIT_STATUS_OPT_RECURSE_IGNORED_DIRS: ::libc::c_uint = 64;
pub static GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX: ::libc::c_uint = 128;
pub static GIT_STATUS_OPT_RENAMES_INDEX_TO_WORKDIR: ::libc::c_uint = 256;
pub static GIT_STATUS_OPT_SORT_CASE_SENSITIVELY: ::libc::c_uint = 512;
pub static GIT_STATUS_OPT_SORT_CASE_INSENSITIVELY: ::libc::c_uint = 1024;
pub static GIT_STATUS_OPT_RENAMES_FROM_REWRITES: ::libc::c_uint = 2048;
pub static GIT_STATUS_OPT_NO_REFRESH: ::libc::c_uint = 4096;
pub type git_status_opt_t = Enum_Unnamed63;
#[repr(C)]
pub struct Struct_Unnamed64 {
pub version: ::libc::c_uint,
pub show: git_status_show_t,
pub flags: ::libc::c_uint,
pub pathspec: git_strarray,
}
pub type git_status_options = Struct_Unnamed64;
#[repr(C)]
pub struct Struct_Unnamed65 {
pub status: git_status_t,
pub head_to_index: *mut git_diff_delta,
pub index_to_workdir: *mut git_diff_delta,
}
pub type git_status_entry = Struct_Unnamed65;
pub type Enum_Unnamed66 = ::libc::c_uint;
pub static GIT_SUBMODULE_STATUS_IN_HEAD: ::libc::c_uint = 1;
pub static GIT_SUBMODULE_STATUS_IN_INDEX: ::libc::c_uint = 2;
pub static GIT_SUBMODULE_STATUS_IN_CONFIG: ::libc::c_uint = 4;
pub static GIT_SUBMODULE_STATUS_IN_WD: ::libc::c_uint = 8;
pub static GIT_SUBMODULE_STATUS_INDEX_ADDED: ::libc::c_uint = 16;
pub static GIT_SUBMODULE_STATUS_INDEX_DELETED: ::libc::c_uint = 32;
pub static GIT_SUBMODULE_STATUS_INDEX_MODIFIED: ::libc::c_uint = 64;
pub static GIT_SUBMODULE_STATUS_WD_UNINITIALIZED: ::libc::c_uint = 128;
pub static GIT_SUBMODULE_STATUS_WD_ADDED: ::libc::c_uint = 256;
pub static GIT_SUBMODULE_STATUS_WD_DELETED: ::libc::c_uint = 512;
pub static GIT_SUBMODULE_STATUS_WD_MODIFIED: ::libc::c_uint = 1024;
pub static GIT_SUBMODULE_STATUS_WD_INDEX_MODIFIED: ::libc::c_uint = 2048;
pub static GIT_SUBMODULE_STATUS_WD_WD_MODIFIED: ::libc::c_uint = 4096;
pub static GIT_SUBMODULE_STATUS_WD_UNTRACKED: ::libc::c_uint = 8192;
pub type git_submodule_status_t = Enum_Unnamed66;
pub type git_tag_foreach_cb =
::std::option::Option<unsafe extern "C" fn
(arg1: *const ::libc::c_char,
arg2: *mut git_oid, arg3: *mut ::libc::c_void)
-> ::libc::c_int>;
#[link(name = "git2")]
extern "C" {
pub fn git_libgit2_version(major: *mut ::libc::c_int,
minor: *mut ::libc::c_int,
rev: *mut ::libc::c_int);
pub fn git_libgit2_capabilities() -> ::libc::c_int;
pub fn git_libgit2_opts(option: ::libc::c_int, ...) -> ::libc::c_int;
pub fn git_attr_value(attr: *const ::libc::c_char) -> git_attr_t;
pub fn git_attr_get(value_out: *mut *const ::libc::c_char,
repo: *mut git_repository, flags: u32,
path: *const ::libc::c_char,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_attr_get_many(values_out: *mut *const ::libc::c_char,
repo: *mut git_repository, flags: u32,
path: *const ::libc::c_char, num_attr: size_t,
names: *mut *const ::libc::c_char) ->
::libc::c_int;
pub fn git_attr_foreach(repo: *mut git_repository, flags: u32,
path: *const ::libc::c_char,
callback: git_attr_foreach_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_attr_cache_flush(repo: *mut git_repository);
pub fn git_attr_add_macro(repo: *mut git_repository,
name: *const ::libc::c_char,
values: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_oid_fromstr(out: *mut git_oid, str: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_oid_fromstrp(out: *mut git_oid, str: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_oid_fromstrn(out: *mut git_oid, str: *const ::libc::c_char,
length: size_t) -> ::libc::c_int;
pub fn git_oid_fromraw(out: *mut git_oid, raw: *const ::libc::c_uchar);
pub fn git_oid_fmt(out: *mut ::libc::c_char, id: *const git_oid);
pub fn git_oid_nfmt(out: *mut ::libc::c_char, n: size_t,
id: *const git_oid);
pub fn git_oid_pathfmt(out: *mut ::libc::c_char, id: *const git_oid);
pub fn git_oid_allocfmt(id: *const git_oid) -> *mut ::libc::c_char;
pub fn git_oid_tostr(out: *mut ::libc::c_char, n: size_t,
id: *const git_oid) -> *mut ::libc::c_char;
pub fn git_oid_cpy(out: *mut git_oid, src: *const git_oid);
pub fn git_oid_cmp(a: *const git_oid, b: *const git_oid) -> ::libc::c_int;
pub fn git_oid_ncmp(a: *const git_oid, b: *const git_oid, len: size_t) ->
::libc::c_int;
pub fn git_oid_streq(id: *const git_oid, str: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_oid_strcmp(id: *const git_oid, str: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_oid_iszero(id: *const git_oid) -> ::libc::c_int;
pub fn git_oid_shorten_new(min_length: size_t) -> *mut git_oid_shorten;
pub fn git_oid_shorten_add(os: *mut git_oid_shorten,
text_id: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_oid_shorten_free(os: *mut git_oid_shorten);
pub fn git_object_lookup(object: *mut *mut git_object,
repo: *mut git_repository, id: *const git_oid,
_type: git_otype) -> ::libc::c_int;
pub fn git_object_lookup_prefix(object_out: *mut *mut git_object,
repo: *mut git_repository,
id: *const git_oid, len: size_t,
_type: git_otype) -> ::libc::c_int;
pub fn git_object_lookup_bypath(out: *mut *mut git_object,
treeish: *const git_object,
path: *const ::libc::c_char,
_type: git_otype) -> ::libc::c_int;
pub fn git_object_id(obj: *const git_object) -> *const git_oid;
pub fn git_object_type(obj: *const git_object) -> git_otype;
pub fn git_object_owner(obj: *const git_object) -> *mut git_repository;
pub fn git_object_free(object: *mut git_object);
pub fn git_object_type2string(_type: git_otype) -> *const ::libc::c_char;
pub fn git_object_string2type(str: *const ::libc::c_char) -> git_otype;
pub fn git_object_typeisloose(_type: git_otype) -> ::libc::c_int;
pub fn git_object__size(_type: git_otype) -> size_t;
pub fn git_object_peel(peeled: *mut *mut git_object,
object: *const git_object, target_type: git_otype)
-> ::libc::c_int;
pub fn git_object_dup(dest: *mut *mut git_object, source: *mut git_object)
-> ::libc::c_int;
pub fn git_buf_free(buffer: *mut git_buf);
pub fn git_buf_grow(buffer: *mut git_buf, target_size: size_t) ->
::libc::c_int;
pub fn git_buf_set(buffer: *mut git_buf, data: *const ::libc::c_void,
datalen: size_t) -> ::libc::c_int;
pub fn git_blob_lookup(blob: *mut *mut git_blob,
repo: *mut git_repository, id: *const git_oid) ->
::libc::c_int;
pub fn git_blob_lookup_prefix(blob: *mut *mut git_blob,
repo: *mut git_repository,
id: *const git_oid, len: size_t) ->
::libc::c_int;
pub fn git_blob_free(blob: *mut git_blob);
pub fn git_blob_id(blob: *const git_blob) -> *const git_oid;
pub fn git_blob_owner(blob: *const git_blob) -> *mut git_repository;
pub fn git_blob_rawcontent(blob: *const git_blob) ->
*const ::libc::c_void;
pub fn git_blob_rawsize(blob: *const git_blob) -> git_off_t;
pub fn git_blob_filtered_content(out: *mut git_buf, blob: *mut git_blob,
as_path: *const ::libc::c_char,
check_for_binary_data: ::libc::c_int) ->
::libc::c_int;
pub fn git_blob_create_fromworkdir(id: *mut git_oid,
repo: *mut git_repository,
relative_path: *const ::libc::c_char)
-> ::libc::c_int;
pub fn git_blob_create_fromdisk(id: *mut git_oid,
repo: *mut git_repository,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_blob_create_fromchunks(id: *mut git_oid,
repo: *mut git_repository,
hintpath: *const ::libc::c_char,
callback: git_blob_chunk_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_blob_create_frombuffer(oid: *mut git_oid,
repo: *mut git_repository,
buffer: *const ::libc::c_void,
len: size_t) -> ::libc::c_int;
pub fn git_blob_is_binary(blob: *mut git_blob) -> ::libc::c_int;
pub fn git_blame_get_hunk_count(blame: *mut git_blame) -> u32;
pub fn git_blame_get_hunk_byindex(blame: *mut git_blame, index: u32)
-> *const git_blame_hunk;
pub fn git_blame_get_hunk_byline(blame: *mut git_blame, lineno: u32)
-> *const git_blame_hunk;
pub fn git_blame_file(out: *mut *mut git_blame, repo: *mut git_repository,
path: *const ::libc::c_char,
options: *mut git_blame_options) -> ::libc::c_int;
pub fn git_blame_buffer(out: *mut *mut git_blame,
reference: *mut git_blame,
buffer: *const ::libc::c_char,
buffer_len: u32) -> ::libc::c_int;
pub fn git_blame_free(blame: *mut git_blame);
pub fn git_branch_create(out: *mut *mut git_reference,
repo: *mut git_repository,
branch_name: *const ::libc::c_char,
target: *const git_commit, force: ::libc::c_int)
-> ::libc::c_int;
pub fn git_branch_delete(branch: *mut git_reference) -> ::libc::c_int;
pub fn git_branch_iterator_new(out: *mut *mut git_branch_iterator,
repo: *mut git_repository,
list_flags: git_branch_t) -> ::libc::c_int;
pub fn git_branch_next(out: *mut *mut git_reference,
out_type: *mut git_branch_t,
iter: *mut git_branch_iterator) -> ::libc::c_int;
pub fn git_branch_iterator_free(iter: *mut git_branch_iterator);
pub fn git_branch_move(out: *mut *mut git_reference,
branch: *mut git_reference,
new_branch_name: *const ::libc::c_char,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_branch_lookup(out: *mut *mut git_reference,
repo: *mut git_repository,
branch_name: *const ::libc::c_char,
branch_type: git_branch_t) -> ::libc::c_int;
pub fn git_branch_name(out: *mut *const ::libc::c_char,
_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_branch_upstream(out: *mut *mut git_reference,
branch: *mut git_reference) -> ::libc::c_int;
pub fn git_branch_set_upstream(branch: *mut git_reference,
upstream_name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_branch_upstream_name(tracking_branch_name_out:
*mut ::libc::c_char,
buffer_size: size_t,
repo: *mut git_repository,
canonical_branch_name:
*const ::libc::c_char) ->
::libc::c_int;
pub fn git_branch_is_head(branch: *mut git_reference) -> ::libc::c_int;
pub fn git_branch_remote_name(remote_name_out: *mut ::libc::c_char,
buffer_size: size_t,
repo: *mut git_repository,
canonical_branch_name:
*const ::libc::c_char) -> ::libc::c_int;
pub fn git_tree_lookup(out: *mut *mut git_tree, repo: *mut git_repository,
id: *const git_oid) -> ::libc::c_int;
pub fn git_tree_lookup_prefix(out: *mut *mut git_tree,
repo: *mut git_repository,
id: *const git_oid, len: size_t) ->
::libc::c_int;
pub fn git_tree_free(tree: *mut git_tree);
pub fn git_tree_id(tree: *const git_tree) -> *const git_oid;
pub fn git_tree_owner(tree: *const git_tree) -> *mut git_repository;
pub fn git_tree_entrycount(tree: *const git_tree) -> size_t;
pub fn git_tree_entry_byname(tree: *const git_tree,
filename: *const ::libc::c_char) ->
*const git_tree_entry;
pub fn git_tree_entry_byindex(tree: *const git_tree, idx: size_t) ->
*const git_tree_entry;
pub fn git_tree_entry_byoid(tree: *const git_tree, oid: *const git_oid) ->
*const git_tree_entry;
pub fn git_tree_entry_bypath(out: *mut *mut git_tree_entry,
root: *const git_tree,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_tree_entry_dup(entry: *const git_tree_entry) ->
*mut git_tree_entry;
pub fn git_tree_entry_free(entry: *mut git_tree_entry);
pub fn git_tree_entry_name(entry: *const git_tree_entry) ->
*const ::libc::c_char;
pub fn git_tree_entry_id(entry: *const git_tree_entry) -> *const git_oid;
pub fn git_tree_entry_type(entry: *const git_tree_entry) -> git_otype;
pub fn git_tree_entry_filemode(entry: *const git_tree_entry) ->
git_filemode_t;
pub fn git_tree_entry_filemode_raw(entry: *const git_tree_entry) ->
git_filemode_t;
pub fn git_tree_entry_cmp(e1: *const git_tree_entry,
e2: *const git_tree_entry) -> ::libc::c_int;
pub fn git_tree_entry_to_object(object_out: *mut *mut git_object,
repo: *mut git_repository,
entry: *const git_tree_entry) ->
::libc::c_int;
pub fn git_treebuilder_create(out: *mut *mut git_treebuilder,
source: *const git_tree) -> ::libc::c_int;
pub fn git_treebuilder_clear(bld: *mut git_treebuilder);
pub fn git_treebuilder_entrycount(bld: *mut git_treebuilder) ->
::libc::c_uint;
pub fn git_treebuilder_free(bld: *mut git_treebuilder);
pub fn git_treebuilder_get(bld: *mut git_treebuilder,
filename: *const ::libc::c_char) ->
*const git_tree_entry;
pub fn git_treebuilder_insert(out: *mut *const git_tree_entry,
bld: *mut git_treebuilder,
filename: *const ::libc::c_char,
id: *const git_oid,
filemode: git_filemode_t) -> ::libc::c_int;
pub fn git_treebuilder_remove(bld: *mut git_treebuilder,
filename: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_treebuilder_filter(bld: *mut git_treebuilder,
filter: git_treebuilder_filter_cb,
payload: *mut ::libc::c_void);
pub fn git_treebuilder_write(id: *mut git_oid, repo: *mut git_repository,
bld: *mut git_treebuilder) -> ::libc::c_int;
pub fn git_tree_walk(tree: *const git_tree, mode: git_treewalk_mode,
callback: git_treewalk_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_strarray_free(array: *mut git_strarray);
pub fn git_strarray_copy(tgt: *mut git_strarray, src: *const git_strarray)
-> ::libc::c_int;
pub fn git_reference_lookup(out: *mut *mut git_reference,
repo: *mut git_repository,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reference_name_to_id(out: *mut git_oid,
repo: *mut git_repository,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_reference_dwim(out: *mut *mut git_reference,
repo: *mut git_repository,
shorthand: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_reference_symbolic_create(out: *mut *mut git_reference,
repo: *mut git_repository,
name: *const ::libc::c_char,
target: *const ::libc::c_char,
force: ::libc::c_int) ->
::libc::c_int;
pub fn git_reference_create(out: *mut *mut git_reference,
repo: *mut git_repository,
name: *const ::libc::c_char,
id: *const git_oid, force: ::libc::c_int) ->
::libc::c_int;
pub fn git_reference_target(_ref: *const git_reference) -> *const git_oid;
pub fn git_reference_target_peel(_ref: *const git_reference) ->
*const git_oid;
pub fn git_reference_symbolic_target(_ref: *const git_reference) ->
*const ::libc::c_char;
pub fn git_reference_type(_ref: *const git_reference) -> git_ref_t;
pub fn git_reference_name(_ref: *const git_reference) ->
*const ::libc::c_char;
pub fn git_reference_resolve(out: *mut *mut git_reference,
_ref: *const git_reference) -> ::libc::c_int;
pub fn git_reference_owner(_ref: *const git_reference) ->
*mut git_repository;
pub fn git_reference_symbolic_set_target(out: *mut *mut git_reference,
_ref: *mut git_reference,
target: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_reference_set_target(out: *mut *mut git_reference,
_ref: *mut git_reference,
id: *const git_oid) -> ::libc::c_int;
pub fn git_reference_rename(new_ref: *mut *mut git_reference,
_ref: *mut git_reference,
new_name: *const ::libc::c_char,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_reference_delete(_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_list(array: *mut git_strarray,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_reference_foreach(repo: *mut git_repository,
callback: git_reference_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_reference_foreach_name(repo: *mut git_repository,
callback: git_reference_foreach_name_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_reference_free(_ref: *mut git_reference);
pub fn git_reference_cmp(ref1: *mut git_reference,
ref2: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_iterator_new(out: *mut *mut git_reference_iterator,
repo: *mut git_repository) ->
::libc::c_int;
pub fn git_reference_iterator_glob_new(out:
*mut *mut git_reference_iterator,
repo: *mut git_repository,
glob: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_reference_next(out: *mut *mut git_reference,
iter: *mut git_reference_iterator) ->
::libc::c_int;
pub fn git_reference_next_name(out: *mut *const ::libc::c_char,
iter: *mut git_reference_iterator) ->
::libc::c_int;
pub fn git_reference_iterator_free(iter: *mut git_reference_iterator);
pub fn git_reference_foreach_glob(repo: *mut git_repository,
glob: *const ::libc::c_char,
callback: git_reference_foreach_name_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_reference_has_log(_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_is_branch(_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_is_remote(_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_is_tag(_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_reference_normalize_name(buffer_out: *mut ::libc::c_char,
buffer_size: size_t,
name: *const ::libc::c_char,
flags: ::libc::c_uint) ->
::libc::c_int;
pub fn git_reference_peel(out: *mut *mut git_object,
_ref: *mut git_reference, _type: git_otype) ->
::libc::c_int;
pub fn git_reference_is_valid_name(refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_reference_shorthand(_ref: *mut git_reference) ->
*const ::libc::c_char;
pub fn git_diff_free(diff: *mut git_diff);
pub fn git_diff_tree_to_tree(diff: *mut *mut git_diff,
repo: *mut git_repository,
old_tree: *mut git_tree,
new_tree: *mut git_tree,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_diff_tree_to_index(diff: *mut *mut git_diff,
repo: *mut git_repository,
old_tree: *mut git_tree,
index: *mut git_index,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_diff_index_to_workdir(diff: *mut *mut git_diff,
repo: *mut git_repository,
index: *mut git_index,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_diff_tree_to_workdir(diff: *mut *mut git_diff,
repo: *mut git_repository,
old_tree: *mut git_tree,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_diff_tree_to_workdir_with_index(diff: *mut *mut git_diff,
repo: *mut git_repository,
old_tree: *mut git_tree,
opts: *const git_diff_options)
-> ::libc::c_int;
pub fn git_diff_merge(onto: *mut git_diff, from: *const git_diff) ->
::libc::c_int;
pub fn git_diff_find_similar(diff: *mut git_diff,
options: *const git_diff_find_options) ->
::libc::c_int;
pub fn git_diff_options_init(options: *mut git_diff_options,
version: ::libc::c_uint) -> ::libc::c_int;
pub fn git_diff_num_deltas(diff: *const git_diff) -> size_t;
pub fn git_diff_num_deltas_of_type(diff: *const git_diff,
_type: git_delta_t) -> size_t;
pub fn git_diff_get_delta(diff: *const git_diff, idx: size_t) ->
*const git_diff_delta;
pub fn git_diff_is_sorted_icase(diff: *const git_diff) -> ::libc::c_int;
pub fn git_diff_foreach(diff: *mut git_diff, file_cb: git_diff_file_cb,
hunk_cb: git_diff_hunk_cb,
line_cb: git_diff_line_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_diff_status_char(status: git_delta_t) -> ::libc::c_char;
pub fn git_diff_print(diff: *mut git_diff, format: git_diff_format_t,
print_cb: git_diff_line_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_diff_blobs(old_blob: *const git_blob,
old_as_path: *const ::libc::c_char,
new_blob: *const git_blob,
new_as_path: *const ::libc::c_char,
options: *const git_diff_options,
file_cb: git_diff_file_cb,
hunk_cb: git_diff_hunk_cb,
line_cb: git_diff_line_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_diff_blob_to_buffer(old_blob: *const git_blob,
old_as_path: *const ::libc::c_char,
buffer: *const ::libc::c_char,
buffer_len: size_t,
buffer_as_path: *const ::libc::c_char,
options: *const git_diff_options,
file_cb: git_diff_file_cb,
hunk_cb: git_diff_hunk_cb,
line_cb: git_diff_line_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_checkout_head(repo: *mut git_repository,
opts: *const git_checkout_opts) -> ::libc::c_int;
pub fn git_checkout_index(repo: *mut git_repository,
index: *mut git_index,
opts: *const git_checkout_opts) ->
::libc::c_int;
pub fn git_checkout_tree(repo: *mut git_repository,
treeish: *const git_object,
opts: *const git_checkout_opts) -> ::libc::c_int;
pub fn git_indexer_new(out: *mut *mut git_indexer,
path: *const ::libc::c_char, mode: ::libc::c_uint,
odb: *mut git_odb,
progress_cb: git_transfer_progress_callback,
progress_cb_payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_indexer_append(idx: *mut git_indexer,
data: *const ::libc::c_void, size: size_t,
stats: *mut git_transfer_progress) ->
::libc::c_int;
pub fn git_indexer_commit(idx: *mut git_indexer,
stats: *mut git_transfer_progress) ->
::libc::c_int;
pub fn git_indexer_hash(idx: *const git_indexer) -> *const git_oid;
pub fn git_indexer_free(idx: *mut git_indexer);
pub fn git_repository_open(out: *mut *mut git_repository,
path: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_repository_wrap_odb(out: *mut *mut git_repository,
odb: *mut git_odb) -> ::libc::c_int;
pub fn git_repository_discover(path_out: *mut ::libc::c_char,
path_size: size_t,
start_path: *const ::libc::c_char,
across_fs: ::libc::c_int,
ceiling_dirs: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_open_ext(out: *mut *mut git_repository,
path: *const ::libc::c_char,
flags: ::libc::c_uint,
ceiling_dirs: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_open_bare(out: *mut *mut git_repository,
bare_path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_free(repo: *mut git_repository);
pub fn git_repository_init(out: *mut *mut git_repository,
path: *const ::libc::c_char,
is_bare: ::libc::c_uint) -> ::libc::c_int;
pub fn git_repository_init_ext(out: *mut *mut git_repository,
repo_path: *const ::libc::c_char,
opts: *mut git_repository_init_options) ->
::libc::c_int;
pub fn git_repository_head(out: *mut *mut git_reference,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_head_detached(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_head_unborn(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_is_empty(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_path(repo: *mut git_repository) ->
*const ::libc::c_char;
pub fn git_repository_workdir(repo: *mut git_repository) ->
*const ::libc::c_char;
pub fn git_repository_set_workdir(repo: *mut git_repository,
workdir: *const ::libc::c_char,
update_gitlink: ::libc::c_int) ->
::libc::c_int;
pub fn git_repository_is_bare(repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_config(out: *mut *mut git_config,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_odb(out: *mut *mut git_odb,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_refdb(out: *mut *mut git_refdb,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_index(out: *mut *mut git_index,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_message(out: *mut ::libc::c_char, len: size_t,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_message_remove(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_merge_cleanup(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_fetchhead_foreach(repo: *mut git_repository,
callback:
git_repository_fetchhead_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_repository_mergehead_foreach(repo: *mut git_repository,
callback:
git_repository_mergehead_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_repository_hashfile(out: *mut git_oid,
repo: *mut git_repository,
path: *const ::libc::c_char,
_type: git_otype,
as_path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_set_head(repo: *mut git_repository,
refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_set_head_detached(repo: *mut git_repository,
commitish: *const git_oid) ->
::libc::c_int;
pub fn git_repository_detach_head(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_repository_state(repo: *mut git_repository) -> ::libc::c_int;
pub fn git_repository_set_namespace(repo: *mut git_repository,
nmspace: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_repository_get_namespace(repo: *mut git_repository) ->
*const ::libc::c_char;
pub fn git_repository_is_shallow(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_refspec_src(refspec: *const git_refspec) ->
*const ::libc::c_char;
pub fn git_refspec_dst(refspec: *const git_refspec) ->
*const ::libc::c_char;
pub fn git_refspec_string(refspec: *const git_refspec) ->
*const ::libc::c_char;
pub fn git_refspec_force(refspec: *const git_refspec) -> ::libc::c_int;
pub fn git_refspec_direction(spec: *const git_refspec) -> git_direction;
pub fn git_refspec_src_matches(refspec: *const git_refspec,
refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_refspec_dst_matches(refspec: *const git_refspec,
refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_refspec_transform(out: *mut ::libc::c_char, outlen: size_t,
spec: *const git_refspec,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_refspec_rtransform(out: *mut ::libc::c_char, outlen: size_t,
spec: *const git_refspec,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_cred_has_username(cred: *mut git_cred) -> ::libc::c_int;
pub fn git_cred_userpass_plaintext_new(out: *mut *mut git_cred,
username: *const ::libc::c_char,
password: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_cred_ssh_key_new(out: *mut *mut git_cred,
username: *const ::libc::c_char,
publickey: *const ::libc::c_char,
privatekey: *const ::libc::c_char,
passphrase: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_cred_ssh_custom_new(out: *mut *mut git_cred,
username: *const ::libc::c_char,
publickey: *const ::libc::c_char,
publickey_len: size_t,
sign_fn: git_cred_sign_callback,
sign_data: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_cred_default_new(out: *mut *mut git_cred) -> ::libc::c_int;
pub fn git_transport_new(out: *mut *mut git_transport,
owner: *mut git_remote,
url: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_transport_register(prefix: *const ::libc::c_char,
priority: ::libc::c_uint,
cb: git_transport_cb,
param: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_transport_unregister(prefix: *const ::libc::c_char,
priority: ::libc::c_uint) ->
::libc::c_int;
pub fn git_transport_dummy(out: *mut *mut git_transport,
owner: *mut git_remote,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_transport_local(out: *mut *mut git_transport,
owner: *mut git_remote,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_transport_smart(out: *mut *mut git_transport,
owner: *mut git_remote,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_smart_subtransport_http(out: *mut *mut git_smart_subtransport,
owner: *mut git_transport) ->
::libc::c_int;
pub fn git_smart_subtransport_git(out: *mut *mut git_smart_subtransport,
owner: *mut git_transport) ->
::libc::c_int;
pub fn git_smart_subtransport_ssh(out: *mut *mut git_smart_subtransport,
owner: *mut git_transport) ->
::libc::c_int;
pub fn git_remote_create(out: *mut *mut git_remote,
repo: *mut git_repository,
name: *const ::libc::c_char,
url: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_remote_create_with_fetchspec(out: *mut *mut git_remote,
repo: *mut git_repository,
name: *const ::libc::c_char,
url: *const ::libc::c_char,
fetch: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_create_inmemory(out: *mut *mut git_remote,
repo: *mut git_repository,
fetch: *const ::libc::c_char,
url: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_load(out: *mut *mut git_remote,
repo: *mut git_repository,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_remote_save(remote: *const git_remote) -> ::libc::c_int;
pub fn git_remote_owner(remote: *const git_remote) -> *mut git_repository;
pub fn git_remote_name(remote: *const git_remote) ->
*const ::libc::c_char;
pub fn git_remote_url(remote: *const git_remote) -> *const ::libc::c_char;
pub fn git_remote_pushurl(remote: *const git_remote) ->
*const ::libc::c_char;
pub fn git_remote_set_url(remote: *mut git_remote,
url: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_remote_set_pushurl(remote: *mut git_remote,
url: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_add_fetch(remote: *mut git_remote,
refspec: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_get_fetch_refspecs(array: *mut git_strarray,
remote: *mut git_remote) ->
::libc::c_int;
pub fn git_remote_set_fetch_refspecs(remote: *mut git_remote,
array: *mut git_strarray) ->
::libc::c_int;
pub fn git_remote_add_push(remote: *mut git_remote,
refspec: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_get_push_refspecs(array: *mut git_strarray,
remote: *mut git_remote) ->
::libc::c_int;
pub fn git_remote_set_push_refspecs(remote: *mut git_remote,
array: *mut git_strarray) ->
::libc::c_int;
pub fn git_remote_clear_refspecs(remote: *mut git_remote);
pub fn git_remote_refspec_count(remote: *mut git_remote) -> size_t;
pub fn git_remote_get_refspec(remote: *mut git_remote, n: size_t) ->
*const git_refspec;
pub fn git_remote_connect(remote: *mut git_remote,
direction: git_direction) -> ::libc::c_int;
pub fn git_remote_ls(out: *mut *mut *const git_remote_head,
size: *mut size_t, remote: *mut git_remote) ->
::libc::c_int;
pub fn git_remote_download(remote: *mut git_remote) -> ::libc::c_int;
pub fn git_remote_connected(remote: *mut git_remote) -> ::libc::c_int;
pub fn git_remote_stop(remote: *mut git_remote);
pub fn git_remote_disconnect(remote: *mut git_remote);
pub fn git_remote_free(remote: *mut git_remote);
pub fn git_remote_update_tips(remote: *mut git_remote) -> ::libc::c_int;
pub fn git_remote_fetch(remote: *mut git_remote) -> ::libc::c_int;
pub fn git_remote_valid_url(url: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_remote_supported_url(url: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_remote_list(out: *mut git_strarray, repo: *mut git_repository)
-> ::libc::c_int;
pub fn git_remote_check_cert(remote: *mut git_remote,
check: ::libc::c_int);
pub fn git_remote_set_transport(remote: *mut git_remote,
transport: *mut git_transport) ->
::libc::c_int;
pub fn git_remote_set_callbacks(remote: *mut git_remote,
callbacks: *const git_remote_callbacks) ->
::libc::c_int;
pub fn git_remote_stats(remote: *mut git_remote) ->
*const git_transfer_progress;
pub fn git_remote_autotag(remote: *mut git_remote) ->
git_remote_autotag_option_t;
pub fn git_remote_set_autotag(remote: *mut git_remote,
value: git_remote_autotag_option_t);
pub fn git_remote_rename(remote: *mut git_remote,
new_name: *const ::libc::c_char,
callback: git_remote_rename_problem_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_remote_update_fetchhead(remote: *mut git_remote) ->
::libc::c_int;
pub fn git_remote_set_update_fetchhead(remote: *mut git_remote,
value: ::libc::c_int);
pub fn git_remote_is_valid_name(remote_name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_clone(out: *mut *mut git_repository,
url: *const ::libc::c_char,
local_path: *const ::libc::c_char,
options: *const git_clone_options) -> ::libc::c_int;
pub fn git_clone_into(repo: *mut git_repository, remote: *mut git_remote,
co_opts: *const git_checkout_opts,
branch: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_commit_lookup(commit: *mut *mut git_commit,
repo: *mut git_repository, id: *const git_oid) ->
::libc::c_int;
pub fn git_commit_lookup_prefix(commit: *mut *mut git_commit,
repo: *mut git_repository,
id: *const git_oid, len: size_t) ->
::libc::c_int;
pub fn git_commit_free(commit: *mut git_commit);
pub fn git_commit_id(commit: *const git_commit) -> *const git_oid;
pub fn git_commit_owner(commit: *const git_commit) -> *mut git_repository;
pub fn git_commit_message_encoding(commit: *const git_commit) ->
*const ::libc::c_char;
pub fn git_commit_message(commit: *const git_commit) ->
*const ::libc::c_char;
pub fn git_commit_message_raw(commit: *const git_commit) ->
*const ::libc::c_char;
pub fn git_commit_time(commit: *const git_commit) -> git_time_t;
pub fn git_commit_time_offset(commit: *const git_commit) -> ::libc::c_int;
pub fn git_commit_committer(commit: *const git_commit) ->
*const git_signature;
pub fn git_commit_author(commit: *const git_commit) ->
*const git_signature;
pub fn git_commit_raw_header(commit: *const git_commit) ->
*const ::libc::c_char;
pub fn git_commit_tree(tree_out: *mut *mut git_tree,
commit: *const git_commit) -> ::libc::c_int;
pub fn git_commit_tree_id(commit: *const git_commit) -> *const git_oid;
pub fn git_commit_parentcount(commit: *const git_commit) ->
::libc::c_uint;
pub fn git_commit_parent(out: *mut *mut git_commit,
commit: *const git_commit, n: ::libc::c_uint) ->
::libc::c_int;
pub fn git_commit_parent_id(commit: *const git_commit, n: ::libc::c_uint)
-> *const git_oid;
pub fn git_commit_nth_gen_ancestor(ancestor: *mut *mut git_commit,
commit: *const git_commit,
n: ::libc::c_uint) -> ::libc::c_int;
pub fn git_commit_create(id: *mut git_oid, repo: *mut git_repository,
update_ref: *const ::libc::c_char,
author: *const git_signature,
committer: *const git_signature,
message_encoding: *const ::libc::c_char,
message: *const ::libc::c_char,
tree: *const git_tree,
parent_count: ::libc::c_int,
parents: *mut *const git_commit) ->
::libc::c_int;
pub fn git_commit_create_v(id: *mut git_oid, repo: *mut git_repository,
update_ref: *const ::libc::c_char,
author: *const git_signature,
committer: *const git_signature,
message_encoding: *const ::libc::c_char,
message: *const ::libc::c_char,
tree: *const git_tree,
parent_count: ::libc::c_int, ...) ->
::libc::c_int;
pub fn git_config_find_global(out: *mut ::libc::c_char, length: size_t) ->
::libc::c_int;
pub fn git_config_find_xdg(out: *mut ::libc::c_char, length: size_t) ->
::libc::c_int;
pub fn git_config_find_system(out: *mut ::libc::c_char, length: size_t) ->
::libc::c_int;
pub fn git_config_open_default(out: *mut *mut git_config) ->
::libc::c_int;
pub fn git_config_new(out: *mut *mut git_config) -> ::libc::c_int;
pub fn git_config_add_file_ondisk(cfg: *mut git_config,
path: *const ::libc::c_char,
level: git_config_level_t,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_config_open_ondisk(out: *mut *mut git_config,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_open_level(out: *mut *mut git_config,
parent: *const git_config,
level: git_config_level_t) -> ::libc::c_int;
pub fn git_config_open_global(out: *mut *mut git_config,
config: *mut git_config) -> ::libc::c_int;
pub fn git_config_refresh(cfg: *mut git_config) -> ::libc::c_int;
pub fn git_config_free(cfg: *mut git_config);
pub fn git_config_get_entry(out: *mut *const git_config_entry,
cfg: *const git_config,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_config_get_int32(out: *mut i32, cfg: *const git_config,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_config_get_int64(out: *mut i64, cfg: *const git_config,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_config_get_bool(out: *mut ::libc::c_int,
cfg: *const git_config,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_config_get_string(out: *mut *const ::libc::c_char,
cfg: *const git_config,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_get_multivar_foreach(cfg: *const git_config,
name: *const ::libc::c_char,
regexp: *const ::libc::c_char,
callback: git_config_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_config_multivar_iterator_new(out:
*mut *mut git_config_iterator,
cfg: *const git_config,
name: *const ::libc::c_char,
regexp: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_next(entry: *mut *mut git_config_entry,
iter: *mut git_config_iterator) -> ::libc::c_int;
pub fn git_config_iterator_free(iter: *mut git_config_iterator);
pub fn git_config_set_int32(cfg: *mut git_config,
name: *const ::libc::c_char, value: i32)
-> ::libc::c_int;
pub fn git_config_set_int64(cfg: *mut git_config,
name: *const ::libc::c_char, value: i64)
-> ::libc::c_int;
pub fn git_config_set_bool(cfg: *mut git_config,
name: *const ::libc::c_char,
value: ::libc::c_int) -> ::libc::c_int;
pub fn git_config_set_string(cfg: *mut git_config,
name: *const ::libc::c_char,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_set_multivar(cfg: *mut git_config,
name: *const ::libc::c_char,
regexp: *const ::libc::c_char,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_delete_entry(cfg: *mut git_config,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_delete_multivar(cfg: *mut git_config,
name: *const ::libc::c_char,
regexp: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_foreach(cfg: *const git_config,
callback: git_config_foreach_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_config_iterator_new(out: *mut *mut git_config_iterator,
cfg: *const git_config) -> ::libc::c_int;
pub fn git_config_iterator_glob_new(out: *mut *mut git_config_iterator,
cfg: *const git_config,
regexp: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_foreach_match(cfg: *const git_config,
regexp: *const ::libc::c_char,
callback: git_config_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_config_get_mapped(out: *mut ::libc::c_int,
cfg: *const git_config,
name: *const ::libc::c_char,
maps: *const git_cvar_map, map_n: size_t) ->
::libc::c_int;
pub fn git_config_lookup_map_value(out: *mut ::libc::c_int,
maps: *const git_cvar_map,
map_n: size_t,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_parse_bool(out: *mut ::libc::c_int,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_parse_int32(out: *mut i32,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_parse_int64(out: *mut i64,
value: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_config_backend_foreach_match(backend: *mut git_config_backend,
regexp: *const ::libc::c_char,
_fn:
::std::option::Option<unsafe extern "C" fn
(arg1:
*const git_config_entry,
arg2:
*mut ::libc::c_void)
->
::libc::c_int>,
data: *mut ::libc::c_void) ->
::libc::c_int;
pub fn giterr_last() -> *const git_error;
pub fn giterr_clear();
pub fn giterr_detach(cpy: *mut git_error) -> ::libc::c_int;
pub fn giterr_set_str(error_class: ::libc::c_int,
string: *const ::libc::c_char);
pub fn giterr_set_oom();
pub fn git_filter_list_load(filters: *mut *mut git_filter_list,
repo: *mut git_repository,
blob: *mut git_blob,
path: *const ::libc::c_char,
mode: git_filter_mode_t) -> ::libc::c_int;
pub fn git_filter_list_apply_to_data(out: *mut git_buf,
filters: *mut git_filter_list,
_in: *mut git_buf) -> ::libc::c_int;
pub fn git_filter_list_apply_to_file(out: *mut git_buf,
filters: *mut git_filter_list,
repo: *mut git_repository,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_filter_list_apply_to_blob(out: *mut git_buf,
filters: *mut git_filter_list,
blob: *mut git_blob) ->
::libc::c_int;
pub fn git_filter_list_free(filters: *mut git_filter_list);
pub fn git_graph_ahead_behind(ahead: *mut size_t, behind: *mut size_t,
repo: *mut git_repository,
local: *const git_oid,
upstream: *const git_oid) -> ::libc::c_int;
pub fn git_ignore_add_rule(repo: *mut git_repository,
rules: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_ignore_clear_internal_rules(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_ignore_path_is_ignored(ignored: *mut ::libc::c_int,
repo: *mut git_repository,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_index_open(out: *mut *mut git_index,
index_path: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_index_new(out: *mut *mut git_index) -> ::libc::c_int;
pub fn git_index_free(index: *mut git_index);
pub fn git_index_owner(index: *const git_index) -> *mut git_repository;
pub fn git_index_caps(index: *const git_index) -> ::libc::c_uint;
pub fn git_index_set_caps(index: *mut git_index, caps: ::libc::c_uint) ->
::libc::c_int;
pub fn git_index_read(index: *mut git_index, force: ::libc::c_int) ->
::libc::c_int;
pub fn git_index_write(index: *mut git_index) -> ::libc::c_int;
pub fn git_index_path(index: *mut git_index) -> *const ::libc::c_char;
pub fn git_index_read_tree(index: *mut git_index, tree: *const git_tree)
-> ::libc::c_int;
pub fn git_index_write_tree(out: *mut git_oid, index: *mut git_index) ->
::libc::c_int;
pub fn git_index_write_tree_to(out: *mut git_oid, index: *mut git_index,
repo: *mut git_repository) ->
::libc::c_int;
pub fn git_index_entrycount(index: *const git_index) -> size_t;
pub fn git_index_clear(index: *mut git_index);
pub fn git_index_get_byindex(index: *mut git_index, n: size_t) ->
*const git_index_entry;
pub fn git_index_get_bypath(index: *mut git_index,
path: *const ::libc::c_char,
stage: ::libc::c_int) ->
*const git_index_entry;
pub fn git_index_remove(index: *mut git_index,
path: *const ::libc::c_char, stage: ::libc::c_int)
-> ::libc::c_int;
pub fn git_index_remove_directory(index: *mut git_index,
dir: *const ::libc::c_char,
stage: ::libc::c_int) -> ::libc::c_int;
pub fn git_index_add(index: *mut git_index,
source_entry: *const git_index_entry) ->
::libc::c_int;
pub fn git_index_entry_stage(entry: *const git_index_entry) ->
::libc::c_int;
pub fn git_index_add_bypath(index: *mut git_index,
path: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_index_remove_bypath(index: *mut git_index,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_index_add_all(index: *mut git_index,
pathspec: *const git_strarray,
flags: ::libc::c_uint,
callback: git_index_matched_path_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_index_remove_all(index: *mut git_index,
pathspec: *const git_strarray,
callback: git_index_matched_path_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_index_update_all(index: *mut git_index,
pathspec: *const git_strarray,
callback: git_index_matched_path_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_index_find(at_pos: *mut size_t, index: *mut git_index,
path: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_index_conflict_add(index: *mut git_index,
ancestor_entry: *const git_index_entry,
our_entry: *const git_index_entry,
their_entry: *const git_index_entry) ->
::libc::c_int;
pub fn git_index_conflict_get(ancestor_out: *mut *const git_index_entry,
our_out: *mut *const git_index_entry,
their_out: *mut *const git_index_entry,
index: *mut git_index,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_index_conflict_remove(index: *mut git_index,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_index_conflict_cleanup(index: *mut git_index);
pub fn git_index_has_conflicts(index: *const git_index) -> ::libc::c_int;
pub fn git_index_conflict_iterator_new(iterator_out:
*mut *mut git_index_conflict_iterator,
index: *mut git_index) ->
::libc::c_int;
pub fn git_index_conflict_next(ancestor_out: *mut *const git_index_entry,
our_out: *mut *const git_index_entry,
their_out: *mut *const git_index_entry,
iterator: *mut git_index_conflict_iterator)
-> ::libc::c_int;
pub fn git_index_conflict_iterator_free(iterator:
*mut git_index_conflict_iterator);
pub fn git_merge_base(out: *mut git_oid, repo: *mut git_repository,
one: *const git_oid, two: *const git_oid) ->
::libc::c_int;
pub fn git_merge_base_many(out: *mut git_oid, repo: *mut git_repository,
length: size_t, input_array: *const git_oid) ->
::libc::c_int;
pub fn git_merge_head_from_ref(out: *mut *mut git_merge_head,
repo: *mut git_repository,
_ref: *mut git_reference) -> ::libc::c_int;
pub fn git_merge_head_from_fetchhead(out: *mut *mut git_merge_head,
repo: *mut git_repository,
branch_name: *const ::libc::c_char,
remote_url: *const ::libc::c_char,
oid: *const git_oid) ->
::libc::c_int;
pub fn git_merge_head_from_oid(out: *mut *mut git_merge_head,
repo: *mut git_repository,
oid: *const git_oid) -> ::libc::c_int;
pub fn git_merge_head_free(head: *mut git_merge_head);
pub fn git_merge_trees(out: *mut *mut git_index,
repo: *mut git_repository,
ancestor_tree: *const git_tree,
our_tree: *const git_tree,
their_tree: *const git_tree,
opts: *const git_merge_tree_opts) -> ::libc::c_int;
pub fn git_merge(out: *mut *mut git_merge_result,
repo: *mut git_repository,
their_heads: *mut *const git_merge_head,
their_heads_len: size_t, opts: *const git_merge_opts) ->
::libc::c_int;
pub fn git_merge_result_is_uptodate(merge_result: *mut git_merge_result)
-> ::libc::c_int;
pub fn git_merge_result_is_fastforward(merge_result:
*mut git_merge_result) ->
::libc::c_int;
pub fn git_merge_result_fastforward_oid(out: *mut git_oid,
merge_result:
*mut git_merge_result) ->
::libc::c_int;
pub fn git_merge_result_free(merge_result: *mut git_merge_result);
pub fn git_message_prettify(out: *mut ::libc::c_char, out_size: size_t,
message: *const ::libc::c_char,
strip_comments: ::libc::c_int) ->
::libc::c_int;
pub fn git_note_iterator_new(out: *mut *mut git_note_iterator,
repo: *mut git_repository,
notes_ref: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_note_iterator_free(it: *mut git_note_iterator);
pub fn git_note_next(note_id: *mut git_oid, annotated_id: *mut git_oid,
it: *mut git_note_iterator) -> ::libc::c_int;
pub fn git_note_read(out: *mut *mut git_note, repo: *mut git_repository,
notes_ref: *const ::libc::c_char,
oid: *const git_oid) -> ::libc::c_int;
pub fn git_note_message(note: *const git_note) -> *const ::libc::c_char;
pub fn git_note_oid(note: *const git_note) -> *const git_oid;
pub fn git_note_create(out: *mut git_oid, repo: *mut git_repository,
author: *const git_signature,
committer: *const git_signature,
notes_ref: *const ::libc::c_char,
oid: *const git_oid, note: *const ::libc::c_char,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_note_remove(repo: *mut git_repository,
notes_ref: *const ::libc::c_char,
author: *const git_signature,
committer: *const git_signature,
oid: *const git_oid) -> ::libc::c_int;
pub fn git_note_free(note: *mut git_note);
pub fn git_note_default_ref(out: *mut *const ::libc::c_char,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_note_foreach(repo: *mut git_repository,
notes_ref: *const ::libc::c_char,
note_cb: git_note_foreach_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_odb_new(out: *mut *mut git_odb) -> ::libc::c_int;
pub fn git_odb_open(out: *mut *mut git_odb,
objects_dir: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_odb_add_disk_alternate(odb: *mut git_odb,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_odb_free(db: *mut git_odb);
pub fn git_odb_read(out: *mut *mut git_odb_object, db: *mut git_odb,
id: *const git_oid) -> ::libc::c_int;
pub fn git_odb_read_prefix(out: *mut *mut git_odb_object,
db: *mut git_odb, short_id: *const git_oid,
len: size_t) -> ::libc::c_int;
pub fn git_odb_read_header(len_out: *mut size_t, type_out: *mut git_otype,
db: *mut git_odb, id: *const git_oid) ->
::libc::c_int;
pub fn git_odb_exists(db: *mut git_odb, id: *const git_oid) ->
::libc::c_int;
pub fn git_odb_refresh(db: *mut Struct_git_odb) -> ::libc::c_int;
pub fn git_odb_foreach(db: *mut git_odb, cb: git_odb_foreach_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_odb_write(out: *mut git_oid, odb: *mut git_odb,
data: *const ::libc::c_void, len: size_t,
_type: git_otype) -> ::libc::c_int;
pub fn git_odb_open_wstream(out: *mut *mut git_odb_stream,
db: *mut git_odb, size: size_t,
_type: git_otype) -> ::libc::c_int;
pub fn git_odb_stream_write(stream: *mut git_odb_stream,
buffer: *const ::libc::c_char, len: size_t) ->
::libc::c_int;
pub fn git_odb_stream_finalize_write(out: *mut git_oid,
stream: *mut git_odb_stream) ->
::libc::c_int;
pub fn git_odb_stream_read(stream: *mut git_odb_stream,
buffer: *mut ::libc::c_char, len: size_t) ->
::libc::c_int;
pub fn git_odb_stream_free(stream: *mut git_odb_stream);
pub fn git_odb_open_rstream(out: *mut *mut git_odb_stream,
db: *mut git_odb, oid: *const git_oid) ->
::libc::c_int;
pub fn git_odb_write_pack(out: *mut *mut git_odb_writepack,
db: *mut git_odb,
progress_cb: git_transfer_progress_callback,
progress_payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_odb_hash(out: *mut git_oid, data: *const ::libc::c_void,
len: size_t, _type: git_otype) -> ::libc::c_int;
pub fn git_odb_hashfile(out: *mut git_oid, path: *const ::libc::c_char,
_type: git_otype) -> ::libc::c_int;
pub fn git_odb_object_dup(dest: *mut *mut git_odb_object,
source: *mut git_odb_object) -> ::libc::c_int;
pub fn git_odb_object_free(object: *mut git_odb_object);
pub fn git_odb_object_id(object: *mut git_odb_object) -> *const git_oid;
pub fn git_odb_object_data(object: *mut git_odb_object) ->
*const ::libc::c_void;
pub fn git_odb_object_size(object: *mut git_odb_object) -> size_t;
pub fn git_odb_object_type(object: *mut git_odb_object) -> git_otype;
pub fn git_odb_add_backend(odb: *mut git_odb,
backend: *mut git_odb_backend,
priority: ::libc::c_int) -> ::libc::c_int;
pub fn git_odb_add_alternate(odb: *mut git_odb,
backend: *mut git_odb_backend,
priority: ::libc::c_int) -> ::libc::c_int;
pub fn git_odb_num_backends(odb: *mut git_odb) -> size_t;
pub fn git_odb_get_backend(out: *mut *mut git_odb_backend,
odb: *mut git_odb, pos: size_t) ->
::libc::c_int;
pub fn git_packbuilder_new(out: *mut *mut git_packbuilder,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_packbuilder_set_threads(pb: *mut git_packbuilder,
n: ::libc::c_uint) -> ::libc::c_uint;
pub fn git_packbuilder_insert(pb: *mut git_packbuilder,
id: *const git_oid,
name: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_packbuilder_insert_tree(pb: *mut git_packbuilder,
id: *const git_oid) -> ::libc::c_int;
pub fn git_packbuilder_insert_commit(pb: *mut git_packbuilder,
id: *const git_oid) -> ::libc::c_int;
pub fn git_packbuilder_write(pb: *mut git_packbuilder,
path: *const ::libc::c_char,
mode: ::libc::c_uint,
progress_cb: git_transfer_progress_callback,
progress_cb_payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_packbuilder_hash(pb: *mut git_packbuilder) -> *const git_oid;
pub fn git_packbuilder_foreach(pb: *mut git_packbuilder,
cb: git_packbuilder_foreach_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_packbuilder_object_count(pb: *mut git_packbuilder) -> u32;
pub fn git_packbuilder_written(pb: *mut git_packbuilder) -> u32;
pub fn git_packbuilder_set_callbacks(pb: *mut git_packbuilder,
progress_cb:
git_packbuilder_progress,
progress_cb_payload:
*mut ::libc::c_void) ->
::libc::c_int;
pub fn git_packbuilder_free(pb: *mut git_packbuilder);
pub fn git_patch_from_diff(out: *mut *mut git_patch, diff: *mut git_diff,
idx: size_t) -> ::libc::c_int;
pub fn git_patch_from_blobs(out: *mut *mut git_patch,
old_blob: *const git_blob,
old_as_path: *const ::libc::c_char,
new_blob: *const git_blob,
new_as_path: *const ::libc::c_char,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_patch_from_blob_and_buffer(out: *mut *mut git_patch,
old_blob: *const git_blob,
old_as_path: *const ::libc::c_char,
buffer: *const ::libc::c_char,
buffer_len: size_t,
buffer_as_path:
*const ::libc::c_char,
opts: *const git_diff_options) ->
::libc::c_int;
pub fn git_patch_free(patch: *mut git_patch);
pub fn git_patch_get_delta(patch: *mut git_patch) ->
*const git_diff_delta;
pub fn git_patch_num_hunks(patch: *mut git_patch) -> size_t;
pub fn git_patch_line_stats(total_context: *mut size_t,
total_additions: *mut size_t,
total_deletions: *mut size_t,
patch: *const git_patch) -> ::libc::c_int;
pub fn git_patch_get_hunk(out: *mut *const git_diff_hunk,
lines_in_hunk: *mut size_t,
patch: *mut git_patch, hunk_idx: size_t) ->
::libc::c_int;
pub fn git_patch_num_lines_in_hunk(patch: *mut git_patch,
hunk_idx: size_t) -> ::libc::c_int;
pub fn git_patch_get_line_in_hunk(out: *mut *const git_diff_line,
patch: *mut git_patch, hunk_idx: size_t,
line_of_hunk: size_t) -> ::libc::c_int;
pub fn git_patch_size(patch: *mut git_patch,
include_context: ::libc::c_int,
include_hunk_headers: ::libc::c_int,
include_file_headers: ::libc::c_int) -> size_t;
pub fn git_patch_print(patch: *mut git_patch, print_cb: git_diff_line_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_patch_to_str(string: *mut *mut ::libc::c_char,
patch: *mut git_patch) -> ::libc::c_int;
pub fn git_pathspec_new(out: *mut *mut git_pathspec,
pathspec: *const git_strarray) -> ::libc::c_int;
pub fn git_pathspec_free(ps: *mut git_pathspec);
pub fn git_pathspec_matches_path(ps: *const git_pathspec, flags: u32,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_pathspec_match_workdir(out: *mut *mut git_pathspec_match_list,
repo: *mut git_repository,
flags: u32, ps: *mut git_pathspec)
-> ::libc::c_int;
pub fn git_pathspec_match_index(out: *mut *mut git_pathspec_match_list,
index: *mut git_index, flags: u32,
ps: *mut git_pathspec) -> ::libc::c_int;
pub fn git_pathspec_match_tree(out: *mut *mut git_pathspec_match_list,
tree: *mut git_tree, flags: u32,
ps: *mut git_pathspec) -> ::libc::c_int;
pub fn git_pathspec_match_diff(out: *mut *mut git_pathspec_match_list,
diff: *mut git_diff, flags: u32,
ps: *mut git_pathspec) -> ::libc::c_int;
pub fn git_pathspec_match_list_free(m: *mut git_pathspec_match_list);
pub fn git_pathspec_match_list_entrycount(m:
*const git_pathspec_match_list)
-> size_t;
pub fn git_pathspec_match_list_entry(m: *const git_pathspec_match_list,
pos: size_t) ->
*const ::libc::c_char;
pub fn git_pathspec_match_list_diff_entry(m:
*const git_pathspec_match_list,
pos: size_t) ->
*const git_diff_delta;
pub fn git_pathspec_match_list_failed_entrycount(m:
*const git_pathspec_match_list)
-> size_t;
pub fn git_pathspec_match_list_failed_entry(m:
*const git_pathspec_match_list,
pos: size_t) ->
*const ::libc::c_char;
pub fn git_push_new(out: *mut *mut git_push, remote: *mut git_remote) ->
::libc::c_int;
pub fn git_push_set_options(push: *mut git_push,
opts: *const git_push_options) ->
::libc::c_int;
pub fn git_push_set_callbacks(push: *mut git_push,
pack_progress_cb: git_packbuilder_progress,
pack_progress_cb_payload:
*mut ::libc::c_void,
transfer_progress_cb:
git_push_transfer_progress,
transfer_progress_cb_payload:
*mut ::libc::c_void) -> ::libc::c_int;
pub fn git_push_add_refspec(push: *mut git_push,
refspec: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_push_update_tips(push: *mut git_push) -> ::libc::c_int;
pub fn git_push_finish(push: *mut git_push) -> ::libc::c_int;
pub fn git_push_unpack_ok(push: *mut git_push) -> ::libc::c_int;
pub fn git_push_status_foreach(push: *mut git_push,
cb:
::std::option::Option<unsafe extern "C" fn
(arg1:
*const ::libc::c_char,
arg2:
*const ::libc::c_char,
arg3:
*mut ::libc::c_void)
->
::libc::c_int>,
data: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_push_free(push: *mut git_push);
pub fn git_refdb_new(out: *mut *mut git_refdb, repo: *mut git_repository)
-> ::libc::c_int;
pub fn git_refdb_open(out: *mut *mut git_refdb, repo: *mut git_repository)
-> ::libc::c_int;
pub fn git_refdb_compress(refdb: *mut git_refdb) -> ::libc::c_int;
pub fn git_refdb_free(refdb: *mut git_refdb);
pub fn git_reflog_read(out: *mut *mut git_reflog,
repo: *mut git_repository,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reflog_write(reflog: *mut git_reflog) -> ::libc::c_int;
pub fn git_reflog_append(reflog: *mut git_reflog, id: *const git_oid,
committer: *const git_signature,
msg: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reflog_append_to(repo: *mut git_repository,
name: *const ::libc::c_char,
id: *const git_oid,
committer: *const git_signature,
msg: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reflog_rename(repo: *mut git_repository,
old_name: *const ::libc::c_char,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reflog_delete(repo: *mut git_repository,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_reflog_entrycount(reflog: *mut git_reflog) -> size_t;
pub fn git_reflog_entry_byindex(reflog: *mut git_reflog, idx: size_t) ->
*const git_reflog_entry;
pub fn git_reflog_drop(reflog: *mut git_reflog, idx: size_t,
rewrite_previous_entry: ::libc::c_int) ->
::libc::c_int;
pub fn git_reflog_entry_id_old(entry: *const git_reflog_entry) ->
*const git_oid;
pub fn git_reflog_entry_id_new(entry: *const git_reflog_entry) ->
*const git_oid;
pub fn git_reflog_entry_committer(entry: *const git_reflog_entry) ->
*const git_signature;
pub fn git_reflog_entry_message(entry: *const git_reflog_entry) ->
*const ::libc::c_char;
pub fn git_reflog_free(reflog: *mut git_reflog);
pub fn git_reset(repo: *mut git_repository, target: *mut git_object,
reset_type: git_reset_t) -> ::libc::c_int;
pub fn git_reset_default(repo: *mut git_repository,
target: *mut git_object,
pathspecs: *mut git_strarray) -> ::libc::c_int;
pub fn git_revparse_single(out: *mut *mut git_object,
repo: *mut git_repository,
spec: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_revparse_ext(object_out: *mut *mut git_object,
reference_out: *mut *mut git_reference,
repo: *mut git_repository,
spec: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_revparse(revspec: *mut git_revspec, repo: *mut git_repository,
spec: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_revwalk_new(out: *mut *mut git_revwalk,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_revwalk_reset(walker: *mut git_revwalk);
pub fn git_revwalk_push(walk: *mut git_revwalk, id: *const git_oid) ->
::libc::c_int;
pub fn git_revwalk_push_glob(walk: *mut git_revwalk,
glob: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_revwalk_push_head(walk: *mut git_revwalk) -> ::libc::c_int;
pub fn git_revwalk_hide(walk: *mut git_revwalk, commit_id: *const git_oid)
-> ::libc::c_int;
pub fn git_revwalk_hide_glob(walk: *mut git_revwalk,
glob: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_revwalk_hide_head(walk: *mut git_revwalk) -> ::libc::c_int;
pub fn git_revwalk_push_ref(walk: *mut git_revwalk,
refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_revwalk_hide_ref(walk: *mut git_revwalk,
refname: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_revwalk_next(out: *mut git_oid, walk: *mut git_revwalk) ->
::libc::c_int;
pub fn git_revwalk_sorting(walk: *mut git_revwalk,
sort_mode: ::libc::c_uint);
pub fn git_revwalk_push_range(walk: *mut git_revwalk,
range: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_revwalk_simplify_first_parent(walk: *mut git_revwalk);
pub fn git_revwalk_free(walk: *mut git_revwalk);
pub fn git_revwalk_repository(walk: *mut git_revwalk) ->
*mut git_repository;
pub fn git_signature_new(out: *mut *mut git_signature,
name: *const ::libc::c_char,
email: *const ::libc::c_char, time: git_time_t,
offset: ::libc::c_int) -> ::libc::c_int;
pub fn git_signature_now(out: *mut *mut git_signature,
name: *const ::libc::c_char,
email: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_signature_default(out: *mut *mut git_signature,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_signature_dup(sig: *const git_signature) -> *mut git_signature;
pub fn git_signature_free(sig: *mut git_signature);
pub fn git_stash_save(out: *mut git_oid, repo: *mut git_repository,
stasher: *const git_signature,
message: *const ::libc::c_char,
flags: ::libc::c_uint) -> ::libc::c_int;
pub fn git_stash_foreach(repo: *mut git_repository,
callback: git_stash_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_stash_drop(repo: *mut git_repository, index: size_t) ->
::libc::c_int;
pub fn git_status_foreach(repo: *mut git_repository,
callback: git_status_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_status_foreach_ext(repo: *mut git_repository,
opts: *const git_status_options,
callback: git_status_cb,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_status_file(status_flags: *mut ::libc::c_uint,
repo: *mut git_repository,
path: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_status_list_new(out: *mut *mut git_status_list,
repo: *mut git_repository,
opts: *const git_status_options) ->
::libc::c_int;
pub fn git_status_list_entrycount(statuslist: *mut git_status_list) ->
size_t;
pub fn git_status_byindex(statuslist: *mut git_status_list, idx: size_t)
-> *const git_status_entry;
pub fn git_status_list_free(statuslist: *mut git_status_list);
pub fn git_status_should_ignore(ignored: *mut ::libc::c_int,
repo: *mut git_repository,
path: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_submodule_lookup(submodule: *mut *mut git_submodule,
repo: *mut git_repository,
name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_submodule_foreach(repo: *mut git_repository,
callback:
::std::option::Option<unsafe extern "C" fn
(arg1:
*mut git_submodule,
arg2:
*const ::libc::c_char,
arg3:
*mut ::libc::c_void)
->
::libc::c_int>,
payload: *mut ::libc::c_void) ->
::libc::c_int;
pub fn git_submodule_add_setup(submodule: *mut *mut git_submodule,
repo: *mut git_repository,
url: *const ::libc::c_char,
path: *const ::libc::c_char,
use_gitlink: ::libc::c_int) ->
::libc::c_int;
pub fn git_submodule_add_finalize(submodule: *mut git_submodule) ->
::libc::c_int;
pub fn git_submodule_add_to_index(submodule: *mut git_submodule,
write_index: ::libc::c_int) ->
::libc::c_int;
pub fn git_submodule_save(submodule: *mut git_submodule) -> ::libc::c_int;
pub fn git_submodule_owner(submodule: *mut git_submodule) ->
*mut git_repository;
pub fn git_submodule_name(submodule: *mut git_submodule) ->
*const ::libc::c_char;
pub fn git_submodule_path(submodule: *mut git_submodule) ->
*const ::libc::c_char;
pub fn git_submodule_url(submodule: *mut git_submodule) ->
*const ::libc::c_char;
pub fn git_submodule_set_url(submodule: *mut git_submodule,
url: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_submodule_index_id(submodule: *mut git_submodule) ->
*const git_oid;
pub fn git_submodule_head_id(submodule: *mut git_submodule) ->
*const git_oid;
pub fn git_submodule_wd_id(submodule: *mut git_submodule) ->
*const git_oid;
pub fn git_submodule_ignore(submodule: *mut git_submodule) ->
git_submodule_ignore_t;
pub fn git_submodule_set_ignore(submodule: *mut git_submodule,
ignore: git_submodule_ignore_t) ->
git_submodule_ignore_t;
pub fn git_submodule_update(submodule: *mut git_submodule) ->
git_submodule_update_t;
pub fn git_submodule_set_update(submodule: *mut git_submodule,
update: git_submodule_update_t) ->
git_submodule_update_t;
pub fn git_submodule_fetch_recurse_submodules(submodule:
*mut git_submodule) ->
::libc::c_int;
pub fn git_submodule_set_fetch_recurse_submodules(submodule:
*mut git_submodule,
fetch_recurse_submodules:
::libc::c_int) ->
::libc::c_int;
pub fn git_submodule_init(submodule: *mut git_submodule,
overwrite: ::libc::c_int) -> ::libc::c_int;
pub fn git_submodule_sync(submodule: *mut git_submodule) -> ::libc::c_int;
pub fn git_submodule_open(repo: *mut *mut git_repository,
submodule: *mut git_submodule) -> ::libc::c_int;
pub fn git_submodule_reload(submodule: *mut git_submodule) ->
::libc::c_int;
pub fn git_submodule_reload_all(repo: *mut git_repository) ->
::libc::c_int;
pub fn git_submodule_status(status: *mut ::libc::c_uint,
submodule: *mut git_submodule) ->
::libc::c_int;
pub fn git_submodule_location(location_status: *mut ::libc::c_uint,
submodule: *mut git_submodule) ->
::libc::c_int;
pub fn git_tag_lookup(out: *mut *mut git_tag, repo: *mut git_repository,
id: *const git_oid) -> ::libc::c_int;
pub fn git_tag_lookup_prefix(out: *mut *mut git_tag,
repo: *mut git_repository,
id: *const git_oid, len: size_t) ->
::libc::c_int;
pub fn git_tag_free(tag: *mut git_tag);
pub fn git_tag_id(tag: *const git_tag) -> *const git_oid;
pub fn git_tag_owner(tag: *const git_tag) -> *mut git_repository;
pub fn git_tag_target(target_out: *mut *mut git_object,
tag: *const git_tag) -> ::libc::c_int;
pub fn git_tag_target_id(tag: *const git_tag) -> *const git_oid;
pub fn git_tag_target_type(tag: *const git_tag) -> git_otype;
pub fn git_tag_name(tag: *const git_tag) -> *const ::libc::c_char;
pub fn git_tag_tagger(tag: *const git_tag) -> *const git_signature;
pub fn git_tag_message(tag: *const git_tag) -> *const ::libc::c_char;
pub fn git_tag_create(oid: *mut git_oid, repo: *mut git_repository,
tag_name: *const ::libc::c_char,
target: *const git_object,
tagger: *const git_signature,
message: *const ::libc::c_char,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_tag_annotation_create(oid: *mut git_oid,
repo: *mut git_repository,
tag_name: *const ::libc::c_char,
target: *const git_object,
tagger: *const git_signature,
message: *const ::libc::c_char) ->
::libc::c_int;
pub fn git_tag_create_frombuffer(oid: *mut git_oid,
repo: *mut git_repository,
buffer: *const ::libc::c_char,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_tag_create_lightweight(oid: *mut git_oid,
repo: *mut git_repository,
tag_name: *const ::libc::c_char,
target: *const git_object,
force: ::libc::c_int) -> ::libc::c_int;
pub fn git_tag_delete(repo: *mut git_repository,
tag_name: *const ::libc::c_char) -> ::libc::c_int;
pub fn git_tag_list(tag_names: *mut git_strarray,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_tag_list_match(tag_names: *mut git_strarray,
pattern: *const ::libc::c_char,
repo: *mut git_repository) -> ::libc::c_int;
pub fn git_tag_foreach(repo: *mut git_repository,
callback: git_tag_foreach_cb,
payload: *mut ::libc::c_void) -> ::libc::c_int;
pub fn git_tag_peel(tag_target_out: *mut *mut git_object,
tag: *const git_tag) -> ::libc::c_int;
pub fn git_threads_init() -> ::libc::c_int;
pub fn git_threads_shutdown();
}
|
use crate::ast;
use std::collections::HashSet;
use syn;
use syn::parse::Result as ParseResult;
use syn::spanned::Spanned;
use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
use heck::SnakeCase;
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
struct PropertyId(usize);
/*struct Property {
name: syn::Ident,
ty: syn::Type,
change_ty: Option<syn::Type>,
is_signal: bool,
}*/
#[derive(Debug)]
struct ChangeHandler {
prop_name: syn::Ident,
resolved_prop: Option<PropertyId>,
body: ast::BindingExpr,
}
#[derive(Debug)]
struct MatchChangeHandler {
prop_name: syn::Ident,
resolved_prop: Option<PropertyId>,
body: ast::BindingExpr,
}
#[derive(Debug, Default)]
struct Anchors {
fill: Option<ast::BindingExpr>,
fill_width: Option<ast::BindingExpr>,
fill_height: Option<ast::BindingExpr>,
left: Option<ast::BindingExpr>,
right: Option<ast::BindingExpr>,
top: Option<ast::BindingExpr>,
bottom: Option<ast::BindingExpr>,
margins: f64,
margin_left: f64,
margin_right: f64,
margin_top: f64,
margin_bottom: f64,
}
#[derive(Debug, Clone)]
struct PropPath {
prop_id: usize,
sub_prop_path: Vec<syn::Ident>,
}
#[derive(Debug, Clone)]
struct ProcessedBindingResolved {
deps: Vec<PropPath>,
}
#[derive(Debug, Clone)]
struct ProcessedBinding {
context_prop: usize,
for_prop: PropPath,
expr: Vec<ast::BindingExprPart>,
resolved: Option<ProcessedBindingResolved>,
}
impl ProcessedBinding {
fn resolve_deps(&self, _prop_graph: &PropertyGraph) -> ParseResult<ProcessedBindingResolved> {
let mut deps = vec![];
for expr_part in &self.expr {
match expr_part {
ast::BindingExprPart::Dep(_dep_part) => {
let mut _is_first = true;
let mut prop_id = self.context_prop;
let mut sub_prop_path = vec![];
/*for path_part in &dep_part.path {
let mut found_known_sub = false;
if is_first {
if let Some(found_id) = prop_graph.lookup_prop(prop_id, &path_part.to_string()) {
prop_id = found_id;
found_known_sub = true;
}
} else if sub_prop_path.is_empty() {
if let Some(found_id) = prop_graph.lookup_local_prop(prop_id, &path_part.to_string()) {
prop_id = found_id;
found_known_sub = true;
}
}
if !found_known_sub {
sub_prop_path.push(path_part.clone());
}
is_first = false;
}*/
/*for path_part in &dep_path.inst_path {
}*/
let prop_path = PropPath {
prop_id,
sub_prop_path,
};
deps.push(prop_path);
}
_ => {}
}
}
Ok(ProcessedBindingResolved {
deps,
})
}
fn gen_expr(&self, resolver: &Fn(&PropPath) -> TokenStream) -> TokenStream {
let mut final_expr = TokenStream::new();
let mut dep_index = 0;
let ref deps = self.resolved.as_ref().expect("Resolved expression").deps;
for expr_part in &self.expr {
match expr_part {
ast::BindingExprPart::Dep(_dep_part) => {
final_expr.extend(resolver(&deps[dep_index]));
dep_index += 1;
}
ast::BindingExprPart::Tokens(toks) => {
final_expr.extend(toks.clone().into_iter());
}
}
}
final_expr
}
}
#[derive(Debug)]
struct ProcessedPropertyResolved {
field_name: Option<syn::Ident>,
}
#[derive(Debug)]
struct ProcessedProperty {
name: Option<syn::Ident>,
main_type: syn::Type,
change_type: syn::Type,
//sub_properties: Vec<Property>,
//bindings: Vec<Binding>,
//binding_expr: Option<BindingExpr>,
parent: Option<usize>,
is_inst: bool,
//is_mouse_item: bool,
//anchors: Anchors,
resolved: Option<ProcessedPropertyResolved>,
}
impl ProcessedProperty {
fn resolve_locals(&self, prop_graph: &PropertyGraph, ident_source: &mut IdentSource) -> ParseResult<ProcessedPropertyResolved> {
println!("resolving {:?}", self.parent);
if self.parent.is_some() {
let mut name = self.name.clone().unwrap_or_else(|| {
let mut gen_name = "".to_string();
for tok in self.main_type.clone().into_token_stream() {
gen_name += &tok.to_string();
}
syn::Ident::new(&gen_name.to_snake_case(), self.main_type.span())
});
// Prefix the parent's name (as long as the parent isn't the root)
if let Some(parent) = self.parent {
let ref parent_prop = prop_graph.properties[parent];
if parent_prop.parent.is_some() {
if let Some(ref parent_name) = parent_prop.name {
name = syn::Ident::new(&(parent_name.to_string() + "_" + &name.to_string()), name.span());
}
}
}
let resolved_name = ident_source.get(&name, IdentType::Snake);
Ok(ProcessedPropertyResolved {
field_name: Some(resolved_name),
})
} else {
println!("RESOLVED TO NONE {:?}", self.name);
Ok(ProcessedPropertyResolved {
field_name: None,
})
}
}
}
/*struct IdentStore {
stored: HashMap<String, String>,
}
impl IdentStore {
fn new() -> IdentStore {
IdentStore {
stored: HashMap::new(),
}
}
fn insert(&mut self, id: String, ident: String) -> Result<(), String> {
if let Some(val) = self.stored.get(&id) {
return Err(format!("Trying to overwrite {} with {}", val, ident));
}
self.stored.insert(id, ident);
Ok(())
}
fn contains(&self, id: &str) -> bool {
self.stored.contains_key(id)
}
fn get(&self, id: &str) -> Option<&str> {
self.stored.get(id).into()
}
}*/
/*struct NameScope {
/*names_to_ids: HashMap<String, usize>,
ids_to_names: HashMap<usize, String>,*/
taken_names: HashSet<String>,
}
impl NameScope {
/*fn new_ref(&mut self, name: String) -> NameRef {
let id = get_new_id();
self.names_to_ids.insert(name, id);
self.ids_to_names.insert(id, name);
NameRef(id)
}*/
}
struct NameRef(usize);*/
enum IdentType {
None,
Snake,
}
impl IdentType {
fn apply(&self, name: &str) -> String {
match self {
IdentType::None => name.to_string(),
IdentType::Snake => name.to_snake_case(),
}
}
}
struct IdentSource {
store: HashSet<String>,
}
impl IdentSource {
fn new() -> IdentSource {
IdentSource {
store: HashSet::new(),
}
}
fn take_all(&mut self, names: &[&str]) {
for name in names {
self.store.insert(name.to_string());
}
}
fn get(&mut self, from: &syn::Ident, ty: IdentType) -> syn::Ident {
let mut name = ty.apply(&from.to_string());
if self.store.contains(&name) {
let base_name = name.clone();
name = format!("{}_", base_name);
let mut current_num = 1;
while self.store.contains(&name) {
current_num += 1;
name = format!("{}_{}", base_name, current_num.to_string());
}
}
self.store.insert(name.clone());
syn::Ident::new(&name, from.span())
}
}
struct PropertyGraph {
properties: Vec<ProcessedProperty>,
}
impl PropertyGraph {
fn add_property(&mut self, property: ProcessedProperty) -> usize {
let id = self.properties.len();
self.properties.push(property);
id
}
fn lookup_local_prop(&self, source_id: usize, find_name: &str) -> Option<usize> {
for (id, prop) in self.properties.iter().enumerate() {
if prop.parent == Some(source_id) {
if let Some(ref prop_name) = prop.name {
if &prop_name.to_string() == find_name {
//println!("looking for {:?} in {:?}, found {:?}", find_name, source_id, id);
return Some(id);
}
}
}
}
None
}
fn lookup_prop(&self, source_id: usize, find_name: &str) -> Option<usize> {
// Search for locals:
if let Some(id) = self.lookup_local_prop(source_id, find_name) {
return Some(id);
}
// Search for instances:
for (id, prop) in self.properties.iter().enumerate() {
if prop.is_inst {
if let Some(ref prop_name) = prop.name {
if &prop_name.to_string() == find_name {
return Some(id);
}
}
}
}
None
}
}
fn get_change_type(ty: &syn::Type) -> ParseResult<syn::Type> {
let mut change_ty = ty.clone();
if let syn::Type::Path(ref mut path) = change_ty {
if let Some(ref mut last_seg) = path.path.segments.last_mut() {
let mut ty_ident = last_seg.value_mut();
let change_name = ty_ident.ident.to_string() + "Change";
ty_ident.ident = syn::Ident::new(&change_name, ty_ident.span());
} else {
return Err(syn::parse::Error::new(ty.span(), "Type path has no parts"))
}
} else {
return Err(syn::parse::Error::new(ty.span(), "Can't generate change type for non-path type"))
}
Ok(change_ty)
}
pub struct FinalDesc {
struct_name: Option<syn::Ident>,
prop_graph: PropertyGraph,
bindings: Vec<ProcessedBinding>,
ident_source: IdentSource,
}
/*fn get_change_type(from_type: &syn::Type) -> syn::Type {
/*match from_type {
syn::Type::
}*/
//syn::Ident::new(&(struct_name.to_string() + "Change"), struct_name.span())
println!("{:?}", from_type);
from_type.clone()
}*/
impl FinalDesc {
pub fn new() -> FinalDesc {
let mut ident_source = IdentSource::new();
ident_source.take_all(&[
"Self", "abstract", "alignof", "as", "become", "box", "break", "const", "continue",
"crate", "do", "else", "enum", "extern crate", "extern", "false", "final", "fn", "for",
"for", "if let", "if", "if", "impl", "impl", "in", "let", "loop", "macro", "match",
"mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure", "ref",
"return", "self", "sizeof", "static", "struct", "super", "trait", "true", "type",
"typeof", "unsafe", "unsized", "use", "use", "virtual", "where", "while", "yield",
]);
FinalDesc {
struct_name: None,
prop_graph: PropertyGraph {
properties: vec![],
},
bindings: vec![],
ident_source,
}
}
pub fn process_instance(&mut self, instance: &ast::Instance, parent: Option<usize>) -> ParseResult<()> {
if parent.is_none() {
let instance_name: ast::InstanceName = syn::parse2(instance.type_name.clone().into_token_stream())?;
self.struct_name = Some(instance_name.name);
}
let name = if parent.is_none() {
// TODO: Ensure root is not called something other than root.
Some(syn::Ident::new("root", instance.type_name.span()))
} else {
instance.name.clone()
};
//let change_type_name = instance.type_name.to_string() + "Change";
//let change_type = syn::Ident::new(&change_type_name, instance_name.span());
let change_type = get_change_type(&instance.type_name)?;
let processed = ProcessedProperty {
name,
main_type: instance.type_name.clone(),
change_type,
parent,
is_inst: true,
//is_mouse_item: false,
//anchors: Anchors::default(),
resolved: None,
};
let id = self.prop_graph.add_property(processed);
for property in &instance.body.properties {
//self.properties.push(property.clone());
self.process_property(property, id);
}
for binding in &instance.body.bindings {
self.process_binding_expr(&binding.body, id, PropPath {
prop_id: id,
sub_prop_path: vec![binding.prop_name.clone()],
});
}
for child in &instance.body.children {
self.process_instance(child, Some(id))?;
}
Ok(())
}
fn process_property(&mut self, property: &ast::Property, parent: usize) -> ParseResult<()> {
let change_type_toks = if let Some(ast::PropertyChangeType{ref change_ty}) = property.change_ty {
quote! {
#change_ty
}
} else {
let ty = &property.ty;
quote! {
::modelone::change_value::ValueChange<#ty>
}
};
let change_type = syn::parse2(change_type_toks)?;
let processed = ProcessedProperty {
name: Some(property.name.clone()),
main_type: property.ty.clone(),
change_type,
parent: Some(parent),
is_inst: false,
resolved: None,
};
let id = self.prop_graph.add_property(processed);
if let Some((_, ref binding_expr)) = property.binding_expr {
self.process_binding_expr(binding_expr, parent, PropPath {
prop_id: id,
sub_prop_path: vec![],
});
}
Ok(())
}
fn process_binding_expr(&mut self, binding_expr: &ast::BindingExpr, context_prop: usize, for_prop: PropPath) -> ParseResult<()> {
self.bindings.push(ProcessedBinding {
context_prop,
for_prop,
expr: binding_expr.expr.clone(),
resolved: None,
});
Ok(())
}
pub fn resolve(&mut self) -> ParseResult<()> {
for i in 0 .. self.prop_graph.properties.len() {
println!("resolve {}: {:?}", i, self.prop_graph.properties[i]);
let resolved = self.prop_graph.properties[i].resolve_locals(&self.prop_graph, &mut self.ident_source)?;
self.prop_graph.properties[i].resolved = Some(resolved);
}
for binding in &mut self.bindings {
println!("Resolving BINDING {:?}", binding);
let resolved = binding.resolve_deps(&self.prop_graph)?;
binding.resolved = Some(resolved);
}
Ok(())
}
pub fn generate(&self) -> TokenStream {
let struct_name = self.struct_name.clone().expect("Struct name");
let change_name = syn::Ident::new(&(struct_name.to_string() + "ChangePrivate"), struct_name.span());
let signal_name = syn::Ident::new(&(struct_name.to_string() + "SignalPrivate"), struct_name.span());
let pub_change_name = syn::Ident::new(&(struct_name.to_string() + "Change"), struct_name.span());
let pub_signal_name = syn::Ident::new(&(struct_name.to_string() + "Signal"), struct_name.span());
let args_struct_name = syn::Ident::new(&(struct_name.to_string() + "Builder"), struct_name.span());
let mut args_struct_fields = TokenStream::new();
let mut struct_fields = TokenStream::new();
let mut struct_impl = TokenStream::new();
let mut change_entries = TokenStream::new();
let mut signal_entries = TokenStream::new();
let mut changeable_apply_entries = TokenStream::new();
let mut reset_view_entries = TokenStream::new();
for property in &self.prop_graph.properties {
let parent_id = if let Some(parent_id) = property.parent {
parent_id
} else {
continue;
};
let ref parent = self.prop_graph.properties[parent_id];
let field_name = &property.resolved.as_ref().expect("Resolved property").field_name;
let field_type = &property.main_type;
let change_type = &property.change_type;
/*let change_type = if let Some(PropertyChangeType{ref change_ty}) = property.change_ty {
quote! {
#change_ty
}
} else {
quote! {
::modelone::change_value::ValueChange<#field_type>
}
};*/
//let field_change_type = get_change_type(field_type);
let field_make_change = syn::Ident::new(&(field_name.as_ref().expect("Field name").to_string() + "_change"), field_name.span());
if parent.parent.is_none() && !property.is_inst {
args_struct_fields.extend(quote! {
#field_name: Option<#field_type>,
});
}
struct_fields.extend(quote! {
#field_name: #field_type,
});
change_entries.extend(quote! {
#field_name(#change_type),
});
signal_entries.extend(quote! {
#field_name(<#change_type as ::modelone::model::Change>::SignalType),
});
struct_impl.extend(quote! {
fn #field_make_change(change: #change_type) -> #pub_change_name {
#pub_change_name(#change_name::#field_name(change))
}
});
/*if let Some(ref binding_expr) = property.binding_expr {
let binding_update = syn::Ident::new(&(field_name.to_string() + "_update"), field_name.span());
let expr_fn = syn::Ident::new(&(field_name.to_string() + "_binding_expr"), field_name.span());
let resolved_binding_expr = &binding_expr.expr;
struct_impl.extend(quote! {
pub fn #binding_update(&self) -> #pub_change_name {
// Using a move lambda here prevents the expression from using &self.
(move || {
#struct_name::#field_make_change(::modelone::change_value::ValueChange(#resolved_binding_expr))
})()
}
});
}*/
changeable_apply_entries.extend(quote! {
#change_name::#field_name(subchange) => {
let mut watcher_fn = |signal| {
watcher.send_signal(#pub_signal_name(#signal_name::#field_name(signal)));
};
/*$(
impl_changeable_body!($mod #name ($($arg)*) $body);
$mod(self);
)*;*/
self.#field_name.changeable_apply(subchange, &mut ::modelone::model::SubWatcher::new(&mut watcher_fn));
},
});
reset_view_entries.extend(quote! {
let changeable: &::modelone::model::Changeable<#change_type> = &self.#field_name;
for subsignal in changeable.reset_view_signals() {
signals.push(#pub_signal_name(#signal_name::#field_name(subsignal)));
}
});
}
for binding in &self.bindings {
//let binding_update = syn::Ident::new(&(field_name.to_string() + "_update"), field_name.span());
let for_prop = &self.prop_graph.properties[binding.for_prop.prop_id];
println!("for_prop: {:?}", for_prop);
let for_field_name = for_prop.resolved.as_ref().expect("Resolved for_prop").field_name.as_ref().expect("Field name for for_prop");
let update_fn = syn::Ident::new(&(for_field_name.to_string() + "_update"), for_field_name.span());
let expr_toks = binding.gen_expr(&|prop_path| {
let prop = &self.prop_graph.properties[prop_path.prop_id];
let resolved_prop = prop.resolved.as_ref().expect("Resolved prop");
println!("for_prop: {:?}", binding);
let name = resolved_prop.field_name.as_ref().expect("Field name for prop");
//let name = format!("{:?}", prop_path);
println!("resolved prop: {:?}, {:?}, {:?}", prop_path, name, prop);
quote!{self.#name}
});
struct_impl.extend(quote! {
fn #update_fn(&self) -> #pub_change_name {
#pub_change_name(#change_name::#for_field_name(#expr_toks))
}
});
}
/*for instance in &self.instances {
if instance.parent.is_some() {
let field_name = instance.resolved_field_name.clone().expect("No resolved field name");
let field_type = instance.type_name.clone();
let change_type_name = field_type.to_string() + "Change";
let change_type = syn::Ident::new(&change_type_name, field_type.span());
struct_fields.extend(quote! {
#field_name: #field_type,
});
change_entries.extend(quote! {
#field_name(#change_type),
});
signal_entries.extend(quote! {
#field_name(<#change_type as ::modelone::model::Change>::SignalType),
});
}
}*/
quote! {
pub struct #args_struct_name {
#args_struct_fields
}
pub struct #struct_name {
#struct_fields
}
impl #struct_name {
#struct_impl
}
#[allow(non_camel_case_types)]
#[derive(Debug, Clone, PartialEq)]
enum #change_name {
#change_entries
}
#[derive(Debug, Clone, PartialEq)]
pub struct #pub_change_name(#change_name);
#[allow(non_camel_case_types)]
#[derive(Debug, Clone, PartialEq)]
enum #signal_name {
#signal_entries
}
#[derive(Debug, Clone, PartialEq)]
pub struct #pub_signal_name(#signal_name);
impl ::modelone::model::Change for #pub_change_name {
type SignalType = #pub_signal_name;
}
impl ::modelone::model::Changeable<#pub_change_name> for #struct_name {
fn changeable_apply(&mut self, change: #pub_change_name, watcher: &mut ::modelone::model::Watcher<#pub_signal_name>) {
match change.0 {
#changeable_apply_entries
}
}
fn reset_view_signals(&self) -> Vec<#pub_signal_name> {
let mut signals = vec![];
#reset_view_entries
signals
}
}
}
}
}
|
use apllodb_shared_components::ApllodbResult;
use apllodb_sql_parser::apllodb_ast;
use apllodb_storage_engine_interface::ColumnName;
use crate::ast_translator::AstTranslator;
impl AstTranslator {
pub fn column_name(ast_column_name: apllodb_ast::ColumnName) -> ApllodbResult<ColumnName> {
ColumnName::new(ast_column_name.0 .0)
}
}
|
use crate::common::CodegenCx;
use crate::coverageinfo;
use crate::llvm;
use llvm::coverageinfo::CounterMappingRegion;
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, FunctionCoverage};
use rustc_codegen_ssa::traits::ConstMethods;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_hir::def_id::{DefId, DefIdSet, LOCAL_CRATE};
use rustc_llvm::RustString;
use rustc_middle::mir::coverage::CodeRegion;
use rustc_middle::ty::{Instance, TyCtxt};
use rustc_span::Symbol;
use std::ffi::CString;
use tracing::debug;
/// Generates and exports the Coverage Map.
///
/// This Coverage Map complies with Coverage Mapping Format version 4 (zero-based encoded as 3),
/// as defined at [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/rustc/11.0-2020-10-12/llvm/docs/CoverageMappingFormat.rst#llvm-code-coverage-mapping-format)
/// and published in Rust's current (November 2020) fork of LLVM. This version is supported by the
/// LLVM coverage tools (`llvm-profdata` and `llvm-cov`) bundled with Rust's fork of LLVM.
///
/// Consequently, Rust's bundled version of Clang also generates Coverage Maps compliant with
/// version 3. Clang's implementation of Coverage Map generation was referenced when implementing
/// this Rust version, and though the format documentation is very explicit and detailed, some
/// undocumented details in Clang's implementation (that may or may not be important) were also
/// replicated for Rust's Coverage Map.
pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
let tcx = cx.tcx;
// Ensure LLVM supports Coverage Map Version 4 (encoded as a zero-based value: 3).
// If not, the LLVM Version must be less than 11.
let version = coverageinfo::mapping_version();
if version != 3 {
tcx.sess.fatal("rustc option `-Z instrument-coverage` requires LLVM 11 or higher.");
}
debug!("Generating coverage map for CodegenUnit: `{}`", cx.codegen_unit.name());
let mut function_coverage_map = match cx.coverage_context() {
Some(ctx) => ctx.take_function_coverage_map(),
None => return,
};
if function_coverage_map.is_empty() {
// This module has no functions with coverage instrumentation
return;
}
add_unreachable_coverage(tcx, &mut function_coverage_map);
let mut mapgen = CoverageMapGenerator::new();
// Encode coverage mappings and generate function records
let mut function_data = Vec::new();
for (instance, function_coverage) in function_coverage_map {
debug!("Generate function coverage for {}, {:?}", cx.codegen_unit.name(), instance);
let mangled_function_name = tcx.symbol_name(instance).to_string();
let function_source_hash = function_coverage.source_hash();
let (expressions, counter_regions) =
function_coverage.get_expressions_and_counter_regions();
let coverage_mapping_buffer = llvm::build_byte_buffer(|coverage_mapping_buffer| {
mapgen.write_coverage_mapping(expressions, counter_regions, coverage_mapping_buffer);
});
debug_assert!(
coverage_mapping_buffer.len() > 0,
"Every `FunctionCoverage` should have at least one counter"
);
function_data.push((mangled_function_name, function_source_hash, coverage_mapping_buffer));
}
// Encode all filenames referenced by counters/expressions in this module
let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
coverageinfo::write_filenames_section_to_buffer(&mapgen.filenames, filenames_buffer);
});
let filenames_size = filenames_buffer.len();
let filenames_val = cx.const_bytes(&filenames_buffer[..]);
let filenames_ref = coverageinfo::hash_bytes(filenames_buffer);
// Generate the LLVM IR representation of the coverage map and store it in a well-known global
let cov_data_val = mapgen.generate_coverage_map(cx, version, filenames_size, filenames_val);
for (mangled_function_name, function_source_hash, coverage_mapping_buffer) in function_data {
save_function_record(
cx,
mangled_function_name,
function_source_hash,
filenames_ref,
coverage_mapping_buffer,
);
}
// Save the coverage data value to LLVM IR
coverageinfo::save_cov_data_to_mod(cx, cov_data_val);
}
struct CoverageMapGenerator {
filenames: FxIndexSet<CString>,
}
impl CoverageMapGenerator {
fn new() -> Self {
Self { filenames: FxIndexSet::default() }
}
/// Using the `expressions` and `counter_regions` collected for the current function, generate
/// the `mapping_regions` and `virtual_file_mapping`, and capture any new filenames. Then use
/// LLVM APIs to encode the `virtual_file_mapping`, `expressions`, and `mapping_regions` into
/// the given `coverage_mapping` byte buffer, compliant with the LLVM Coverage Mapping format.
fn write_coverage_mapping(
&mut self,
expressions: Vec<CounterExpression>,
counter_regions: impl Iterator<Item = (Counter, &'a CodeRegion)>,
coverage_mapping_buffer: &RustString,
) {
let mut counter_regions = counter_regions.collect::<Vec<_>>();
if counter_regions.is_empty() {
return;
}
let mut virtual_file_mapping = Vec::new();
let mut mapping_regions = Vec::new();
let mut current_file_name = None;
let mut current_file_id = 0;
// Convert the list of (Counter, CodeRegion) pairs to an array of `CounterMappingRegion`, sorted
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
// `file_id` (indexing files referenced by the current function), and construct the
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
// `filenames` array.
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
let CodeRegion { file_name, start_line, start_col, end_line, end_col } = *region;
let same_file = current_file_name.as_ref().map_or(false, |p| *p == file_name);
if !same_file {
if current_file_name.is_some() {
current_file_id += 1;
}
current_file_name = Some(file_name);
let c_filename = CString::new(file_name.to_string())
.expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
let (filenames_index, _) = self.filenames.insert_full(c_filename);
virtual_file_mapping.push(filenames_index as u32);
}
debug!("Adding counter {:?} to map for {:?}", counter, region);
mapping_regions.push(CounterMappingRegion::code_region(
counter,
current_file_id,
start_line,
start_col,
end_line,
end_col,
));
}
// Encode and append the current function's coverage mapping data
coverageinfo::write_mapping_to_buffer(
virtual_file_mapping,
expressions,
mapping_regions,
coverage_mapping_buffer,
);
}
/// Construct coverage map header and the array of function records, and combine them into the
/// coverage map. Save the coverage map data into the LLVM IR as a static global using a
/// specific, well-known section and name.
fn generate_coverage_map(
self,
cx: &CodegenCx<'ll, 'tcx>,
version: u32,
filenames_size: usize,
filenames_val: &'ll llvm::Value,
) -> &'ll llvm::Value {
debug!("cov map: filenames_size = {}, 0-based version = {}", filenames_size, version);
// Create the coverage data header (Note, fields 0 and 2 are now always zero,
// as of `llvm::coverage::CovMapVersion::Version4`.)
let zero_was_n_records_val = cx.const_u32(0);
let filenames_size_val = cx.const_u32(filenames_size as u32);
let zero_was_coverage_size_val = cx.const_u32(0);
let version_val = cx.const_u32(version);
let cov_data_header_val = cx.const_struct(
&[zero_was_n_records_val, filenames_size_val, zero_was_coverage_size_val, version_val],
/*packed=*/ false,
);
// Create the complete LLVM coverage data value to add to the LLVM IR
cx.const_struct(&[cov_data_header_val, filenames_val], /*packed=*/ false)
}
}
/// Construct a function record and combine it with the function's coverage mapping data.
/// Save the function record into the LLVM IR as a static global using a
/// specific, well-known section and name.
fn save_function_record(
cx: &CodegenCx<'ll, 'tcx>,
mangled_function_name: String,
function_source_hash: u64,
filenames_ref: u64,
coverage_mapping_buffer: Vec<u8>,
) {
// Concatenate the encoded coverage mappings
let coverage_mapping_size = coverage_mapping_buffer.len();
let coverage_mapping_val = cx.const_bytes(&coverage_mapping_buffer[..]);
let func_name_hash = coverageinfo::hash_str(&mangled_function_name);
let func_name_hash_val = cx.const_u64(func_name_hash);
let coverage_mapping_size_val = cx.const_u32(coverage_mapping_size as u32);
let func_hash_val = cx.const_u64(function_source_hash);
let filenames_ref_val = cx.const_u64(filenames_ref);
let func_record_val = cx.const_struct(
&[
func_name_hash_val,
coverage_mapping_size_val,
func_hash_val,
filenames_ref_val,
coverage_mapping_val,
],
/*packed=*/ true,
);
// At the present time, the coverage map for Rust assumes every instrumented function `is_used`.
// Note that Clang marks functions as "unused" in `CodeGenPGO::emitEmptyCounterMapping`. (See:
// https://github.com/rust-lang/llvm-project/blob/de02a75e398415bad4df27b4547c25b896c8bf3b/clang%2Flib%2FCodeGen%2FCodeGenPGO.cpp#L877-L878
// for example.)
//
// It's not yet clear if or how this may be applied to Rust in the future, but the `is_used`
// argument is available and handled similarly.
let is_used = true;
coverageinfo::save_func_record_to_mod(cx, func_name_hash, func_record_val, is_used);
}
/// When finalizing the coverage map, `FunctionCoverage` only has the `CodeRegion`s and counters for
/// the functions that went through codegen; such as public functions and "used" functions
/// (functions referenced by other "used" or public items). Any other functions considered unused,
/// or "Unreachable" were still parsed and processed through the MIR stage.
///
/// We can find the unreachable functions by the set difference of all MIR `DefId`s (`tcx` query
/// `mir_keys`) minus the codegenned `DefId`s (`tcx` query `collect_and_partition_mono_items`).
///
/// *HOWEVER* the codegenned `DefId`s are partitioned across multiple `CodegenUnit`s (CGUs), and
/// this function is processing a `function_coverage_map` for the functions (`Instance`/`DefId`)
/// allocated to only one of those CGUs. We must NOT inject any "Unreachable" functions's
/// `CodeRegion`s more than once, so we have to pick which CGU's `function_coverage_map` to add
/// each "Unreachable" function to.
///
/// Some constraints:
///
/// 1. The file name of an "Unreachable" function must match the file name of the existing
/// codegenned (covered) function to which the unreachable code regions will be added.
/// 2. The function to which the unreachable code regions will be added must not be a genaric
/// function (must not have type parameters) because the coverage tools will get confused
/// if the codegenned function has more than one instantiation and additional `CodeRegion`s
/// attached to only one of those instantiations.
fn add_unreachable_coverage<'tcx>(
tcx: TyCtxt<'tcx>,
function_coverage_map: &mut FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>,
) {
// FIXME(#79622): Can this solution be simplified and/or improved? Are there other sources
// of compiler state data that might help (or better sources that could be exposed, but
// aren't yet)?
// Note: If the crate *only* defines generic functions, there are no codegenerated non-generic
// functions to add any unreachable code to. In this case, the unreachable code regions will
// have no coverage, instead of having coverage with zero executions.
//
// This is probably still an improvement over Clang, which does not generate any coverage
// for uninstantiated template functions.
let has_non_generic_def_ids =
function_coverage_map.keys().any(|instance| instance.def.attrs(tcx).len() == 0);
if !has_non_generic_def_ids {
// There are no non-generic functions to add unreachable `CodeRegion`s to
return;
}
let all_def_ids: DefIdSet =
tcx.mir_keys(LOCAL_CRATE).iter().map(|local_def_id| local_def_id.to_def_id()).collect();
let (codegenned_def_ids, _) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
let mut unreachable_def_ids_by_file: FxHashMap<Symbol, Vec<DefId>> = FxHashMap::default();
for &non_codegenned_def_id in all_def_ids.difference(codegenned_def_ids) {
// Make sure the non-codegenned (unreachable) function has a file_name
if let Some(non_codegenned_file_name) = tcx.covered_file_name(non_codegenned_def_id) {
let def_ids = unreachable_def_ids_by_file
.entry(*non_codegenned_file_name)
.or_insert_with(Vec::new);
def_ids.push(non_codegenned_def_id);
}
}
if unreachable_def_ids_by_file.is_empty() {
// There are no unreachable functions with file names to add (in any CGU)
return;
}
// Since there may be multiple `CodegenUnit`s, some codegenned_def_ids may be codegenned in a
// different CGU, and will be added to the function_coverage_map for each CGU. Determine which
// function_coverage_map has the responsibility for publishing unreachable coverage
// based on file name:
//
// For each covered file name, sort ONLY the non-generic codegenned_def_ids, and if
// covered_def_ids.contains(the first def_id) for a given file_name, add the unreachable code
// region in this function_coverage_map. Otherwise, ignore it and assume another CGU's
// function_coverage_map will be adding it (because it will be first for one, and only one,
// of them).
let mut sorted_codegenned_def_ids: Vec<DefId> =
codegenned_def_ids.iter().map(|def_id| *def_id).collect();
sorted_codegenned_def_ids.sort_unstable();
let mut first_covered_def_id_by_file: FxHashMap<Symbol, DefId> = FxHashMap::default();
for &def_id in sorted_codegenned_def_ids.iter() {
// Only consider non-generic functions, to potentially add unreachable code regions
if tcx.generics_of(def_id).count() == 0 {
if let Some(covered_file_name) = tcx.covered_file_name(def_id) {
// Only add files known to have unreachable functions
if unreachable_def_ids_by_file.contains_key(covered_file_name) {
first_covered_def_id_by_file.entry(*covered_file_name).or_insert(def_id);
}
}
}
}
// Get the set of def_ids with coverage regions, known by *this* CoverageContext.
let cgu_covered_def_ids: DefIdSet =
function_coverage_map.keys().map(|instance| instance.def.def_id()).collect();
let mut cgu_covered_files: FxHashSet<Symbol> = first_covered_def_id_by_file
.iter()
.filter_map(
|(&file_name, def_id)| {
if cgu_covered_def_ids.contains(def_id) { Some(file_name) } else { None }
},
)
.collect();
// Find the first covered, non-generic function (instance) for each cgu_covered_file. Take the
// unreachable code regions for that file, and add them to the function.
//
// There are three `for` loops here, but (a) the lists have already been reduced to the minimum
// required values, the lists are further reduced (by `remove()` calls) when elements are no
// longer needed, and there are several opportunities to branch out of loops early.
for (instance, function_coverage) in function_coverage_map.iter_mut() {
if instance.def.attrs(tcx).len() > 0 {
continue;
}
// The covered function is not generic...
let covered_def_id = instance.def.def_id();
if let Some(covered_file_name) = tcx.covered_file_name(covered_def_id) {
if !cgu_covered_files.remove(&covered_file_name) {
continue;
}
// The covered function's file is one of the files with unreachable code regions, so
// all of the unreachable code regions for this file will be added to this function.
for def_id in
unreachable_def_ids_by_file.remove(&covered_file_name).into_iter().flatten()
{
// Note, this loop adds an unreachable code regions for each MIR-derived region.
// Alternatively, we could add a single code region for the maximum span of all
// code regions here.
//
// Observed downsides of this approach are:
//
// 1. The coverage results will appear inconsistent compared with the same (or
// similar) code in a function that is reached.
// 2. If the function is unreachable from one crate but reachable when compiling
// another referencing crate (such as a cross-crate reference to a
// generic function or inlined function), actual coverage regions overlaid
// on a single larger code span of `Zero` coverage can appear confusing or
// wrong. Chaning the unreachable coverage from a `code_region` to a
// `gap_region` can help, but still can look odd with `0` line counts for
// lines between executed (> 0) lines (such as for blank lines or comments).
for ®ion in tcx.covered_code_regions(def_id) {
function_coverage.add_unreachable_region(region.clone());
}
}
if cgu_covered_files.is_empty() {
break;
}
}
}
}
|
/// constraints represent
/// the various type relationships
/// that must be preserved.
mod heeren;
mod heeren_tests;
mod unification;
mod unification_tests;
pub use self::unification::*;
|
//! Solver configuration.
#![allow(missing_docs)]
#[cfg(test)]
#[path = "../../../tests/unit/extensions/solve/config_test.rs"]
mod config_test;
extern crate serde_json;
use serde::Deserialize;
use std::io::{BufReader, Read};
use std::sync::Arc;
use vrp_core::models::common::SingleDimLoad;
use vrp_core::models::Problem;
use vrp_core::solver::hyper::*;
use vrp_core::solver::mutation::*;
use vrp_core::solver::population::*;
use vrp_core::solver::{Builder, Telemetry, TelemetryMode};
use vrp_core::utils::{Environment, Parallelism, Random};
/// An algorithm configuration.
#[derive(Clone, Deserialize, Debug)]
pub struct Config {
/// Specifies evolution configuration.
pub evolution: Option<EvolutionConfig>,
/// Specifies hyper heuristic type.
pub hyper: Option<HyperType>,
/// Specifies algorithm termination configuration.
pub termination: Option<TerminationConfig>,
/// Specifies environment configuration.
pub environment: Option<EnvironmentConfig>,
/// Specifies telemetry configuration.
pub telemetry: Option<TelemetryConfig>,
}
/// An evolution configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct EvolutionConfig {
pub initial: Option<InitialConfig>,
pub population: Option<PopulationType>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "camelCase")]
pub enum PopulationType {
/// A greedy population keeps track only of one best-known individual.
#[serde(rename(deserialize = "greedy"))]
#[serde(rename_all = "camelCase")]
Greedy {
/// Selection size. Default is number of cpus.
selection_size: Option<usize>,
},
/// A basic population which sorts individuals based on their
/// dominance order.
#[serde(rename(deserialize = "elitism"))]
#[serde(rename_all = "camelCase")]
Elitism {
/// Max population size. Default is 4.
max_size: Option<usize>,
/// Selection size. Default is number of cpus.
selection_size: Option<usize>,
},
/// A population algorithm based on SOM.
#[serde(rename(deserialize = "rosomaxa"))]
#[serde(rename_all = "camelCase")]
Rosomaxa {
/// Selection size. Default is number of cpus.
selection_size: Option<usize>,
/// Elite population size. Default is 2.
max_elite_size: Option<usize>,
/// Node population size. Default is 2.
max_node_size: Option<usize>,
/// Spread factor. Default is 0.25.
spread_factor: Option<f64>,
/// Distribution factor. Default is 0.25.
distribution_factor: Option<f64>,
/// Learning rate. Default is 0.1.
learning_rate: Option<f64>,
/// A rebalance memory. Default is 500.
rebalance_memory: Option<usize>,
/// A rebalance count. Default is 2.
rebalance_count: Option<usize>,
/// An exploration phase ratio. Default is 0.9.
exploration_ratio: Option<f64>,
},
}
/// An initial solution configuration.
#[derive(Clone, Deserialize, Debug)]
pub struct InitialConfig {
pub method: RecreateMethod,
pub alternatives: InitialAlternativesConfig,
}
/// An initial solution alternatives configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct InitialAlternativesConfig {
pub methods: Vec<RecreateMethod>,
pub max_size: usize,
pub quota: f64,
}
/// A selection operator configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "camelCase")]
pub enum SelectionType {
#[serde(rename(deserialize = "naive"))]
Naive {
/// A size of offspring.
offspring_size: Option<usize>,
},
}
/// A hyper heuristic configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum HyperType {
/// A hyper heuristic which selects one mutation from the list based on its predefined probability.
#[serde(rename(deserialize = "static-selective"))]
StaticSelective {
/// A collection of inner mutation operators (metaheuristics).
mutations: Option<Vec<MutationType>>,
},
/// A hyper heuristic which selects mutations from the predefined list using reinforcement
/// learning technics.
#[serde(rename(deserialize = "dynamic-selective"))]
DynamicSelective,
}
/// A mutation operator configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum MutationType {
/// A metaheuristic which splits problem into smaller and solves them independently.
#[serde(rename(deserialize = "decomposition"))]
#[serde(rename_all = "camelCase")]
Decomposition {
/// Max routes to be selected in decomposed solution.
routes: MinMaxConfig,
/// Amount of attempts to repeat refinement.
repeat: usize,
/// Probability of mutation.
probability: MutationProbabilityType,
},
/// A local search heuristic.
#[serde(rename(deserialize = "local-search"))]
LocalSearch {
/// Probability of mutation.
probability: MutationProbabilityType,
/// Amount of times one of operators is applied.
times: MinMaxConfig,
/// Local search operator.
operators: Vec<LocalOperatorType>,
},
/// A ruin and recreate metaheuristic settings.
#[serde(rename(deserialize = "ruin-recreate"))]
RuinRecreate {
/// Probability.
probability: MutationProbabilityType,
/// Ruin methods.
ruins: Vec<RuinGroupConfig>,
/// Recreate methods.
recreates: Vec<RecreateMethod>,
},
}
/// A mutation method probability type
#[derive(Clone, Deserialize, Debug)]
#[serde(untagged)]
pub enum MutationProbabilityType {
/// A scalar probability based type.
Scalar {
/// Probability value of the mutation.
scalar: f64,
},
/// A context specific probability type.
Context {
/// Threshold parameters.
threshold: ContextThreshold,
/// Selection phase specific parameters.
phases: Vec<ContextPhase>,
},
}
/// A context condition for `MutationProbabilityType`.
#[derive(Clone, Deserialize, Debug)]
pub struct ContextThreshold {
/// Min amount of jobs in individual.
pub jobs: usize,
/// Min amount of routes in individual.
pub routes: usize,
}
/// A selection phase filter for `MutationProbabilityType`.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum ContextPhase {
/// Initial selection phase.
#[serde(rename(deserialize = "initial"))]
Initial {
/// A chance defined by probability.
chance: f64,
},
/// Exploration search phase.
#[serde(rename(deserialize = "exploration"))]
Exploration {
/// A chance defined by probability.
chance: f64,
},
/// Exploitation search phase.
#[serde(rename(deserialize = "exploitation"))]
Exploitation {
/// A chance defined by probability.
chance: f64,
},
}
/// A ruin method configuration.
#[derive(Clone, Deserialize, Debug)]
pub struct RuinGroupConfig {
/// Ruin methods.
methods: Vec<RuinMethod>,
/// Weight of the group.
weight: usize,
}
/// Specifies ruin methods with their probability weight and specific parameters.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum RuinMethod {
/// Adjusted string removal method.
#[serde(rename(deserialize = "adjusted-string"))]
AdjustedString { probability: f64, lmax: usize, cavg: usize, alpha: f64 },
/// Neighbour jobs method
#[serde(rename(deserialize = "neighbour"))]
Neighbour { probability: f64, min: usize, max: usize, threshold: f64 },
/// Random job removal method.
#[serde(rename(deserialize = "random-job"))]
RandomJob { probability: f64, min: usize, max: usize, threshold: f64 },
/// Random route removal method.
#[serde(rename(deserialize = "random-route"))]
RandomRoute { probability: f64, min: usize, max: usize, threshold: f64 },
/// Close route removal method.
#[serde(rename(deserialize = "close-route"))]
CloseRoute { probability: f64 },
/// Random ruin removal method.
#[serde(rename(deserialize = "random-ruin"))]
RandomRuin { probability: f64 },
/// Worst job removal method.
#[serde(rename(deserialize = "worst-job"))]
WorstJob { probability: f64, min: usize, max: usize, threshold: f64, skip: usize },
/// Clustered jobs removal method.
#[serde(rename(deserialize = "cluster"))]
#[serde(rename_all = "camelCase")]
Cluster { probability: f64, min: usize, max: usize, threshold: f64, min_items: usize },
}
/// Specifies recreate methods with their probability weight and specific parameters.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum RecreateMethod {
/// Cheapest insertion method.
#[serde(rename(deserialize = "cheapest"))]
Cheapest { weight: usize },
/// SkipBest insertion method.
#[serde(rename(deserialize = "skip-best"))]
SkipBest { weight: usize, start: usize, end: usize },
#[serde(rename(deserialize = "blinks"))]
/// Insertion with blinks method.
Blinks { weight: usize },
#[serde(rename(deserialize = "gaps"))]
/// Insertion with gaps method.
Gaps { weight: usize, min: usize },
/// Nearest neighbour method.
#[serde(rename(deserialize = "nearest"))]
Nearest { weight: usize },
#[serde(rename(deserialize = "skip-random"))]
/// Insertion with skip random method.
SkipRandom { weight: usize },
/// Farthest insertion method.
#[serde(rename(deserialize = "farthest"))]
Farthest { weight: usize },
#[serde(rename(deserialize = "perturbation"))]
Perturbation { weight: usize, probability: f64, min: f64, max: f64 },
#[serde(rename(deserialize = "regret"))]
Regret { weight: usize, start: usize, end: usize },
}
/// A local search configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(tag = "type")]
pub enum LocalOperatorType {
#[serde(rename(deserialize = "inter-route-best"))]
InterRouteBest { weight: usize, noise: NoiseConfig },
#[serde(rename(deserialize = "inter-route-random"))]
InterRouteRandom { weight: usize, noise: NoiseConfig },
#[serde(rename(deserialize = "intra-route-random"))]
IntraRouteRandom { weight: usize, noise: NoiseConfig },
}
#[derive(Clone, Deserialize, Debug)]
pub struct NoiseConfig {
probability: f64,
min: f64,
max: f64,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct TerminationConfig {
pub max_time: Option<usize>,
pub max_generations: Option<usize>,
pub variation: Option<VariationConfig>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct VariationConfig {
interval_type: String,
value: usize,
cv: f64,
is_global: bool,
}
/// A telemetry config.
#[derive(Clone, Deserialize, Debug)]
pub struct TelemetryConfig {
logging: Option<LoggingConfig>,
metrics: Option<MetricsConfig>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct LoggingConfig {
/// Specifies whether logging is enabled. Default is false.
enabled: bool,
/// Prefix of logging messages.
prefix: Option<String>,
/// Specifies how often best individual is logged. Default is 100 (generations).
log_best: Option<usize>,
/// Specifies how often population is logged. Default is 1000 (generations).
log_population: Option<usize>,
/// Specifies whether population should be dumped.
dump_population: Option<bool>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MetricsConfig {
/// Specifies whether metrics collection is enabled. Default is false.
enabled: bool,
/// Specifies how often population is tracked. Default is 1000 (generations).
track_population: Option<usize>,
}
/// An environment specific configuration.
#[derive(Clone, Deserialize, Debug)]
pub struct EnvironmentConfig {
/// Specifies a data parallelism configuration.
pub parallelism: Option<ParallelismConfig>,
}
/// Data parallelism configuration.
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ParallelismConfig {
/// Number of thread pools.
pub num_thread_pools: usize,
/// Specifies amount of threads in each thread pool.
pub threads_per_pool: usize,
}
#[derive(Clone, Deserialize, Debug, Eq, PartialEq)]
pub struct MinMaxConfig {
pub min: usize,
pub max: usize,
}
#[derive(Clone, Deserialize, Debug, Eq, PartialEq)]
pub struct NameWeight {
pub name: String,
pub weight: usize,
}
impl Default for Config {
fn default() -> Self {
Self { evolution: None, hyper: None, termination: None, environment: None, telemetry: None }
}
}
fn configure_from_evolution(
mut builder: Builder,
population_config: &Option<EvolutionConfig>,
) -> Result<Builder, String> {
if let Some(config) = population_config {
if let Some(initial) = &config.initial {
let environment = builder.config.environment.clone();
builder = builder.with_init_params(
initial.alternatives.max_size,
initial.alternatives.quota,
std::iter::once(create_recreate_method(&initial.method, environment.clone()))
.chain(
initial
.alternatives
.methods
.iter()
.map(|method| create_recreate_method(method, environment.clone())),
)
.collect(),
);
}
if let Some(variation) = &config.population {
let default_selection_size = get_default_selection_size(builder.config.environment.as_ref());
let population = match &variation {
PopulationType::Greedy { selection_size } => Box::new(Greedy::new(
builder.config.problem.clone(),
selection_size.unwrap_or(default_selection_size),
None,
)),
PopulationType::Elitism { max_size, selection_size } => Box::new(Elitism::new(
builder.config.problem.clone(),
builder.config.environment.random.clone(),
max_size.unwrap_or(4),
selection_size.unwrap_or(default_selection_size),
))
as Box<dyn Population + Send + Sync>,
PopulationType::Rosomaxa {
max_elite_size,
max_node_size,
spread_factor,
distribution_factor,
learning_rate,
selection_size,
rebalance_memory,
rebalance_count,
exploration_ratio,
} => {
let mut config = RosomaxaConfig::new_with_defaults(default_selection_size);
if let Some(selection_size) = selection_size {
config.selection_size = *selection_size;
}
if let Some(max_elite_size) = max_elite_size {
config.elite_size = *max_elite_size;
}
if let Some(max_node_size) = max_node_size {
config.node_size = *max_node_size;
}
if let Some(spread_factor) = spread_factor {
config.spread_factor = *spread_factor;
}
if let Some(distribution_factor) = distribution_factor {
config.distribution_factor = *distribution_factor;
}
if let Some(learning_rate) = learning_rate {
config.learning_rate = *learning_rate;
}
if let Some(rebalance_memory) = rebalance_memory {
config.rebalance_memory = *rebalance_memory;
}
if let Some(rebalance_count) = rebalance_count {
config.rebalance_count = *rebalance_count;
}
if let Some(exploration_ratio) = exploration_ratio {
config.exploration_ratio = *exploration_ratio;
}
Box::new(Rosomaxa::new(builder.config.problem.clone(), builder.config.environment.clone(), config)?)
}
};
builder = builder.with_population(population);
}
}
Ok(builder)
}
fn configure_from_hyper(mut builder: Builder, hyper_config: &Option<HyperType>) -> Result<Builder, String> {
if let Some(config) = hyper_config {
match config {
HyperType::StaticSelective { mutations } => {
let static_selective = if let Some(mutations) = mutations {
let mutation_group = mutations
.iter()
.map(|mutation| {
create_mutation(
builder.config.problem.clone(),
builder.config.environment.clone(),
mutation,
)
})
.collect::<Result<Vec<_>, _>>()?;
vrp_core::solver::hyper::StaticSelective::new(mutation_group)
} else {
vrp_core::solver::hyper::StaticSelective::new_with_defaults(
builder.config.problem.clone(),
builder.config.environment.clone(),
)
};
builder = builder.with_hyper(Box::new(static_selective));
}
HyperType::DynamicSelective => {
let dynamic_selective = vrp_core::solver::hyper::DynamicSelective::new_with_defaults(
builder.config.problem.clone(),
builder.config.environment.clone(),
);
builder = builder.with_hyper(Box::new(dynamic_selective));
}
}
}
Ok(builder)
}
fn configure_from_termination(mut builder: Builder, termination_config: &Option<TerminationConfig>) -> Builder {
if let Some(config) = termination_config {
builder = builder.with_max_time(config.max_time);
builder = builder.with_max_generations(config.max_generations);
builder = builder
.with_min_cv(config.variation.as_ref().map(|v| (v.interval_type.clone(), v.value, v.cv, v.is_global)));
}
builder
}
fn create_recreate_method(
method: &RecreateMethod,
environment: Arc<Environment>,
) -> (Arc<dyn Recreate + Send + Sync>, usize) {
match method {
RecreateMethod::Cheapest { weight } => (Arc::new(RecreateWithCheapest::default()), *weight),
RecreateMethod::Farthest { weight } => (Arc::new(RecreateWithFarthest::default()), *weight),
RecreateMethod::SkipBest { weight, start, end } => (Arc::new(RecreateWithSkipBest::new(*start, *end)), *weight),
RecreateMethod::Blinks { weight } => {
(Arc::new(RecreateWithBlinks::<SingleDimLoad>::new_with_defaults(environment.random.clone())), *weight)
}
RecreateMethod::SkipRandom { weight } => (Arc::new(RecreateWithSkipRandom::default()), *weight),
RecreateMethod::Gaps { weight, min } => (Arc::new(RecreateWithGaps::new(*min)), *weight),
RecreateMethod::Nearest { weight } => (Arc::new(RecreateWithNearestNeighbor::default()), *weight),
RecreateMethod::Regret { weight, start, end } => (Arc::new(RecreateWithRegret::new(*start, *end)), *weight),
RecreateMethod::Perturbation { weight, probability, min, max } => {
(Arc::new(RecreateWithPerturbation::new(*probability, *min, *max, environment.random.clone())), *weight)
}
}
}
fn create_mutation(
problem: Arc<Problem>,
environment: Arc<Environment>,
mutation: &MutationType,
) -> Result<(Arc<dyn Mutation + Send + Sync>, MutationProbability), String> {
Ok(match mutation {
MutationType::RuinRecreate { probability, ruins, recreates } => {
let ruin = Arc::new(WeightedRuin::new(
ruins.iter().map(|g| create_ruin_group(&problem, environment.clone(), g)).collect(),
));
let recreate = Arc::new(WeightedRecreate::new(
recreates.iter().map(|r| create_recreate_method(r, environment.clone())).collect(),
));
(
Arc::new(RuinAndRecreate::new(ruin, recreate)),
create_mutation_probability(probability, environment.random.clone()),
)
}
MutationType::LocalSearch { probability, times, operators: inners } => {
let operator = create_local_search(times, inners);
(Arc::new(LocalSearch::new(operator)), create_mutation_probability(probability, environment.random.clone()))
}
MutationType::Decomposition { routes, repeat, probability } => {
if *repeat < 1 {
return Err(format!("repeat must be greater than 1. Specified: {}", repeat));
}
if routes.min < 2 {
return Err(format!("min routes must be greater than 2. Specified: {}", routes.min));
}
let mutation =
vrp_core::solver::hyper::StaticSelective::create_default_mutation(problem, environment.clone());
(
Arc::new(DecomposeSearch::new(mutation, (routes.min, routes.max), *repeat)),
create_mutation_probability(probability, environment.random.clone()),
)
}
})
}
fn create_mutation_probability(
probability: &MutationProbabilityType,
random: Arc<dyn Random + Send + Sync>,
) -> MutationProbability {
match probability {
MutationProbabilityType::Scalar { scalar } => create_scalar_mutation_probability(*scalar, random),
MutationProbabilityType::Context { threshold, phases } => create_context_mutation_probability(
threshold.jobs,
threshold.routes,
phases
.iter()
.map(|phase| match phase {
ContextPhase::Initial { chance } => (SelectionPhase::Initial, *chance),
ContextPhase::Exploration { chance } => (SelectionPhase::Exploration, *chance),
ContextPhase::Exploitation { chance } => (SelectionPhase::Exploitation, *chance),
})
.collect(),
random,
),
}
}
fn create_ruin_group(problem: &Arc<Problem>, environment: Arc<Environment>, group: &RuinGroupConfig) -> RuinGroup {
(group.methods.iter().map(|r| create_ruin_method(problem, environment.clone(), r)).collect(), group.weight)
}
fn create_ruin_method(
problem: &Arc<Problem>,
environment: Arc<Environment>,
method: &RuinMethod,
) -> (Arc<dyn Ruin + Send + Sync>, f64) {
match method {
RuinMethod::AdjustedString { probability, lmax, cavg, alpha } => {
(Arc::new(AdjustedStringRemoval::new(*lmax, *cavg, *alpha)), *probability)
}
RuinMethod::Neighbour { probability, min, max, threshold } => {
(Arc::new(NeighbourRemoval::new(RuinLimits::new(*min, *max, *threshold, 8))), *probability)
}
RuinMethod::RandomJob { probability, min, max, threshold } => {
(Arc::new(RandomJobRemoval::new(RuinLimits::new(*min, *max, *threshold, 8))), *probability)
}
RuinMethod::RandomRoute { probability, min, max, threshold } => {
(Arc::new(RandomRouteRemoval::new(*min, *max, *threshold)), *probability)
}
RuinMethod::WorstJob { probability, min, max, threshold, skip: worst_skip } => {
(Arc::new(WorstJobRemoval::new(*worst_skip, RuinLimits::new(*min, *max, *threshold, 8))), *probability)
}
RuinMethod::Cluster { probability, min, max, threshold, min_items } => (
Arc::new(ClusterRemoval::new(
problem.clone(),
environment,
*min_items,
RuinLimits::new(*min, *max, *threshold, 8),
)),
*probability,
),
RuinMethod::CloseRoute { probability } => (Arc::new(CloseRouteRemoval::default()), *probability),
RuinMethod::RandomRuin { probability } => (StaticSelective::create_default_random_ruin(), *probability),
}
}
fn create_local_search(times: &MinMaxConfig, inners: &[LocalOperatorType]) -> Arc<dyn LocalOperator + Send + Sync> {
let operators = inners
.iter()
.map::<(Arc<dyn LocalOperator + Send + Sync>, usize), _>(|op| match op {
LocalOperatorType::InterRouteBest { weight, noise } => {
(Arc::new(ExchangeInterRouteBest::new(noise.probability, noise.min, noise.max)), *weight)
}
LocalOperatorType::InterRouteRandom { weight, noise } => {
(Arc::new(ExchangeInterRouteRandom::new(noise.probability, noise.min, noise.max)), *weight)
}
LocalOperatorType::IntraRouteRandom { weight, noise } => {
(Arc::new(ExchangeIntraRouteRandom::new(noise.probability, noise.min, noise.max)), *weight)
}
})
.collect::<Vec<_>>();
Arc::new(CompositeLocalOperator::new(operators, times.min, times.max))
}
fn configure_from_telemetry(builder: Builder, telemetry_config: &Option<TelemetryConfig>) -> Builder {
const LOG_BEST: usize = 100;
const LOG_POPULATION: usize = 1000;
const TRACK_POPULATION: usize = 1000;
let create_logger = |prefix: Option<String>| -> Arc<dyn Fn(&str)> {
if let Some(prefix) = prefix {
Arc::new(move |msg: &str| println!("{}{}", prefix, msg))
} else {
Arc::new(|msg: &str| println!("{}", msg))
}
};
let create_metrics = |track_population: &Option<usize>| TelemetryMode::OnlyMetrics {
track_population: track_population.unwrap_or(TRACK_POPULATION),
};
let create_logging = |log_best: &Option<usize>,
log_population: &Option<usize>,
dump_population: &Option<bool>,
prefix: Option<String>| {
TelemetryMode::OnlyLogging {
logger: create_logger(prefix),
log_best: log_best.unwrap_or(LOG_BEST),
log_population: log_population.unwrap_or(LOG_POPULATION),
dump_population: dump_population.unwrap_or(false),
}
};
let telemetry_mode = match telemetry_config.as_ref().map(|t| (&t.logging, &t.metrics)) {
Some((None, Some(MetricsConfig { enabled, track_population }))) if *enabled => create_metrics(track_population),
Some((Some(LoggingConfig { enabled, prefix, log_best, log_population, dump_population }), None))
if *enabled =>
{
create_logging(log_best, log_population, dump_population, prefix.clone())
}
Some((
Some(LoggingConfig { enabled: logging_enabled, prefix, log_best, log_population, dump_population }),
Some(MetricsConfig { enabled: metrics_enabled, track_population }),
)) => match (logging_enabled, metrics_enabled) {
(true, true) => TelemetryMode::All {
logger: create_logger(prefix.clone()),
log_best: log_best.unwrap_or(LOG_BEST),
log_population: log_population.unwrap_or(LOG_POPULATION),
track_population: track_population.unwrap_or(TRACK_POPULATION),
dump_population: dump_population.unwrap_or(false),
},
(true, false) => create_logging(log_best, log_population, dump_population, prefix.clone()),
(false, true) => create_metrics(track_population),
_ => TelemetryMode::None,
},
_ => TelemetryMode::None,
};
builder.with_telemetry(Telemetry::new(telemetry_mode))
}
fn configure_from_environment(environment_config: &Option<EnvironmentConfig>) -> Arc<Environment> {
let mut environment = Environment::default();
// TODO validate parameters
if let Some(config) = environment_config.as_ref().and_then(|c| c.parallelism.as_ref()) {
environment.parallelism = Parallelism::new(config.num_thread_pools, config.threads_per_pool);
}
Arc::new(environment)
}
/// Reads config from reader.
pub fn read_config<R: Read>(reader: BufReader<R>) -> Result<Config, String> {
serde_json::from_reader(reader).map_err(|err| format!("cannot deserialize config: '{}'", err))
}
/// Creates a solver `Builder` from config file.
pub fn create_builder_from_config_file<R: Read>(
problem: Arc<Problem>,
reader: BufReader<R>,
) -> Result<Builder, String> {
read_config(reader).and_then(|config| create_builder_from_config(problem, &config))
}
/// Creates a solver `Builder` from config.
pub fn create_builder_from_config(problem: Arc<Problem>, config: &Config) -> Result<Builder, String> {
let environment = configure_from_environment(&config.environment);
let mut builder = Builder::new(problem, environment);
builder = configure_from_telemetry(builder, &config.telemetry);
builder = configure_from_evolution(builder, &config.evolution)?;
builder = configure_from_hyper(builder, &config.hyper)?;
builder = configure_from_termination(builder, &config.termination);
Ok(builder)
}
|
use crate::EntityWithMetadata;
use azure_core::{
headers::{etag_from_headers, get_str_from_headers, CommonStorageResponseHeaders},
prelude::Etag,
util::HeaderMapExt,
};
use bytes::Bytes;
use http::Response;
use serde::de::DeserializeOwned;
use std::convert::{TryFrom, TryInto};
use url::Url;
#[derive(Debug, Clone)]
pub struct InsertEntityResponse<E>
where
E: DeserializeOwned,
{
pub common_storage_response_headers: CommonStorageResponseHeaders,
pub etag: Etag,
pub location: Option<Url>,
pub entity_with_metadata: Option<EntityWithMetadata<E>>,
}
impl<E> TryFrom<&Response<Bytes>> for InsertEntityResponse<E>
where
E: DeserializeOwned,
{
type Error = crate::Error;
fn try_from(response: &Response<Bytes>) -> Result<Self, Self::Error> {
debug!("{}", std::str::from_utf8(response.body())?);
debug!("headers == {:#?}", response.headers());
let entity_with_metadata =
match get_str_from_headers(response.headers(), "preference-applied")? {
"return-no-content" => None,
"return-content" => Some(response.try_into()?),
_ => {
return Err(crate::Error::GenericErrorWithText(
"Unexpected value for preference-applied header".to_owned(),
))
}
};
Ok(InsertEntityResponse {
common_storage_response_headers: response.headers().try_into()?,
etag: etag_from_headers(response.headers())?.into(),
location: response
.headers()
.get_as_str("location")
.map(Url::parse)
.transpose()?,
entity_with_metadata,
})
}
}
|
use bson::{RawDocument, RawDocumentBuf};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::wire::Message;
use crate::{
bson::{rawdoc, Document},
bson_util::extend_raw_document_buf,
client::{options::ServerApi, ClusterTime, HELLO_COMMAND_NAMES, REDACTED_COMMANDS},
error::{Error, ErrorKind, Result},
hello::{HelloCommandResponse, HelloReply},
operation::{CommandErrorBody, CommandResponse},
options::{ReadConcern, ReadConcernInternal, ReadConcernLevel, ServerAddress},
selection_criteria::ReadPreference,
ClientSession,
};
/// A command that has been serialized to BSON.
#[derive(Debug)]
pub(crate) struct RawCommand {
pub(crate) name: String,
pub(crate) target_db: String,
/// Whether or not the server may respond to this command multiple times via the moreToComeBit.
pub(crate) exhaust_allowed: bool,
pub(crate) bytes: Vec<u8>,
}
impl RawCommand {
pub(crate) fn should_compress(&self) -> bool {
let name = self.name.to_lowercase();
!REDACTED_COMMANDS.contains(name.as_str()) && !HELLO_COMMAND_NAMES.contains(name.as_str())
}
}
/// Driver-side model of a database command.
#[serde_with::skip_serializing_none]
#[derive(Clone, Debug, Serialize, Default)]
#[serde(rename_all = "camelCase")]
pub(crate) struct Command<T = Document> {
#[serde(skip)]
pub(crate) name: String,
#[serde(skip)]
pub(crate) exhaust_allowed: bool,
#[serde(flatten)]
pub(crate) body: T,
#[serde(rename = "$db")]
pub(crate) target_db: String,
lsid: Option<Document>,
#[serde(rename = "$clusterTime")]
cluster_time: Option<ClusterTime>,
#[serde(flatten)]
server_api: Option<ServerApi>,
#[serde(rename = "$readPreference")]
read_preference: Option<ReadPreference>,
txn_number: Option<i64>,
start_transaction: Option<bool>,
autocommit: Option<bool>,
read_concern: Option<ReadConcernInternal>,
recovery_token: Option<Document>,
}
impl<T> Command<T> {
pub(crate) fn new(name: String, target_db: String, body: T) -> Self {
Self {
name,
target_db,
exhaust_allowed: false,
body,
lsid: None,
cluster_time: None,
server_api: None,
read_preference: None,
txn_number: None,
start_transaction: None,
autocommit: None,
read_concern: None,
recovery_token: None,
}
}
pub(crate) fn new_read(
name: String,
target_db: String,
read_concern: Option<ReadConcern>,
body: T,
) -> Self {
Self {
name,
target_db,
exhaust_allowed: false,
body,
lsid: None,
cluster_time: None,
server_api: None,
read_preference: None,
txn_number: None,
start_transaction: None,
autocommit: None,
read_concern: read_concern.map(Into::into),
recovery_token: None,
}
}
pub(crate) fn set_session(&mut self, session: &ClientSession) {
self.lsid = Some(session.id().clone())
}
pub(crate) fn set_cluster_time(&mut self, cluster_time: &ClusterTime) {
self.cluster_time = Some(cluster_time.clone());
}
pub(crate) fn set_recovery_token(&mut self, recovery_token: &Document) {
self.recovery_token = Some(recovery_token.clone());
}
pub(crate) fn set_txn_number(&mut self, txn_number: i64) {
self.txn_number = Some(txn_number);
}
pub(crate) fn set_server_api(&mut self, server_api: &ServerApi) {
self.server_api = Some(server_api.clone());
}
pub(crate) fn set_read_preference(&mut self, read_preference: ReadPreference) {
self.read_preference = Some(read_preference);
}
pub(crate) fn set_start_transaction(&mut self) {
self.start_transaction = Some(true);
}
pub(crate) fn set_autocommit(&mut self) {
self.autocommit = Some(false);
}
/// Sets the read concern level for this command.
/// This does not overwrite any other read concern options.
pub(crate) fn set_read_concern_level(&mut self, level: ReadConcernLevel) {
let inner = self.read_concern.get_or_insert(ReadConcernInternal {
level: None,
at_cluster_time: None,
after_cluster_time: None,
});
inner.level = Some(level);
}
/// Sets the read concern level for this command to "snapshot" and sets the `atClusterTime`
/// field.
pub(crate) fn set_snapshot_read_concern(&mut self, session: &ClientSession) {
let inner = self.read_concern.get_or_insert(ReadConcernInternal {
level: Some(ReadConcernLevel::Snapshot),
at_cluster_time: None,
after_cluster_time: None,
});
inner.at_cluster_time = session.snapshot_time;
}
pub(crate) fn set_after_cluster_time(&mut self, session: &ClientSession) {
if let Some(operation_time) = session.operation_time {
let inner = self.read_concern.get_or_insert(ReadConcernInternal {
level: None,
at_cluster_time: None,
after_cluster_time: None,
});
inner.after_cluster_time = Some(operation_time);
}
}
}
impl Command<RawDocumentBuf> {
pub(crate) fn into_bson_bytes(mut self) -> Result<Vec<u8>> {
let mut command = self.body;
// Clear the body of the command to avoid re-serializing.
self.body = rawdoc! {};
let rest_of_command = bson::to_raw_document_buf(&self)?;
extend_raw_document_buf(&mut command, rest_of_command)?;
Ok(command.into_bytes())
}
}
#[derive(Debug, Clone)]
pub(crate) struct RawCommandResponse {
pub(crate) source: ServerAddress,
raw: RawDocumentBuf,
}
impl RawCommandResponse {
#[cfg(test)]
pub(crate) fn with_document_and_address(source: ServerAddress, doc: Document) -> Result<Self> {
let mut raw = Vec::new();
doc.to_writer(&mut raw)?;
Ok(Self {
source,
raw: RawDocumentBuf::from_bytes(raw)?,
})
}
/// Initialize a response from a document.
#[cfg(test)]
pub(crate) fn with_document(doc: Document) -> Result<Self> {
Self::with_document_and_address(
ServerAddress::Tcp {
host: "localhost".to_string(),
port: None,
},
doc,
)
}
pub(crate) fn new(source: ServerAddress, message: Message) -> Result<Self> {
let raw = message.single_document_response()?;
Ok(Self::new_raw(source, RawDocumentBuf::from_bytes(raw)?))
}
pub(crate) fn new_raw(source: ServerAddress, raw: RawDocumentBuf) -> Self {
Self { source, raw }
}
pub(crate) fn body<'a, T: Deserialize<'a>>(&'a self) -> Result<T> {
bson::from_slice(self.raw.as_bytes()).map_err(|e| {
Error::from(ErrorKind::InvalidResponse {
message: format!("{}", e),
})
})
}
/// Used to handle decoding responses where the server may return invalid UTF-8 in error
/// messages.
pub(crate) fn body_utf8_lossy<'a, T: Deserialize<'a>>(&'a self) -> Result<T> {
bson::from_slice_utf8_lossy(self.raw.as_bytes()).map_err(|e| {
Error::from(ErrorKind::InvalidResponse {
message: format!("{}", e),
})
})
}
pub(crate) fn raw_body(&self) -> &RawDocument {
&self.raw
}
pub(crate) fn as_bytes(&self) -> &[u8] {
self.raw.as_bytes()
}
/// Deserialize the body of this response, returning an authentication error if it fails.
pub(crate) fn auth_response_body<T: DeserializeOwned>(
&self,
mechanism_name: &str,
) -> Result<T> {
self.body()
.map_err(|_| Error::invalid_authentication_response(mechanism_name))
}
pub(crate) fn into_hello_reply(self) -> Result<HelloReply> {
match self.body::<CommandResponse<HelloCommandResponse>>() {
Ok(response) if response.is_success() => {
let server_address = self.source_address().clone();
let cluster_time = response.cluster_time().cloned();
Ok(HelloReply {
server_address,
command_response: response.body,
cluster_time,
raw_command_response: self.into_raw_document_buf(),
})
}
_ => match self.body::<CommandResponse<CommandErrorBody>>() {
Ok(command_error_body) => Err(Error::new(
ErrorKind::Command(command_error_body.body.command_error),
command_error_body.body.error_labels,
)),
Err(_) => Err(ErrorKind::InvalidResponse {
message: "invalid server response".into(),
}
.into()),
},
}
}
/// The address of the server that sent this response.
pub(crate) fn source_address(&self) -> &ServerAddress {
&self.source
}
pub(crate) fn into_raw_document_buf(self) -> RawDocumentBuf {
self.raw
}
}
|
// box pointer
fn box_pointer() {
fn add_one( num: Box<i32>) {
*num += 1;
}
let p = Box::new(1i32);
add_one(p);
// equivilent to following C code:
//
// {
// int *x = malloc(sizeof(int));
// add_one(x);
// free(x);
// }
// println!("{}", p); // `p` has been moved.
}
fn borrow_pointer() {
}
fn borrow_mut_pointer() {
}
fn main() {
} |
use assert_cmd::prelude::*; // Add methods on commands
use std::{env, process::Command, path::Path};
#[test]
fn positive_question_mark() {
let mut cmd = Command::cargo_bin("cargo-hacspec").expect("Error getting cargo hacspec command");
cmd.envs(env::vars());
cmd.args(&["-e", "fst"]);
cmd.args(&["--dir", "tests/"]);
cmd.args(&["-f", "language-tests/question_mark.rs"]);
cmd.args(&["--vc-init"]);
let status = cmd.status();
let status = status.expect("Error running typechecker");
assert!(status.success());
assert!(Path::new("tests/_vc/Question.Mark.fst").exists());
}
|
//! This module contains everything related to graphics.
#![allow(missing_docs)]
pub mod window;
pub mod canvas;
pub mod image;
pub mod font;
pub mod text;
pub mod sprite;
pub mod drawable;
pub mod color;
pub mod shape; |
use tokio::stream::{Stream, StreamExt};
use pin_project::pin_project;
use std::pin::Pin;
use std::task::{Context, Poll};
#[pin_project]
pub struct StreamReader<R> {
source: Vec<R>,
buffer_length: usize,
ptr_location: usize,
window_size: usize
}
use tokio::net::TcpListener;
impl <R> StreamReader<R> {
pub fn new(r: Vec<R>, window_length: usize) -> Self {
StreamReader {
source: r,
buffer_length: window_length,
ptr_location: 0,
window_size: window_length
}
}
pub fn append_source(self: &mut Self, r: Vec<R>) {
self.buffer_length += r.len();
for item in r {
self.source.push(item);
}
}
pub fn reset_pointer_index(self: &mut Self) {
self.ptr_location = 0;
}
pub fn clear_source(self: &mut Self) {
self.ptr_location = 0;
self.buffer_length = 0;
self.source.clear();
}
pub fn set_window_size(self: &mut Self, window_size: usize) {
self.window_size = window_size;
}
}
impl<R: std::clone::Clone> Stream for StreamReader<R> {
type Item = Vec<R>;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.project();
let vector_state: &mut Vec<R> = this.source; // Normal reference to the field
let unpinned_ptr_location: &mut usize = this.ptr_location; // Normal reference to the field
if this.buffer_length == unpinned_ptr_location {
Poll::Ready(None)
} else {
let start = *unpinned_ptr_location;
let end = match start + *this.window_size {
value if value >= *this.buffer_length => *this.buffer_length,
_rest => _rest
};
*unpinned_ptr_location += end - start;
Poll::Ready(Some(vector_state[start..end].to_vec()))
}
}
}
#[tokio::test]
async fn u32_vector() {
let mut stream = StreamReader::<u32> { source: vec![1,2,3,4,5] , buffer_length: 5, ptr_location: 0, window_size: 2 };
assert_eq!(stream.next().await, Some(vec![1,2]));
assert_eq!(stream.next().await, Some(vec![3,4]));
assert_eq!(stream.next().await, Some(vec![5]));
}
#[tokio::test]
async fn char_vector() {
let mut stream = StreamReader::<char> { source: "This is a test string.".chars().collect() , buffer_length: 22, ptr_location: 0, window_size: 2 };
assert_eq!(stream.next().await, Some(vec!['T', 'h']));
}
#[tokio::test]
async fn string_vector() {
let mut stream = StreamReader::<String> { source: vec!["#test 1".to_string(), "#test 2".to_string()] , buffer_length: 2, ptr_location: 0, window_size: 2 };
assert_eq!(stream.next().await, Some(vec!["#test 1".to_string(), "#test 2".to_string()]));
}
#[tokio::test]
async fn str_vector() {
let mut stream = StreamReader::<&str> { source: vec!["#test 1", "#test 2"] , buffer_length: 2, ptr_location: 0, window_size: 2 };
assert_eq!(stream.next().await, Some(vec!["#test 1", "#test 2"]));
}
#[tokio::test]
async fn append_str_vector() {
let mut stream = StreamReader::<&str> { source: vec!["#test 1", "#test 2"] , buffer_length: 2, ptr_location: 0, window_size: 2 };
stream.append_source(vec!["#test 3"]);
assert_eq!(stream.next().await, Some(vec!["#test 1", "#test 2"]));
assert_eq!(stream.next().await, Some(vec!["#test 3"]));
}
#[tokio::test]
async fn empty_init_append_str_vector() {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 2 };
stream.append_source(vec!["#test 1"]);
stream.append_source(vec!["#test 2"]);
stream.append_source(vec!["#test 3"]);
assert_eq!(stream.next().await, Some(vec!["#test 1", "#test 2"]));
assert_eq!(stream.next().await, Some(vec!["#test 3"]));
}
#[tokio::test]
async fn empty_init_none() {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 2 };
assert_eq!(stream.next().await, None);
}
#[tokio::test]
async fn reset_ptr_init_none() {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 2 };
stream.append_source(vec!["#test 1"]);
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
assert_eq!(stream.next().await, None);
stream.reset_pointer_index();
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
}
#[tokio::test]
async fn clear_source () {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 2 };
stream.append_source(vec!["#test 1"]);
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
stream.clear_source();
assert_eq!(stream.next().await, None);
stream.append_source(vec!["#test 1"]);
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
}
#[tokio::test]
async fn window_size_less_than_source () {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 0 };
stream.append_source(vec!["#test 1"]);
assert_eq!(stream.next().await, Some(vec![]));
stream.set_window_size(2);
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
}
#[tokio::test]
async fn window_size_greater_than_source () {
let mut stream = StreamReader::<&str> { source: vec![] , buffer_length: 0, ptr_location: 0, window_size: 0 };
stream.append_source(vec!["#test 1"]);
assert_eq!(stream.next().await, Some(vec![]));
stream.set_window_size(20);
assert_eq!(stream.next().await, Some(vec!["#test 1"]));
}
|
#[allow(unused_parens)]
pub fn raindrops(n: u32) -> String {
let mut result : String = String::new();
if ( n%3 == 0) {result+="Pling"}
if ( n%5 == 0) {result+="Plang"}
if ( n%7 == 0) {result+="Plong"}
if result.is_empty() {
result = format!("{}", n);
}
result
}
|
use crate::{content::PostDirMetadata, markdown, paths::AbsPath, util};
use camino::{Utf8Path, Utf8PathBuf};
use chrono::Utc;
use colored::Colorize;
use eyre::{eyre, Result};
use regex::Regex;
use serde::Deserialize;
use std::fs;
use tracing::info;
use yaml_front_matter::{Document, YamlFrontMatter};
pub fn new_post(title: String) -> Result<()> {
let slug = util::slugify(&title);
let path = post_path(&slug);
new_prototype(&title, &path)
}
pub fn new_draft(title: String) -> Result<()> {
let slug = util::slugify(&title);
let path = draft_path(&slug);
new_prototype(&title, &path)
}
fn post_path(slug: &str) -> Utf8PathBuf {
let now = Utc::now();
format!("posts/{}-{}.markdown", now.format("%Y-%m-%d"), slug).into()
}
fn draft_path(slug: &str) -> Utf8PathBuf {
format!("drafts/{slug}.markdown").into()
}
fn new_prototype(title: &str, path: &Utf8Path) -> Result<()> {
let content = prototype_post(title);
fs::write(path, content)?;
info!("Created {path}");
Ok(())
}
fn prototype_post(title: &str) -> String {
format!(
r#"---
title: "{title}"
tags: [Tag1, Tag2]
---
Lorem ipsum...
"#
)
}
pub fn promote(pattern: String) -> Result<()> {
let draft_path = match_single_file_path(&pattern, "drafts/".into())?;
let title = read_title(&draft_path)?;
let slug = util::slugify(&title);
let post_path = post_path(&slug);
rename(&draft_path, &post_path, "Promoted")?;
Ok(())
}
pub fn demote(pattern: String) -> Result<()> {
let post_path = match_single_file_path(&pattern, "posts/".into())?;
let PostDirMetadata { date: _date, slug } = PostDirMetadata::from_path(&post_path)?;
let draft_path = draft_path(&slug);
rename(&post_path, &draft_path, "Demoted")?;
Ok(())
}
fn rename(src: &Utf8Path, dest: &Utf8Path, notice: &str) -> Result<()> {
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
println!(
"{} {src} to {}",
format!("[{notice}]").green(),
dest.as_str().magenta()
);
fs::rename(src, dest)?;
Ok(())
}
fn match_single_file_path(pattern: &str, dir: &Utf8Path) -> Result<AbsPath> {
let found = match_file_path(pattern, dir)?;
match found.len() {
0 => {
println!("{} no matches found", "[Error]".red());
std::process::exit(1);
}
1 => return Ok(found.into_iter().next().unwrap()),
x => {
println!("{} {} matches found", "[Error]".red(), x);
for path in found {
let title = read_title(&path)?;
println!(r#"{} "{}""#, path.as_str().magenta(), title.cyan());
}
std::process::exit(1);
}
}
}
fn match_file_path(pattern: &str, dir: &Utf8Path) -> Result<Vec<AbsPath>> {
// Add in case insensitivity
let re = Regex::new(&format!("(?i){pattern}"))?;
let files = markdown::find_markdown_files(dir.as_str());
let mut res = Vec::new();
for file in files.into_iter() {
let path = file.abs_path();
if matches_file(&re, &path)? {
res.push(path);
}
}
Ok(res)
}
fn matches_file(re: &Regex, path: &Utf8Path) -> Result<bool> {
if re.is_match(path.as_str()) {
return Ok(true);
}
let title = read_title(path)?;
if re.is_match(&title) {
return Ok(true);
}
Ok(false)
}
fn read_title(path: &Utf8Path) -> Result<String> {
let content = fs::read_to_string(&path)?;
let Document {
metadata,
content: _,
} = YamlFrontMatter::parse::<TitleMetadata>(&content)
.map_err(|err| eyre!("Failed to parse metadata for : {:#?}\n{}", path, err))?;
Ok(metadata.title)
}
#[derive(Deserialize, Debug)]
struct TitleMetadata {
title: String,
}
|
#[doc = "Register `APB3ENR` reader"]
pub type R = crate::R<APB3ENR_SPEC>;
#[doc = "Register `APB3ENR` writer"]
pub type W = crate::W<APB3ENR_SPEC>;
#[doc = "Field `SBSEN` reader - SBS clock enable Set and reset by software."]
pub type SBSEN_R = crate::BitReader;
#[doc = "Field `SBSEN` writer - SBS clock enable Set and reset by software."]
pub type SBSEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SPI5EN` reader - SPI5 clock enable Set and reset by software."]
pub type SPI5EN_R = crate::BitReader;
#[doc = "Field `SPI5EN` writer - SPI5 clock enable Set and reset by software."]
pub type SPI5EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPUART1EN` reader - LPUART1 clock enable Set and reset by software."]
pub type LPUART1EN_R = crate::BitReader;
#[doc = "Field `LPUART1EN` writer - LPUART1 clock enable Set and reset by software."]
pub type LPUART1EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C3EN` reader - I2C3 clock enable Set and reset by software."]
pub type I2C3EN_R = crate::BitReader;
#[doc = "Field `I2C3EN` writer - I2C3 clock enable Set and reset by software."]
pub type I2C3EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C4EN` reader - I2C4 clock enable Set and reset by software."]
pub type I2C4EN_R = crate::BitReader;
#[doc = "Field `I2C4EN` writer - I2C4 clock enable Set and reset by software."]
pub type I2C4EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM1EN` reader - LPTIM1 clock enable Set and reset by software."]
pub type LPTIM1EN_R = crate::BitReader;
#[doc = "Field `LPTIM1EN` writer - LPTIM1 clock enable Set and reset by software."]
pub type LPTIM1EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM3EN` reader - LPTIM3 clock enable Set and reset by software."]
pub type LPTIM3EN_R = crate::BitReader;
#[doc = "Field `LPTIM3EN` writer - LPTIM3 clock enable Set and reset by software."]
pub type LPTIM3EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM4EN` reader - LPTIM4 clock enable Set and reset by software."]
pub type LPTIM4EN_R = crate::BitReader;
#[doc = "Field `LPTIM4EN` writer - LPTIM4 clock enable Set and reset by software."]
pub type LPTIM4EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM5EN` reader - LPTIM5 clock enable Set and reset by software."]
pub type LPTIM5EN_R = crate::BitReader;
#[doc = "Field `LPTIM5EN` writer - LPTIM5 clock enable Set and reset by software."]
pub type LPTIM5EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPTIM6EN` reader - LPTIM6 clock enable Set and reset by software."]
pub type LPTIM6EN_R = crate::BitReader;
#[doc = "Field `LPTIM6EN` writer - LPTIM6 clock enable Set and reset by software."]
pub type LPTIM6EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VREFEN` reader - VREF clock enable Set and reset by software."]
pub type VREFEN_R = crate::BitReader;
#[doc = "Field `VREFEN` writer - VREF clock enable Set and reset by software."]
pub type VREFEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RTCAPBEN` reader - RTC APB interface clock enable Set and reset by software."]
pub type RTCAPBEN_R = crate::BitReader;
#[doc = "Field `RTCAPBEN` writer - RTC APB interface clock enable Set and reset by software."]
pub type RTCAPBEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 1 - SBS clock enable Set and reset by software."]
#[inline(always)]
pub fn sbsen(&self) -> SBSEN_R {
SBSEN_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 5 - SPI5 clock enable Set and reset by software."]
#[inline(always)]
pub fn spi5en(&self) -> SPI5EN_R {
SPI5EN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - LPUART1 clock enable Set and reset by software."]
#[inline(always)]
pub fn lpuart1en(&self) -> LPUART1EN_R {
LPUART1EN_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - I2C3 clock enable Set and reset by software."]
#[inline(always)]
pub fn i2c3en(&self) -> I2C3EN_R {
I2C3EN_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - I2C4 clock enable Set and reset by software."]
#[inline(always)]
pub fn i2c4en(&self) -> I2C4EN_R {
I2C4EN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 11 - LPTIM1 clock enable Set and reset by software."]
#[inline(always)]
pub fn lptim1en(&self) -> LPTIM1EN_R {
LPTIM1EN_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - LPTIM3 clock enable Set and reset by software."]
#[inline(always)]
pub fn lptim3en(&self) -> LPTIM3EN_R {
LPTIM3EN_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - LPTIM4 clock enable Set and reset by software."]
#[inline(always)]
pub fn lptim4en(&self) -> LPTIM4EN_R {
LPTIM4EN_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - LPTIM5 clock enable Set and reset by software."]
#[inline(always)]
pub fn lptim5en(&self) -> LPTIM5EN_R {
LPTIM5EN_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - LPTIM6 clock enable Set and reset by software."]
#[inline(always)]
pub fn lptim6en(&self) -> LPTIM6EN_R {
LPTIM6EN_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 20 - VREF clock enable Set and reset by software."]
#[inline(always)]
pub fn vrefen(&self) -> VREFEN_R {
VREFEN_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - RTC APB interface clock enable Set and reset by software."]
#[inline(always)]
pub fn rtcapben(&self) -> RTCAPBEN_R {
RTCAPBEN_R::new(((self.bits >> 21) & 1) != 0)
}
}
impl W {
#[doc = "Bit 1 - SBS clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn sbsen(&mut self) -> SBSEN_W<APB3ENR_SPEC, 1> {
SBSEN_W::new(self)
}
#[doc = "Bit 5 - SPI5 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn spi5en(&mut self) -> SPI5EN_W<APB3ENR_SPEC, 5> {
SPI5EN_W::new(self)
}
#[doc = "Bit 6 - LPUART1 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lpuart1en(&mut self) -> LPUART1EN_W<APB3ENR_SPEC, 6> {
LPUART1EN_W::new(self)
}
#[doc = "Bit 7 - I2C3 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn i2c3en(&mut self) -> I2C3EN_W<APB3ENR_SPEC, 7> {
I2C3EN_W::new(self)
}
#[doc = "Bit 8 - I2C4 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn i2c4en(&mut self) -> I2C4EN_W<APB3ENR_SPEC, 8> {
I2C4EN_W::new(self)
}
#[doc = "Bit 11 - LPTIM1 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lptim1en(&mut self) -> LPTIM1EN_W<APB3ENR_SPEC, 11> {
LPTIM1EN_W::new(self)
}
#[doc = "Bit 12 - LPTIM3 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lptim3en(&mut self) -> LPTIM3EN_W<APB3ENR_SPEC, 12> {
LPTIM3EN_W::new(self)
}
#[doc = "Bit 13 - LPTIM4 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lptim4en(&mut self) -> LPTIM4EN_W<APB3ENR_SPEC, 13> {
LPTIM4EN_W::new(self)
}
#[doc = "Bit 14 - LPTIM5 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lptim5en(&mut self) -> LPTIM5EN_W<APB3ENR_SPEC, 14> {
LPTIM5EN_W::new(self)
}
#[doc = "Bit 15 - LPTIM6 clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn lptim6en(&mut self) -> LPTIM6EN_W<APB3ENR_SPEC, 15> {
LPTIM6EN_W::new(self)
}
#[doc = "Bit 20 - VREF clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn vrefen(&mut self) -> VREFEN_W<APB3ENR_SPEC, 20> {
VREFEN_W::new(self)
}
#[doc = "Bit 21 - RTC APB interface clock enable Set and reset by software."]
#[inline(always)]
#[must_use]
pub fn rtcapben(&mut self) -> RTCAPBEN_W<APB3ENR_SPEC, 21> {
RTCAPBEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC APB4 peripheral clock register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb3enr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb3enr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB3ENR_SPEC;
impl crate::RegisterSpec for APB3ENR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb3enr::R`](R) reader structure"]
impl crate::Readable for APB3ENR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb3enr::W`](W) writer structure"]
impl crate::Writable for APB3ENR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB3ENR to value 0"]
impl crate::Resettable for APB3ENR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
pub mod tokens;
use self::tokens::{get_operator_table, get_token_table, Keyword, Token, TokenType};
use crate::utils::AsExclusiveTakeWhile;
use std::collections::HashMap;
use std::fmt;
use std::iter::{FromIterator, IntoIterator, Iterator, Peekable};
use std::string::String;
use std::vec;
// ---------- Lexer --------------
pub struct Lexer {
char_iterator: Peekable<vec::IntoIter<char>>,
token_table: HashMap<String, Keyword>,
operator_table: HashMap<String, Keyword>,
// Line and column
row: usize,
column: usize,
}
impl Lexer {
pub fn new(source_code: String) -> Self {
let chars = Vec::<char>::from_iter(source_code.chars());
let peekable = chars.into_iter().peekable();
Lexer {
char_iterator: peekable,
token_table: get_token_table(),
operator_table: get_operator_table(),
row: 1,
column: 0,
}
}
fn advance_pos(&mut self, n: usize) {
self.column += n;
}
fn newline(&mut self) {
self.row += 1;
self.column = 0;
}
fn parse_next_token(&mut self) -> Option<Token> {
if let Some(&chr) = self.char_iterator.peek() {
if chr.is_alphabetic() || chr == '_' {
Some(Token::new(self.parse_identifier(), self.row, self.column))
} else if chr.is_numeric() {
Some(Token::new(self.parse_number(), self.row, self.column))
} else if chr == '"' {
Some(Token::new(self.parse_string(), self.row, self.column))
} else if chr == '\n' {
self.char_iterator.next();
self.newline();
self.parse_next_token()
} else if chr == ' ' || chr == '\t' {
self.char_iterator.next();
self.advance_pos(1);
self.parse_next_token()
} else {
Some(Token::new(self.parse_operator(), self.row, self.column))
}
} else {
None
}
}
fn parse_identifier(&mut self) -> TokenType {
let id_chars = |chr: &char| chr.is_alphanumeric();
let id: String = self.char_iterator.take_while_exclusive(id_chars).collect();
self.advance_pos(id.len());
// If keyword map contains the keyword, return Token::Keyword
// Else return a Token::Identifier
match self.token_table.get(&id) {
Some(keyword) => TokenType::Keyword(keyword.clone()),
_ => TokenType::Id(id),
}
}
fn parse_string(&mut self) -> TokenType {
// Looking for the closing doublequote
let string_chars = |chr: &char| *chr != '"';
// Skip starting doublequote
self.char_iterator.next();
let string: String = self
.char_iterator
.take_while_exclusive(string_chars)
.collect();
self.advance_pos(string.len() + 2); // With doublequotes
// Skip ending doublequote
if self.char_iterator.next().is_none() {
panic!("Unmatched double quotes at {}:{}", self.row, self.column)
}
TokenType::String(string)
}
fn parse_number(&mut self) -> TokenType {
let numeric_chars = |chr: &char| chr.is_numeric() || *chr == '.';
// Looking for the end of the number
let number: String = self
.char_iterator
.take_while_exclusive(numeric_chars)
.collect();
self.advance_pos(number.len());
TokenType::Number(
number
.parse::<f64>()
.unwrap_or_else(|_| panic!("Failed to parse number '{}'", number)),
)
}
fn parse_operator(&mut self) -> TokenType {
// First we try longer operators then shorter, to avoid returning '>' instead of '>='
// Lenghts are 3, 2, 1
for n_operator in (1..4).rev() {
let n_character_operator: String =
self.char_iterator.clone().take(n_operator).collect();
if let Some(keyword) = self.operator_table.get(&n_character_operator).cloned() {
// Advance original iterator. Need to coolect, to power on lazy iterartor
let _ = self.char_iterator.by_ref().take(n_operator).count();
self.advance_pos(n_operator);
return TokenType::Keyword(keyword.clone());
}
}
panic!("Can't parse token at {}:{}", self.row, self.column)
}
}
impl fmt::Debug for Lexer {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Lexer {{ row: {}, col: {} }}", self.row, self.column)
}
}
// ----- Iterator trait implementation for the scanner -----
impl Iterator for Lexer {
type Item = Token;
fn next(&mut self) -> Option<Token> {
self.parse_next_token()
}
}
|
use crate::net::SignerID;
use crate::rpc::TapyrusApi;
use crate::signer_node::{is_master, master_index, NodeParameters, NodeState};
use tapyrus::blockdata::block::Block;
pub fn process_completedblock<T>(
sender_id: &SignerID,
block: &Block,
prev_state: &NodeState,
params: &NodeParameters<T>,
) -> NodeState
where
T: TapyrusApi,
{
if !is_master(sender_id, prev_state, params) {
log::warn!("Peer {} may be malicious node. It might impersonate as master or your node might be behind from others.", sender_id);
return prev_state.clone(); // Ignore message
}
if let Err(e) = params.rpc.submitblock(block) {
log::warn!(
"The node got invalid completed block or it was already relayed via Tapyrus network. from-peer: {}, block: {:?}, rpc error: {:?}",
sender_id,
block,
e
);
}
NodeState::RoundComplete {
master_index: master_index(prev_state, params)
.expect("Previous state getting round complete should have round master"),
block_height: prev_state.block_height(),
}
}
#[cfg(test)]
mod tests {
use super::process_completedblock;
use crate::errors::Error;
use crate::net::SignerID;
use crate::signer_node::node_state::builder::{Builder, Master, Member};
use crate::signer_node::{master_index, NodeState};
use crate::tests::helper::blocks::get_block;
use crate::tests::helper::keys::TEST_KEYS;
use crate::tests::helper::node_parameters_builder::NodeParametersBuilder;
use crate::tests::helper::node_state_builder::BuilderForTest;
use crate::tests::helper::rpc::MockRpc;
#[test]
fn test_process_completedblock() {
let block = get_block(0);
let mut rpc = MockRpc::new();
rpc.should_call_submitblock(Ok(()));
let params = NodeParametersBuilder::new().rpc(rpc).build();
// check 1, node state should be RoundComplete after process completeblock message.
let prev_state = Member::for_test().master_index(0).build();
let sender_id = SignerID::new(TEST_KEYS.pubkeys()[0]);
let state = process_completedblock(&sender_id, &block, &prev_state, ¶ms);
params.rpc.assert();
match &state {
NodeState::RoundComplete { master_index, .. } => assert_eq!(*master_index, 0),
n => assert!(false, "Should be RoundComplete, but the state is {:?}", n),
}
}
#[test]
fn test_process_master_received_completedblock() {
let block = get_block(0);
let params = NodeParametersBuilder::new().build();
let prev_state = Master::for_test().build();
let sender_id = SignerID::new(TEST_KEYS.pubkeys()[0]);
let state = process_completedblock(&sender_id, &block, &prev_state, ¶ms);
// if master receives completedblock message, it does not change state
assert_eq!(prev_state, state);
}
#[test]
fn test_process_completedblock_with_submit_block_failure() {
let block = get_block(0);
let mut rpc = MockRpc::new();
rpc.should_call_submitblock(Err(Error::JsonRpc(jsonrpc::error::Error::Rpc(
jsonrpc::error::RpcError {
code: -25,
message: "proposal was not based on our best chain".to_string(),
data: None,
},
))));
let params = NodeParametersBuilder::new().rpc(rpc).build();
let prev_state = Member::for_test().master_index(0).build();
let sender_id = SignerID::new(TEST_KEYS.pubkeys()[0]);
let state = process_completedblock(&sender_id, &block, &prev_state, ¶ms);
params.rpc.assert();
match &state {
NodeState::RoundComplete { master_index, .. } => assert_eq!(*master_index, 0),
n => assert!(false, "Should be RoundComplete, but the state is {:?}", n),
}
}
#[test]
fn test_process_completedblock_ignore_different_master() {
let block = get_block(0);
let rpc = MockRpc::new();
let params = NodeParametersBuilder::new().rpc(rpc).build();
let prev_state = Member::for_test().master_index(0).build();
let sender_id = SignerID::new(TEST_KEYS.pubkeys()[4]);
let state = process_completedblock(&sender_id, &block, &prev_state, ¶ms);
params.rpc.assert();
// It should not incremented if not recorded master.
assert_eq!(master_index(&state, ¶ms).unwrap(), 0);
match state {
NodeState::Member { .. } => assert!(true),
n => panic!("Should be Member, but state:{:?}", n),
}
}
}
|
//! Abstraction and functions for walking the i3-node tree.
extern crate i3ipc;
use super::structures::*;
struct TreeWalker<T> {
// rootnode: i3ipc::reply::Node,
nextnode: Option<i3ipc::reply::Node>,
output: Option<i3ipc::reply::Node>,
workspace: Option<i3ipc::reply::Node>,
parent_containers: Vec<i3ipc::reply::Node>, // the outer-most (i.e. the first) parent is 0
window: Option<i3ipc::reply::Node>,
result: Option<T>,
}
impl<T> TreeWalker<T> {
fn new() -> TreeWalker<T> {
let tree = get_tree();
TreeWalker::<T> {
// rootnode: tree.clone(),
nextnode: Some(tree),
output: None,
workspace: None,
parent_containers: Vec::new(),
window: None,
result: None,
}
}
}
fn walk_tree<T>(
mut tree_walker: TreeWalker<T>,
on_node: &mut FnMut(TreeWalker<T>) -> TreeWalker<T>,
) -> TreeWalker<T> {
let node = tree_walker.nextnode.unwrap();
tree_walker.nextnode = None;
for node in node.nodes {
tree_walker.nextnode = Some(node.clone());
if tree_walker.result.is_some() {
return tree_walker;
}
match node.nodetype {
i3ipc::reply::NodeType::Output => {
tree_walker.output = Some(node);
tree_walker = walk_tree(tree_walker, on_node);
tree_walker.output = None;
}
i3ipc::reply::NodeType::Workspace => {
tree_walker.workspace = Some(node);
tree_walker = (on_node)(tree_walker);
tree_walker = walk_tree(tree_walker, on_node);
tree_walker.workspace = None;
}
i3ipc::reply::NodeType::Con => {
match node.window {
Some(_) => {
tree_walker.window = Some(node);
tree_walker = (on_node)(tree_walker);
tree_walker.window = None;
}
None => {
tree_walker = (on_node)(tree_walker);
tree_walker.parent_containers.push(node);
tree_walker = walk_tree(tree_walker, on_node);
// tree_walker.output = None; //should this be here?
tree_walker.parent_containers.pop();
}
}
}
i3ipc::reply::NodeType::FloatingCon => {
println!("F");
}
_ => {}
}
}
tree_walker
}
pub fn build_lists(wsl: &mut WorkSpaceList) {
let tree_walker = TreeWalker::new();
let logic = &mut |tree_walker: TreeWalker<i64>| -> TreeWalker<i64> {
if tree_walker.workspace.is_some() && tree_walker.window.is_none() {
let workspace = tree_walker.workspace.clone().unwrap();
wsl.workspace_on_init(workspace.id);
}
if tree_walker.window.is_some() {
let window = tree_walker.window.clone().unwrap();
let workspace = tree_walker.workspace.clone().unwrap();
wsl.window_on_init(window.id, Some(workspace.id));
}
return tree_walker;
};
walk_tree(tree_walker, logic);
}
pub fn resolve_name(id: i64) -> Option<String> {
let tree_walker = TreeWalker::new();
let logic = &mut |mut tree_walker: TreeWalker<String>| -> TreeWalker<String> {
if tree_walker.workspace.is_some() && tree_walker.window.is_none() {
let workspace = tree_walker.workspace.clone().unwrap();
if workspace.id == id {
tree_walker.result = workspace.name;
}
}
if tree_walker.window.is_some() {
let window = tree_walker.window.clone().unwrap();
if window.id == id {
tree_walker.result = window.name;
}
}
return tree_walker;
};
walk_tree(tree_walker, logic).result
}
pub fn resolve_focused() -> Option<i64> {
let tree_walker = TreeWalker::new();
let logic = &mut |mut tree_walker: TreeWalker<i64>| -> TreeWalker<i64> {
if tree_walker.window.is_some() {
let window = tree_walker.window.clone().unwrap();
if window.focused {
tree_walker.result = Some(window.id);
}
}
return tree_walker;
};
walk_tree(tree_walker, logic).result
}
pub fn find_window_workspace_from_i3(window_id: i64) -> i64 {
let tree_walker = TreeWalker::new();
let logic = &mut |mut tree_walker: TreeWalker<i64>| -> TreeWalker<i64> {
if tree_walker.window.is_some() {
let window = tree_walker.window.clone().unwrap();
let workspace = tree_walker.workspace.clone().unwrap();
if window.id == window_id {
tree_walker.result = Some(workspace.id);
}
}
return tree_walker;
};
walk_tree(tree_walker, logic).result.unwrap()
}
fn get_tree() -> i3ipc::reply::Node {
return i3ipc::I3Connection::connect().unwrap().get_tree().unwrap();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.