text stringlengths 8 4.13M |
|---|
use gfx;
use glutin;
use gfx_device_gl;
use gfx::format::{Srgba8, Rgba8};
use gfx::Device;
use gfx::traits::FactoryExt;
use gfx_window_glutin;
use gfx::texture::ImageInfoCommon;
use gfx::format::R8_G8_B8_A8;
use super::{Vertex, ColorFormat, DepthFormat, GeometryBuffer, Locals};
use super::{pipe_blend, pipe_opaque, get_dimensions};
use puck_core::Color;
use {input, PuckError, PuckResult};
//use render::{Uniforms, Blend, TextureRegion, GeometryTesselator};
use {Dimensions, Input};
use glutin::GlContext;
use render::{down_size_m4, TextureArrayDimensions};
use FileResources;
use image::DynamicImage;
use puck_core::HashMap;
use render::{Blend, Uniforms};
use cgmath::{Vector2, vec3};
pub struct OpaquePipeline<R> where R : gfx::Resources {
pub pipeline: gfx::PipelineState<R, pipe_opaque::Meta>,
pub data : Option<pipe_opaque::Data<R>>,
}
pub struct BlendPipeline<R> where R : gfx::Resources {
pub pipeline: gfx::PipelineState<R, pipe_blend::Meta>,
pub data : Option<pipe_blend::Data<R>>,
}
pub struct Pipelines<R> where R : gfx::Resources {
pub opaque: OpaquePipeline<R>,
pub blend: BlendPipeline<R>,
}
pub struct Renderer<R, C, F, D> where R : gfx::Resources,
C : gfx::CommandBuffer<R>,
F : gfx::Factory<R>,
D : gfx::Device {
pub file_resources: FileResources,
pub window: glutin::GlWindow, // opengl
pub events_loop: glutin::EventsLoop, // opengl
pub device: D,
pub factory: F,
pub screen_colour_target: gfx::handle::RenderTargetView<R, ColorFormat>,
pub screen_depth_target: gfx::handle::DepthStencilView<R, DepthFormat>,
pub encoder: gfx::Encoder<R, C>,
// what about raw texture representation? for blitting to ui
pub texture: Option<(gfx::handle::Texture<R, gfx::format::R8_G8_B8_A8>, gfx::handle::ShaderResourceView<R, [f32; 4]>)>,
pub sampler: gfx::handle::Sampler<R>,
pub pipelines: Option<Pipelines<R>>,
pub dimensions: Dimensions,
pub input: Input,
}
impl<F> Renderer<gfx_device_gl::Resources, gfx_device_gl::CommandBuffer, F, gfx_device_gl::Device> where F : gfx::Factory<gfx_device_gl::Resources> {
pub fn begin_frame(&mut self, reload_texture: bool, reload_program: bool) -> (Dimensions, Input) {
self.load_resources(reload_texture, reload_program);
let mut events : Vec<glutin::Event> = Vec::new();
self.events_loop.poll_events(|ev| events.push(ev));
let mut close_requested = false;
let mut resize = false;
for ev in &events {
if let &glutin::Event::WindowEvent { ref event, .. } = ev {
match event {
&glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(glutin::VirtualKeyCode::Escape),
..
},
..
} => {
close_requested = true
},
&glutin::WindowEvent::Closed => {
close_requested = true
},
&glutin::WindowEvent::Resized(width, height) => {
self.window.resize(width, height);
resize = true;
},
_ => (),
}
}
}
if resize {
// println!("resize, PRE -> {:?}", get_dimensions(&self.window));
gfx_window_glutin::update_views(&self.window, &mut self.screen_colour_target, &mut self.screen_depth_target);
// println!("POST -> {:?}", get_dimensions(&self.window));
}
self.input = input::produce(&self.input, &events);
self.input.close = close_requested;
let dimensions = get_dimensions(&self.window);
self.dimensions = dimensions;
// clear
if !close_requested {
// self.screen_colour_target = 4;
// let decoded = decode_color(clear_color);
// self.encoder.clear(&self.screen_colour_target, decoded);
// self.encoder.clear_depth(&self.screen_depth_target, 1.0);
// let ad = self.screen_colour_target.get_dimensions();
// println!("internal dimensions -> {:?}", ad);
}
(dimensions, self.input.clone())
}
pub fn clear_depth_and_color(&mut self, color: Color) {
self.clear_depth();
self.clear_color(color);
}
pub fn clear_depth(&mut self) {
self.encoder.clear_depth(&self.screen_depth_target, 1.0);
}
pub fn clear_color(&mut self, color:Color) {
let decoded = decode_color(color);
self.encoder.clear(&self.screen_colour_target, decoded);
}
pub fn load_resources(&mut self, reload_texture: bool, reload_program: bool) -> bool {
if reload_program || self.pipelines.is_none() {
println!("LOAD PIPELINES");
let pipeline_load_result = self.file_resources.shader_pair.load().and_then( |shader_data| {
let opaque_pso = self.factory.create_pipeline_simple(
&shader_data.vertex_data,
&shader_data.fragment_data,
pipe_opaque::new()
).map_err(PuckError::PipelineError)?;
let blend_pso = self.factory.create_pipeline_simple(
&shader_data.vertex_data,
&shader_data.fragment_data,
pipe_blend::new()
).map_err(PuckError::PipelineError)?;
Ok(Pipelines {
opaque: OpaquePipeline {
pipeline: opaque_pso,
data: None,
},
blend: BlendPipeline {
pipeline: blend_pso,
data: None,
},
})
});
match pipeline_load_result {
Ok(p) => self.pipelines = Some(p),
Err(e) => println!("pipeline load error -> {:?}", e),
}
}
if reload_texture || self.texture.is_none() {
println!("LOAD TEXTURES");
let texture_load_result = self.file_resources.texture_directory.load().and_then(|texture_array_data| {
let images_raw : Vec<_> = texture_array_data.images.iter().map(|img| {
let dyn_image = DynamicImage::ImageRgba8(img.clone()).flipv();
dyn_image.to_rgba().into_raw()
} ).collect();
let data : Vec<_> = images_raw.iter().map(|v| v.as_slice()).collect();
let kind = texture_kind_for(&texture_array_data.dimensions);
let (texture, texture_view) = self.factory.create_texture_immutable_u8::<Srgba8>(kind, data.as_slice()).map_err(PuckError::CombinedGFXError)?;
Ok((texture, texture_view))
});
match texture_load_result {
Ok((t, tv)) => {
let pair = (t, tv);
self.texture = Some(pair);
},
Err(e) => println!("texture load error -> {:?}", e),
}
}
self.texture.is_some() && self.pipelines.is_some()
}
pub fn upload(&mut self, vertices: &[Vertex]) -> GeometryBuffer<gfx_device_gl::Resources> {
let (buffer, slice) = self.factory.create_vertex_buffer_with_slice(vertices, ());
GeometryBuffer {
buffer,
slice,
}
}
fn draw_raw(&mut self, geometry: &GeometryBuffer<gfx_device_gl::Resources>, uniforms: Uniforms, blend:Blend) -> PuckResult<()> {
// let tv = match texture_array {
// TextureArraySource::UI => &self.ui.texture_view,
// TextureArraySource::Primary => self.texture.as_ref().map(|&(_, ref v)| v).ok_or(JamError::NoTexture())?,
// };
let tv = self.texture.as_ref().map(|&(_, ref v)| v).ok_or(PuckError::NoTexture())?;
// let tv = self.texture.as_ref().map(|&(_, ref v)| v).ok_or(JamError::NoTexture())?;
match blend {
Blend::None => {
let opaque_pipe = self.pipelines.as_mut().ok_or(PuckError::NoPipeline()).map(|p| &mut p.opaque )?;
let opaque_data = pipe_opaque::Data {
vbuf: geometry.buffer.clone(),
texture: (tv.clone(), self.sampler.clone()),
locals: self.factory.create_constant_buffer(1),
out_color: self.screen_colour_target.clone(),
out_depth: self.screen_depth_target.clone(),
};
let locals = Locals {
u_transform: down_size_m4(uniforms.transform.into()),
u_color: uniforms.color.float_raw(),
u_alpha_minimum: 0.01,
};
self.encoder.update_constant_buffer(&opaque_data.locals, &locals);
self.encoder.draw(&geometry.slice, &opaque_pipe.pipeline, &opaque_data);
},
Blend::Add => {
// println!("no add pipeline atm")
},
Blend::Alpha => {
let blend_pipe = self.pipelines.as_mut().ok_or(PuckError::NoPipeline()).map(|p| &mut p.blend )?;
let blend_data = pipe_blend::Data {
vbuf: geometry.buffer.clone(),
texture: (tv.clone(), self.sampler.clone()),
locals: self.factory.create_constant_buffer(1),
out_color: self.screen_colour_target.clone(),
out_depth: self.screen_depth_target.clone(),
};
let locals = Locals {
u_transform: down_size_m4(uniforms.transform.into()),
u_color: uniforms.color.float_raw(),
u_alpha_minimum: 0.01,
};
self.encoder.update_constant_buffer(&blend_data.locals, &locals);
self.encoder.draw(&geometry.slice, &blend_pipe.pipeline, &blend_data);
},
}
Ok(())
}
pub fn draw(&mut self, geometry: &GeometryBuffer<gfx_device_gl::Resources>, uniforms: Uniforms, blend:Blend) -> PuckResult<()> {
self.draw_raw(geometry, uniforms, blend)
}
pub fn draw_vertices(&mut self, vertices: &[Vertex], uniforms: Uniforms, blend:Blend) -> PuckResult<GeometryBuffer<gfx_device_gl::Resources>> {
let geometry = self.upload(vertices);
let res = self.draw(&geometry, uniforms, blend);
res.map(|()| geometry)
}
pub fn finish_frame(&mut self) -> PuckResult<()> {
self.encoder.flush(&mut self.device);
self.window.swap_buffers().map_err(PuckError::ContextError)?;
self.device.cleanup();
Ok(())
}
}
fn decode_color(c: Color) -> [f32; 4] {
let f = |xu: u8| {
let x = (xu as f32) / 255.0;
if x > 0.04045 {
((x + 0.055) / 1.055).powf(2.4)
} else {
x / 12.92
}
};
[f(c.r), f(c.g), f(c.b), 0.0]
}
pub fn texture_kind_for(dimensions: &TextureArrayDimensions) -> gfx::texture::Kind {
gfx::texture::Kind::D2Array(dimensions.width as u16, dimensions.height as u16, dimensions.layers as u16, gfx::texture::AaMode::Single)
}
|
//! # CRC digest of resource paths
use crc::{Crc, CRC_32_MPEG_2};
fn normalize_char(b: u8) -> u8 {
match b {
b'/' => b'\\',
b'A'..=b'Z' => b + 0x20,
_ => b,
}
}
const ALG: Crc<u32> = Crc::<u32>::new(&CRC_32_MPEG_2);
/// Calculate the Cyclic-Redundancy-Check for a file path
///
/// The game uses [CRC-32/MPEG-2], transforms all letters to lowercase,
/// replaces slashes with backslashes and appends 4 NULL bytes.
///
/// [CRC-32/MPEG-2]: https://reveng.sourceforge.io/crc-catalogue/17plus.htm#crc.cat.crc-32-mpeg-2
pub fn calculate_crc(path: &[u8]) -> u32 {
let mut crc = ALG.digest();
let mut s = 0;
for (i, b) in path.iter().copied().enumerate() {
let n = normalize_char(b);
if n != b {
if i > s {
crc.update(&path[s..i]);
}
crc.update(&[n]);
s = i + 1;
}
}
crc.update(&path[s..]);
// I have no clue why this was added
crc.update(&[0, 0, 0, 0]);
crc.finalize()
}
|
fn main() {
let mut mine = User {
username: "Len",
age: 26,
active: true,
};
mine.active = false;
println!("mine: {:#?}", mine);
let test = build_user("Test", 30);
println!("test user: {:?}", test);
let cp_mine = User {
age: 29,
..mine
};
println!("cp_mine: {:?}", cp_mine);
println!("******** Tuples ********");
let black = Color(0, 0, 0);
let origin = Point(0, 0, 0);
println!("black: {:?}; \norigin: {:?};", black, origin);
}
#[derive(Debug)]
struct User<'a> {
username: &'a str,
age: u8,
active: bool,
}
fn build_user(username: &str, age: u8) -> User {
User {
username,
age,
active: true,
}
}
#[derive(Debug)]
struct Color(i32, i32, i32);
#[derive(Debug)]
struct Point (i32, i32, i32); |
use std::fmt;
use std::io;
#[allow(unused_variables)]
#[allow(unused_must_use)]
fn main() {
let traffic_light = vec!((4,2), (4,3), (4,4), (4,5), (4,6));
let galaxy = vec!(
(3,3),(3,4),(3,5),(3,6),(3,7),(3,8),(3,10),(3,11),
(4,3),(4,4),(4,5),(4,6),(4,7),(4,8),(4,10),(4,11),
(5,10),(5,11),
(6,3),(6,4),(6,10),(6,11),
(7,3),(7,4),(7,10),(7,11),
(8,3),(8,4),(8,10),(8,11),
(9,3),(9,4),
(10,3),(10,4),(10,6),(10,7),(10,8),(10,9),(10,10),(10,11),
(11,3),(11,4),(11,6),(11,7),(11,8),(11,9),(11,10),(11,11)
);
let mut board = Board { width: 50, height: 20, live_cells: galaxy };
let mut reader = io::stdin();
let string = &mut String::new();
board.display();
loop {
reader.read_line(string);
if string.len() != 0 {
board = board.tick();
board.display();
}
}
}
struct Board {
height: isize,
width: isize,
live_cells: Vec<(isize, isize)>
}
impl Board {
fn all_cells(&self) -> Vec<(isize, isize)> {
(0..self.height).into_iter().flat_map(|x|
(0..self.width).into_iter().map(|y| (x, y) ).collect::<Vec<_>>()
).collect()
}
fn tick(&self) -> Board {
Board { height: self.height, width: self.width, live_cells: self.live_cells_next_go() }
}
fn display(&self) {
println!("{:?}", self);
}
fn is_alive(&self, cell: &(isize, isize)) -> bool {
self.live_cells.iter().any(|live_cell| live_cell.eq(cell))
}
fn live_cells_next_go(&self) -> Vec<(isize, isize)> {
self.all_cells().iter().filter_map(|cell|
match (self.is_alive(cell), self.neighbours_count(cell)) {
(true, 2) => Some(*cell),
(_, 3) => Some(*cell),
_ => None,
}
).collect()
}
fn neighbours(&self, &(x, y): &(isize, isize)) -> Vec<(isize, isize)> {
vec!(
(x - 1, y - 1),
(x - 1, y ),
(x - 1, y + 1),
(x, y - 1),
(x, y + 1),
(x + 1, y - 1),
(x + 1, y ),
(x + 1, y + 1)
)
}
fn neighbours_count(&self, cell: &(isize, isize)) -> usize {
self.neighbours(cell).iter().filter(|neighbour| self.is_alive(neighbour)).count()
}
}
impl fmt::Debug for Board {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut board = String::new();
let cells = self.all_cells();
for x in (0..self.height) {
for y in (0..self.width) {
let idx = (x * self.width) + y;
let cell_str = match self.is_alive(&cells[idx as usize]) {
true => "■",
false => "□"
};
board = board + cell_str;
}
board = board + "\n";
}
write!(f, "{}", board)
}
}
|
use crate::helpers::configuration::*;
#[derive (serde::Deserialize, Debug)]
pub enum Virustotal {
DataIp(VTdataIp),
DataUrl(VTdataUrl),
}
#[derive(serde::Deserialize, Debug)]
pub struct VTdataIp {
data: Vec<VTattributesIp>,
}
#[derive(serde::Deserialize, Debug)]
pub struct VTattributesIp {
attributes: VTvaluesIp,
}
#[derive(serde::Deserialize, Debug)]
pub struct VTvaluesIp {
as_owner: String,
asn: i32,
country: String,
last_analysis_stats: VTanalysisResult,
reputation: i32,
}
#[derive(serde::Deserialize, Debug)]
pub struct VTanalysisResult {
harmless: i8 ,
malicious: i8,
suspicious: i8,
undetected: i8,
timeout: i8,
}
#[derive(serde::Deserialize, Debug)]
pub struct VTdataUrl {
data: Vec<VTattributesUrl>,
}
#[derive (serde::Deserialize, Debug)]
pub struct VTattributesUrl {
attributes: VTvaluesUrl,
}
#[derive (serde::Deserialize, Debug)]
pub struct VTvaluesUrl {
last_analysis_stats: VTanalysisResultUrl,
times_submitted: i16,
reputation: i16,
total_votes: VTvotesUrl,
}
#[derive(serde::Deserialize, Debug)]
pub struct VTanalysisResultUrl {
harmless: i8 ,
malicious: i8,
suspicious: i8,
undetected: i8,
timeout: i8,
}
#[derive (serde::Deserialize, Debug)]
pub struct VTvotesUrl {
harmless: i8,
malicious: i8,
}
pub async fn get_virustotal (src: String, api_key: String, src_type: String) -> Result<Virustotal, Box<dyn std::error::Error>>{
let client = reqwest::Client::new();
let url = format!("https://www.virustotal.com/api/v3/search?query={}", src);
let resp = client
.get(url)
.header("x-apikey", &api_key)
.send()
.await?
.text()
.await?;
if src_type.eq("ip"){
let de_vt_ip: VTdataIp = serde_json::from_str(resp.as_str()).expect("error deserializing");
return Ok(Virustotal::DataIp(de_vt_ip));
}else{
let de_vt_url: VTdataUrl = serde_json::from_str(resp.as_str()).expect("error deserializing");
return Ok(Virustotal::DataUrl(de_vt_url));
}
}
|
use std::collections::HashMap;
impl Solution {
pub fn num_buses_to_destination(routes: Vec<Vec<i32>>, source: i32, target: i32) -> i32 {
if target == source{
return 0;
}
let n = routes.len();
let mut que = Vec::new();
let mut g = HashMap::new();
let mut dist = vec![1e8 as i32;n];
for i in 0..n{
for &j in routes[i].iter(){
if j == source{
que.push(i);
dist[i] = 1;
}
g.entry(j).or_insert(vec![]).push(i as i32);
}
}
while que.len() > 0{
let t = que[0];
que.remove(0);
for &x in routes[t as usize].iter(){
if x == target{
return dist[t as usize];
}
if g.contains_key(&x){
for &y in g[&x].iter(){
if dist[y as usize] > dist[t as usize] + 1{
dist[y as usize] = dist[t as usize] + 1;
que.push(y as usize);
}
}
g.remove_entry(&x);
}
}
}
-1
}
} |
fn main() {
let x =1;
let c ='c';
match c{
x => println!("x: {} c: {}",x,c),//x is fucking shadowed
}
println!("x: {}",x);
}
|
//! Maps the API endpoints to functions
use std::collections::HashMap;
use chrono::NaiveDateTime;
use diesel;
use diesel::prelude::*;
use diesel::BelongingToDsl;
use rocket::State;
use rocket_contrib::Json;
use serde_json;
use super::DbPool;
use helpers::{debug, get_user_from_username, get_last_update};
use models::{Beatmap, Update, NewUpdate, Hiscore, NewHiscore, User};
use osu_api::ApiClient;
use schema::updates::dsl as updates_dsl;
use schema::hiscores::dsl as hiscores_dsl;
/// Holds the changes between two updates
#[derive(Serialize)]
pub struct UpdateDiff {
pub first_update: bool,
pub count300: i32,
pub count100: i32,
pub count50: i32,
pub playcount: i32,
pub ranked_score: i64,
pub total_score: i64,
pub pp_rank: i32,
pub level: f32,
pub pp_raw: f32,
pub accuracy: f32,
pub count_rank_ss: i32,
pub count_rank_s: i32,
pub count_rank_a: i32,
pub pp_country_rank: i32,
pub newhs: Vec<NewHiscore>,
}
impl UpdateDiff {
/// Given two different updates, returns a new `UpdateDiff` representing the difference between them. If the first
/// update doesn't exist, then the first update will be treated as containing all zeros.
pub fn diff(prev: Option<&Update>, cur: &NewUpdate, old_hs: Vec<Hiscore>, new_hs: Vec<NewHiscore>) -> UpdateDiff {
match prev {
Some(prev) => {
// find hiscores that are in the new hiscores but not the old hiscores
let hs_diff: Vec<NewHiscore> = new_hs.into_iter().filter_map(|cur_hs| -> Option<NewHiscore> {
let mut is_duplicate = false;
for old_hs in &old_hs {
if old_hs.beatmap_id == cur_hs.beatmap_id && old_hs.score == cur_hs.score {
is_duplicate = true;
break;
}
}
if is_duplicate { None } else { Some(cur_hs) }
}).collect();
UpdateDiff {
first_update: false,
count300: cur.count300 - prev.count300,
count100: cur.count100 - prev.count100,
count50: cur.count50 - prev.count50,
playcount: cur.playcount - prev.playcount,
ranked_score: cur.ranked_score - prev.ranked_score,
total_score: cur.total_score - prev.total_score,
pp_rank: cur.pp_rank - prev.pp_rank,
level: cur.level - prev.level,
pp_raw: cur.pp_raw - prev.pp_raw,
accuracy: cur.accuracy - prev.accuracy,
count_rank_ss: cur.count_rank_ss - prev.count_rank_ss,
count_rank_s: cur.count_rank_s - prev.count_rank_s,
count_rank_a: cur.count_rank_a - prev.count_rank_a,
pp_country_rank: cur.pp_country_rank - prev.pp_country_rank,
newhs: hs_diff,
}
},
None => UpdateDiff {
first_update: true,
count300: cur.count300,
count100: cur.count100,
count50: cur.count50,
playcount: cur.playcount,
ranked_score: cur.ranked_score,
total_score: cur.total_score,
pp_rank: cur.pp_rank,
level: cur.level,
pp_raw: cur.pp_raw,
accuracy: cur.accuracy,
count_rank_ss: cur.count_rank_ss,
count_rank_s: cur.count_rank_s,
count_rank_a: cur.count_rank_a,
pp_country_rank: cur.pp_country_rank,
newhs: new_hs
}
}
}
}
/// Updates a user's stats using the osu! API and returns the changes since the last recorded update.
#[get("/update/<username>/<mode>")]
pub fn update(
api_client: State<ApiClient>, db_pool: State<DbPool>, username: String, mode: u8
) -> Result<Option<Json<UpdateDiff>>, String> {
let client = api_client.inner();
let db_conn = &*db_pool.get_conn();
let stats = client.get_stats(&username, mode)?;
match stats {
None => { return Ok(None); },
Some(s) => {
let last_update: Option<Update> = get_last_update(s.user_id, mode, db_conn)?;
// if there was a change worth recording between the two updates, write it to the database
let needs_insert = if last_update.is_some() {
let first = last_update.as_ref().unwrap();
first.pp_rank != s.pp_rank ||
s.playcount != s.playcount ||
s.pp_country_rank != s.pp_country_rank
} else {
true
};
if needs_insert {
diesel::insert_into(updates_dsl::updates)
.values(&s)
.execute(db_conn)
.map_err(debug)?;
}
// look up the user's previous hiscores
let old_hiscores: Vec<Hiscore> = hiscores_dsl::hiscores
.filter(hiscores_dsl::user_id.eq(s.user_id))
.filter(hiscores_dsl::mode.eq(mode as i16))
.load::<Hiscore>(db_conn)
.map_err(debug)?;
// get the user's current hiscores
let cur_hiscores = match api_client.get_user_best(s.user_id, mode, 100)? {
Some(hs) => hs,
None => Vec::new(),
};
// calculate the diff between the last and current updates
let diff = UpdateDiff::diff(last_update.as_ref(), &s, old_hiscores, cur_hiscores);
// insert all new hiscores into the database
diesel::insert_into(hiscores_dsl::hiscores)
.values(&diff.newhs)
.execute(db_conn)
.map_err(debug)?;
// TODO: Prefetch all of the beatmaps and update them into the cache
// calculate the difference between the current stats and the last update (if it exists) and return them
Ok(Some(Json(diff)))
}
}
}
/// Returns current static statistics for a user as stored in the osu!track database. Designed to be extrememly fast and
/// avoid the osu! server round-trip involved with getting live stats. Returns a 404 if there is no stored updates for the
/// user in the selected mode.
#[get("/stats/<username>/<mode>")]
pub fn get_stats(db_pool: State<DbPool>, username: String, mode: u8) -> Result<Option<Json<Update>>, String> {
let db_conn = &*db_pool.get_conn();
let usr: User = match get_user_from_username(db_conn, &username)? {
Some(usr) => usr,
None => { return Ok(None); },
};
Update::belonging_to(&usr)
.order(updates_dsl::id.desc())
.filter(updates_dsl::mode.eq(mode as i16))
.first(db_conn)
.map(|x| Some(Json(x)))
.map_err(debug)
}
/// Returns the live view of a user's stats as reported by the osu! API. Functions the same way as the `/update/` endpoint
/// but returns the current statistics rather than the change since the last update
#[get("/livestats/<username>/<mode>")]
pub fn live_stats(
api_client: State<ApiClient>, db_pool: State<DbPool>, username: String, mode: u8
) -> Result<Option<Json<NewUpdate>>, String> {
let client = api_client.inner();
let db_conn = &*db_pool.get_conn();
let stats: NewUpdate = match client.get_stats(&username, mode)? {
Some(u) => u,
None => { return Ok(None); },
};
// check to see if the user exists in our database yet. If it doesn't, it will soon because the `get_stats()`
// function inserts it on another thread.
let usr: User = match get_user_from_username(db_conn, &username)? {
Some(usr) => usr,
None => {
// this means that the DB is currently in the process of inserting the user and update, so we don't need to bother
return Ok(Some(Json(stats)));
},
};
// find the last stored update for the user and, if there has been a change, insert a new update
let last_update = get_last_update(usr.id, mode, db_conn)?;
// if there was a change worth recording between the two updates, write it to the database
let needs_insert = if last_update.is_some() {
let first = last_update.unwrap();
first.pp_rank != stats.pp_rank ||
stats.playcount != stats.playcount ||
stats.pp_country_rank != stats.pp_country_rank
} else {
true
};
if needs_insert {
diesel::insert_into(updates_dsl::updates)
.values(&stats)
.execute(db_conn)
.map_err(debug)?;
}
Ok(Some(Json(stats)))
}
/// Returns all of a user's stored updates for a given gamemode.
#[get("/updates/<username>/<mode>")]
pub fn get_updates(db_pool: State<DbPool>, username: String, mode: u8) -> Result<Option<Json<Vec<Update>>>, String> {
let db_conn = &*db_pool.get_conn();
let usr: User = match get_user_from_username(db_conn, &username)? {
Some(user) => user,
None => { return Ok(None); },
};
// pull all updates belonging to the selected user from the database for the provided gamemode
let updates = updates_dsl::updates
.filter(updates_dsl::user_id.eq(usr.id))
.filter(updates_dsl::mode.eq(mode as i16))
.order(updates_dsl::update_time.asc())
.load::<Update>(db_conn)
.map_err(debug)?;
Ok(Some(Json(updates)))
}
/// Returns all of a user's stored hsicores for a given gamemode.
#[get("/hiscores/<username>/<mode>")]
pub fn get_hiscores(db_pool: State<DbPool>, username: String, mode: u8) -> Result<Option<Json<Vec<Hiscore>>>, String> {
let db_conn = &*db_pool.get_conn();
let usr: User = match get_user_from_username(db_conn, &username)? {
Some(user) => user,
None => { return Ok(None); },
};
// pull all hiscores belonging to the selected user from the database for the provided gamemode
let hiscores = hiscores_dsl::hiscores
.filter(hiscores_dsl::user_id.eq(usr.id))
.filter(hiscores_dsl::mode.eq(mode as i16))
.order(hiscores_dsl::score_time.asc())
.load::<Hiscore>(db_conn)
.map_err(debug)?;
Ok(Some(Json(hiscores)))
}
/// Returns the difference between a user's current stats and the last time their total PP score was different than its
/// current value.
#[get("/lastpp/<username>/<mode>")]
pub fn get_last_pp_diff(
api_client: State<ApiClient>, db_pool: State<DbPool>, username: String, mode: u8
) -> Result<Option<Json<UpdateDiff>>, String> {
let client = api_client.inner();
let db_conn = &*db_pool.get_conn();
let stats = client.get_stats(&username, mode)?;
match stats {
None => { return Ok(None); },
Some(s) => {
// find the most recent update in the same game mode where `pp_raw` is different than current.
let last_different_update: Vec<Update> = updates_dsl::updates
.filter(updates_dsl::user_id.eq(s.user_id))
.filter(updates_dsl::mode.eq(mode as i16))
.filter(updates_dsl::pp_raw.ne(s.pp_raw))
.order(updates_dsl::id.desc())
.limit(1)
.load::<Update>(db_conn)
.map_err(debug)?;
let last_different_update = if last_different_update.len() > 0 { Some(&last_different_update[0]) } else { None };
// find the first recorded update that has the same pp as the user currently does
let first_same_update_time: Option<NaiveDateTime> = if last_different_update.is_some() {
let same_updates = updates_dsl::updates
.filter(updates_dsl::id.gt(last_different_update.unwrap().id))
.filter(updates_dsl::user_id.eq(s.user_id))
.filter(updates_dsl::mode.eq(mode as i16))
.order(updates_dsl::id.asc())
.select(updates_dsl::update_time)
.limit(1)
.load::<NaiveDateTime>(db_conn)
.map_err(debug)?;
if same_updates.len() > 0 {
Some(same_updates[0].clone())
} else {
None
}
} else {
None
};
// get the user's current hiscores
let cur_hiscores: Vec<NewHiscore> = match api_client.get_user_best(s.user_id, mode, 100)? {
Some(hs) => hs,
None => Vec::new(),
};
let old_hiscores: Vec<Hiscore> = if last_different_update.is_some() {
// look up the user's hiscores that were made before the last significant update
let query = hiscores_dsl::hiscores
.filter(hiscores_dsl::user_id.eq(s.user_id))
.filter(hiscores_dsl::mode.eq(mode as i16));
// if there were updates previous to this with the same pp value, pick the earliest one and
// enforce a bound that all hiscores were recorded previous to it.
if first_same_update_time.is_some() {
query.filter(hiscores_dsl::time_recorded.lt(first_same_update_time.unwrap()))
.load::<Hiscore>(db_conn)
.map_err(debug)?
} else {
query.load::<Hiscore>(db_conn)
.map_err(debug)?
}
} else {
// there has been no update for the user where there pp is different than it currently is,
// so simply report all of their hiscores as new
Vec::new()
};
// calculate the diff between the current and last significant update and return it
Ok(Some(Json(UpdateDiff::diff(last_different_update, &s, old_hiscores, cur_hiscores))))
}
}
}
/// Returns data for a set of beatmaps. It first attempts to retrieve them from the database but if they aren't
/// stored, they will be retrieved from the osu! API and inserted. Returns a Json-encoded hap of beatmap_id:beatmap
#[get("/beatmaps/<ids>/<mode>")]
pub fn get_beatmaps(
api_client: State<ApiClient>, db_pool: State<DbPool>, ids: String, mode: u8
) -> Result<Option<Json<HashMap<i32, Beatmap>>>, String> {
let ids: Vec<i32> = serde_json::from_str(&ids).map_err(debug)?;
// TODO: Search the database and find all beatmaps that have IDs that are included in the parsed vector of ids.
// TODO: Retrieve all beatmaps from the API (preferrably asynchronously) that are not contained in the database
// TODO: Package up all results and return them
unimplemented!();
}
/// Returns data for one beatmap. It first attempts to retrieve the data from the database if it isn't found there
/// it is retrieved from the osu! API and inserted.
#[get("/beatmap/<id>/<mode>")]
pub fn get_beatmap(
api_client: State<ApiClient>, db_pool: State<DbPool>, id: i32, mode: u8
) -> Result<Option<Json<Beatmap>>, String> {
// TODO: Search the database for the beatmap with the supplied id
// TODO: if not found in the database, return it from the API.
unimplemented!();
}
|
#[doc = "Register `C1SCR` reader"]
pub type R = crate::R<C1SCR_SPEC>;
#[doc = "Register `C1SCR` writer"]
pub type W = crate::W<C1SCR_SPEC>;
#[doc = "Field `CH1C` reader - CH1C"]
pub type CH1C_R = crate::BitReader<CH1C_A>;
#[doc = "CH1C\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CH1C_A {
#[doc = "0: No action"]
NoAction = 0,
#[doc = "1: Processor receive channel n status bit clear"]
Clear = 1,
}
impl From<CH1C_A> for bool {
#[inline(always)]
fn from(variant: CH1C_A) -> Self {
variant as u8 != 0
}
}
impl CH1C_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH1C_A {
match self.bits {
false => CH1C_A::NoAction,
true => CH1C_A::Clear,
}
}
#[doc = "No action"]
#[inline(always)]
pub fn is_no_action(&self) -> bool {
*self == CH1C_A::NoAction
}
#[doc = "Processor receive channel n status bit clear"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CH1C_A::Clear
}
}
#[doc = "Field `CH1C` writer - CH1C"]
pub type CH1C_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CH1C_A>;
impl<'a, REG, const O: u8> CH1C_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No action"]
#[inline(always)]
pub fn no_action(self) -> &'a mut crate::W<REG> {
self.variant(CH1C_A::NoAction)
}
#[doc = "Processor receive channel n status bit clear"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CH1C_A::Clear)
}
}
#[doc = "Field `CH2C` reader - CH2C"]
pub use CH1C_R as CH2C_R;
#[doc = "Field `CH3C` reader - CH3C"]
pub use CH1C_R as CH3C_R;
#[doc = "Field `CH4C` reader - CH4C"]
pub use CH1C_R as CH4C_R;
#[doc = "Field `CH5C` reader - CH5C"]
pub use CH1C_R as CH5C_R;
#[doc = "Field `CH6C` reader - CH6C"]
pub use CH1C_R as CH6C_R;
#[doc = "Field `CH2C` writer - CH2C"]
pub use CH1C_W as CH2C_W;
#[doc = "Field `CH3C` writer - CH3C"]
pub use CH1C_W as CH3C_W;
#[doc = "Field `CH4C` writer - CH4C"]
pub use CH1C_W as CH4C_W;
#[doc = "Field `CH5C` writer - CH5C"]
pub use CH1C_W as CH5C_W;
#[doc = "Field `CH6C` writer - CH6C"]
pub use CH1C_W as CH6C_W;
#[doc = "Field `CH1S` reader - CH1S"]
pub type CH1S_R = crate::BitReader<CH1S_A>;
#[doc = "CH1S\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CH1S_A {
#[doc = "0: No action"]
NoAction = 0,
#[doc = "1: Processor transmit channel n status bit set"]
Set = 1,
}
impl From<CH1S_A> for bool {
#[inline(always)]
fn from(variant: CH1S_A) -> Self {
variant as u8 != 0
}
}
impl CH1S_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH1S_A {
match self.bits {
false => CH1S_A::NoAction,
true => CH1S_A::Set,
}
}
#[doc = "No action"]
#[inline(always)]
pub fn is_no_action(&self) -> bool {
*self == CH1S_A::NoAction
}
#[doc = "Processor transmit channel n status bit set"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == CH1S_A::Set
}
}
#[doc = "Field `CH1S` writer - CH1S"]
pub type CH1S_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CH1S_A>;
impl<'a, REG, const O: u8> CH1S_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No action"]
#[inline(always)]
pub fn no_action(self) -> &'a mut crate::W<REG> {
self.variant(CH1S_A::NoAction)
}
#[doc = "Processor transmit channel n status bit set"]
#[inline(always)]
pub fn set(self) -> &'a mut crate::W<REG> {
self.variant(CH1S_A::Set)
}
}
#[doc = "Field `CH2S` reader - CH2S"]
pub use CH1S_R as CH2S_R;
#[doc = "Field `CH3S` reader - CH3S"]
pub use CH1S_R as CH3S_R;
#[doc = "Field `CH4S` reader - CH4S"]
pub use CH1S_R as CH4S_R;
#[doc = "Field `CH5S` reader - CH5S"]
pub use CH1S_R as CH5S_R;
#[doc = "Field `CH6S` reader - CH6S"]
pub use CH1S_R as CH6S_R;
#[doc = "Field `CH2S` writer - CH2S"]
pub use CH1S_W as CH2S_W;
#[doc = "Field `CH3S` writer - CH3S"]
pub use CH1S_W as CH3S_W;
#[doc = "Field `CH4S` writer - CH4S"]
pub use CH1S_W as CH4S_W;
#[doc = "Field `CH5S` writer - CH5S"]
pub use CH1S_W as CH5S_W;
#[doc = "Field `CH6S` writer - CH6S"]
pub use CH1S_W as CH6S_W;
impl R {
#[doc = "Bit 0 - CH1C"]
#[inline(always)]
pub fn ch1c(&self) -> CH1C_R {
CH1C_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - CH2C"]
#[inline(always)]
pub fn ch2c(&self) -> CH2C_R {
CH2C_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - CH3C"]
#[inline(always)]
pub fn ch3c(&self) -> CH3C_R {
CH3C_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - CH4C"]
#[inline(always)]
pub fn ch4c(&self) -> CH4C_R {
CH4C_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - CH5C"]
#[inline(always)]
pub fn ch5c(&self) -> CH5C_R {
CH5C_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - CH6C"]
#[inline(always)]
pub fn ch6c(&self) -> CH6C_R {
CH6C_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 16 - CH1S"]
#[inline(always)]
pub fn ch1s(&self) -> CH1S_R {
CH1S_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - CH2S"]
#[inline(always)]
pub fn ch2s(&self) -> CH2S_R {
CH2S_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - CH3S"]
#[inline(always)]
pub fn ch3s(&self) -> CH3S_R {
CH3S_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - CH4S"]
#[inline(always)]
pub fn ch4s(&self) -> CH4S_R {
CH4S_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - CH5S"]
#[inline(always)]
pub fn ch5s(&self) -> CH5S_R {
CH5S_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - CH6S"]
#[inline(always)]
pub fn ch6s(&self) -> CH6S_R {
CH6S_R::new(((self.bits >> 21) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - CH1C"]
#[inline(always)]
#[must_use]
pub fn ch1c(&mut self) -> CH1C_W<C1SCR_SPEC, 0> {
CH1C_W::new(self)
}
#[doc = "Bit 1 - CH2C"]
#[inline(always)]
#[must_use]
pub fn ch2c(&mut self) -> CH2C_W<C1SCR_SPEC, 1> {
CH2C_W::new(self)
}
#[doc = "Bit 2 - CH3C"]
#[inline(always)]
#[must_use]
pub fn ch3c(&mut self) -> CH3C_W<C1SCR_SPEC, 2> {
CH3C_W::new(self)
}
#[doc = "Bit 3 - CH4C"]
#[inline(always)]
#[must_use]
pub fn ch4c(&mut self) -> CH4C_W<C1SCR_SPEC, 3> {
CH4C_W::new(self)
}
#[doc = "Bit 4 - CH5C"]
#[inline(always)]
#[must_use]
pub fn ch5c(&mut self) -> CH5C_W<C1SCR_SPEC, 4> {
CH5C_W::new(self)
}
#[doc = "Bit 5 - CH6C"]
#[inline(always)]
#[must_use]
pub fn ch6c(&mut self) -> CH6C_W<C1SCR_SPEC, 5> {
CH6C_W::new(self)
}
#[doc = "Bit 16 - CH1S"]
#[inline(always)]
#[must_use]
pub fn ch1s(&mut self) -> CH1S_W<C1SCR_SPEC, 16> {
CH1S_W::new(self)
}
#[doc = "Bit 17 - CH2S"]
#[inline(always)]
#[must_use]
pub fn ch2s(&mut self) -> CH2S_W<C1SCR_SPEC, 17> {
CH2S_W::new(self)
}
#[doc = "Bit 18 - CH3S"]
#[inline(always)]
#[must_use]
pub fn ch3s(&mut self) -> CH3S_W<C1SCR_SPEC, 18> {
CH3S_W::new(self)
}
#[doc = "Bit 19 - CH4S"]
#[inline(always)]
#[must_use]
pub fn ch4s(&mut self) -> CH4S_W<C1SCR_SPEC, 19> {
CH4S_W::new(self)
}
#[doc = "Bit 20 - CH5S"]
#[inline(always)]
#[must_use]
pub fn ch5s(&mut self) -> CH5S_W<C1SCR_SPEC, 20> {
CH5S_W::new(self)
}
#[doc = "Bit 21 - CH6S"]
#[inline(always)]
#[must_use]
pub fn ch6s(&mut self) -> CH6S_W<C1SCR_SPEC, 21> {
CH6S_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Reading this register will always return 0x0000 0000.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1scr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c1scr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct C1SCR_SPEC;
impl crate::RegisterSpec for C1SCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`c1scr::R`](R) reader structure"]
impl crate::Readable for C1SCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`c1scr::W`](W) writer structure"]
impl crate::Writable for C1SCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets C1SCR to value 0"]
impl crate::Resettable for C1SCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![cfg_attr(feature = "unstable", feature(test))]
#[cfg(feature = "libwebkit2gtk")]
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "libwebkit2gtk")]
extern crate ammonia;
#[cfg(feature = "libwebkit2gtk")]
extern crate pulldown_cmark;
extern crate structopt;
#[cfg(feature = "libwebkit2gtk")]
extern crate syntect;
extern crate cairo;
extern crate gdk;
extern crate gdk_pixbuf;
extern crate gio;
extern crate glib;
extern crate gtk;
extern crate log;
extern crate pango;
extern crate pangocairo;
#[cfg(feature = "libwebkit2gtk")]
extern crate webkit2gtk;
use gio::prelude::*;
use gtk::SettingsExt;
use log::error;
use structopt::{clap, StructOpt};
include!(concat!(env!("OUT_DIR"), "/gnvim_version.rs"));
mod nvim_bridge;
mod nvim_gio;
mod thread_guard;
mod ui;
fn parse_geometry(input: &str) -> Result<(i32, i32), String> {
let ret_tuple: Vec<&str> = input.split('x').collect();
if ret_tuple.len() != 2 {
Err(String::from("must be of form 'width'x'height'"))
} else {
match (ret_tuple[0].parse(), ret_tuple[1].parse()) {
(Ok(x), Ok(y)) => Ok((x, y)),
(_, _) => {
Err(String::from("at least one argument wasn't an integer"))
}
}
}
}
/// danivim is a graphical UI for neovim.
#[derive(StructOpt, Debug)]
#[structopt(
name = "danivim",
version = VERSION,
author = "Ville Hakulinen"
)]
struct Options {
/// Prints the executed neovim command.
#[structopt(long = "print-nvim-cmd")]
print_nvim_cmd: bool,
/// Path to neovim binary.
#[structopt(long = "nvim", name = "BIN", default_value = "nvim")]
nvim_path: String,
/// Path for gnvim runtime files.
#[structopt(
long = "gnvim-rtp",
default_value = "/usr/local/share/gnvim/runtime",
env = "GNVIM_RUNTIME_PATH"
)]
gnvim_rtp: String,
/// Files to open.
#[structopt(value_name = "FILES")]
open_files: Vec<String>,
/// Arguments that are passed to nvim.
#[structopt(value_name = "ARGS", last = true)]
nvim_args: Vec<String>,
/// Disables externalized popup menu
#[structopt(long = "disable-ext-popupmenu")]
disable_ext_popupmenu: bool,
/// Disables externalized command line
#[structopt(long = "disable-ext-cmdline")]
disable_ext_cmdline: bool,
/// Disables externalized tab line
#[structopt(long = "disable-ext-tabline")]
disable_ext_tabline: bool,
/// Instruct GTK to prefer dark theme
#[structopt(long = "gtk-prefer-dark-theme")]
prefer_dark_theme: bool,
/// Geometry of the window in widthxheight form
#[structopt(long = "geometry", parse(try_from_str = parse_geometry), default_value = "1280x720")]
geometry: (i32, i32),
}
enum Error {
Start(nvim_gio::Error),
Call(Box<nvim_rs::error::CallError>),
}
impl std::fmt::Display for Error {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::Start(e) => write!(fmt, "Failed to start nvim: {}", e),
Error::Call(e) => write!(fmt, "Call to nvim failed: {}", e),
}
}
}
impl From<nvim_gio::Error> for Error {
fn from(arg: nvim_gio::Error) -> Self {
Error::Start(arg)
}
}
impl From<Box<nvim_rs::error::CallError>> for Error {
fn from(arg: Box<nvim_rs::error::CallError>) -> Self {
Error::Call(arg)
}
}
async fn build(app: >k::Application, opts: &Options) -> Result<(), Error> {
let (tx, rx) = glib::MainContext::channel(glib::PRIORITY_DEFAULT);
let bridge = nvim_bridge::NvimBridge::new(tx.clone());
let rtp = format!("let &rtp.=',{}'", opts.gnvim_rtp);
let mut args: Vec<&str> = vec![
&opts.nvim_path,
"--embed",
"--cmd",
"let g:gnvim=1",
"--cmd",
"set termguicolors",
"--cmd",
&rtp,
];
// Pass arguments from cli to nvim.
for arg in opts.nvim_args.iter() {
args.push(arg);
}
// Open files "normally" through nvim.
for file in opts.open_files.iter() {
args.push(file);
}
// Print the nvim cmd which is executed if asked.
if opts.print_nvim_cmd {
println!("nvim cmd: {:?}", args);
}
let mut nvim = nvim_gio::new_child(
bridge,
args.iter().map(|a| std::ffi::OsStr::new(a)).collect(),
tx,
)
.map_err(Error::from)?;
nvim.subscribe("Gnvim").await.map_err(Error::from)?;
let api_info = nvim.get_api_info().await.map_err(Error::from)?;
nvim.set_var("gnvim_channel_id", api_info[0].clone())
.await
.map_err(Error::from)?;
let mut ui_opts = nvim_rs::UiAttachOptions::new();
ui_opts.set_rgb(true);
ui_opts.set_linegrid_external(true);
ui_opts.set_multigrid_external(true);
ui_opts.set_popupmenu_external(!opts.disable_ext_popupmenu);
ui_opts.set_tabline_external(!opts.disable_ext_tabline);
ui_opts.set_cmdline_external(!opts.disable_ext_cmdline);
nvim.ui_attach(80, 30, &ui_opts)
.await
.map_err(Error::from)?;
let ui = ui::UI::init(app, rx, opts.geometry, nvim);
ui.start();
Ok(())
}
fn main() {
env_logger::init();
if let Err(err) = gtk::init() {
error!("Failed to initialize gtk: {}", err);
return;
}
let opts = Options::clap();
let opts = Options::from_clap(&opts.get_matches_safe().unwrap_or_else(
|mut err| {
if let clap::ErrorKind::UnknownArgument = err.kind {
// Arg likely passed for nvim, notify user of how to pass args to nvim.
err.message = format!(
"{}\n\nIf this is an argument for nvim, try moving \
it after a -- separator.",
err.message
);
err.exit();
} else {
err.exit()
}
},
));
let mut flags = gio::ApplicationFlags::empty();
flags.insert(gio::ApplicationFlags::NON_UNIQUE);
flags.insert(gio::ApplicationFlags::HANDLES_OPEN);
let app = gtk::Application::new(Some("com.github.daaniiieel.danivim"), flags)
.unwrap();
gdk::set_program_class("Danivim");
glib::set_application_name("Danivim");
gtk::Window::set_default_icon_name("gnvim");
if opts.prefer_dark_theme {
if let Some(settings) = gtk::Settings::get_default() {
settings.set_property_gtk_application_prefer_dark_theme(true);
}
}
app.connect_activate(move |app| {
let opts = &opts;
let c = glib::MainContext::default();
c.block_on(async move {
if let Err(err) = build(app, opts).await {
error!("Failed to build UI: {}", err);
}
});
});
app.run(&[]);
}
|
use hex;
use regex::Regex;
use skia_safe::{canvas::Canvas, Paint, Rect as SkRect};
#[derive(Debug)]
pub struct ColourParseError {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct ArgbColour {
a: u8,
r: u8,
g: u8,
b: u8,
}
impl ArgbColour {
pub fn from_hex(hex: &str) -> Result<ArgbColour, ColourParseError> {
let re = Regex::new(r"#?([\dA-Za-z]{8}|[\dA-Za-z]{6}|[\dA-Za-z]{3})").unwrap();
let captures = re.captures(hex);
let digits = captures.and_then(|c| { c.get(1) }).map(|m| m.as_str());
match digits {
Some(digit_string) => match digit_string.len() {
3 => {
fn parse_hex_digit(char: &char) -> Option<u8> {
char.to_digit(16).map(|d| d as u8)
}
let mut chars = digit_string.chars();
let r = parse_hex_digit(&chars.next().unwrap()).unwrap() * 17;
let g = parse_hex_digit(&chars.next().unwrap()).unwrap() * 17;
let b = parse_hex_digit(&chars.next().unwrap()).unwrap() * 17;
Ok(ArgbColour { a: 255, r, g, b })
},
6 => {
hex::decode(digit_string)
.map(|b| ArgbColour { a: 255, r: b[0], g: b[1], b: b[2] })
.map_err(|_| ColourParseError {})
}
8 => {
hex::decode(digit_string)
.map(|b| ArgbColour { a: b[0], r: b[1], g: b[2], b: b[3] })
.map_err(|_| ColourParseError {})
}
_ => Err(ColourParseError {})
},
None => Err(ColourParseError {})
}
}
fn invert(&self) -> ArgbColour {
ArgbColour {
a: self.a,
r: self.r ^ 0xFF,
g: self.g ^ 0xFF,
b: self.b ^ 0xFF,
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct Rect {
pub x: f32,
pub y: f32,
pub width: f32,
pub height: f32,
pub stroke_colour: ArgbColour,
pub fill_colour: ArgbColour
}
impl Rect {
fn to_skia_rect(&self) -> SkRect {
SkRect::from_xywh(self.x, self.y, self.width, self.height)
}
pub fn scale(&self, scale_factor: f32) -> Rect {
Rect {
x: self.x * scale_factor,
y: self.y * scale_factor,
width: self.width * scale_factor,
height: self.height * scale_factor,
..*self
}
}
}
pub fn draw_ui(canvas: &mut Canvas, rects: Vec<Rect>) {
let mut paint = Paint::default();
paint.set_anti_alias(true);
paint.set_stroke_width(1.0);
for rect in &rects {
let colour = rect.fill_colour;
paint.set_argb(colour.a, colour.r, colour.g, colour.b);
canvas.draw_rect(rect.to_skia_rect(), &paint);
}
}
mod test {
use crate::renderer::ArgbColour;
#[test]
pub fn test_hex_to_argb() {
assert_eq!(ArgbColour::from_hex("#ffffff").unwrap(), ArgbColour { a: 255, r: 255, g: 255, b: 255 });
assert_eq!(ArgbColour::from_hex("#000000").unwrap(), ArgbColour { a: 255, r: 0, g: 0, b: 0 });
assert_eq!(ArgbColour::from_hex("#FF0000").unwrap(), ArgbColour { a: 255, r: 255, g: 0, b: 0 });
assert_eq!(ArgbColour::from_hex("#00FF00").unwrap(), ArgbColour { a: 255, r: 0, g: 255, b: 0 });
assert_eq!(ArgbColour::from_hex("#0000FF").unwrap(), ArgbColour { a: 255, r: 0, g: 0, b: 255 });
}
} |
use tide::middleware::RootLogger;
use tide_static_files::StaticFiles;
fn main() {
let mut app = tide::App::new(());
app.middleware(RootLogger::new());
let static_files = StaticFiles::new(".").unwrap();
app.at("/static/*").get(static_files);
app.serve("127.0.0.1:8000").unwrap();
}
|
#[macro_use]
extern crate criterion;
use criterion::{black_box, Criterion};
use feather_blocks::{BlockId, EastWire, WestWire};
// Some useless vanity benchmarks.
fn to_id_complex_state(c: &mut Criterion) {
feather_blocks::init();
let block = BlockId::redstone_wire()
.with_east_wire(EastWire::Up)
.with_power(15);
c.bench_function("to_id_complex_state", |b| {
b.iter(|| black_box(block).vanilla_id());
});
}
fn from_id_complex_state(c: &mut Criterion) {
feather_blocks::init();
c.bench_function("from_id_complex_state", |b| {
b.iter(|| BlockId::from_vanilla_id(black_box(7198)));
});
}
fn update_properties_complex_state(c: &mut Criterion) {
feather_blocks::init();
c.bench_function("update_properties_complex_state", |b| {
b.iter(|| {
BlockId::redstone_wire()
.with_east_wire(EastWire::Up)
.with_west_wire(WestWire::Side)
.with_power(15)
});
});
}
criterion_group!(
benches,
update_properties_complex_state,
to_id_complex_state,
from_id_complex_state
);
criterion_main!(benches);
|
pub mod default;
/// Used only for allowing dynamic known species
pub trait Species: std::fmt::Display {
fn name(&self) -> &str;
}
|
use super::super::alu::*;
use super::super::{Cpu, Cycles, Memory};
use util::bits::Bits as _;
pub const S_SET: bool = true;
pub const S_CLR: bool = false;
pub const REG_SHIFT: bool = true;
pub const IMM_SHIFT: bool = false;
/// Creates a function for an arithmetic data processing function that writes
/// back to the destination register.
macro_rules! dataproc {
($name:ident, $get_operand:expr, $operation:expr, $s_flag:expr, $r_shift:expr) => {
// #TODO possibly add some debug code here in the data processing instruction
// gen so that I can log an error when Rs is R15 which is not supported
// by these sets of instructions.
pub fn $name(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let rd = instr.bits(12, 15);
let rn = instr.bits(16, 19);
// When using R15 as operand (Rm or Rn), the returned value
// depends on the instruction: PC+12 if I=0,R=1 (shift by register),
// otherwise PC+8 (shift by immediate).
let lhs = if rn == 15 && $r_shift {
cpu.registers.read(rn) + 4
} else {
cpu.registers.read(rn)
};
let mut cycles = if $r_shift {
memory.stall(Cycles::ONE);
Cycles::ONE
} else {
Cycles::ZERO
};
// If S=1, Rd=R15; should not be used in user mode:
// CPSR = SPSR_<current mode>
// PC = result
// For example: MOVS PC,R14 ;return from SWI (PC=R14_svc, CPSR=SPSR_svc).
if util::unlikely!(rd == 15 && $s_flag) {
let rhs = $get_operand(cpu, instr);
let res = $operation(cpu, lhs, rhs);
let spsr = cpu.registers.read_spsr();
cpu.registers.write_cpsr(spsr);
cycles += cpu.branch(res, memory);
} else {
let rhs = $get_operand(cpu, instr);
let res = $operation(cpu, lhs, rhs);
if util::unlikely!(rd == 15) {
cycles += cpu.branch_arm(res, memory);
} else {
cpu.registers.write(rd, res);
}
}
cycles
}
};
}
/// Creates a function data processing function that does not write
/// back to the destination register. These always have the S flag set.
macro_rules! dataproc_no_write {
($name:ident, $get_operand:expr, $operation:expr, $r_shift:expr) => {
pub fn $name(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let rd = instr.bits(12, 15);
let rn = instr.bits(16, 19);
// When using R15 as operand (Rm or Rn), the returned value
// depends on the instruction: PC+12 if I=0,R=1 (shift by register),
// otherwise PC+8 (shift by immediate).
let lhs = if rn == 15 && $r_shift {
cpu.registers.read(rn) + 4
} else {
cpu.registers.read(rn)
};
let mut cycles = if $r_shift {
memory.stall(Cycles::ONE);
Cycles::ONE
} else {
Cycles::ZERO
};
// If S=1, Rd=R15; should not be used in user mode:
// CPSR = SPSR_<current mode>
// PC = result
// For example: MOVS PC,R14 ;return from SWI (PC=R14_svc, CPSR=SPSR_svc).
if util::unlikely!(rd == 15) {
let rhs = $get_operand(cpu, instr);
$operation(cpu, lhs, rhs);
let spsr = cpu.registers.read_spsr();
cpu.registers.write_cpsr(spsr);
} else {
let rhs = $get_operand(cpu, instr);
$operation(cpu, lhs, rhs);
}
if rd == 15 {
if cpu.registers.getf_t() {
let dest = cpu.registers.read(15) & 0xFFFFFFFE;
cycles += cpu.branch_thumb(dest, memory);
} else {
let dest = cpu.registers.read(15) & 0xFFFFFFFC;
cycles += cpu.branch_arm(dest, memory);
}
}
cycles
}
};
}
// instr: AND
dataproc!(arm_and_lli, bs::lli, arm_alu_and, S_CLR, IMM_SHIFT);
dataproc!(arm_and_llr, bs::llr, arm_alu_and, S_CLR, REG_SHIFT);
dataproc!(arm_and_lri, bs::lri, arm_alu_and, S_CLR, IMM_SHIFT);
dataproc!(arm_and_lrr, bs::lrr, arm_alu_and, S_CLR, REG_SHIFT);
dataproc!(arm_and_ari, bs::ari, arm_alu_and, S_CLR, IMM_SHIFT);
dataproc!(arm_and_arr, bs::arr, arm_alu_and, S_CLR, REG_SHIFT);
dataproc!(arm_and_rri, bs::rri, arm_alu_and, S_CLR, IMM_SHIFT);
dataproc!(arm_and_rrr, bs::rrr, arm_alu_and, S_CLR, REG_SHIFT);
dataproc!(arm_and_imm, bs::imm, arm_alu_and, S_CLR, IMM_SHIFT);
// instr: ANDS
dataproc!(arm_ands_lli, bs::lli_s, arm_alu_ands, S_SET, IMM_SHIFT);
dataproc!(arm_ands_llr, bs::llr_s, arm_alu_ands, S_SET, REG_SHIFT);
dataproc!(arm_ands_lri, bs::lri_s, arm_alu_ands, S_SET, IMM_SHIFT);
dataproc!(arm_ands_lrr, bs::lrr_s, arm_alu_ands, S_SET, REG_SHIFT);
dataproc!(arm_ands_ari, bs::ari_s, arm_alu_ands, S_SET, IMM_SHIFT);
dataproc!(arm_ands_arr, bs::arr_s, arm_alu_ands, S_SET, REG_SHIFT);
dataproc!(arm_ands_rri, bs::rri_s, arm_alu_ands, S_SET, IMM_SHIFT);
dataproc!(arm_ands_rrr, bs::rrr_s, arm_alu_ands, S_SET, REG_SHIFT);
dataproc!(arm_ands_imm, bs::imm, arm_alu_ands, S_SET, IMM_SHIFT);
// instr: BIC
dataproc!(arm_bic_lli, bs::lli, arm_alu_bic, S_CLR, IMM_SHIFT);
dataproc!(arm_bic_llr, bs::llr, arm_alu_bic, S_CLR, REG_SHIFT);
dataproc!(arm_bic_lri, bs::lri, arm_alu_bic, S_CLR, IMM_SHIFT);
dataproc!(arm_bic_lrr, bs::lrr, arm_alu_bic, S_CLR, REG_SHIFT);
dataproc!(arm_bic_ari, bs::ari, arm_alu_bic, S_CLR, IMM_SHIFT);
dataproc!(arm_bic_arr, bs::arr, arm_alu_bic, S_CLR, REG_SHIFT);
dataproc!(arm_bic_rri, bs::rri, arm_alu_bic, S_CLR, IMM_SHIFT);
dataproc!(arm_bic_rrr, bs::rrr, arm_alu_bic, S_CLR, REG_SHIFT);
dataproc!(arm_bic_imm, bs::imm, arm_alu_bic, S_CLR, IMM_SHIFT);
// instr: BICS
dataproc!(arm_bics_lli, bs::lli_s, arm_alu_bics, S_SET, IMM_SHIFT);
dataproc!(arm_bics_llr, bs::llr_s, arm_alu_bics, S_SET, REG_SHIFT);
dataproc!(arm_bics_lri, bs::lri_s, arm_alu_bics, S_SET, IMM_SHIFT);
dataproc!(arm_bics_lrr, bs::lrr_s, arm_alu_bics, S_SET, REG_SHIFT);
dataproc!(arm_bics_ari, bs::ari_s, arm_alu_bics, S_SET, IMM_SHIFT);
dataproc!(arm_bics_arr, bs::arr_s, arm_alu_bics, S_SET, REG_SHIFT);
dataproc!(arm_bics_rri, bs::rri_s, arm_alu_bics, S_SET, IMM_SHIFT);
dataproc!(arm_bics_rrr, bs::rrr_s, arm_alu_bics, S_SET, REG_SHIFT);
dataproc!(arm_bics_imm, bs::imm, arm_alu_bics, S_SET, IMM_SHIFT);
// instr: ORR
dataproc!(arm_orr_lli, bs::lli, arm_alu_orr, S_CLR, IMM_SHIFT);
dataproc!(arm_orr_llr, bs::llr, arm_alu_orr, S_CLR, REG_SHIFT);
dataproc!(arm_orr_lri, bs::lri, arm_alu_orr, S_CLR, IMM_SHIFT);
dataproc!(arm_orr_lrr, bs::lrr, arm_alu_orr, S_CLR, REG_SHIFT);
dataproc!(arm_orr_ari, bs::ari, arm_alu_orr, S_CLR, IMM_SHIFT);
dataproc!(arm_orr_arr, bs::arr, arm_alu_orr, S_CLR, REG_SHIFT);
dataproc!(arm_orr_rri, bs::rri, arm_alu_orr, S_CLR, IMM_SHIFT);
dataproc!(arm_orr_rrr, bs::rrr, arm_alu_orr, S_CLR, REG_SHIFT);
dataproc!(arm_orr_imm, bs::imm, arm_alu_orr, S_CLR, IMM_SHIFT);
// instr: ORRS
dataproc!(arm_orrs_lli, bs::lli_s, arm_alu_orrs, S_SET, IMM_SHIFT);
dataproc!(arm_orrs_llr, bs::llr_s, arm_alu_orrs, S_SET, REG_SHIFT);
dataproc!(arm_orrs_lri, bs::lri_s, arm_alu_orrs, S_SET, IMM_SHIFT);
dataproc!(arm_orrs_lrr, bs::lrr_s, arm_alu_orrs, S_SET, REG_SHIFT);
dataproc!(arm_orrs_ari, bs::ari_s, arm_alu_orrs, S_SET, IMM_SHIFT);
dataproc!(arm_orrs_arr, bs::arr_s, arm_alu_orrs, S_SET, REG_SHIFT);
dataproc!(arm_orrs_rri, bs::rri_s, arm_alu_orrs, S_SET, IMM_SHIFT);
dataproc!(arm_orrs_rrr, bs::rrr_s, arm_alu_orrs, S_SET, REG_SHIFT);
dataproc!(arm_orrs_imm, bs::imm, arm_alu_orrs, S_SET, IMM_SHIFT);
// instr: EOR
dataproc!(arm_eor_lli, bs::lli, arm_alu_eor, S_CLR, IMM_SHIFT);
dataproc!(arm_eor_llr, bs::llr, arm_alu_eor, S_CLR, REG_SHIFT);
dataproc!(arm_eor_lri, bs::lri, arm_alu_eor, S_CLR, IMM_SHIFT);
dataproc!(arm_eor_lrr, bs::lrr, arm_alu_eor, S_CLR, REG_SHIFT);
dataproc!(arm_eor_ari, bs::ari, arm_alu_eor, S_CLR, IMM_SHIFT);
dataproc!(arm_eor_arr, bs::arr, arm_alu_eor, S_CLR, REG_SHIFT);
dataproc!(arm_eor_rri, bs::rri, arm_alu_eor, S_CLR, IMM_SHIFT);
dataproc!(arm_eor_rrr, bs::rrr, arm_alu_eor, S_CLR, REG_SHIFT);
dataproc!(arm_eor_imm, bs::imm, arm_alu_eor, S_CLR, IMM_SHIFT);
// instr: EORS
dataproc!(arm_eors_lli, bs::lli_s, arm_alu_eors, S_SET, IMM_SHIFT);
dataproc!(arm_eors_llr, bs::llr_s, arm_alu_eors, S_SET, REG_SHIFT);
dataproc!(arm_eors_lri, bs::lri_s, arm_alu_eors, S_SET, IMM_SHIFT);
dataproc!(arm_eors_lrr, bs::lrr_s, arm_alu_eors, S_SET, REG_SHIFT);
dataproc!(arm_eors_ari, bs::ari_s, arm_alu_eors, S_SET, IMM_SHIFT);
dataproc!(arm_eors_arr, bs::arr_s, arm_alu_eors, S_SET, REG_SHIFT);
dataproc!(arm_eors_rri, bs::rri_s, arm_alu_eors, S_SET, IMM_SHIFT);
dataproc!(arm_eors_rrr, bs::rrr_s, arm_alu_eors, S_SET, REG_SHIFT);
dataproc!(arm_eors_imm, bs::imm, arm_alu_eors, S_SET, IMM_SHIFT);
// instr: SUB
dataproc!(arm_sub_lli, bs::lli, arm_alu_sub, S_CLR, IMM_SHIFT);
dataproc!(arm_sub_llr, bs::llr, arm_alu_sub, S_CLR, REG_SHIFT);
dataproc!(arm_sub_lri, bs::lri, arm_alu_sub, S_CLR, IMM_SHIFT);
dataproc!(arm_sub_lrr, bs::lrr, arm_alu_sub, S_CLR, REG_SHIFT);
dataproc!(arm_sub_ari, bs::ari, arm_alu_sub, S_CLR, IMM_SHIFT);
dataproc!(arm_sub_arr, bs::arr, arm_alu_sub, S_CLR, REG_SHIFT);
dataproc!(arm_sub_rri, bs::rri, arm_alu_sub, S_CLR, IMM_SHIFT);
dataproc!(arm_sub_rrr, bs::rrr, arm_alu_sub, S_CLR, REG_SHIFT);
dataproc!(arm_sub_imm, bs::imm, arm_alu_sub, S_CLR, IMM_SHIFT);
// instr: SUBS
dataproc!(arm_subs_lli, bs::lli_s, arm_alu_subs, S_SET, IMM_SHIFT);
dataproc!(arm_subs_llr, bs::llr_s, arm_alu_subs, S_SET, REG_SHIFT);
dataproc!(arm_subs_lri, bs::lri_s, arm_alu_subs, S_SET, IMM_SHIFT);
dataproc!(arm_subs_lrr, bs::lrr_s, arm_alu_subs, S_SET, REG_SHIFT);
dataproc!(arm_subs_ari, bs::ari_s, arm_alu_subs, S_SET, IMM_SHIFT);
dataproc!(arm_subs_arr, bs::arr_s, arm_alu_subs, S_SET, REG_SHIFT);
dataproc!(arm_subs_rri, bs::rri_s, arm_alu_subs, S_SET, IMM_SHIFT);
dataproc!(arm_subs_rrr, bs::rrr_s, arm_alu_subs, S_SET, REG_SHIFT);
dataproc!(arm_subs_imm, bs::imm, arm_alu_subs, S_SET, IMM_SHIFT);
// instr: RSB
dataproc!(arm_rsb_lli, bs::lli, arm_alu_rsb, S_CLR, IMM_SHIFT);
dataproc!(arm_rsb_llr, bs::llr, arm_alu_rsb, S_CLR, REG_SHIFT);
dataproc!(arm_rsb_lri, bs::lri, arm_alu_rsb, S_CLR, IMM_SHIFT);
dataproc!(arm_rsb_lrr, bs::lrr, arm_alu_rsb, S_CLR, REG_SHIFT);
dataproc!(arm_rsb_ari, bs::ari, arm_alu_rsb, S_CLR, IMM_SHIFT);
dataproc!(arm_rsb_arr, bs::arr, arm_alu_rsb, S_CLR, REG_SHIFT);
dataproc!(arm_rsb_rri, bs::rri, arm_alu_rsb, S_CLR, IMM_SHIFT);
dataproc!(arm_rsb_rrr, bs::rrr, arm_alu_rsb, S_CLR, REG_SHIFT);
dataproc!(arm_rsb_imm, bs::imm, arm_alu_rsb, S_CLR, IMM_SHIFT);
// instr: RSBS
dataproc!(arm_rsbs_lli, bs::lli_s, arm_alu_rsbs, S_SET, IMM_SHIFT);
dataproc!(arm_rsbs_llr, bs::llr_s, arm_alu_rsbs, S_SET, REG_SHIFT);
dataproc!(arm_rsbs_lri, bs::lri_s, arm_alu_rsbs, S_SET, IMM_SHIFT);
dataproc!(arm_rsbs_lrr, bs::lrr_s, arm_alu_rsbs, S_SET, REG_SHIFT);
dataproc!(arm_rsbs_ari, bs::ari_s, arm_alu_rsbs, S_SET, IMM_SHIFT);
dataproc!(arm_rsbs_arr, bs::arr_s, arm_alu_rsbs, S_SET, REG_SHIFT);
dataproc!(arm_rsbs_rri, bs::rri_s, arm_alu_rsbs, S_SET, IMM_SHIFT);
dataproc!(arm_rsbs_rrr, bs::rrr_s, arm_alu_rsbs, S_SET, REG_SHIFT);
dataproc!(arm_rsbs_imm, bs::imm, arm_alu_rsbs, S_SET, IMM_SHIFT);
// instr: ADD
dataproc!(arm_add_lli, bs::lli, arm_alu_add, S_CLR, IMM_SHIFT);
dataproc!(arm_add_llr, bs::llr, arm_alu_add, S_CLR, REG_SHIFT);
dataproc!(arm_add_lri, bs::lri, arm_alu_add, S_CLR, IMM_SHIFT);
dataproc!(arm_add_lrr, bs::lrr, arm_alu_add, S_CLR, REG_SHIFT);
dataproc!(arm_add_ari, bs::ari, arm_alu_add, S_CLR, IMM_SHIFT);
dataproc!(arm_add_arr, bs::arr, arm_alu_add, S_CLR, REG_SHIFT);
dataproc!(arm_add_rri, bs::rri, arm_alu_add, S_CLR, IMM_SHIFT);
dataproc!(arm_add_rrr, bs::rrr, arm_alu_add, S_CLR, REG_SHIFT);
dataproc!(arm_add_imm, bs::imm, arm_alu_add, S_CLR, IMM_SHIFT);
// instr: ADDS
dataproc!(arm_adds_lli, bs::lli_s, arm_alu_adds, S_SET, IMM_SHIFT);
dataproc!(arm_adds_llr, bs::llr_s, arm_alu_adds, S_SET, REG_SHIFT);
dataproc!(arm_adds_lri, bs::lri_s, arm_alu_adds, S_SET, IMM_SHIFT);
dataproc!(arm_adds_lrr, bs::lrr_s, arm_alu_adds, S_SET, REG_SHIFT);
dataproc!(arm_adds_ari, bs::ari_s, arm_alu_adds, S_SET, IMM_SHIFT);
dataproc!(arm_adds_arr, bs::arr_s, arm_alu_adds, S_SET, REG_SHIFT);
dataproc!(arm_adds_rri, bs::rri_s, arm_alu_adds, S_SET, IMM_SHIFT);
dataproc!(arm_adds_rrr, bs::rrr_s, arm_alu_adds, S_SET, REG_SHIFT);
dataproc!(arm_adds_imm, bs::imm, arm_alu_adds, S_SET, IMM_SHIFT);
// instr: ADC
dataproc!(arm_adc_lli, bs::lli, arm_alu_adc, S_CLR, IMM_SHIFT);
dataproc!(arm_adc_llr, bs::llr, arm_alu_adc, S_CLR, REG_SHIFT);
dataproc!(arm_adc_lri, bs::lri, arm_alu_adc, S_CLR, IMM_SHIFT);
dataproc!(arm_adc_lrr, bs::lrr, arm_alu_adc, S_CLR, REG_SHIFT);
dataproc!(arm_adc_ari, bs::ari, arm_alu_adc, S_CLR, IMM_SHIFT);
dataproc!(arm_adc_arr, bs::arr, arm_alu_adc, S_CLR, REG_SHIFT);
dataproc!(arm_adc_rri, bs::rri, arm_alu_adc, S_CLR, IMM_SHIFT);
dataproc!(arm_adc_rrr, bs::rrr, arm_alu_adc, S_CLR, REG_SHIFT);
dataproc!(arm_adc_imm, bs::imm, arm_alu_adc, S_CLR, IMM_SHIFT);
// instr: ADCS
dataproc!(arm_adcs_lli, bs::lli_s, arm_alu_adcs, S_SET, IMM_SHIFT);
dataproc!(arm_adcs_llr, bs::llr_s, arm_alu_adcs, S_SET, REG_SHIFT);
dataproc!(arm_adcs_lri, bs::lri_s, arm_alu_adcs, S_SET, IMM_SHIFT);
dataproc!(arm_adcs_lrr, bs::lrr_s, arm_alu_adcs, S_SET, REG_SHIFT);
dataproc!(arm_adcs_ari, bs::ari_s, arm_alu_adcs, S_SET, IMM_SHIFT);
dataproc!(arm_adcs_arr, bs::arr_s, arm_alu_adcs, S_SET, REG_SHIFT);
dataproc!(arm_adcs_rri, bs::rri_s, arm_alu_adcs, S_SET, IMM_SHIFT);
dataproc!(arm_adcs_rrr, bs::rrr_s, arm_alu_adcs, S_SET, REG_SHIFT);
dataproc!(arm_adcs_imm, bs::imm, arm_alu_adcs, S_SET, IMM_SHIFT);
// instr: SBC
dataproc!(arm_sbc_lli, bs::lli, arm_alu_sbc, S_CLR, IMM_SHIFT);
dataproc!(arm_sbc_llr, bs::llr, arm_alu_sbc, S_CLR, REG_SHIFT);
dataproc!(arm_sbc_lri, bs::lri, arm_alu_sbc, S_CLR, IMM_SHIFT);
dataproc!(arm_sbc_lrr, bs::lrr, arm_alu_sbc, S_CLR, REG_SHIFT);
dataproc!(arm_sbc_ari, bs::ari, arm_alu_sbc, S_CLR, IMM_SHIFT);
dataproc!(arm_sbc_arr, bs::arr, arm_alu_sbc, S_CLR, REG_SHIFT);
dataproc!(arm_sbc_rri, bs::rri, arm_alu_sbc, S_CLR, IMM_SHIFT);
dataproc!(arm_sbc_rrr, bs::rrr, arm_alu_sbc, S_CLR, REG_SHIFT);
dataproc!(arm_sbc_imm, bs::imm, arm_alu_sbc, S_CLR, IMM_SHIFT);
// instr: SBCS
dataproc!(arm_sbcs_lli, bs::lli_s, arm_alu_sbcs, S_SET, IMM_SHIFT);
dataproc!(arm_sbcs_llr, bs::llr_s, arm_alu_sbcs, S_SET, REG_SHIFT);
dataproc!(arm_sbcs_lri, bs::lri_s, arm_alu_sbcs, S_SET, IMM_SHIFT);
dataproc!(arm_sbcs_lrr, bs::lrr_s, arm_alu_sbcs, S_SET, REG_SHIFT);
dataproc!(arm_sbcs_ari, bs::ari_s, arm_alu_sbcs, S_SET, IMM_SHIFT);
dataproc!(arm_sbcs_arr, bs::arr_s, arm_alu_sbcs, S_SET, REG_SHIFT);
dataproc!(arm_sbcs_rri, bs::rri_s, arm_alu_sbcs, S_SET, IMM_SHIFT);
dataproc!(arm_sbcs_rrr, bs::rrr_s, arm_alu_sbcs, S_SET, REG_SHIFT);
dataproc!(arm_sbcs_imm, bs::imm, arm_alu_sbcs, S_SET, IMM_SHIFT);
// instr: RSC
dataproc!(arm_rsc_lli, bs::lli, arm_alu_rsc, S_CLR, IMM_SHIFT);
dataproc!(arm_rsc_llr, bs::llr, arm_alu_rsc, S_CLR, REG_SHIFT);
dataproc!(arm_rsc_lri, bs::lri, arm_alu_rsc, S_CLR, IMM_SHIFT);
dataproc!(arm_rsc_lrr, bs::lrr, arm_alu_rsc, S_CLR, REG_SHIFT);
dataproc!(arm_rsc_ari, bs::ari, arm_alu_rsc, S_CLR, IMM_SHIFT);
dataproc!(arm_rsc_arr, bs::arr, arm_alu_rsc, S_CLR, REG_SHIFT);
dataproc!(arm_rsc_rri, bs::rri, arm_alu_rsc, S_CLR, IMM_SHIFT);
dataproc!(arm_rsc_rrr, bs::rrr, arm_alu_rsc, S_CLR, REG_SHIFT);
dataproc!(arm_rsc_imm, bs::imm, arm_alu_rsc, S_CLR, IMM_SHIFT);
// instr: RSCS
dataproc!(arm_rscs_lli, bs::lli_s, arm_alu_rscs, S_SET, IMM_SHIFT);
dataproc!(arm_rscs_llr, bs::llr_s, arm_alu_rscs, S_SET, REG_SHIFT);
dataproc!(arm_rscs_lri, bs::lri_s, arm_alu_rscs, S_SET, IMM_SHIFT);
dataproc!(arm_rscs_lrr, bs::lrr_s, arm_alu_rscs, S_SET, REG_SHIFT);
dataproc!(arm_rscs_ari, bs::ari_s, arm_alu_rscs, S_SET, IMM_SHIFT);
dataproc!(arm_rscs_arr, bs::arr_s, arm_alu_rscs, S_SET, REG_SHIFT);
dataproc!(arm_rscs_rri, bs::rri_s, arm_alu_rscs, S_SET, IMM_SHIFT);
dataproc!(arm_rscs_rrr, bs::rrr_s, arm_alu_rscs, S_SET, REG_SHIFT);
dataproc!(arm_rscs_imm, bs::imm, arm_alu_rscs, S_SET, IMM_SHIFT);
// instr: MOV
dataproc!(arm_mov_lli, bs::lli, arm_alu_mov, S_CLR, IMM_SHIFT);
dataproc!(arm_mov_llr, bs::llr, arm_alu_mov, S_CLR, REG_SHIFT);
dataproc!(arm_mov_lri, bs::lri, arm_alu_mov, S_CLR, IMM_SHIFT);
dataproc!(arm_mov_lrr, bs::lrr, arm_alu_mov, S_CLR, REG_SHIFT);
dataproc!(arm_mov_ari, bs::ari, arm_alu_mov, S_CLR, IMM_SHIFT);
dataproc!(arm_mov_arr, bs::arr, arm_alu_mov, S_CLR, REG_SHIFT);
dataproc!(arm_mov_rri, bs::rri, arm_alu_mov, S_CLR, IMM_SHIFT);
dataproc!(arm_mov_rrr, bs::rrr, arm_alu_mov, S_CLR, REG_SHIFT);
dataproc!(arm_mov_imm, bs::imm, arm_alu_mov, S_CLR, IMM_SHIFT);
// instr: MOVS
dataproc!(arm_movs_lli, bs::lli_s, arm_alu_movs, S_SET, IMM_SHIFT);
dataproc!(arm_movs_llr, bs::llr_s, arm_alu_movs, S_SET, REG_SHIFT);
dataproc!(arm_movs_lri, bs::lri_s, arm_alu_movs, S_SET, IMM_SHIFT);
dataproc!(arm_movs_lrr, bs::lrr_s, arm_alu_movs, S_SET, REG_SHIFT);
dataproc!(arm_movs_ari, bs::ari_s, arm_alu_movs, S_SET, IMM_SHIFT);
dataproc!(arm_movs_arr, bs::arr_s, arm_alu_movs, S_SET, REG_SHIFT);
dataproc!(arm_movs_rri, bs::rri_s, arm_alu_movs, S_SET, IMM_SHIFT);
dataproc!(arm_movs_rrr, bs::rrr_s, arm_alu_movs, S_SET, REG_SHIFT);
dataproc!(arm_movs_imm, bs::imm, arm_alu_movs, S_SET, IMM_SHIFT);
// instr: MVN
dataproc!(arm_mvn_lli, bs::lli, arm_alu_mvn, S_CLR, IMM_SHIFT);
dataproc!(arm_mvn_llr, bs::llr, arm_alu_mvn, S_CLR, REG_SHIFT);
dataproc!(arm_mvn_lri, bs::lri, arm_alu_mvn, S_CLR, IMM_SHIFT);
dataproc!(arm_mvn_lrr, bs::lrr, arm_alu_mvn, S_CLR, REG_SHIFT);
dataproc!(arm_mvn_ari, bs::ari, arm_alu_mvn, S_CLR, IMM_SHIFT);
dataproc!(arm_mvn_arr, bs::arr, arm_alu_mvn, S_CLR, REG_SHIFT);
dataproc!(arm_mvn_rri, bs::rri, arm_alu_mvn, S_CLR, IMM_SHIFT);
dataproc!(arm_mvn_rrr, bs::rrr, arm_alu_mvn, S_CLR, REG_SHIFT);
dataproc!(arm_mvn_imm, bs::imm, arm_alu_mvn, S_CLR, IMM_SHIFT);
// instr: MVNS
dataproc!(arm_mvns_lli, bs::lli_s, arm_alu_mvns, S_SET, IMM_SHIFT);
dataproc!(arm_mvns_llr, bs::llr_s, arm_alu_mvns, S_SET, REG_SHIFT);
dataproc!(arm_mvns_lri, bs::lri_s, arm_alu_mvns, S_SET, IMM_SHIFT);
dataproc!(arm_mvns_lrr, bs::lrr_s, arm_alu_mvns, S_SET, REG_SHIFT);
dataproc!(arm_mvns_ari, bs::ari_s, arm_alu_mvns, S_SET, IMM_SHIFT);
dataproc!(arm_mvns_arr, bs::arr_s, arm_alu_mvns, S_SET, REG_SHIFT);
dataproc!(arm_mvns_rri, bs::rri_s, arm_alu_mvns, S_SET, IMM_SHIFT);
dataproc!(arm_mvns_rrr, bs::rrr_s, arm_alu_mvns, S_SET, REG_SHIFT);
dataproc!(arm_mvns_imm, bs::imm, arm_alu_mvns, S_SET, IMM_SHIFT);
// instr: CMPS
dataproc_no_write!(arm_cmps_lli, bs::lli_s, arm_alu_cmps, IMM_SHIFT);
dataproc_no_write!(arm_cmps_llr, bs::llr_s, arm_alu_cmps, REG_SHIFT);
dataproc_no_write!(arm_cmps_lri, bs::lri_s, arm_alu_cmps, IMM_SHIFT);
dataproc_no_write!(arm_cmps_lrr, bs::lrr_s, arm_alu_cmps, REG_SHIFT);
dataproc_no_write!(arm_cmps_ari, bs::ari_s, arm_alu_cmps, IMM_SHIFT);
dataproc_no_write!(arm_cmps_arr, bs::arr_s, arm_alu_cmps, REG_SHIFT);
dataproc_no_write!(arm_cmps_rri, bs::rri_s, arm_alu_cmps, IMM_SHIFT);
dataproc_no_write!(arm_cmps_rrr, bs::rrr_s, arm_alu_cmps, REG_SHIFT);
dataproc_no_write!(arm_cmps_imm, bs::imm, arm_alu_cmps, IMM_SHIFT);
// instr: CMNS
dataproc_no_write!(arm_cmns_lli, bs::lli_s, arm_alu_cmns, IMM_SHIFT);
dataproc_no_write!(arm_cmns_llr, bs::llr_s, arm_alu_cmns, REG_SHIFT);
dataproc_no_write!(arm_cmns_lri, bs::lri_s, arm_alu_cmns, IMM_SHIFT);
dataproc_no_write!(arm_cmns_lrr, bs::lrr_s, arm_alu_cmns, REG_SHIFT);
dataproc_no_write!(arm_cmns_ari, bs::ari_s, arm_alu_cmns, IMM_SHIFT);
dataproc_no_write!(arm_cmns_arr, bs::arr_s, arm_alu_cmns, REG_SHIFT);
dataproc_no_write!(arm_cmns_rri, bs::rri_s, arm_alu_cmns, IMM_SHIFT);
dataproc_no_write!(arm_cmns_rrr, bs::rrr_s, arm_alu_cmns, REG_SHIFT);
dataproc_no_write!(arm_cmns_imm, bs::imm, arm_alu_cmns, IMM_SHIFT);
// instr: TEQS
dataproc_no_write!(arm_teqs_lli, bs::lli_s, arm_alu_teqs, IMM_SHIFT);
dataproc_no_write!(arm_teqs_llr, bs::llr_s, arm_alu_teqs, REG_SHIFT);
dataproc_no_write!(arm_teqs_lri, bs::lri_s, arm_alu_teqs, IMM_SHIFT);
dataproc_no_write!(arm_teqs_lrr, bs::lrr_s, arm_alu_teqs, REG_SHIFT);
dataproc_no_write!(arm_teqs_ari, bs::ari_s, arm_alu_teqs, IMM_SHIFT);
dataproc_no_write!(arm_teqs_arr, bs::arr_s, arm_alu_teqs, REG_SHIFT);
dataproc_no_write!(arm_teqs_rri, bs::rri_s, arm_alu_teqs, IMM_SHIFT);
dataproc_no_write!(arm_teqs_rrr, bs::rrr_s, arm_alu_teqs, REG_SHIFT);
dataproc_no_write!(arm_teqs_imm, bs::imm, arm_alu_teqs, IMM_SHIFT);
// instr: TSTS
dataproc_no_write!(arm_tsts_lli, bs::lli_s, arm_alu_tsts, IMM_SHIFT);
dataproc_no_write!(arm_tsts_llr, bs::llr_s, arm_alu_tsts, REG_SHIFT);
dataproc_no_write!(arm_tsts_lri, bs::lri_s, arm_alu_tsts, IMM_SHIFT);
dataproc_no_write!(arm_tsts_lrr, bs::lrr_s, arm_alu_tsts, REG_SHIFT);
dataproc_no_write!(arm_tsts_ari, bs::ari_s, arm_alu_tsts, IMM_SHIFT);
dataproc_no_write!(arm_tsts_arr, bs::arr_s, arm_alu_tsts, REG_SHIFT);
dataproc_no_write!(arm_tsts_rri, bs::rri_s, arm_alu_tsts, IMM_SHIFT);
dataproc_no_write!(arm_tsts_rrr, bs::rrr_s, arm_alu_tsts, REG_SHIFT);
dataproc_no_write!(arm_tsts_imm, bs::imm, arm_alu_tsts, IMM_SHIFT);
|
use std::env;
pub fn is_debug() -> bool {
let args: Vec<String> = env::args().collect();
for arg in args {
if arg == "-d" || arg == "--debug" {
return true;
}
}
return false;
}
|
#[doc = "Reader of register ADV_NEXT_INSTANT"]
pub type R = crate::R<u32, super::ADV_NEXT_INSTANT>;
#[doc = "Reader of field `ADV_NEXT_INSTANT`"]
pub type ADV_NEXT_INSTANT_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - Shows the next start of advertising event with reference to the internal reference clock."]
#[inline(always)]
pub fn adv_next_instant(&self) -> ADV_NEXT_INSTANT_R {
ADV_NEXT_INSTANT_R::new((self.bits & 0xffff) as u16)
}
}
|
fn main() {
vectors();
}
fn vectors() {
let mut nums = vec![1i, 2i, 3i];
let nums2 = [1i, ..20];
nums.push(4i); // works
let nums_arr = [1i, 2i, 3i];
// nums_arr.push(4i); <-- won't work
let slice = nums.as_slice();
for i in nums2.iter() {
println!("{}", i);
}
let names = ["Grayson", "Brian", "Niko"];
println!("The second name is: {}", names[1]);
}
|
#[doc = "Register `WMMONR` reader"]
pub type R = crate::R<WMMONR_SPEC>;
#[doc = "Field `WMISSMON` reader - cache write-miss monitor counter"]
pub type WMISSMON_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - cache write-miss monitor counter"]
#[inline(always)]
pub fn wmissmon(&self) -> WMISSMON_R {
WMISSMON_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "DCACHE write-miss monitor register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wmmonr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WMMONR_SPEC;
impl crate::RegisterSpec for WMMONR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wmmonr::R`](R) reader structure"]
impl crate::Readable for WMMONR_SPEC {}
#[doc = "`reset()` method sets WMMONR to value 0"]
impl crate::Resettable for WMMONR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use volatile::Volatile;
use super::color::ColorCode;
pub(crate) const BUFFER_HEIGHT: usize = 25;
pub(crate) const BUFFER_WIDTH: usize = 80;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub(crate) struct ScreenChar {
pub(crate) character: u8,
pub(crate) color_code: ColorCode,
}
#[repr(transparent)]
pub struct Buffer {
pub(crate) chars: [[Volatile<ScreenChar>; BUFFER_WIDTH]; BUFFER_HEIGHT],
}
|
#![no_std]
pub(crate) mod emulator;
pub(crate) mod settings;
pub(crate) mod utils;
pub mod error;
pub mod host;
pub mod zx;
pub use emulator::Emulator;
pub use settings::RustzxSettings;
pub use utils::EmulationMode;
#[cfg(feature = "strum")]
pub use strum::IntoEnumIterator as IterableEnum;
extern crate alloc;
pub type Result<T> = core::result::Result<T, error::Error>;
|
#[doc = "Reader of register SIE_EP3_CR0"]
pub type R = crate::R<u32, super::SIE_EP3_CR0>;
#[doc = "Writer for register SIE_EP3_CR0"]
pub type W = crate::W<u32, super::SIE_EP3_CR0>;
#[doc = "Register SIE_EP3_CR0 `reset()`'s with value 0"]
impl crate::ResetValue for super::SIE_EP3_CR0 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "The mode controls how the USB SIE responds to traffic and how the USB SIE changes the mode of that endpoint as a result of host packets to the endpoint.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MODE_A {
#[doc = "0: Ignore all USB traffic to this endpoint"]
DISABLE,
#[doc = "1: SETUP: Accept\nIN: NAK\nOUT: NAK"]
NAK_INOUT,
#[doc = "2: SETUP: Accept\nIN: STALL\nOUT: ACK 0B tokens, NAK others"]
STATUS_OUT_ONLY,
#[doc = "3: SETUP: Accept\nIN: STALL\nOUT: STALL"]
STALL_INOUT,
#[doc = "5: SETUP: Ignore\nIN: Ignore\nOUT: Accept Isochronous OUT token"]
ISO_OUT,
#[doc = "6: SETUP: Accept\nIN: Respond with 0B data\nOUT: Stall"]
STATUS_IN_ONLY,
#[doc = "7: SETUP: Ignore\nIN: Accept Isochronous IN token\nOUT: Ignore"]
ISO_IN,
#[doc = "8: SETUP: Ignore\nIN: Ignore\nOUT: NAK"]
NAK_OUT,
#[doc = "9: SETUP: Ignore\nIN: Ignore\nOUT: Accept data and ACK if STALL=0, STALL otherwise.\nChange to MODE=8 after one succesfull OUT token."]
ACK_OUT,
#[doc = "11: SETUP: Accept\nIN: Respond with 0B data\nOUT: Accept data"]
ACK_OUT_STATUS_IN,
#[doc = "12: SETUP: Ignore\nIN: NAK\nOUT: Ignore"]
NAK_IN,
#[doc = "13: SETUP: Ignore\nIN: Respond to IN with data if STALL=0, STALL otherwise\nOUT: Ignore"]
ACK_IN,
#[doc = "15: SETUP: Accept\nIN: Respond to IN with data\nOUT: ACK 0B tokens, NAK others"]
ACK_IN_STATUS_OUT,
}
impl From<MODE_A> for u8 {
#[inline(always)]
fn from(variant: MODE_A) -> Self {
match variant {
MODE_A::DISABLE => 0,
MODE_A::NAK_INOUT => 1,
MODE_A::STATUS_OUT_ONLY => 2,
MODE_A::STALL_INOUT => 3,
MODE_A::ISO_OUT => 5,
MODE_A::STATUS_IN_ONLY => 6,
MODE_A::ISO_IN => 7,
MODE_A::NAK_OUT => 8,
MODE_A::ACK_OUT => 9,
MODE_A::ACK_OUT_STATUS_IN => 11,
MODE_A::NAK_IN => 12,
MODE_A::ACK_IN => 13,
MODE_A::ACK_IN_STATUS_OUT => 15,
}
}
}
#[doc = "Reader of field `MODE`"]
pub type MODE_R = crate::R<u8, MODE_A>;
impl MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, MODE_A> {
use crate::Variant::*;
match self.bits {
0 => Val(MODE_A::DISABLE),
1 => Val(MODE_A::NAK_INOUT),
2 => Val(MODE_A::STATUS_OUT_ONLY),
3 => Val(MODE_A::STALL_INOUT),
5 => Val(MODE_A::ISO_OUT),
6 => Val(MODE_A::STATUS_IN_ONLY),
7 => Val(MODE_A::ISO_IN),
8 => Val(MODE_A::NAK_OUT),
9 => Val(MODE_A::ACK_OUT),
11 => Val(MODE_A::ACK_OUT_STATUS_IN),
12 => Val(MODE_A::NAK_IN),
13 => Val(MODE_A::ACK_IN),
15 => Val(MODE_A::ACK_IN_STATUS_OUT),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
*self == MODE_A::DISABLE
}
#[doc = "Checks if the value of the field is `NAK_INOUT`"]
#[inline(always)]
pub fn is_nak_inout(&self) -> bool {
*self == MODE_A::NAK_INOUT
}
#[doc = "Checks if the value of the field is `STATUS_OUT_ONLY`"]
#[inline(always)]
pub fn is_status_out_only(&self) -> bool {
*self == MODE_A::STATUS_OUT_ONLY
}
#[doc = "Checks if the value of the field is `STALL_INOUT`"]
#[inline(always)]
pub fn is_stall_inout(&self) -> bool {
*self == MODE_A::STALL_INOUT
}
#[doc = "Checks if the value of the field is `ISO_OUT`"]
#[inline(always)]
pub fn is_iso_out(&self) -> bool {
*self == MODE_A::ISO_OUT
}
#[doc = "Checks if the value of the field is `STATUS_IN_ONLY`"]
#[inline(always)]
pub fn is_status_in_only(&self) -> bool {
*self == MODE_A::STATUS_IN_ONLY
}
#[doc = "Checks if the value of the field is `ISO_IN`"]
#[inline(always)]
pub fn is_iso_in(&self) -> bool {
*self == MODE_A::ISO_IN
}
#[doc = "Checks if the value of the field is `NAK_OUT`"]
#[inline(always)]
pub fn is_nak_out(&self) -> bool {
*self == MODE_A::NAK_OUT
}
#[doc = "Checks if the value of the field is `ACK_OUT`"]
#[inline(always)]
pub fn is_ack_out(&self) -> bool {
*self == MODE_A::ACK_OUT
}
#[doc = "Checks if the value of the field is `ACK_OUT_STATUS_IN`"]
#[inline(always)]
pub fn is_ack_out_status_in(&self) -> bool {
*self == MODE_A::ACK_OUT_STATUS_IN
}
#[doc = "Checks if the value of the field is `NAK_IN`"]
#[inline(always)]
pub fn is_nak_in(&self) -> bool {
*self == MODE_A::NAK_IN
}
#[doc = "Checks if the value of the field is `ACK_IN`"]
#[inline(always)]
pub fn is_ack_in(&self) -> bool {
*self == MODE_A::ACK_IN
}
#[doc = "Checks if the value of the field is `ACK_IN_STATUS_OUT`"]
#[inline(always)]
pub fn is_ack_in_status_out(&self) -> bool {
*self == MODE_A::ACK_IN_STATUS_OUT
}
}
#[doc = "Write proxy for field `MODE`"]
pub struct MODE_W<'a> {
w: &'a mut W,
}
impl<'a> MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MODE_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Ignore all USB traffic to this endpoint"]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(MODE_A::DISABLE)
}
#[doc = "SETUP: Accept IN: NAK OUT: NAK"]
#[inline(always)]
pub fn nak_inout(self) -> &'a mut W {
self.variant(MODE_A::NAK_INOUT)
}
#[doc = "SETUP: Accept IN: STALL OUT: ACK 0B tokens, NAK others"]
#[inline(always)]
pub fn status_out_only(self) -> &'a mut W {
self.variant(MODE_A::STATUS_OUT_ONLY)
}
#[doc = "SETUP: Accept IN: STALL OUT: STALL"]
#[inline(always)]
pub fn stall_inout(self) -> &'a mut W {
self.variant(MODE_A::STALL_INOUT)
}
#[doc = "SETUP: Ignore IN: Ignore OUT: Accept Isochronous OUT token"]
#[inline(always)]
pub fn iso_out(self) -> &'a mut W {
self.variant(MODE_A::ISO_OUT)
}
#[doc = "SETUP: Accept IN: Respond with 0B data OUT: Stall"]
#[inline(always)]
pub fn status_in_only(self) -> &'a mut W {
self.variant(MODE_A::STATUS_IN_ONLY)
}
#[doc = "SETUP: Ignore IN: Accept Isochronous IN token OUT: Ignore"]
#[inline(always)]
pub fn iso_in(self) -> &'a mut W {
self.variant(MODE_A::ISO_IN)
}
#[doc = "SETUP: Ignore IN: Ignore OUT: NAK"]
#[inline(always)]
pub fn nak_out(self) -> &'a mut W {
self.variant(MODE_A::NAK_OUT)
}
#[doc = "SETUP: Ignore IN: Ignore OUT: Accept data and ACK if STALL=0, STALL otherwise. Change to MODE=8 after one succesfull OUT token."]
#[inline(always)]
pub fn ack_out(self) -> &'a mut W {
self.variant(MODE_A::ACK_OUT)
}
#[doc = "SETUP: Accept IN: Respond with 0B data OUT: Accept data"]
#[inline(always)]
pub fn ack_out_status_in(self) -> &'a mut W {
self.variant(MODE_A::ACK_OUT_STATUS_IN)
}
#[doc = "SETUP: Ignore IN: NAK OUT: Ignore"]
#[inline(always)]
pub fn nak_in(self) -> &'a mut W {
self.variant(MODE_A::NAK_IN)
}
#[doc = "SETUP: Ignore IN: Respond to IN with data if STALL=0, STALL otherwise OUT: Ignore"]
#[inline(always)]
pub fn ack_in(self) -> &'a mut W {
self.variant(MODE_A::ACK_IN)
}
#[doc = "SETUP: Accept IN: Respond to IN with data OUT: ACK 0B tokens, NAK others"]
#[inline(always)]
pub fn ack_in_status_out(self) -> &'a mut W {
self.variant(MODE_A::ACK_IN_STATUS_OUT)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "The ACK'd transaction bit is set whenever the SIE engages in a transaction to the register's endpoint that completes with an ACK packet. This bit is cleared by any writes to the register.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ACKED_TXN_A {
#[doc = "0: No ACK'd transactions since bit was last cleared."]
ACKED_NO,
#[doc = "1: Indicates a transaction ended with an ACK."]
ACKED_YES,
}
impl From<ACKED_TXN_A> for bool {
#[inline(always)]
fn from(variant: ACKED_TXN_A) -> Self {
match variant {
ACKED_TXN_A::ACKED_NO => false,
ACKED_TXN_A::ACKED_YES => true,
}
}
}
#[doc = "Reader of field `ACKED_TXN`"]
pub type ACKED_TXN_R = crate::R<bool, ACKED_TXN_A>;
impl ACKED_TXN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ACKED_TXN_A {
match self.bits {
false => ACKED_TXN_A::ACKED_NO,
true => ACKED_TXN_A::ACKED_YES,
}
}
#[doc = "Checks if the value of the field is `ACKED_NO`"]
#[inline(always)]
pub fn is_acked_no(&self) -> bool {
*self == ACKED_TXN_A::ACKED_NO
}
#[doc = "Checks if the value of the field is `ACKED_YES`"]
#[inline(always)]
pub fn is_acked_yes(&self) -> bool {
*self == ACKED_TXN_A::ACKED_YES
}
}
#[doc = "Write proxy for field `ACKED_TXN`"]
pub struct ACKED_TXN_W<'a> {
w: &'a mut W,
}
impl<'a> ACKED_TXN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ACKED_TXN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No ACK'd transactions since bit was last cleared."]
#[inline(always)]
pub fn acked_no(self) -> &'a mut W {
self.variant(ACKED_TXN_A::ACKED_NO)
}
#[doc = "Indicates a transaction ended with an ACK."]
#[inline(always)]
pub fn acked_yes(self) -> &'a mut W {
self.variant(ACKED_TXN_A::ACKED_YES)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `NAK_INT_EN`"]
pub type NAK_INT_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NAK_INT_EN`"]
pub struct NAK_INT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> NAK_INT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `ERR_IN_TXN`"]
pub type ERR_IN_TXN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ERR_IN_TXN`"]
pub struct ERR_IN_TXN_W<'a> {
w: &'a mut W,
}
impl<'a> ERR_IN_TXN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `STALL`"]
pub type STALL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `STALL`"]
pub struct STALL_W<'a> {
w: &'a mut W,
}
impl<'a> STALL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - The mode controls how the USB SIE responds to traffic and how the USB SIE changes the mode of that endpoint as a result of host packets to the endpoint."]
#[inline(always)]
pub fn mode(&self) -> MODE_R {
MODE_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 4 - The ACK'd transaction bit is set whenever the SIE engages in a transaction to the register's endpoint that completes with an ACK packet. This bit is cleared by any writes to the register."]
#[inline(always)]
pub fn acked_txn(&self) -> ACKED_TXN_R {
ACKED_TXN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - When set this bit causes an endpoint interrupt to be generated even when a transfer completes with a NAK."]
#[inline(always)]
pub fn nak_int_en(&self) -> NAK_INT_EN_R {
NAK_INT_EN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - The Error in transaction bit is set whenever an error is detected. For an IN transaction, this indicates a no response from HOST scenario. For an OUT transaction, this represents an RxErr (PID error/ CRC error/ bit-stuff error scenario). This bit is cleared by any writes to the register."]
#[inline(always)]
pub fn err_in_txn(&self) -> ERR_IN_TXN_R {
ERR_IN_TXN_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - When this bit is set the SIE stalls an OUT packet if the Mode bits are set to ACK-OUT. The SIE stalls an IN packet if the mode bits are set to ACK-IN. This bit must be clear for all other modes."]
#[inline(always)]
pub fn stall(&self) -> STALL_R {
STALL_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - The mode controls how the USB SIE responds to traffic and how the USB SIE changes the mode of that endpoint as a result of host packets to the endpoint."]
#[inline(always)]
pub fn mode(&mut self) -> MODE_W {
MODE_W { w: self }
}
#[doc = "Bit 4 - The ACK'd transaction bit is set whenever the SIE engages in a transaction to the register's endpoint that completes with an ACK packet. This bit is cleared by any writes to the register."]
#[inline(always)]
pub fn acked_txn(&mut self) -> ACKED_TXN_W {
ACKED_TXN_W { w: self }
}
#[doc = "Bit 5 - When set this bit causes an endpoint interrupt to be generated even when a transfer completes with a NAK."]
#[inline(always)]
pub fn nak_int_en(&mut self) -> NAK_INT_EN_W {
NAK_INT_EN_W { w: self }
}
#[doc = "Bit 6 - The Error in transaction bit is set whenever an error is detected. For an IN transaction, this indicates a no response from HOST scenario. For an OUT transaction, this represents an RxErr (PID error/ CRC error/ bit-stuff error scenario). This bit is cleared by any writes to the register."]
#[inline(always)]
pub fn err_in_txn(&mut self) -> ERR_IN_TXN_W {
ERR_IN_TXN_W { w: self }
}
#[doc = "Bit 7 - When this bit is set the SIE stalls an OUT packet if the Mode bits are set to ACK-OUT. The SIE stalls an IN packet if the mode bits are set to ACK-IN. This bit must be clear for all other modes."]
#[inline(always)]
pub fn stall(&mut self) -> STALL_W {
STALL_W { w: self }
}
}
|
pub use self::hci::Hci;
pub use self::setup::Setup;
pub mod desc;
pub mod ehci;
pub mod hci;
pub mod ohci;
pub mod setup;
pub mod uhci;
pub mod xhci;
#[derive(Debug)]
pub enum Packet<'a> {
Setup(&'a Setup),
In(&'a mut [u8]),
Out(&'a [u8]),
}
#[derive(Debug)]
pub enum Pipe {
Control,
Interrupt,
Isochronous,
Bulk
}
|
#![cfg_attr(docsrs, feature(doc_cfg))]
#![deny(missing_debug_implementations)]
//! <div align="center">
//!
//! <img src="https://raw.githubusercontent.com/SeaQL/sea-query/master/docs/SeaQL logo dual.png" width="320"/>
//!
//! <h1>SeaQuery</h1>
//!
//! <p>
//! <strong>🌊 A dynamic query builder for MySQL, Postgres and SQLite</strong>
//! </p>
//!
//! [](https://crates.io/crates/sea-query)
//! [](https://docs.rs/sea-query)
//! [](https://github.com/SeaQL/sea-query/actions/workflows/rust.yml)
//!
//! <sub>Built with 🔥 by 🌊🦀🐚</sub>
//!
//! </div>
//!
//! ## Introduction
//!
//! SeaQuery is query builder to help you construct dynamic SQL queries in Rust.
//! You can construct expressions, queries and schema as abstract syntax trees using an ergonomic API.
//! We support MySQL, Postgres and SQLite behind a common interface that aligns their behaviour where appropriate.
//!
//! This library is the foundation of SeaORM.
//!
//! ## Install
//!
//! ```toml
//! # Cargo.toml
//! [dependencies]
//! sea-query = "^0"
//! ```
//!
//! ## Usage
//!
//! Table of Content
//!
//! 1. Basics
//!
//! 1. [Iden](#iden)
//! 1. [Expression](#expression)
//! 1. [Condition](#condition)
//! 1. [Statement Builders](#statement-builders)
//!
//! 1. Query Statement
//!
//! 1. [Query Select](#query-select)
//! 1. [Query Insert](#query-insert)
//! 1. [Query Update](#query-update)
//! 1. [Query Delete](#query-delete)
//!
//! 1. Schema Statement
//!
//! 1. [Table Create](#table-create)
//! 1. [Table Alter](#table-alter)
//! 1. [Table Drop](#table-drop)
//! 1. [Table Rename](#table-rename)
//! 1. [Table Truncate](#table-truncate)
//! 1. [Foreign Key Create](#foreign-key-create)
//! 1. [Foreign Key Drop](#foreign-key-drop)
//! 1. [Index Create](#index-create)
//! 1. [Index Drop](#index-drop)
//!
//! ### Motivation
//!
//! Why would you want to use a dynamic query builder?
//!
//! 1. Parameter bindings
//!
//! One of the headaches when using raw SQL is parameter binding. With SeaQuery you can:
//!
//! ```
//! # use sea_query::{*, tests_cfg::*};
//! assert_eq!(
//! Query::select()
//! .column(Glyph::Image)
//! .from(Glyph::Table)
//! .and_where(Expr::col(Glyph::Image).like("A"))
//! .and_where(Expr::col(Glyph::Id).is_in(vec![1, 2, 3]))
//! .build(PostgresQueryBuilder),
//! (r#"SELECT "image" FROM "glyph" WHERE "image" LIKE $1 AND "id" IN ($2, $3, $4)"#.to_owned(),
//! Values(vec![Value::String(Box::new("A".to_owned())), Value::Int(1), Value::Int(2), Value::Int(3)]))
//! );
//! ```
//!
//! 2. Dynamic query
//!
//! You can construct the query at runtime based on user inputs:
//!
//! ```
//! # use sea_query::{*, tests_cfg::*};
//! Query::select()
//! .column(Char::Character)
//! .from(Char::Table)
//! .conditions(
//! // some runtime condition
//! true,
//! // if condition is true then add the following condition
//! |q| { q.and_where(Expr::col(Char::Id).eq(1)); },
//! // otherwise leave it as is
//! |q| { }
//! );
//! ```
//!
//! ### Integration
//!
//! We provide integration for [SQLx](https://crates.io/crates/sqlx),
//! [postgres](https://crates.io/crates/postgres) and [rusqlite](https://crates.io/crates/rusqlite).
//! See [examples](https://github.com/SeaQL/sea-query/blob/master/examples) for usage.
//!
//! ### Iden
//!
//! `Iden` is a trait for identifiers used in any query statement.
//!
//! Commonly implemented by Enum where each Enum represents a table found in a database,
//! and its variants include table name and column name.
//!
//! [`Iden::unquoted()`] must be implemented to provide a mapping between Enum variants and its
//! corresponding string value.
//!
//! ```rust
//! use sea_query::{*};
//!
//! // For example Character table with column id, character, font_size...
//! pub enum Character {
//! Table,
//! Id,
//! FontId,
//! FontSize,
//! }
//!
//! // Mapping between Enum variant and its corresponding string value
//! impl Iden for Character {
//! fn unquoted(&self, s: &mut dyn std::fmt::Write) {
//! write!(s, "{}", match self {
//! Self::Table => "character",
//! Self::Id => "id",
//! Self::FontId => "font_id",
//! Self::FontSize => "font_size",
//! }).unwrap();
//! }
//! }
//! ```
//!
//! If you're okay with running another procedural macro, you can activate
//! the `derive` feature on the crate to save you some boilerplate.
//! For more usage information, look at
//! [the derive example](https://github.com/SeaQL/sea-query/blob/master/examples/derive.rs).
//!
//! ```rust
//! # #[cfg(feature = "derive")]
//! use sea_query::Iden;
//!
//! // This will implement Iden exactly as shown above
//! #[derive(Iden)]
//! enum Character { Table }
//! assert_eq!(Character::Table.to_string(), "character");
//!
//! // You can also derive a unit struct
//! #[derive(Iden)]
//! struct Glyph;
//! assert_eq!(Glyph.to_string(), "glyph");
//! ```
//!
//! ### Expression
//!
//! Use [`Expr`] to construct select, join, where and having expression in query.
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! assert_eq!(
//! Query::select()
//! .column(Char::Character)
//! .from(Char::Table)
//! .and_where(
//! Expr::expr(Expr::col(Char::SizeW).add(1)).mul(2)
//! .equals(Expr::expr(Expr::col(Char::SizeH).div(2)).sub(1))
//! )
//! .and_where(Expr::col(Char::SizeW).in_subquery(
//! Query::select()
//! .expr(Expr::cust_with_values("ln(? ^ ?)", vec![2.4, 1.2]))
//! .take()
//! ))
//! .and_where(Expr::col(Char::Character).like("D").and(Expr::col(Char::Character).like("E")))
//! .to_string(PostgresQueryBuilder),
//! [
//! r#"SELECT "character" FROM "character""#,
//! r#"WHERE ("size_w" + 1) * 2 = ("size_h" / 2) - 1"#,
//! r#"AND "size_w" IN (SELECT ln(2.4 ^ 1.2))"#,
//! r#"AND (("character" LIKE 'D') AND ("character" LIKE 'E'))"#,
//! ].join(" ")
//! );
//! ```
//!
//! ### Condition
//!
//! If you have complex conditions to express, you can use the [`Condition`] builder,
//! usable for [`ConditionalStatement::cond_where`] and [`SelectStatement::cond_having`].
//!
//! ```
//! # use sea_query::{*, tests_cfg::*};
//! assert_eq!(
//! Query::select()
//! .column(Glyph::Id)
//! .from(Glyph::Table)
//! .cond_where(
//! Cond::any()
//! .add(
//! Cond::all()
//! .add(Expr::col(Glyph::Aspect).is_null())
//! .add(Expr::col(Glyph::Image).is_null())
//! )
//! .add(
//! Cond::all()
//! .add(Expr::col(Glyph::Aspect).is_in(vec![3, 4]))
//! .add(Expr::col(Glyph::Image).like("A%"))
//! )
//! )
//! .to_string(PostgresQueryBuilder),
//! [
//! r#"SELECT "id" FROM "glyph""#,
//! r#"WHERE"#,
//! r#"("aspect" IS NULL AND "image" IS NULL)"#,
//! r#"OR"#,
//! r#"("aspect" IN (3, 4) AND "image" LIKE 'A%')"#,
//! ].join(" ")
//! );
//! ```
//!
//! There is also the [`any!`] and [`all!`] macro at your convenience:
//!
//! ```
//! # use sea_query::{*, tests_cfg::*};
//! Query::select()
//! .cond_where(
//! any![
//! Expr::col(Glyph::Aspect).is_in(vec![3, 4]),
//! all![
//! Expr::col(Glyph::Aspect).is_null(),
//! Expr::col(Glyph::Image).like("A%")
//! ]
//! ]
//! );
//! ```
//!
//! ### Statement Builders
//!
//! Statements are divided into 2 categories: Query and Schema, and to be serialized into SQL
//! with [`QueryStatementBuilder`] and [`SchemaStatementBuilder`] respectively.
//!
//! Schema statement has the following interface:
//!
//! ```rust
//! # use sea_query::{*};
//! # trait ExampleSchemaBuilder {
//! fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String;
//! # }
//! ```
//!
//! Query statement has the following interfaces:
//!
//! ```rust
//! # use sea_query::{*};
//! # trait ExampleQueryBuilder {
//! fn build<T: QueryBuilder>(&self, query_builder: T) -> (String, Values);
//!
//! fn to_string<T: QueryBuilder>(&self, query_builder: T) -> String;
//! # }
//! ```
//!
//! `build` builds a SQL statement as string and parameters to be passed to the database driver
//! through the binary protocol. This is the preferred way as it has less overhead and is more secure.
//!
//! `to_string` builds a SQL statement as string with parameters injected. This is good for testing
//! and debugging.
//!
//! ### Query Select
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let query = Query::select()
//! .column(Char::Character)
//! .column((Font::Table, Font::Name))
//! .from(Char::Table)
//! .left_join(Font::Table, Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))
//! .and_where(Expr::col(Char::SizeW).is_in(vec![3, 4]))
//! .and_where(Expr::col(Char::Character).like("A%"))
//! .to_owned();
//!
//! assert_eq!(
//! query.to_string(MysqlQueryBuilder),
//! r#"SELECT `character`, `font`.`name` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id` WHERE `size_w` IN (3, 4) AND `character` LIKE 'A%'"#
//! );
//! assert_eq!(
//! query.to_string(PostgresQueryBuilder),
//! r#"SELECT "character", "font"."name" FROM "character" LEFT JOIN "font" ON "character"."font_id" = "font"."id" WHERE "size_w" IN (3, 4) AND "character" LIKE 'A%'"#
//! );
//! assert_eq!(
//! query.to_string(SqliteQueryBuilder),
//! r#"SELECT `character`, `font`.`name` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id` WHERE `size_w` IN (3, 4) AND `character` LIKE 'A%'"#
//! );
//! ```
//!
//! ### Query Insert
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let query = Query::insert()
//! .into_table(Glyph::Table)
//! .columns(vec![
//! Glyph::Aspect,
//! Glyph::Image,
//! ])
//! .values_panic(vec![
//! 5.15.into(),
//! "12A".into(),
//! ])
//! .values_panic(vec![
//! 4.21.into(),
//! "123".into(),
//! ])
//! .to_owned();
//!
//! assert_eq!(
//! query.to_string(MysqlQueryBuilder),
//! r#"INSERT INTO `glyph` (`aspect`, `image`) VALUES (5.15, '12A'), (4.21, '123')"#
//! );
//! assert_eq!(
//! query.to_string(PostgresQueryBuilder),
//! r#"INSERT INTO "glyph" ("aspect", "image") VALUES (5.15, '12A'), (4.21, '123')"#
//! );
//! assert_eq!(
//! query.to_string(SqliteQueryBuilder),
//! r#"INSERT INTO `glyph` (`aspect`, `image`) VALUES (5.15, '12A'), (4.21, '123')"#
//! );
//! ```
//!
//! ### Query Update
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let query = Query::update()
//! .table(Glyph::Table)
//! .values(vec![
//! (Glyph::Aspect, 1.23.into()),
//! (Glyph::Image, "123".into()),
//! ])
//! .and_where(Expr::col(Glyph::Id).eq(1))
//! .to_owned();
//!
//! assert_eq!(
//! query.to_string(MysqlQueryBuilder),
//! r#"UPDATE `glyph` SET `aspect` = 1.23, `image` = '123' WHERE `id` = 1"#
//! );
//! assert_eq!(
//! query.to_string(PostgresQueryBuilder),
//! r#"UPDATE "glyph" SET "aspect" = 1.23, "image" = '123' WHERE "id" = 1"#
//! );
//! assert_eq!(
//! query.to_string(SqliteQueryBuilder),
//! r#"UPDATE `glyph` SET `aspect` = 1.23, `image` = '123' WHERE `id` = 1"#
//! );
//! ```
//!
//! ### Query Delete
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let query = Query::delete()
//! .from_table(Glyph::Table)
//! .cond_where(
//! Cond::any()
//! .add(Expr::col(Glyph::Id).lt(1))
//! .add(Expr::col(Glyph::Id).gt(10))
//! )
//! .to_owned();
//!
//! assert_eq!(
//! query.to_string(MysqlQueryBuilder),
//! r#"DELETE FROM `glyph` WHERE `id` < 1 OR `id` > 10"#
//! );
//! assert_eq!(
//! query.to_string(PostgresQueryBuilder),
//! r#"DELETE FROM "glyph" WHERE "id" < 1 OR "id" > 10"#
//! );
//! assert_eq!(
//! query.to_string(SqliteQueryBuilder),
//! r#"DELETE FROM `glyph` WHERE `id` < 1 OR `id` > 10"#
//! );
//! ```
//!
//! ### Table Create
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let table = Table::create()
//! .table(Char::Table)
//! .if_not_exists()
//! .col(ColumnDef::new(Char::Id).integer().not_null().auto_increment().primary_key())
//! .col(ColumnDef::new(Char::FontSize).integer().not_null())
//! .col(ColumnDef::new(Char::Character).string().not_null())
//! .col(ColumnDef::new(Char::SizeW).integer().not_null())
//! .col(ColumnDef::new(Char::SizeH).integer().not_null())
//! .col(ColumnDef::new(Char::FontId).integer().default(Value::Null))
//! .foreign_key(
//! ForeignKey::create()
//! .name("FK_2e303c3a712662f1fc2a4d0aad6")
//! .from(Char::Table, Char::FontId)
//! .to(Font::Table, Font::Id)
//! .on_delete(ForeignKeyAction::Cascade)
//! .on_update(ForeignKeyAction::Cascade)
//! )
//! .to_owned();
//!
//! assert_eq!(
//! table.to_string(MysqlQueryBuilder),
//! vec![
//! r#"CREATE TABLE IF NOT EXISTS `character` ("#,
//! r#"`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY,"#,
//! r#"`font_size` int NOT NULL,"#,
//! r#"`character` varchar(255) NOT NULL,"#,
//! r#"`size_w` int NOT NULL,"#,
//! r#"`size_h` int NOT NULL,"#,
//! r#"`font_id` int DEFAULT NULL,"#,
//! r#"CONSTRAINT `FK_2e303c3a712662f1fc2a4d0aad6`"#,
//! r#"FOREIGN KEY (`font_id`) REFERENCES `font` (`id`)"#,
//! r#"ON DELETE CASCADE ON UPDATE CASCADE"#,
//! r#")"#,
//! ].join(" ")
//! );
//! assert_eq!(
//! table.to_string(PostgresQueryBuilder),
//! vec![
//! r#"CREATE TABLE IF NOT EXISTS "character" ("#,
//! r#""id" serial NOT NULL PRIMARY KEY,"#,
//! r#""font_size" integer NOT NULL,"#,
//! r#""character" varchar NOT NULL,"#,
//! r#""size_w" integer NOT NULL,"#,
//! r#""size_h" integer NOT NULL,"#,
//! r#""font_id" integer DEFAULT NULL,"#,
//! r#"CONSTRAINT "FK_2e303c3a712662f1fc2a4d0aad6""#,
//! r#"FOREIGN KEY ("font_id") REFERENCES "font" ("id")"#,
//! r#"ON DELETE CASCADE ON UPDATE CASCADE"#,
//! r#")"#,
//! ].join(" ")
//! );
//! assert_eq!(
//! table.to_string(SqliteQueryBuilder),
//! vec![
//! r#"CREATE TABLE IF NOT EXISTS `character` ("#,
//! r#"`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,"#,
//! r#"`font_size` integer NOT NULL,"#,
//! r#"`character` text NOT NULL,"#,
//! r#"`size_w` integer NOT NULL,"#,
//! r#"`size_h` integer NOT NULL,"#,
//! r#"`font_id` integer DEFAULT NULL,"#,
//! r#"FOREIGN KEY (`font_id`) REFERENCES `font` (`id`) ON DELETE CASCADE ON UPDATE CASCADE"#,
//! r#")"#,
//! ].join(" ")
//! );
//! ```
//!
//! ### Table Alter
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let table = Table::alter()
//! .table(Font::Table)
//! .add_column(ColumnDef::new(Alias::new("new_col")).integer().not_null().default(100))
//! .to_owned();
//!
//! assert_eq!(
//! table.to_string(MysqlQueryBuilder),
//! r#"ALTER TABLE `font` ADD COLUMN `new_col` int NOT NULL DEFAULT 100"#
//! );
//! assert_eq!(
//! table.to_string(PostgresQueryBuilder),
//! r#"ALTER TABLE "font" ADD COLUMN "new_col" integer NOT NULL DEFAULT 100"#
//! );
//! assert_eq!(
//! table.to_string(SqliteQueryBuilder),
//! r#"ALTER TABLE `font` ADD COLUMN `new_col` integer NOT NULL DEFAULT 100"#,
//! );
//! ```
//!
//! ### Table Drop
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let table = Table::drop()
//! .table(Glyph::Table)
//! .table(Char::Table)
//! .to_owned();
//!
//! assert_eq!(
//! table.to_string(MysqlQueryBuilder),
//! r#"DROP TABLE `glyph`, `character`"#
//! );
//! assert_eq!(
//! table.to_string(PostgresQueryBuilder),
//! r#"DROP TABLE "glyph", "character""#
//! );
//! assert_eq!(
//! table.to_string(SqliteQueryBuilder),
//! r#"DROP TABLE `glyph`, `character`"#
//! );
//! ```
//!
//! ### Table Rename
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let table = Table::rename()
//! .table(Font::Table, Alias::new("font_new"))
//! .to_owned();
//!
//! assert_eq!(
//! table.to_string(MysqlQueryBuilder),
//! r#"RENAME TABLE `font` TO `font_new`"#
//! );
//! assert_eq!(
//! table.to_string(PostgresQueryBuilder),
//! r#"ALTER TABLE "font" RENAME TO "font_new""#
//! );
//! assert_eq!(
//! table.to_string(SqliteQueryBuilder),
//! r#"ALTER TABLE `font` RENAME TO `font_new`"#
//! );
//! ```
//!
//! ### Table Truncate
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let table = Table::truncate()
//! .table(Font::Table)
//! .to_owned();
//!
//! assert_eq!(
//! table.to_string(MysqlQueryBuilder),
//! r#"TRUNCATE TABLE `font`"#
//! );
//! assert_eq!(
//! table.to_string(PostgresQueryBuilder),
//! r#"TRUNCATE TABLE "font""#
//! );
//! assert_eq!(
//! table.to_string(SqliteQueryBuilder),
//! r#"TRUNCATE TABLE `font`"#
//! );
//! ```
//!
//! ### Foreign Key Create
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let foreign_key = ForeignKey::create()
//! .name("FK_character_font")
//! .from(Char::Table, Char::FontId)
//! .to(Font::Table, Font::Id)
//! .on_delete(ForeignKeyAction::Cascade)
//! .on_update(ForeignKeyAction::Cascade)
//! .to_owned();
//!
//! assert_eq!(
//! foreign_key.to_string(MysqlQueryBuilder),
//! vec![
//! r#"ALTER TABLE `character`"#,
//! r#"ADD CONSTRAINT `FK_character_font`"#,
//! r#"FOREIGN KEY (`font_id`) REFERENCES `font` (`id`)"#,
//! r#"ON DELETE CASCADE ON UPDATE CASCADE"#,
//! ].join(" ")
//! );
//! assert_eq!(
//! foreign_key.to_string(PostgresQueryBuilder),
//! vec![
//! r#"ALTER TABLE "character" ADD CONSTRAINT "FK_character_font""#,
//! r#"FOREIGN KEY ("font_id") REFERENCES "font" ("id")"#,
//! r#"ON DELETE CASCADE ON UPDATE CASCADE"#,
//! ].join(" ")
//! );
//! // Sqlite does not support modification of foreign key constraints to existing tables
//! ```
//!
//! ### Foreign Key Drop
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let foreign_key = ForeignKey::drop()
//! .name("FK_character_font")
//! .table(Char::Table)
//! .to_owned();
//!
//! assert_eq!(
//! foreign_key.to_string(MysqlQueryBuilder),
//! r#"ALTER TABLE `character` DROP FOREIGN KEY `FK_character_font`"#
//! );
//! assert_eq!(
//! foreign_key.to_string(PostgresQueryBuilder),
//! r#"ALTER TABLE "character" DROP CONSTRAINT "FK_character_font""#
//! );
//! // Sqlite does not support modification of foreign key constraints to existing tables
//! ```
//!
//! ### Index Create
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let index = Index::create()
//! .name("idx-glyph-aspect")
//! .table(Glyph::Table)
//! .col(Glyph::Aspect)
//! .to_owned();
//!
//! assert_eq!(
//! index.to_string(MysqlQueryBuilder),
//! r#"CREATE INDEX `idx-glyph-aspect` ON `glyph` (`aspect`)"#
//! );
//! assert_eq!(
//! index.to_string(PostgresQueryBuilder),
//! r#"CREATE INDEX "idx-glyph-aspect" ON "glyph" ("aspect")"#
//! );
//! assert_eq!(
//! index.to_string(SqliteQueryBuilder),
//! r#"CREATE INDEX `idx-glyph-aspect` ON `glyph` (`aspect`)"#
//! );
//! ```
//!
//! ### Index Drop
//!
//! ```rust
//! # use sea_query::{*, tests_cfg::*};
//! let index = Index::drop()
//! .name("idx-glyph-aspect")
//! .table(Glyph::Table)
//! .to_owned();
//!
//! assert_eq!(
//! index.to_string(MysqlQueryBuilder),
//! r#"DROP INDEX `idx-glyph-aspect` ON `glyph`"#
//! );
//! assert_eq!(
//! index.to_string(PostgresQueryBuilder),
//! r#"DROP INDEX "idx-glyph-aspect""#
//! );
//! assert_eq!(
//! index.to_string(SqliteQueryBuilder),
//! r#"DROP INDEX `idx-glyph-aspect` ON `glyph`"#
//! );
//! ```
#![doc(
html_logo_url = "https://raw.githubusercontent.com/SeaQL/sea-query/master/docs/SeaQL icon dark.png"
)]
pub mod backend;
pub mod driver;
pub mod error;
pub mod expr;
pub mod extension;
pub mod foreign_key;
pub mod func;
pub mod index;
pub mod prepare;
pub mod query;
pub mod schema;
mod shim;
pub mod table;
pub mod tests_cfg;
pub mod token;
pub mod types;
pub mod value;
pub use backend::*;
pub use driver::*;
//pub use extension::*;
pub use foreign_key::*;
pub use index::*;
pub use query::*;
pub use table::*;
// pub use error::*;
pub use expr::*;
pub use func::*;
pub use prepare::*;
pub use schema::*;
//pub use shim::*;
//pub use tests_cfg::*;
pub use token::*;
pub use types::*;
pub use value::*;
#[cfg(feature = "derive")]
pub use sea_query_derive::Iden;
|
use crate::route::Route;
use crate::types::Product;
use yew::prelude::*;
use yew_router::components::RouterAnchor;
type Anchor = RouterAnchor<Route>;
pub fn product_card(product: &Product, atc_button: Html) -> Html {
html! {
<div class="product_card_container">
<Anchor route=Route::ProductDetail(product.id) classes="product_card_anchor">
<img class="product_card_image" src={&product.image}/>
<div class="product_card_name">{&product.name}</div>
<div class="product_card_price">{"$"}{&product.price}</div>
</Anchor>
{ atc_button }
</div>
}
}
|
use std::{fmt::Write, sync::Arc};
use eyre::Report;
use hashbrown::{HashMap, HashSet};
use rand::Rng;
use tokio::time::{interval, Duration};
use twilight_http::{
api_error::{ApiError, GeneralApiError},
error::ErrorType,
};
use twilight_model::id::{marker::ChannelMarker, Id};
use crate::{
custom_client::TwitchStream,
embeds::{EmbedData, TwitchNotifEmbed},
util::constants::UNKNOWN_CHANNEL,
Context,
};
#[cold]
pub async fn twitch_tracking_loop(ctx: Arc<Context>) {
if cfg!(debug_assertions) {
info!("Skip twitch tracking on debug");
return;
}
let mut online_streams = HashSet::new();
let mut interval = interval(Duration::from_secs(10 * 60));
interval.tick().await;
loop {
interval.tick().await;
// Get data about what needs to be tracked for which channel
let user_ids = ctx.tracked_users();
// Get stream data about all streams that need to be tracked
let mut streams = match ctx.clients.custom.get_twitch_streams(&user_ids).await {
Ok(streams) => streams,
Err(err) => {
let report = Report::new(err);
warn!("{:?}", report.wrap_err("error while retrieving streams"));
continue;
}
};
// Filter streams whether they're live
streams.retain(TwitchStream::is_live);
let now_online: HashSet<_> = streams.iter().map(|stream| stream.user_id).collect();
// If there was no activity change since last time, don't do anything
if now_online == online_streams {
continue;
}
// Filter streams whether its already known they're live
streams.retain(|stream| !online_streams.contains(&stream.user_id));
// Nothing to do if streams is empty
// (i.e. the change was that streamers went offline)
if streams.is_empty() {
online_streams = now_online;
continue;
}
let ids: Vec<_> = streams.iter().map(|s| s.user_id).collect();
let users: HashMap<_, _> = match ctx.clients.custom.get_twitch_users(&ids).await {
Ok(users) => users.into_iter().map(|u| (u.user_id, u)).collect(),
Err(err) => {
let report = Report::new(err).wrap_err("error while retrieving twitch users");
warn!("{report:?}");
continue;
}
};
// Generate random width and height to avoid discord caching the thumbnail url
let (width, height) = {
let mut rng = rand::thread_rng();
let width: u32 = rng.gen_range(350..=370);
let height: u32 = rng.gen_range(175..=185);
(width, height)
};
// Process each stream by notifying all corresponding channels
for mut stream in streams {
let channels = match ctx.tracked_channels_for(stream.user_id) {
Some(channels) => channels,
None => continue,
};
// Adjust streams' thumbnail url
let url_len = stream.thumbnail_url.len();
stream.thumbnail_url.truncate(url_len - 20); // cut off "{width}x{height}.jpg"
let _ = write!(stream.thumbnail_url, "{width}x{height}.jpg");
let data = TwitchNotifEmbed::new(&stream, &users[&stream.user_id]);
for channel in channels {
send_notif(&ctx, &data, channel).await;
}
}
online_streams = now_online;
}
}
async fn send_notif(ctx: &Context, data: &TwitchNotifEmbed, channel: Id<ChannelMarker>) {
let embed = data.as_builder().build();
match ctx.http.create_message(channel).embeds(&[embed]) {
Ok(msg_fut) => {
if let Err(why) = msg_fut.exec().await {
if let ErrorType::Response { error, .. } = why.kind() {
match error {
ApiError::General(GeneralApiError {
code: UNKNOWN_CHANNEL,
..
}) => {
if let Err(err) = ctx.psql().remove_channel_tracks(channel.get()).await
{
let wrap = format!(
"could not remove stream tracks from unknown channel {channel}"
);
let report = Report::new(err).wrap_err(wrap);
warn!("{report:?}");
} else {
debug!("Removed twitch tracking of unknown channel {channel}");
}
}
why => warn!(
"Error from API while sending twitch notif (channel {channel}): {why}"
),
}
} else {
let wrap = format!("error while sending twitch notif (channel {channel})");
let report = Report::new(why).wrap_err(wrap);
warn!("{report:?}");
}
}
}
Err(err) => {
let report = Report::new(err).wrap_err("invalid embed for twitch notif");
warn!("{report:?}");
}
}
}
|
use super::{Task, TaskId};
use alloc::task::Wake;
use alloc::{collections::BTreeMap, sync::Arc};
use core::task::Waker;
use core::task::{Context, Poll};
use crossbeam_queue::ArrayQueue;
use crate::serial_println;
/// # Executor
///
/// A much more optimized, and generally better executor than SimpleExecutor.
///
/// Stores tasks in a BTreeMap, where it holds the taskId and the Task.
///
/// Stores the queue as an `Arc<ArrayQueue<TaskId>>` so it can be used by the waker and executor.
/// the waker will push the woken ID to this queue, where the executor will then run the task
///
/// Waker cache stores the taskId and it's relevant waker
pub struct Executor {
tasks: BTreeMap<TaskId, Task>,
task_queue: Arc<ArrayQueue<TaskId>>,
waker_cache: BTreeMap<TaskId, Waker>,
}
impl Executor {
/// Initialize a new Executor
pub fn new() -> Self {
serial_println!("Initialized task executor");
Executor {
tasks: BTreeMap::new(),
task_queue: Arc::new(ArrayQueue::new(100)),
waker_cache: BTreeMap::new(),
}
}
/// Spawn a new task. Will panic if the task already exists on the task map.
pub fn spawn(&mut self, task: Task) {
let task_id = task.id;
if self.tasks.insert(task.id, task).is_some() {
panic!("task with same ID already in tasks");
}
self.task_queue.push(task_id).expect("queue full");
}
}
impl Executor {
/// Iterate through our task_queue, to check what tasks are ready to run. Then run them
fn run_ready_tasks(&mut self) {
// destructure `self` to avoid borrow checker errors (will be fixed soon)
let Self {
tasks,
task_queue,
waker_cache,
} = self;
while let Ok(task_id) = task_queue.pop() {
let task = match tasks.get_mut(&task_id) {
Some(task) => task,
None => continue, // task no longer exists
};
let waker = waker_cache
.entry(task_id)
.or_insert_with(|| TaskWaker::new(task_id, task_queue.clone())); // Instead of recreating a new waker every time, we use the waker already stored in the cache for this task
let mut context = Context::from_waker(waker); // create a new context from the waker
match task.poll(&mut context) { // check the task is ready
Poll::Ready(()) => {
// task done -> remove it and its cached waker
tasks.remove(&task_id); // the task is done, we can remove it
waker_cache.remove(&task_id); // the waker is also no longer needed
}
Poll::Pending => {}
}
}
}
/// This function will run our executor. It is a diverging function, so will never return
/// It will run in the background from our OS.
pub fn run(&mut self) -> ! {
loop {
self.run_ready_tasks(); // Run tasks indefinitely.
self.sleep_if_idle(); // sleep if idle :P
}
}
/// If we have no tasks, we should hlt to avoid wasting precious CPU time.
fn sleep_if_idle(&self) {
use x86_64::instructions::interrupts::{self, enable_interrupts_and_hlt};
interrupts::disable(); // We should disable interrupts before checking the task queue, as between checking the task queue and sleeping,
// another interrupt could fire
if self.task_queue.is_empty() {
enable_interrupts_and_hlt(); // We re-enable interrupts and halt
} else {
interrupts::enable(); // we have tasks to run, just re-enable interrupts and don't halt
}
}
}
/// # TaskWaker
///
/// This struct stores the waker's ID, as well as a reference to the task_queue
///
/// When the task is ready to be run, we add the ID to the queue, where it will be run
struct TaskWaker {
task_id: TaskId,
task_queue: Arc<ArrayQueue<TaskId>>,
}
impl TaskWaker {
/// Create a new task, inputting the task's ID and a reference to the queue. We return a waker from this TaskWaker
fn new(task_id: TaskId, task_queue: Arc<ArrayQueue<TaskId>>) -> Waker {
Waker::from(Arc::new(TaskWaker {
task_id,
task_queue,
}))
}
/// Submit the task_id to the task queue (panic if it is full)
fn wake_task(&self) {
self.task_queue.push(self.task_id).expect("task_queue full");
}
}
// This allows our `TaskWaker` to be used as a Waker (through the Arc struct)
impl Wake for TaskWaker {
fn wake(self: Arc<Self>) {
self.wake_task();
}
fn wake_by_ref(self: &Arc<Self>) {
self.wake_task();
}
} |
//https://ptrace.fefe.de/wp/wpopt.rs
use std::fmt::Write;
use std::iter::FromIterator;
use bytes::{BufMut, Bytes, BytesMut};
use futures::stream;
use futures::FutureExt;
use futures::StreamExt;
use tokio_util::codec::{BytesCodec, FramedRead, FramedWrite};
use std::time::Instant;
use word_count::util::*;
use parallel_stream::StreamExt as MyStreamExt;
const CHUNKS_CAPACITY: usize = 1024;
const BUFFER_SIZE: usize = 16*4096;
const CHANNEL_SIZE: usize = 64;
fn main() {
let conf = parse_args("word count parallel buf");
//let mut runtime = Runtime::new().expect("can't create runtime");
use tokio::runtime::Builder;
let runtime = Builder::new_multi_thread()
.worker_threads(conf.threads)
.max_threads(conf.threads +2)
/*
.on_thread_start(|| {
eprintln!("thread started");
})
.on_thread_stop(|| {
eprintln!("thread stopping");
})
*/
//.keep_alive(Some(Duration::from_secs(1)))
//.stack_size(16 * 1024 * 1024)
.build().expect("can't create runtime");
let (start_usr_time, start_sys_time) = get_cputime_usecs();
let start_time = Instant::now();
let (input, output) = open_io_async(&conf);
let input_stream = FramedRead::with_capacity(input , WholeWordsCodec::new(), BUFFER_SIZE);
let output_stream = FramedWrite::new(output, BytesCodec::new());
let freq_stream = input_stream.fork(conf.threads, CHANNEL_SIZE, &runtime)
.instrumented_fold(|| FreqTable::new(), |mut frequency, text| async move{
count_bytes(&mut frequency, &text.expect("io error"));
frequency
}, "split_and_count".to_owned())
.map_result(|frequency| stream::iter(frequency).chunks(CHUNKS_CAPACITY))
.flatten_stream()
.shuffle_unordered_chunked( |(word, _count)|
((word[0] as usize) << 8) + word.len(),
//std::cmp::min(16, conf.threads),
1 + conf.threads/2,
CHANNEL_SIZE, &runtime)
.instrumented_fold(|| FreqTable::new(), |mut frequency, sub_table| async move {
for (word, count) in sub_table {
*frequency.entry(word).or_insert(0) += count;
}
frequency
}, "merge_table".to_owned())
.map_result(|sub_table| Vec::from_iter(sub_table))
.merge(Vec::new(), |mut frequency, mut part| async move {
frequency.append(&mut part);
frequency
},
&runtime)
.map(|mut frequency| {
frequency.sort_unstable_by(|(ref w_a, ref f_a), (ref w_b, ref f_b)| f_b.cmp(&f_a).then(w_b.cmp(&w_a)));
stream::iter(frequency).chunks(CHUNKS_CAPACITY) // <- TODO performance?
})
.flatten_stream()
.instrumented_map(|chunk: Vec<(Bytes, u64)>| {
let mut buffer = BytesMut::with_capacity(CHUNKS_CAPACITY * 15);
for (word_raw, count) in chunk {
let word = utf8(&word_raw).expect("UTF8 encoding error");
let max_len = word_raw.len() + 15;
if buffer.remaining_mut() < max_len {
buffer.reserve(10 * max_len);
}
buffer
.write_fmt(format_args!("{} {}\n", word, count))
.expect("Formating error");
}
buffer.freeze()
}, "format_chunk".to_owned());
let task = MyStreamExt::forward(freq_stream, output_stream);
runtime.block_on(task).expect("error running main task");
let difference = start_time.elapsed();
let (end_usr_time, end_sys_time) = get_cputime_usecs();
let usr_time = (end_usr_time - start_usr_time) as f64 / 1000_000.0;
let sys_time = (end_sys_time - start_sys_time) as f64 / 1000_000.0;
eprintln!("walltime: {:?} (usr: {:.3}s sys: {:.3}s)",
difference, usr_time, sys_time);
}
|
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Copy, Serialize, Deserialize)]
pub enum PacketStage {
Handshaking = 0,
Status = 1,
Login = 2,
Play = 3,
}
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Copy, Serialize, Deserialize)]
pub enum PacketDirection {
ClientBound = 0,
ServerBound = 1,
}
macro_rules! wrap {
($outer:ident, $inner:ident) => {
impl std::ops::Deref for $outer {
type Target = $inner;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<$inner> for $outer {
fn from(inner: $inner) -> Self {
Self(inner)
}
}
};
}
#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct PacketId(u64);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct PacketName(String);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct MinecraftVersion(String);
#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct ProtocolVersion(u64);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct SharedTypeId(String);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct VariantName(String);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[serde(transparent)]
pub struct FieldName(String);
wrap!(PacketId, u64);
wrap!(PacketName, String);
wrap!(MinecraftVersion, String);
wrap!(ProtocolVersion, u64);
wrap!(SharedTypeId, String);
wrap!(VariantName, String);
wrap!(FieldName, String);
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize)]
pub struct PacketIdentifier(pub PacketDirection, pub PacketStage, pub PacketId);
impl PacketIdentifier {
pub fn direction(&self) -> PacketDirection {
self.0
}
pub fn stage(&self) -> PacketStage {
self.1
}
pub fn id(&self) -> PacketId {
self.2
}
} |
pub mod filesystem;
|
#[doc = "Register `AHB1ENR` reader"]
pub type R = crate::R<AHB1ENR_SPEC>;
#[doc = "Register `AHB1ENR` writer"]
pub type W = crate::W<AHB1ENR_SPEC>;
#[doc = "Field `GPIOAEN` reader - IO port A clock enable"]
pub type GPIOAEN_R = crate::BitReader<GPIOAEN_A>;
#[doc = "IO port A clock enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GPIOAEN_A {
#[doc = "0: The selected clock is disabled"]
Disabled = 0,
#[doc = "1: The selected clock is enabled"]
Enabled = 1,
}
impl From<GPIOAEN_A> for bool {
#[inline(always)]
fn from(variant: GPIOAEN_A) -> Self {
variant as u8 != 0
}
}
impl GPIOAEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GPIOAEN_A {
match self.bits {
false => GPIOAEN_A::Disabled,
true => GPIOAEN_A::Enabled,
}
}
#[doc = "The selected clock is disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == GPIOAEN_A::Disabled
}
#[doc = "The selected clock is enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == GPIOAEN_A::Enabled
}
}
#[doc = "Field `GPIOAEN` writer - IO port A clock enable"]
pub type GPIOAEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, GPIOAEN_A>;
impl<'a, REG, const O: u8> GPIOAEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "The selected clock is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(GPIOAEN_A::Disabled)
}
#[doc = "The selected clock is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(GPIOAEN_A::Enabled)
}
}
#[doc = "Field `GPIOBEN` reader - IO port B clock enable"]
pub use GPIOAEN_R as GPIOBEN_R;
#[doc = "Field `GPIOCEN` reader - IO port C clock enable"]
pub use GPIOAEN_R as GPIOCEN_R;
#[doc = "Field `GPIODEN` reader - IO port D clock enable"]
pub use GPIOAEN_R as GPIODEN_R;
#[doc = "Field `GPIOEEN` reader - IO port E clock enable"]
pub use GPIOAEN_R as GPIOEEN_R;
#[doc = "Field `GPIOFEN` reader - IO port F clock enable"]
pub use GPIOAEN_R as GPIOFEN_R;
#[doc = "Field `GPIOGEN` reader - IO port G clock enable"]
pub use GPIOAEN_R as GPIOGEN_R;
#[doc = "Field `GPIOHEN` reader - IO port H clock enable"]
pub use GPIOAEN_R as GPIOHEN_R;
#[doc = "Field `GPIOIEN` reader - IO port I clock enable"]
pub use GPIOAEN_R as GPIOIEN_R;
#[doc = "Field `GPIOJEN` reader - IO port J clock enable"]
pub use GPIOAEN_R as GPIOJEN_R;
#[doc = "Field `GPIOKEN` reader - IO port K clock enable"]
pub use GPIOAEN_R as GPIOKEN_R;
#[doc = "Field `CRCEN` reader - CRC clock enable"]
pub use GPIOAEN_R as CRCEN_R;
#[doc = "Field `BKPSRAMEN` reader - Backup SRAM interface clock enable"]
pub use GPIOAEN_R as BKPSRAMEN_R;
#[doc = "Field `DTCMRAMEN` reader - CCM data RAM clock enable"]
pub use GPIOAEN_R as DTCMRAMEN_R;
#[doc = "Field `DMA1EN` reader - DMA1 clock enable"]
pub use GPIOAEN_R as DMA1EN_R;
#[doc = "Field `DMA2EN` reader - DMA2 clock enable"]
pub use GPIOAEN_R as DMA2EN_R;
#[doc = "Field `DMA2DEN` reader - DMA2D clock enable"]
pub use GPIOAEN_R as DMA2DEN_R;
#[doc = "Field `ETHMACEN` reader - Ethernet MAC clock enable"]
pub use GPIOAEN_R as ETHMACEN_R;
#[doc = "Field `ETHMACTXEN` reader - Ethernet Transmission clock enable"]
pub use GPIOAEN_R as ETHMACTXEN_R;
#[doc = "Field `ETHMACRXEN` reader - Ethernet Reception clock enable"]
pub use GPIOAEN_R as ETHMACRXEN_R;
#[doc = "Field `ETHMACPTPEN` reader - Ethernet PTP clock enable"]
pub use GPIOAEN_R as ETHMACPTPEN_R;
#[doc = "Field `OTGHSEN` reader - USB OTG HS clock enable"]
pub use GPIOAEN_R as OTGHSEN_R;
#[doc = "Field `OTGHSULPIEN` reader - USB OTG HSULPI clock enable"]
pub use GPIOAEN_R as OTGHSULPIEN_R;
#[doc = "Field `GPIOBEN` writer - IO port B clock enable"]
pub use GPIOAEN_W as GPIOBEN_W;
#[doc = "Field `GPIOCEN` writer - IO port C clock enable"]
pub use GPIOAEN_W as GPIOCEN_W;
#[doc = "Field `GPIODEN` writer - IO port D clock enable"]
pub use GPIOAEN_W as GPIODEN_W;
#[doc = "Field `GPIOEEN` writer - IO port E clock enable"]
pub use GPIOAEN_W as GPIOEEN_W;
#[doc = "Field `GPIOFEN` writer - IO port F clock enable"]
pub use GPIOAEN_W as GPIOFEN_W;
#[doc = "Field `GPIOGEN` writer - IO port G clock enable"]
pub use GPIOAEN_W as GPIOGEN_W;
#[doc = "Field `GPIOHEN` writer - IO port H clock enable"]
pub use GPIOAEN_W as GPIOHEN_W;
#[doc = "Field `GPIOIEN` writer - IO port I clock enable"]
pub use GPIOAEN_W as GPIOIEN_W;
#[doc = "Field `GPIOJEN` writer - IO port J clock enable"]
pub use GPIOAEN_W as GPIOJEN_W;
#[doc = "Field `GPIOKEN` writer - IO port K clock enable"]
pub use GPIOAEN_W as GPIOKEN_W;
#[doc = "Field `CRCEN` writer - CRC clock enable"]
pub use GPIOAEN_W as CRCEN_W;
#[doc = "Field `BKPSRAMEN` writer - Backup SRAM interface clock enable"]
pub use GPIOAEN_W as BKPSRAMEN_W;
#[doc = "Field `DTCMRAMEN` writer - CCM data RAM clock enable"]
pub use GPIOAEN_W as DTCMRAMEN_W;
#[doc = "Field `DMA1EN` writer - DMA1 clock enable"]
pub use GPIOAEN_W as DMA1EN_W;
#[doc = "Field `DMA2EN` writer - DMA2 clock enable"]
pub use GPIOAEN_W as DMA2EN_W;
#[doc = "Field `DMA2DEN` writer - DMA2D clock enable"]
pub use GPIOAEN_W as DMA2DEN_W;
#[doc = "Field `ETHMACEN` writer - Ethernet MAC clock enable"]
pub use GPIOAEN_W as ETHMACEN_W;
#[doc = "Field `ETHMACTXEN` writer - Ethernet Transmission clock enable"]
pub use GPIOAEN_W as ETHMACTXEN_W;
#[doc = "Field `ETHMACRXEN` writer - Ethernet Reception clock enable"]
pub use GPIOAEN_W as ETHMACRXEN_W;
#[doc = "Field `ETHMACPTPEN` writer - Ethernet PTP clock enable"]
pub use GPIOAEN_W as ETHMACPTPEN_W;
#[doc = "Field `OTGHSEN` writer - USB OTG HS clock enable"]
pub use GPIOAEN_W as OTGHSEN_W;
#[doc = "Field `OTGHSULPIEN` writer - USB OTG HSULPI clock enable"]
pub use GPIOAEN_W as OTGHSULPIEN_W;
impl R {
#[doc = "Bit 0 - IO port A clock enable"]
#[inline(always)]
pub fn gpioaen(&self) -> GPIOAEN_R {
GPIOAEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - IO port B clock enable"]
#[inline(always)]
pub fn gpioben(&self) -> GPIOBEN_R {
GPIOBEN_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - IO port C clock enable"]
#[inline(always)]
pub fn gpiocen(&self) -> GPIOCEN_R {
GPIOCEN_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - IO port D clock enable"]
#[inline(always)]
pub fn gpioden(&self) -> GPIODEN_R {
GPIODEN_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - IO port E clock enable"]
#[inline(always)]
pub fn gpioeen(&self) -> GPIOEEN_R {
GPIOEEN_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - IO port F clock enable"]
#[inline(always)]
pub fn gpiofen(&self) -> GPIOFEN_R {
GPIOFEN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - IO port G clock enable"]
#[inline(always)]
pub fn gpiogen(&self) -> GPIOGEN_R {
GPIOGEN_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - IO port H clock enable"]
#[inline(always)]
pub fn gpiohen(&self) -> GPIOHEN_R {
GPIOHEN_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - IO port I clock enable"]
#[inline(always)]
pub fn gpioien(&self) -> GPIOIEN_R {
GPIOIEN_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - IO port J clock enable"]
#[inline(always)]
pub fn gpiojen(&self) -> GPIOJEN_R {
GPIOJEN_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - IO port K clock enable"]
#[inline(always)]
pub fn gpioken(&self) -> GPIOKEN_R {
GPIOKEN_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 12 - CRC clock enable"]
#[inline(always)]
pub fn crcen(&self) -> CRCEN_R {
CRCEN_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 18 - Backup SRAM interface clock enable"]
#[inline(always)]
pub fn bkpsramen(&self) -> BKPSRAMEN_R {
BKPSRAMEN_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 20 - CCM data RAM clock enable"]
#[inline(always)]
pub fn dtcmramen(&self) -> DTCMRAMEN_R {
DTCMRAMEN_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - DMA1 clock enable"]
#[inline(always)]
pub fn dma1en(&self) -> DMA1EN_R {
DMA1EN_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - DMA2 clock enable"]
#[inline(always)]
pub fn dma2en(&self) -> DMA2EN_R {
DMA2EN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - DMA2D clock enable"]
#[inline(always)]
pub fn dma2den(&self) -> DMA2DEN_R {
DMA2DEN_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 25 - Ethernet MAC clock enable"]
#[inline(always)]
pub fn ethmacen(&self) -> ETHMACEN_R {
ETHMACEN_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - Ethernet Transmission clock enable"]
#[inline(always)]
pub fn ethmactxen(&self) -> ETHMACTXEN_R {
ETHMACTXEN_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - Ethernet Reception clock enable"]
#[inline(always)]
pub fn ethmacrxen(&self) -> ETHMACRXEN_R {
ETHMACRXEN_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - Ethernet PTP clock enable"]
#[inline(always)]
pub fn ethmacptpen(&self) -> ETHMACPTPEN_R {
ETHMACPTPEN_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - USB OTG HS clock enable"]
#[inline(always)]
pub fn otghsen(&self) -> OTGHSEN_R {
OTGHSEN_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - USB OTG HSULPI clock enable"]
#[inline(always)]
pub fn otghsulpien(&self) -> OTGHSULPIEN_R {
OTGHSULPIEN_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - IO port A clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioaen(&mut self) -> GPIOAEN_W<AHB1ENR_SPEC, 0> {
GPIOAEN_W::new(self)
}
#[doc = "Bit 1 - IO port B clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioben(&mut self) -> GPIOBEN_W<AHB1ENR_SPEC, 1> {
GPIOBEN_W::new(self)
}
#[doc = "Bit 2 - IO port C clock enable"]
#[inline(always)]
#[must_use]
pub fn gpiocen(&mut self) -> GPIOCEN_W<AHB1ENR_SPEC, 2> {
GPIOCEN_W::new(self)
}
#[doc = "Bit 3 - IO port D clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioden(&mut self) -> GPIODEN_W<AHB1ENR_SPEC, 3> {
GPIODEN_W::new(self)
}
#[doc = "Bit 4 - IO port E clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioeen(&mut self) -> GPIOEEN_W<AHB1ENR_SPEC, 4> {
GPIOEEN_W::new(self)
}
#[doc = "Bit 5 - IO port F clock enable"]
#[inline(always)]
#[must_use]
pub fn gpiofen(&mut self) -> GPIOFEN_W<AHB1ENR_SPEC, 5> {
GPIOFEN_W::new(self)
}
#[doc = "Bit 6 - IO port G clock enable"]
#[inline(always)]
#[must_use]
pub fn gpiogen(&mut self) -> GPIOGEN_W<AHB1ENR_SPEC, 6> {
GPIOGEN_W::new(self)
}
#[doc = "Bit 7 - IO port H clock enable"]
#[inline(always)]
#[must_use]
pub fn gpiohen(&mut self) -> GPIOHEN_W<AHB1ENR_SPEC, 7> {
GPIOHEN_W::new(self)
}
#[doc = "Bit 8 - IO port I clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioien(&mut self) -> GPIOIEN_W<AHB1ENR_SPEC, 8> {
GPIOIEN_W::new(self)
}
#[doc = "Bit 9 - IO port J clock enable"]
#[inline(always)]
#[must_use]
pub fn gpiojen(&mut self) -> GPIOJEN_W<AHB1ENR_SPEC, 9> {
GPIOJEN_W::new(self)
}
#[doc = "Bit 10 - IO port K clock enable"]
#[inline(always)]
#[must_use]
pub fn gpioken(&mut self) -> GPIOKEN_W<AHB1ENR_SPEC, 10> {
GPIOKEN_W::new(self)
}
#[doc = "Bit 12 - CRC clock enable"]
#[inline(always)]
#[must_use]
pub fn crcen(&mut self) -> CRCEN_W<AHB1ENR_SPEC, 12> {
CRCEN_W::new(self)
}
#[doc = "Bit 18 - Backup SRAM interface clock enable"]
#[inline(always)]
#[must_use]
pub fn bkpsramen(&mut self) -> BKPSRAMEN_W<AHB1ENR_SPEC, 18> {
BKPSRAMEN_W::new(self)
}
#[doc = "Bit 20 - CCM data RAM clock enable"]
#[inline(always)]
#[must_use]
pub fn dtcmramen(&mut self) -> DTCMRAMEN_W<AHB1ENR_SPEC, 20> {
DTCMRAMEN_W::new(self)
}
#[doc = "Bit 21 - DMA1 clock enable"]
#[inline(always)]
#[must_use]
pub fn dma1en(&mut self) -> DMA1EN_W<AHB1ENR_SPEC, 21> {
DMA1EN_W::new(self)
}
#[doc = "Bit 22 - DMA2 clock enable"]
#[inline(always)]
#[must_use]
pub fn dma2en(&mut self) -> DMA2EN_W<AHB1ENR_SPEC, 22> {
DMA2EN_W::new(self)
}
#[doc = "Bit 23 - DMA2D clock enable"]
#[inline(always)]
#[must_use]
pub fn dma2den(&mut self) -> DMA2DEN_W<AHB1ENR_SPEC, 23> {
DMA2DEN_W::new(self)
}
#[doc = "Bit 25 - Ethernet MAC clock enable"]
#[inline(always)]
#[must_use]
pub fn ethmacen(&mut self) -> ETHMACEN_W<AHB1ENR_SPEC, 25> {
ETHMACEN_W::new(self)
}
#[doc = "Bit 26 - Ethernet Transmission clock enable"]
#[inline(always)]
#[must_use]
pub fn ethmactxen(&mut self) -> ETHMACTXEN_W<AHB1ENR_SPEC, 26> {
ETHMACTXEN_W::new(self)
}
#[doc = "Bit 27 - Ethernet Reception clock enable"]
#[inline(always)]
#[must_use]
pub fn ethmacrxen(&mut self) -> ETHMACRXEN_W<AHB1ENR_SPEC, 27> {
ETHMACRXEN_W::new(self)
}
#[doc = "Bit 28 - Ethernet PTP clock enable"]
#[inline(always)]
#[must_use]
pub fn ethmacptpen(&mut self) -> ETHMACPTPEN_W<AHB1ENR_SPEC, 28> {
ETHMACPTPEN_W::new(self)
}
#[doc = "Bit 29 - USB OTG HS clock enable"]
#[inline(always)]
#[must_use]
pub fn otghsen(&mut self) -> OTGHSEN_W<AHB1ENR_SPEC, 29> {
OTGHSEN_W::new(self)
}
#[doc = "Bit 30 - USB OTG HSULPI clock enable"]
#[inline(always)]
#[must_use]
pub fn otghsulpien(&mut self) -> OTGHSULPIEN_W<AHB1ENR_SPEC, 30> {
OTGHSULPIEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "AHB1 peripheral clock register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb1enr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb1enr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AHB1ENR_SPEC;
impl crate::RegisterSpec for AHB1ENR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ahb1enr::R`](R) reader structure"]
impl crate::Readable for AHB1ENR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ahb1enr::W`](W) writer structure"]
impl crate::Writable for AHB1ENR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets AHB1ENR to value 0x0010_0000"]
impl crate::Resettable for AHB1ENR_SPEC {
const RESET_VALUE: Self::Ux = 0x0010_0000;
}
|
fn main() {
if cfg!(target_os = "linux") {
// these pragmas are optional on my system but left as examples if you run into trouble.
println!("cargo:rustc-link-lib=X11");
println!("cargo:rustc-link-lib=Xcursor");
println!("cargo:rustc-link-lib=Xrandr");
println!("cargo:rustc-link-lib=Xi");
println!("cargo:rustc-link-lib=vulkan");
}
} |
use {
exitfailure::ExitFailure,
std::path::PathBuf,
tsukuyomi::{endpoint, path, server::Server, App},
};
fn main() -> Result<(), ExitFailure> {
let app = App::build(|mut scope| {
// a route that matches the root path.
scope.at("/")?.to({
// an endpoint that matches *all* methods with the root path.
endpoint::call(|| "Hello, world\n") // replies by cloning a `Responder`.
})?;
// a sub-scope with the prefix `/api/v1/`.
scope.mount("/api/v1/")?.done(|mut scope| {
// scopes can be nested.
scope.mount("/posts")?.done(|mut scope| {
// a route with the path `/api/v1/posts`.
scope.at("/")?.done(|mut resource| {
resource.get().to(endpoint::call(|| "list_posts"))?; // <-- GET /api/v1/posts
resource.post().to(endpoint::call(|| "add_post"))?; // <-- POST /api/v1/posts
resource.to(endpoint::call(|| "other methods")) // <-- {PUT, DELETE, ...} /api/v1/posts
})?;
// A route that captures a parameter from the path.
scope.at(path!("/:id"))?.to({
endpoint::call(|id: i32| {
// returns a `Responder`.
format!("get_post(id = {})", id)
})
})
})?;
scope.mount("/user")?.done(|mut scope| {
scope
.at("/auth")? //
.to(endpoint::call(|| "Authentication"))
})
})?;
// a route that captures a *catch-all* parameter.
scope
.at(path!("/static/*path"))?
.get()
.to(endpoint::call(|path: PathBuf| {
// returns a `Responder`.
tsukuyomi::fs::NamedFile::open(path)
}))?;
// A route that matches any path.
scope.fallback(endpoint::call(|| "default route"))?;
Ok(())
})?;
let mut server = Server::new(app)?;
server.bind("127.0.0.1:4000")?;
server.run_forever();
Ok(())
}
|
mod kay_auto;
pub use kay_auto::*;
use kay::{ActorSystem, World};
// most basic possible boilerplate
#[derive(Clone, Compact)]
pub struct BasicActor {
id: BasicActorID,
}
// Actor must have creation method or it won't generate
impl BasicActor {
pub fn spawn(id: BasicActorID, _: &mut World) -> Self {
Self { id }
}
}
// kay actor needs id field of StructName + ID
// type will be auto generated
#[derive(Clone, Compact)]
pub struct CounterActor {
id: CounterActorID,
number: u8,
count: u32,
}
static mut NUMBER_COUNTERS: u8 = 0;
// methods ending in ref mut world will generate kay bindings
impl CounterActor {
// aka new - will generate add_spawn_handler code
pub fn spawn(id: CounterActorID, _: &mut World) -> Self {
let number = unsafe {
NUMBER_COUNTERS += 1;
NUMBER_COUNTERS
};
Self {
number,
id,
count: 0,
}
}
// will generate message_handler
pub fn increment(&mut self, value: Option<u32>, _: &mut World) {
let value = value.unwrap_or(1);
self.count += value;
println!(
"actor{}\nincrement: {}\ncount: {}\n",
self.number, value, self.count
);
}
}
pub fn setup(system: &mut ActorSystem) {
system.register::<BasicActor>();
system.register::<CounterActor>();
auto_setup(system);
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::ffi::CStr;
use std::ffi::CString;
use std::io;
use super::Addr;
use super::AddrMut;
use super::Errno;
use super::MemoryAccess;
/// A local address space.
#[derive(Default, Debug)]
pub struct LocalMemory {}
impl LocalMemory {
/// Creates a new representation of memory in the current address space.
/// Accessing memory this way is highly unsafe. This interface is subject to
/// change in the future to reduce the unsafeness of it.
///
/// # Example
/// ```
/// # use reverie_syscalls::LocalMemory;
/// let memory = LocalMemory::new();
/// ```
pub fn new() -> Self {
// TODO: Make LocalMemory just act as a `&mut [u8]`. Then, the "address
// space" will simply be pointers within that range. This would enable
// restriction of the accessible address space on a per-syscall basis.
Self::default()
}
}
impl MemoryAccess for LocalMemory {
fn read_vectored(
&self,
read_from: &[io::IoSlice],
write_to: &mut [io::IoSliceMut],
) -> Result<usize, Errno> {
// Just read from the first non-empty slice.
if let Some(from) = read_from.iter().find(|slice| !slice.is_empty()) {
// Write to the first non-empty slice.
if let Some(to) = write_to.iter_mut().find(|slice| !slice.is_empty()) {
let count = to.len().min(from.len());
to[0..count].copy_from_slice(&from[0..count]);
return Ok(count);
}
}
Ok(0)
}
fn write_vectored(
&mut self,
_read_from: &[io::IoSlice],
_write_to: &mut [io::IoSliceMut],
) -> Result<usize, Errno> {
todo!("Implement local memory access")
}
fn read<'a, A>(&self, addr: A, buf: &mut [u8]) -> Result<usize, Errno>
where
A: Into<Addr<'a, u8>>,
{
let addr = addr.into();
// Simply copy the memory starting at the address into the buffer. This
// is very unsafe. We need a better way to do this.
unsafe {
::core::intrinsics::copy_nonoverlapping(addr.as_ptr(), buf.as_mut_ptr(), buf.len())
};
Ok(buf.len())
}
fn write(&mut self, addr: AddrMut<u8>, buf: &[u8]) -> Result<usize, Errno> {
// Simply copy the memory starting at the address into the buffer. This
// is very unsafe. We need a better way to do this.
unsafe {
::core::intrinsics::copy_nonoverlapping(buf.as_ptr(), addr.as_mut_ptr(), buf.len())
};
Ok(buf.len())
}
fn read_cstring(&self, addr: Addr<u8>) -> Result<CString, Errno> {
Ok(unsafe { CStr::from_ptr(addr.as_ptr() as *const _) }.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn read_value() {
let m = LocalMemory::new();
let x = [1u32, 2, 3, 4];
let addr = Addr::from_ptr(x.as_ptr()).unwrap();
let v: u32 = m.read_value(addr).unwrap();
assert_eq!(v, 1);
}
#[test]
fn read() {
let m = LocalMemory::new();
let x = [1u8, 2, 3, 4, 5, 6, 7, 8];
let addr = Addr::from_ptr(x.as_ptr()).unwrap();
let mut buf = [0u8; 8];
assert_eq!(m.read(addr, &mut buf).unwrap(), 8);
assert_eq!(buf, x);
}
#[test]
fn read_cstring() {
use std::ffi::CStr;
let m = LocalMemory::new();
let x = "hello world\0";
let addr = Addr::from_ptr(x.as_ptr() as *const u8).unwrap();
assert_eq!(m.read_cstring(addr).unwrap().as_c_str(), unsafe {
CStr::from_ptr(x.as_ptr() as *const _)
});
}
}
|
use std::io;
use std::collections::HashMap;
use std::ops::Deref;
use directory::ReadOnlySource;
use common::BinarySerializable;
use DocId;
use schema::{Field, SchemaBuilder};
use std::path::Path;
use schema::FAST;
use directory::{WritePtr, RAMDirectory, Directory};
use fastfield::FastFieldSerializer;
use fastfield::U32FastFieldsWriter;
use common::bitpacker::compute_num_bits;
use common::bitpacker::BitUnpacker;
lazy_static! {
static ref U32_FAST_FIELD_EMPTY: ReadOnlySource = {
let u32_fast_field = U32FastFieldReader::from(Vec::new());
u32_fast_field._data.clone()
};
}
pub struct U32FastFieldReader {
_data: ReadOnlySource,
bit_unpacker: BitUnpacker,
min_val: u32,
max_val: u32,
}
impl U32FastFieldReader {
pub fn empty() -> U32FastFieldReader {
U32FastFieldReader::open(U32_FAST_FIELD_EMPTY.clone())
}
pub fn min_val(&self,) -> u32 {
self.min_val
}
pub fn max_val(&self,) -> u32 {
self.max_val
}
/// Opens a new fast field reader given a read only source.
///
/// # Panics
/// Panics if the data is corrupted.
pub fn open(data: ReadOnlySource) -> U32FastFieldReader {
let min_val;
let amplitude;
let max_val;
{
let mut cursor = data.as_slice();
min_val = u32::deserialize(&mut cursor).unwrap();
amplitude = u32::deserialize(&mut cursor).unwrap();
max_val = min_val + amplitude;
}
let num_bits = compute_num_bits(amplitude);
let bit_unpacker = {
let data_arr = &(data.deref()[8..]);
BitUnpacker::new(data_arr, num_bits as usize)
};
U32FastFieldReader {
_data: data,
bit_unpacker: bit_unpacker,
min_val: min_val,
max_val: max_val,
}
}
pub fn get(&self, doc: DocId) -> u32 {
self.min_val + self.bit_unpacker.get(doc as usize)
}
}
impl From<Vec<u32>> for U32FastFieldReader {
fn from(vals: Vec<u32>) -> U32FastFieldReader {
let mut schema_builder = SchemaBuilder::default();
let field = schema_builder.add_u32_field("field", FAST);
let schema = schema_builder.build();
let path = Path::new("test");
let mut directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::new(write).unwrap();
let mut fast_field_writers = U32FastFieldsWriter::from_schema(&schema);
for val in vals {
let mut fast_field_writer = fast_field_writers.get_field_writer(field).unwrap();
fast_field_writer.add_val(val);
}
fast_field_writers.serialize(&mut serializer).unwrap();
serializer.close().unwrap();
}
let source = directory.open_read(&path).unwrap();
let fast_field_readers = U32FastFieldsReader::open(source).unwrap();
fast_field_readers.get_field(field).unwrap()
}
}
pub struct U32FastFieldsReader {
source: ReadOnlySource,
field_offsets: HashMap<Field, (u32, u32)>,
}
impl U32FastFieldsReader {
pub fn open(source: ReadOnlySource) -> io::Result<U32FastFieldsReader> {
let header_offset;
let field_offsets: Vec<(Field, u32)>;
{
let buffer = source.as_slice();
{
let mut cursor = buffer;
header_offset = try!(u32::deserialize(&mut cursor));
}
{
let mut cursor = &buffer[header_offset as usize..];
field_offsets = try!(Vec::deserialize(&mut cursor));
}
}
let mut end_offsets: Vec<u32> = field_offsets
.iter()
.map(|&(_, offset)| offset)
.collect();
end_offsets.push(header_offset);
let mut field_offsets_map: HashMap<Field, (u32, u32)> = HashMap::new();
for (field_start_offsets, stop_offset) in field_offsets.iter().zip(end_offsets.iter().skip(1)) {
let (field, start_offset) = *field_start_offsets;
field_offsets_map.insert(field, (start_offset, *stop_offset));
}
Ok(U32FastFieldsReader {
field_offsets: field_offsets_map,
source: source,
})
}
/// Returns the u32 fast value reader if the field
/// is a u32 field indexed as "fast".
///
/// Return None if the field is not a u32 field
/// indexed with the fast option.
///
/// # Panics
/// May panic if the index is corrupted.
pub fn get_field(&self, field: Field) -> Option<U32FastFieldReader> {
self.field_offsets
.get(&field)
.map(|&(start, stop)| {
let field_source = self.source.slice(start as usize, stop as usize);
U32FastFieldReader::open(field_source)
})
}
}
|
//////////////////////////////////////////////////
// General Notes
//
// The Module System:
//
// Packages: A Cargo feature that lets you build, test, and share crates
// Crates: A tree of modules that produces a library or executable
// Modules and use: Let you control the organization, scope, and privacy of paths
// Paths: A way of naming an item, such as a struct, function, or module
//
mod front_of_house {
pub mod hosting {
pub fn add_to_waitlist() {}
fn seat_at_table() {}
}
mod serving {
fn take_order() {}
fn serve_order() {}
fn take_payment() {}
}
}
fn serve_order() {}
mod back_of_house {
pub struct Breakfast {
pub toast: String,
seasonal_fruit: String,
}
impl Breakfast {
// This public associated function is needed
// because the user is unable to instantiate
// Breakfast, due to seasonal_fruit being
// private
//
pub fn summer(toast: &str) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: String::from("peaches"),
}
}
}
// When an enum is defined pub, all its members
// are pub
//
pub enum Appetizer {
Soup,
Salad,
}
fn fix_incorrect_order() {
cook_order();
super::serve_order();
}
fn cook_order() {}
}
// Bringing a module into scope idiomatically
//
//use crate::front_of_house::hosting;
// Bringing a module into scope using a relative path
//
//use self::front_of_house::hosting;
// Bringing a function into scope in an unidiomatic way. This
// is discouraged, since it isn't clear that the function
// isn't locally defined.
//
//use crate::front_of_house::hosting::add_to_waitlist;
// Re-exporting. This allows external code to have access
// to hosting. It also simplifies the structure; external
// clients may not be interested in front_of_house/back_of_house.
//
// Re-exporting is useful when the internal structure of
// the code is different from how programmers calling your
// code would think about the domain...
//
pub use crate::front_of_house::hosting;
// However, when bringing in structs, enums, and other items,
// it's idiomatic to specify the full path
//
use std::collections::HashMap;
// The exception to the above is bringing in two items with
// the same name but different scope
//
/*
use std::fmt;
use std::io;
fn function1() -> fmt::Result {
}
fn function2() -> io::Result<()> {
}
*/
// Another way to resolve the above example is to rename
// the item
//
/*
use std::fmt::Result;
use std::io::Result as IoResult;
fn function1() -> Result {
}
fn function2() -> IoResult<()> {
}
*/
// Using nested paths to clean up large use lists
//
// Before:
//
// use std::cmp::Ordering;
// use std::io;
//
// After:
//
// use std::{cmp::Ordering, io}
// Before:
//
// use std::io;
// use std::io::Write;
//
// After
//
// use std::io::{self, Write};
// Using a glob operator:
//
use std::collections::*;
pub fn eat_at_restaurant() {
// Absolute path
//
//crate::front_of_house::hosting::add_to_waitlist();
// Relative path
//
//front_of_house::hosting::add_to_waitlist();
// use statement above brings hosting into scope...
//
hosting::add_to_waitlist();
// In the example of
let mut meal = back_of_house::Breakfast::summer("Rye");
meal.toast = String::from("Wheat");
println!("meal.toast: {}", meal.toast);
// meal.seasonal_fruit = String::from("blueberries"); // this will fail
let order1 = back_of_house::Appetizer::Soup;
let order2 = back_of_house::Appetizer::Salad;
let mut map = HashMap::new();
map.insert(1, 2);
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
use mockito::{mock, server_url};
speculate::speculate! {
before {
env_logger::try_init().ok();
}
test "simple response body" {
let m = mock("GET", "/")
.with_body("hello world")
.create();
let mut response = chttp::get(server_url()).unwrap();
let response_text = response.body_mut().text().unwrap();
assert_eq!(response_text, "hello world");
m.assert();
}
test "large response body" {
let body = "wow so large ".repeat(1000);
let m = mock("GET", "/")
.with_body(&body)
.create();
let mut response = chttp::get(server_url()).unwrap();
let response_text = response.body_mut().text().unwrap();
assert_eq!(response_text, body);
m.assert();
}
}
|
use errors::*;
use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use store::{DataInput, DataOutput, KVDataInput, KVDataOutput};
const CHUNK_SIZE: usize = 8 * 1024;
pub(crate) struct FsDataInput {
_path: PathBuf,
pub reader: BufReader<File>,
bytes_read: usize,
}
impl FsDataInput {
pub fn open_input<T: AsRef<Path>>(path: &T) -> Result<FsDataInput> {
if !Path::is_file(path.as_ref()) {
bail!(ErrorKind::FileNotFound(
path.as_ref().to_str().unwrap_or("").to_string()
));
}
let file = File::open(path)?;
Ok(FsDataInput {
_path: path.as_ref().to_path_buf(),
reader: BufReader::with_capacity(CHUNK_SIZE, file),
bytes_read: 0,
})
}
pub fn is_end(&self) -> Result<bool> {
Ok(self.reader.get_ref().metadata()?.len() <= self.bytes_read as u64)
}
}
impl Read for FsDataInput {
fn read(&mut self, buf: &mut [u8]) -> ::std::io::Result<usize> {
match self.reader.read(buf) {
Ok(s) => {
self.bytes_read += s;
Ok(s)
}
Err(e) => Err(e),
}
}
}
impl DataInput for FsDataInput {}
impl KVDataInput for FsDataInput {}
pub(crate) struct FsDataOutput {
path: String,
writer: BufWriter<File>,
bytes_written: usize,
}
impl FsDataOutput {
pub fn open_exist<T: AsRef<Path>>(name: &T) -> Result<FsDataOutput> {
Self::new(name, OpenOptions::new().append(true).create(false))
}
pub fn open_new<T: AsRef<Path>>(path: &T) -> Result<FsDataOutput> {
Self::new(path, OpenOptions::new().create(true).write(true))
}
fn new<T: AsRef<Path>>(path: &T, options: &OpenOptions) -> Result<FsDataOutput> {
let file = options.open(path)?;
Ok(FsDataOutput {
path: path.as_ref().to_str().unwrap().to_owned(),
writer: BufWriter::with_capacity(CHUNK_SIZE, file),
bytes_written: 0,
})
}
}
impl Write for FsDataOutput {
fn write(&mut self, buf: &[u8]) -> ::std::io::Result<usize> {
let count = self.writer.write(buf)?;
self.bytes_written += count;
Ok(count)
}
fn flush(&mut self) -> ::std::io::Result<()> {
self.writer.flush()
}
}
impl Drop for FsDataOutput {
fn drop(&mut self) {
if let Err(ref desc) = self.flush() {
error!("Oops, failed to flush {}, errmsg: {}", self.path, desc);
}
self.bytes_written = 0;
}
}
impl DataOutput for FsDataOutput {}
impl KVDataOutput for FsDataOutput {}
|
struct Person {
name: &'static str,
age: u8
}
impl
fn main() {
let my = Person{name:"Adam", age:19};
println!("Length of my name is: {}", my.name.len());
}
|
use fancy_regex::Regex;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use rocket::response::status::NotFound;
use rocket::http::hyper::uri::Uri;
use rocket::serde::json::Json;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
lazy_static! {
pub static ref PROFILE_REGEX: Regex = Regex::new(r#"(?<=g_rgProfileData = )(.+)(?=;)"#).unwrap();
pub static ref GAMES_REGEX: Regex = Regex::new(r#"(?<=var g_rgWishlistData = )(\[.+\])"#).unwrap();
pub static ref TWITCH_SETTINGS: String = "some settings".to_string();
pub static ref CLIENT_ID: String = std::env::var("CLIENT_ID").expect("no CLIENT_ID variable set");
pub static ref BEARER_TOKEN: String = std::env::var("BEARER_TOKEN").expect("no BEARER_TOKEN variable set");
}
/// Receive a message from a form submission and broadcast it to any receivers.
#[get("/<steamUserName>")]
pub async fn get_streams_by_username(steamUserName: String) -> std::result::Result<Json<StreamData>, NotFound<String>> {
let streamdata = get_streams(steamUserName).await.unwrap();
Ok(Json(streamdata))
}
// scrapes profileid via username
// scrapes appids off of the username's profile
// gets gamename from steam's actual api via appid
async fn get_streams(username: String) -> Result<StreamData> {
let client = reqwest::Client::new();
let appids = steam::get_app_ids(username, &client).await?;
let mut data = vec![];
for appid in appids {
let name = steam::get_gamename(appid, &client).await?;
let twitchid = twitch::get_twitchid(&name, &client).await?;
let streams = twitch::get_streams_by_id(twitchid, &client).await?;
data.push(GameResult {
name,
streams
});
}
Ok(StreamData {
data
})
}
mod twitch {
use super::{Result, CLIENT_ID, BEARER_TOKEN, GameResult, GameStream};
use reqwest::Client;
use serde::{Serialize, Deserialize};
fn encodeURI(initial: &str) -> String {
let raw = rocket::http::RawStr::new(initial);
let cow = raw.percent_encode();
cow.to_string()
}
pub async fn get_twitchid(gamename: &str, client: &Client) -> Result<String> {
//let client = reqwest::Client::new();
let encoded = encodeURI(gamename);
let url = format!("https://api.twitch.tv/helix/games?name={}", encoded);
let json: serde_json::Value = client.get(url.to_string())
.header("Client-ID", CLIENT_ID.clone())
.header("Authorization", format!("Bearer {}", BEARER_TOKEN.clone()))
.send()
.await?
.json()
.await?;
let id = serde_json::from_value::<String>(json["data"][0]["id"].clone()).expect("parseable json");
Ok(id)
}
#[derive(Serialize, Deserialize)]
pub struct GetStreamsJson {
user_name: String,
viewer_count: i32,
thumbnail_url: String,
title: String,
}
pub async fn get_streams_by_id(twitchid: String, client: &Client) -> Result<Vec<GameStream>> {
let url = format!("https://api.twitch.tv/helix/streams?game_id={}", twitchid);
let json: serde_json::Value = client.get(url)
.header("Client-ID", CLIENT_ID.clone())
.header("Authorization", format!("Bearer {}", BEARER_TOKEN.clone()))
.send()
.await?
.json()
.await?;
let streamers: Vec<GetStreamsJson> = serde_json::from_value(json["data"].clone()).expect("parseable json");
let gamestreams = streamers
.into_iter()
.map(|s| {
let GetStreamsJson { user_name, viewer_count, thumbnail_url, title } = s;
let thumbnail = thumbnail_url
.replace("{width}", "320")
.replace("{height}", "180");
GameStream {
url: format!("https://www.twitch.tv/{}", user_name),
viewers: viewer_count,
thumbnail,
title
}
})
.collect();
Ok(gamestreams)
}
#[cfg(test)]
mod twitch_tests {
use super::super::Result;
use super::{get_twitchid, get_streams_by_id, encodeURI};
#[tokio::test]
async fn test_get_twitchid() -> Result<()> {
let client = reqwest::Client::new();
let gamename = "Terraria".to_string();
let expected = "31376".to_string();
let id = get_twitchid(&gamename, &client).await.unwrap();
assert_eq!(id, expected);
Ok(())
}
#[tokio::test]
async fn test_get_streamsbyid() -> Result<()> {
let client = reqwest::Client::new();
let id = "31376".to_string();
let expected = 5;
let streams = get_streams_by_id(id, &client).await.unwrap();
assert!(!streams.is_empty());
Ok(())
}
#[test]
fn test_encodeuri_1() {
let expected = "Remnant:%20From%20the%20Ashes".to_string();
let initial = "Remnant: From the Ashes".to_string();
let encoded = encodeURI(&initial);
assert_eq!(encoded, expected);
}
}
}
mod steam {
use super::{Result, GAMES_REGEX, PROFILE_REGEX, Deserialize};
use reqwest::{self, Client};
async fn get_profile_id(username: String, client: &Client) -> Result<String> {
let html: String = client.get(format!("https://steamcommunity.com/id/{}/", username))
.send()
.await?
.text()
.await?;
let pmatch = PROFILE_REGEX.find(&html)?;
let html_string = pmatch.unwrap();
let json: serde_json::Value = serde_json::from_str(html_string.as_str())?;
let profileid: String = serde_json::from_value(json["steamid"].clone()).expect("steamid is still extractable from the page source");
Ok(profileid)
}
pub async fn get_app_ids(username: String, client: &Client) -> Result<Vec<String>> {
let profileid = get_profile_id(username, client).await?;
let app_ids = scrape_app_ids(profileid, client).await?;
Ok(app_ids)
}
#[derive(Deserialize)]
struct ExtractAppid {
appid: i32
}
async fn scrape_app_ids(profileid: String, client: &Client) -> Result<Vec<String>> {
let steam_url = format!("https://store.steampowered.com/wishlist/profiles/{}", profileid);
let html: String = client.get(steam_url)
.send()
.await?
.text()
.await?;
let wlmatch = GAMES_REGEX.find(&html)?;
let json_string = wlmatch.expect("regex matches");
let appids: Vec<String> = serde_json::from_str::<Vec<ExtractAppid>>(json_string.as_str())?
.into_iter()
.map(|extracted| extracted.appid.to_string())
.collect();
Ok(appids)
}
pub async fn get_gamename(appid: String, client: &Client) -> Result<String> {
let steam_url = format!("https://store.steampowered.com/api/appdetails/?appids={}", appid);
let json: serde_json::Value = client.get(steam_url)
.send()
.await?
.json()
.await?;
let name = serde_json::from_value::<String>(json[appid]["data"]["name"].clone())
.expect("steam api hasnt changed")
.replace(|c: char| !c.is_ascii(), "");
Ok(name)
}
#[cfg(test)]
mod steam_tests {
use reqwest::Client;
use super::super::Result;
use super::{get_app_ids, scrape_app_ids, get_profile_id, get_gamename};
#[tokio::test]
async fn test_get_profile_id() -> Result<()> {
let client = Client::new();
let username = "wishlist_example".to_string();
let expected = "76561199053582024".to_string();
let profile_id = get_profile_id(username, &client).await.unwrap();
assert_eq!(profile_id, expected);
Ok(())
}
#[tokio::test]
async fn test_scrape_appids() -> Result<()> {
let client = Client::new();
let expected: Vec<String> = vec!["105600", "617290", "814000", "892970", "1100600"]
.into_iter()
.map(|a| a.to_string())
.collect();
let profileid = "76561199053582024".to_string();
let appids = scrape_app_ids(profileid, &client).await.unwrap();
assert_eq!(appids, expected);
Ok(())
}
#[tokio::test]
async fn test_get_appids() -> Result<()> {
let client = Client::new();
let username = "wishlist_example".to_string();
let appids = get_app_ids(username, &client).await.unwrap();
let expected: Vec<String> = vec!["105600", "617290", "814000", "892970", "1100600"]
.into_iter()
.map(|a| a.to_string())
.collect();
assert_eq!(appids, expected);
Ok(())
}
#[tokio::test]
async fn test_get_gamename() -> Result<()> {
let client = reqwest::Client::new();
let appid = "105600".to_string();
let gamename = get_gamename(appid, &client).await.unwrap();
let expected = "Terraria".to_string();
assert_eq!(gamename, expected);
Ok(())
}
async fn test_fail_get_appids() -> Result<()> {
// write some test for when the profile cant be found
// * profile doesnt exist
// * profile isnt public
// * steam changes something
todo!()
}
}
}
#[derive(Serialize)]
pub struct StreamData {
data: Vec<GameResult>
}
// the value that will be displayed on an individual card
#[derive(Debug, Serialize)]
pub struct GameStream {
url: String,
viewers: i32,
thumbnail: String,
title: String
}
#[derive(Debug, Serialize)]
pub struct GameResult {
name: String,
streams: Vec<GameStream>
}
|
#[macro_use(values_t,value_t,crate_version,crate_authors)]
extern crate clap;
extern crate handlebars;
#[macro_use]
extern crate serde_json;
extern crate env_logger;
extern crate serde;
extern crate crossbeam;
use clap::{App, Arg,ArgMatches};
use handlebars::Handlebars;
use std::collections::btree_map::BTreeMap;
use std::error::Error;
use std::path::Path;
use std::{env, io};
//use std::borrow::Cow;
use std::fs::File;
use std::io::Read;
use std::thread;
use load_files::*;
mod load_files{
use super::*;
#[derive(Debug)]
pub struct Settings {
pub max_threads: u8,
pub file: String,
}
fn has_file(file: String) -> Result<(), String> {
if Path::new(&file).exists() {
return Ok(());
}
Err(String::from("The file notfound"))
}
fn get_matches<'a>() -> ArgMatches<'a> {
App::new("Load files CLI")
.usage("MyApp [--max-threads = <number>] <file>")
.bin_name("MyApp")
.version(crate_version!())
.author(crate_authors!())
.about("Load files")
.args(&[
Arg::with_name("file")
.validator(has_file)
.required(true)
.help("Load files"),
Arg::with_name("max-threads")
.long("max-threads")
.value_name("number")
.required(false)
.help("thread number"),
])
.get_matches()
}
pub fn new() -> Settings {
let matches = get_matches();
let file = matches.value_of("file").unwrap_or("download");
let max_threads:u8 = value_t!(matches, "max-threads", u8).unwrap_or(4);
Settings{file:file.to_string() ,max_threads:max_threads}
}
}
#[derive(Debug)]
struct Task{
url:String
}
impl Task{
fn new(url:String)->Self{
Task{url}
}
}
fn main() -> Result<(), Box<std::error::Error + 'static>>{
let settings:Settings = load_files::new();
println!("{:?} {:?}",
settings.file,
settings.max_threads);
let s:String = std::fs::read_to_string(settings.file)?;
let mut v:Vec<Task> = vec![];
for url in s.lines(){
v.push(Task::new(url.to_string()));
let url_ = url.clone();
crossbeam::scope(|scope_| {
scope_.spawn(move ||{
// load url and create file number thread
println!("{}",url_);
});
});
}
for url in v{
}
println!("{:?}",v);
Ok(())
}
|
use crate::ast::expressions;
pub fn some_expression<E: expressions::Expression + 'static>(
expression: E,
) -> Option<Box<dyn expressions::Expression>> {
log_debug!("Made expression: {:?}", expression);
Some(Box::new(expression))
}
#[macro_export]
macro_rules! make_keyword_rule {
[$fn_name: ident, $(($keyword: pat, $output: expr)),+] => {
pub fn $fn_name(parser: &mut parser::Parser, stack: &mut stack::Stack) -> bool {
match parser.peek().cloned() {
$(Some(tokens::Token { token: tokens::TokenType::Keyword($keyword), ..}) => {
parser.shift();
stack.push_single(Box::new($output));
true
})+,
_ => false
}
}
};
}
|
// Copyright 2014-2015 The GeoRust Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Examples
//!
//! ## Reading
//!
//! ```
//! use geojson::GeoJson;
//!
//! let geojson_str = r#"
//! {
//! "type": "Feature",
//! "properties": {
//! "name": "Firestone Grill"
//! },
//! "geometry": {
//! "type": "Point",
//! "coordinates": [-120.66029,35.2812]
//! }
//! }
//! "#;
//!
//! let geojson = geojson_str.parse::<GeoJson>().unwrap();
//! ```
//!
//! ## Writing
//!
//! ```ignore,rust
//! use std::collections::HashMap;
//! use rustc_serialize::json::ToJson;
//! use geojson::{Feature, GeoJson, Geometry, Value};
//!
//! let geometry = Geometry::new(
//! Value::Point(vec![-120.66029,35.2812])
//! );
//!
//! let mut properties = HashMap::new();
//! properties.insert(
//! String::from("name"),
//! "Firestone Grill".to_json(),
//! );
//!
//! let geojson = GeoJson::Feature(Feature {
//! crs: None,
//! bbox: None,
//! geometry: geometry,
//! id: None,
//! properties: Some(properties),
//! });
//!
//! let geojson_string = geojson.to_string();
//! ```
extern crate rustc_serialize;
use rustc_serialize::json;
/// Bounding Boxes
///
/// [GeoJSON Format Specification § 4]
/// (http://geojson.org/geojson-spec.html#bounding-boxes)
pub type Bbox = Vec<f64>;
/// Positions
///
/// [GeoJSON Format Specification § 2.1.1]
/// (http://geojson.org/geojson-spec.html#positions)
pub type Position = Vec<f64>;
pub type PointType = Position;
pub type LineStringType = Vec<Position>;
pub type PolygonType = Vec<Vec<Position>>;
#[macro_use]
mod macros;
mod util;
mod crs;
pub use crs::Crs;
mod geojson;
pub use geojson::GeoJson;
mod geometry;
pub use geometry::{Geometry, Value};
mod feature;
pub use feature::Feature;
mod feature_collection;
pub use feature_collection::FeatureCollection;
/// Error when reading a GeoJSON object from a str or Object
#[derive(Debug)]
pub struct Error {
pub desc: &'static str,
}
impl Error {
pub fn new(desc: &'static str) -> Error {
return Error{desc: desc};
}
}
trait FromObject: Sized {
fn from_object(object: &json::Object) -> Result<Self, Error>;
}
|
fn main() {
let x = 9;
if x > 9 {
println!("bigger than 9: {}", x);
} else if x > 3 {
println!("range (3-9]: {}", x);
} else {
println!("less-or-eq that 3: {}", x);
}
}
|
extern crate libc;
extern crate pam;
use std::os::raw::{c_char, c_int};
use std::ptr;
use std::mem;
use pam::{constants, module};
use pam::constants::*;
use pam::module::{PamHandleT, PamItem, PamResult};
use std::marker::{PhantomData};
#[link(name = "pam")]
extern {
fn pam_get_item(pamh: *const PamHandleT,
item_type: PamItemType,
item: &mut *const PamItemT,
) -> PamResultCode;
}
#[allow(missing_copy_implementations)]
enum PamItemT {}
fn get_service(pamh: &PamHandleT) -> PamResult<Service> {
let mut ptr: *const PamItemT = ptr::null();
let (res, item) = unsafe {
let r = pam_get_item(pamh, constants::PAM_SERVICE, &mut ptr);
let service: Service = mem::transmute(ptr);
(r, service)
};
if constants::PAM_SUCCESS == res { Ok(item) } else { Err(res) }
}
#[allow(unused_variables)]
#[no_mangle]
pub unsafe extern fn pam_sm_authenticate(pamh: &module::PamHandleT, flags: PamFlag,
argc: c_int, argv: *const *const c_char
) -> PamResultCode {
let user = module::get_user(pamh, None).expect("No user");
let service: Service = get_service(pamh).expect("No service");
if service.0.is_null() {
println!("NPE");
constants::PAM_PERM_DENIED
} else if is_outlier(user, service) {
println!("Outlier detected");
constants::PAM_PERM_DENIED
} else {
println!("Recognised.");
constants::PAM_SUCCESS
}
}
unsafe fn is_outlier(user: String, service: Service) -> bool {
!(user == "tom" &&
std::ffi::CStr::from_ptr(service.0).to_string_lossy().into_owned() == "test-outlier")
}
#[repr(C)]
struct Service(*const c_char);
impl PamItem for Service {
fn item_type(_: PhantomData<Self>) -> PamItemType { PAM_SERVICE }
} |
#![feature(core)]
/*
* Datetime.rs
*
* Copyright 2015 Wiserlix <wiserlix@wiserlix.oa.to>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
*
*/
use std::fmt;
extern crate time;
pub enum Timezone {
KST,
UTC,
}
pub struct Date {
pub year:isize,
pub month:usize,
pub day:isize,
pub week:isize,
pub wday:usize}
pub struct Time {
timezone:Timezone,
pub hour:isize,
pub minute:isize,
pub second:isize}
pub struct Datetime{
pub date:Date,
pub time:Time
}
pub fn new (y:isize,m:usize,d:isize,h:isize,mi:isize,s:isize,tz:Timezone)->Datetime{
let wd = getwday(y,m,d);
let w = getweek(y,m,d);
return Datetime{date:Date{year:y,month:m,day:d,week:w,wday:wd},time:Time{hour:h,minute:mi,second:s,timezone:tz}};
}
pub fn countsy (year:isize)->isize{
let fsy = year / 4;
let hsy = year / 100;
let fhsy = year / 400;
return fsy -hsy +fhsy;
}
pub fn countsy_from1970(year:isize)->isize{countsy(year)-countsy(1970)}
pub fn convert (utime:isize,tz:Timezone)->Datetime{
let mut mday = [31,28,31,30,31,30,31,31,30,31,30,31];
let years = utime / (365*24*60*60) + 1970;
let mut ymod = utime%(365*24*60*60);
ymod -= countsy_from1970(years)*24*60*60;
match tz {
Timezone::UTC => {},
Timezone::KST => {ymod += 9*60*60;}
}
let mut months = 0;
if issy(years) {mday[1]=29;} else {mday[1]=28;}
while ymod >= mday[months]*60*60*24 {
ymod -= mday[months]*60*60*24;
months += 1;
}
months += 1;
let days = ymod/(60*60*24)+1;
let dmod = ymod % (60*60*24);
let hours = dmod/(60*60);
let hmod = dmod % (60*60);
let minutes = hmod / 60;
let seconds = hmod % 60;
let woday = getwday(years,months,days);
let weeks = getweek(years,months,days);
return Datetime{date:Date{year:years,month:months,day:days,week:weeks,wday:woday},time:Time{timezone:tz,hour:hours,minute:minutes,second:seconds}};
}
pub fn issy (year:isize) ->bool {
if (year % 400)==0{true}
else if (year % 100)==0{false}
else if (year % 4)==0{true}
else {false}
}
pub fn getwday(year:isize,month:usize,day:isize)->usize{
let t = [0,3,2,5,0,3,5,1,4,6,2,4];
let y = if month < 3{year-1}else{year};
let x:Option<usize> = std::num::from_int((y+y/4-y/100+y/400+t[month-1]+day)%7);
match x {
Some(x) => x,
None => {0}
}
}
pub fn getweek(year:isize,month:usize,day:isize)->isize{
let mut mday = [31,28,31,30,31,30,31,31,30,31,30,31];
if issy(year) {mday[1]=29;}else{mday[1]=28;}
let mut total_day = 0;
let mut i = 0;
while i < month-1{
total_day += mday[i];
i += 1;
}
total_day += day;
total_day/7+1
}
pub fn to_utime(dt:Datetime)->isize{
let mut mday = [31,28,31,30,31,30,31,31,30,31,30,31];
let mut ans = 0;
ans += (dt.date.year*365*24*60*60)-(1970*365*24*60*60);
let mut i=0;
if issy(dt.date.year){mday[1]=29;}else{mday[1]=28;}
while i < dt.date.month-1 {
ans += mday[i]*24*60*60;
i+=1;
}
ans += countsy_from1970(dt.date.year)*24*60*60;
ans += (dt.date.day-1)*24*60*60;
ans += dt.time.hour*60*60;
match dt.time.timezone{
Timezone::KST => {ans -= 9*60*60;},
Timezone::UTC => {}
}
ans += dt.time.minute*60;
ans += dt.time.second;
ans
}
pub fn print_datetime(dt:&Datetime){
let wod = ["Sun","Mon","Tue","Wed","Thu","Fri","Sat"];
println!("Year:{0},Month:{1},Day:{2} Week{3} {4}",dt.date.year,dt.date.month,dt.date.day,dt.date.week,wod[dt.date.wday]);
println!("Hour:{0},Minute:{1},Second:{2} {3}",dt.time.hour,dt.time.minute,dt.time.second,dt.time.timezone);
}
pub fn get_time()->isize{
let a = time::get_time().sec;
let b = std::num::from_i64(a);
match b {
Some(x)=>x,
None=>{panic!("ERR");}
}
}
impl fmt::Display for Timezone {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result{
match self {
&Timezone::UTC => write!(f,"UTC"),
&Timezone::KST => write!(f,"KST")
}
}
}
|
use std::string::ParseError;
use std::str::FromStr;
type Severity = usize;
type Depth = usize;
type Range = usize;
type Time = usize;
type Trip = Vec<Scanner>;
fn main() {
let input = include_str!("input.txt");
let trip = get_trip(input);
println!("Answer #1: {:?}", trip_severity(&trip, 0));
println!("Answer #2: {:?}", shortest_delay(&trip));
}
fn shortest_delay(trip: &Trip) -> Time {
let mut offset = 0;
loop {
if is_clean_trip(trip, offset) {
return offset;
} else {
offset += 1
} // Do nothing
}
}
fn trip_severity(trip: &Trip, offset: Time) -> Severity {
trip.iter().filter_map(|x| x.caught_at(x.0 + offset)).sum()
}
fn is_clean_trip(trip: &Trip, offset: Time) -> bool {
trip.iter().map(|x| x.caught_at(x.0 + offset)).all(
|x| x == None,
)
}
fn get_trip(input: &str) -> Trip {
input
.lines()
.map(|x| x.parse::<Scanner>().unwrap())
.collect()
}
#[derive(Debug)]
struct Scanner(Depth, Range);
impl Scanner {
fn caught_at(&self, time: Time) -> Option<Severity> {
let full_range = (2 * self.1) - 2;
match time % full_range {
0 => Some(self.0 * self.1),
_ => None,
}
}
}
impl FromStr for Scanner {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let scanner: Vec<&str> = s.split(": ").collect();
Ok(Scanner(
usize::from_str_radix(scanner[0], 10).unwrap(),
usize::from_str_radix(scanner[1], 10).unwrap(),
))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_severity() {
let input = include_str!("test_input.txt");
let trip = get_trip(input);
assert_eq!(24, trip_severity(&trip, 0));
assert_eq!(10, shortest_delay(&trip));
}
}
|
use std::cmp;
use std::env;
use std::fmt;
use std::fs;
#[derive(Clone, Copy, PartialEq)]
enum Position {
Floor,
Empty,
Occupied,
}
impl fmt::Debug for Position {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match &self {
Position::Floor => ".",
Position::Empty => "L",
Position::Occupied => "#",
})
}
}
fn c2p(c: char) -> Position {
match c {
'.' => Position::Floor,
'L' => Position::Empty,
'#' => Position::Occupied,
_ => unreachable!(),
}
}
type Map = Vec<Vec<Position>>;
fn make_map(input: &str) -> Map {
input
.lines()
.map(|line| line.chars().map(c2p).collect())
.collect()
}
fn count_occupied(i: usize, j: usize, input: &Map) -> usize {
(i.saturating_sub(1)..cmp::min(i + 2, input.len()))
.map(|x: usize| {
(j.saturating_sub(1)..cmp::min(j + 2, input[x].len()))
.map(|y: usize| match input[x][y] {
Position::Occupied => 1,
_ => 0,
})
.sum::<usize>()
})
.sum()
}
fn it(i: usize, j: usize, pos: Position, input: &Map) -> Position {
match pos {
Position::Floor => Position::Floor,
Position::Empty => {
if count_occupied(i, j, input) == 0 {
Position::Occupied
} else {
Position::Empty
}
}
Position::Occupied => {
// >4 to include current position
if count_occupied(i, j, input) > 4 {
Position::Empty
} else {
Position::Occupied
}
}
}
}
fn iterate(input: &Map) -> Map {
input
.iter()
.enumerate()
.map(|(i, row)| {
row.iter()
.enumerate()
.map(|(j, pos)| it(i, j, *pos, &input))
.collect()
})
.collect()
}
fn stable(input: &mut Map) -> usize {
loop {
let next = iterate(&input);
if next == *input {
return next
.iter()
.map(|row| {
row.into_iter()
.filter(|x| **x == Position::Occupied)
.count()
})
.sum();
} else {
*input = next;
}
}
}
fn main() {
let args: Vec<String> = env::args().collect();
let input = fs::read_to_string(&args[1]).unwrap();
println!("{:?}", stable(&mut make_map(&input)));
}
|
/*
* Copyright (C) 2019-2022 TON Labs. All Rights Reserved.
*
* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use
* this file except in compliance with the License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific TON DEV software governing permissions and
* limitations under the License.
*/
extern crate sha2;
extern crate num_bigint;
extern crate hex;
#[cfg(test)]
#[macro_use]
extern crate pretty_assertions;
extern crate ton_block;
extern crate ton_types;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate ed25519;
extern crate ed25519_dalek;
extern crate base64;
extern crate chrono;
extern crate failure;
extern crate num_traits;
pub mod contract;
pub mod function;
pub mod event;
pub mod int;
pub mod param;
pub mod param_type;
pub mod token;
pub mod json_abi;
pub mod error;
pub use param_type::ParamType;
pub use contract::{Contract, DataItem};
pub use token::{Token, TokenValue};
pub use function::Function;
pub use event::Event;
pub use json_abi::*;
pub use param::Param;
pub use int::{Int, Uint};
pub use error::*;
#[cfg(test)]
extern crate rand;
extern crate byteorder;
include!("../common/src/info.rs");
|
#[doc = "Reader of register IC_RXFLR"]
pub type R = crate::R<u32, super::IC_RXFLR>;
#[doc = "Reader of field `RXFLR`"]
pub type RXFLR_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:4 - Receive FIFO Level. Contains the number of valid data entries in the receive FIFO.\\n\\n Reset value: 0x0"]
#[inline(always)]
pub fn rxflr(&self) -> RXFLR_R {
RXFLR_R::new((self.bits & 0x1f) as u8)
}
}
|
use proc_macro_hack::proc_macro_hack;
#[proc_macro_hack]
pub use dotenv_codegen_implementation::{dotenv, dotenv_or_default};
|
use quote::{quote_spanned, ToTokens};
use super::{
FlowProperties, FlowPropertyVal, OperatorCategory, OperatorConstraints, OperatorWriteOutput,
WriteContextArgs, RANGE_0, RANGE_1, RANGE_ANY,
};
/// > 1 input stream, *n* output streams
///
/// Takes the input stream and delivers a copy of each item to each output.
/// > Note: Downstream operators may need explicit type annotations.
///
/// ```hydroflow
/// my_tee = source_iter(vec!["Hello", "World"]) -> tee();
/// my_tee -> map(|x: &str| x.to_uppercase()) -> assert_eq(["HELLO", "WORLD"]);
/// my_tee -> map(|x: &str| x.to_lowercase()) -> assert_eq(["hello", "world"]);
/// my_tee -> assert_eq(["Hello", "World"]);
/// ```
pub const TEE: OperatorConstraints = OperatorConstraints {
name: "tee",
categories: &[OperatorCategory::MultiOut],
hard_range_inn: RANGE_1,
soft_range_inn: RANGE_1,
hard_range_out: RANGE_ANY,
soft_range_out: &(2..),
num_args: 0,
persistence_args: RANGE_0,
type_args: RANGE_0,
is_external_input: false,
ports_inn: None,
ports_out: None,
properties: FlowProperties {
deterministic: FlowPropertyVal::Preserve,
monotonic: FlowPropertyVal::Preserve,
inconsistency_tainted: false,
},
input_delaytype_fn: |_| None,
write_fn: |&WriteContextArgs {
root,
op_span,
ident,
inputs,
outputs,
is_pull,
..
},
_| {
let write_iterator = if !is_pull {
let tees = outputs
.iter()
.rev()
.map(|i| i.to_token_stream())
.reduce(|b, a| quote_spanned! {op_span=> #root::pusherator::tee::Tee::new(#a, #b) })
.unwrap_or_else(
|| quote_spanned! {op_span=> #root::pusherator::for_each::ForEach::new(std::mem::drop) },
);
quote_spanned! {op_span=>
let #ident = #tees;
}
} else {
assert_eq!(1, inputs.len());
let input = &inputs[0];
quote_spanned! {op_span=>
let #ident = #input;
}
};
Ok(OperatorWriteOutput {
write_iterator,
..Default::default()
})
},
};
|
// put_uvarint encodes a uint64 into buf and returns the number of bytes written.
// If the buffer is too small, put_uvarint will panic.
pub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {
let mut i = 0;
let mut mx = x;
let buf = buffer.as_mut();
while mx >= 0x80 {
buf[i] = mx as u8 | 0x80;
mx >>= 7;
i += 1;
}
buf[i] = mx as u8;
i + 1
}
#[cfg(test)]
mod test {
#[test]
fn test_put_uvarint() {
let expected = [148u8, 145, 6, 0, 0, 0, 0, 0, 0, 0];
let mut buffer = [0u8; 10];
let actual = super::put_uvarint(&mut buffer[..], 100_500);
assert_eq!(actual, 3);
assert_eq!(buffer, expected);
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KqlScriptsResourceCollectionResponse {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<KqlScriptResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KqlScriptResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<KqlScript>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KqlScript {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<kql_script::Content>,
}
pub mod kql_script {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Content {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub query: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<content::Metadata>,
#[serde(rename = "currentConnection", default, skip_serializing_if = "Option::is_none")]
pub current_connection: Option<content::CurrentConnection>,
}
pub mod content {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Metadata {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub language: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CurrentConnection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorContract {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorResponse>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ArtifactRenameRequest {
#[serde(rename = "newName", default, skip_serializing_if = "Option::is_none")]
pub new_name: Option<String>,
}
|
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde;
use std::{
collections::{BTreeSet, HashMap},
sync::Arc,
};
use anyhow::Result;
use secstr::SecUtf8;
use std::sync::Mutex;
use strum::EnumString;
pub mod accounts;
pub mod alerts;
pub mod options;
pub mod orders;
mod session;
pub mod transactions;
#[cfg(all(feature = "keychain", target_os = "linux"))]
mod linux;
#[cfg(all(feature = "keychain", target_os = "linux"))]
pub use linux::KeychainStore;
#[cfg(all(feature = "keychain", target_os = "macos"))]
mod macos;
#[cfg(all(feature = "keychain", target_os = "macos"))]
pub use macos::KeychainStore;
#[cfg(all(feature = "keychain", target_os = "windows"))]
mod windows;
#[cfg(all(feature = "keychain", target_os = "windows"))]
pub use windows::KeychainStore;
pub use accounts::Api as Accounts;
pub use session::CallbackProvider;
pub use session::Session;
pub use session::OOB;
// The sandbox url to use as base url for the etrade api
const SANDBOX_URL: &str = "https://apisb.etrade.com";
// The production url to use as base url for the etrade api
const LIVE_URL: &str = "https://api.etrade.com";
fn qs_params<'a, T: serde::Serialize + serde::Deserialize<'a>>(
params: &T,
) -> Result<Option<BTreeSet<(String, String)>>> {
let qss = serde_urlencoded::to_string(params)?;
let qs: BTreeSet<(String, String)> = serde_urlencoded::from_str(&qss)?;
if qs.is_empty() {
Ok(None)
} else {
Ok(Some(qs))
}
}
fn empty_body() -> Option<()> {
None
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString, strum::Display)]
pub enum Mode {
Sandbox,
Live,
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Messages {
#[serde(rename = "Message", skip_serializing_if = "Vec::is_empty")]
pub message: Vec<Message>,
}
impl Messages {
pub fn is_empty(&self) -> bool {
self.message.is_empty()
}
}
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct Message {
pub description: String,
pub code: i32,
#[serde(rename = "type")]
pub tpe: MessageType,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum MessageType {
#[serde(rename = "WARNING")]
Warning,
#[serde(rename = "INFO")]
Info,
#[serde(rename = "INFO_HOLD")]
InfoHold,
#[serde(rename = "ERROR")]
Error,
}
impl Default for MessageType {
fn default() -> Self {
MessageType::Info
}
}
#[derive(Debug, Deserialize, Serialize, Default, Clone)]
#[serde(rename_all = "camelCase", default)]
pub struct Product {
pub symbol: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub security_type: Option<SecurityType>,
#[serde(skip_serializing_if = "Option::is_none")]
pub security_sub_type: Option<String>,
pub call_put: String,
pub expiry_year: i32,
pub expiry_month: i32,
pub expiry_day: i32,
pub strike_price: f64,
pub expiry_type: String,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
pub enum SecurityType {
#[serde(rename = "EQ")]
Eq,
#[serde(rename = "OPTN")]
Optn,
#[serde(rename = "MF")]
Mf,
#[serde(rename = "MMF")]
Mmf,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum MarketSession {
#[serde(rename = "REGULAR")]
Regular,
#[serde(rename = "EXTENDED")]
Extended,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum OptionType {
#[serde(rename = "CALL")]
Call,
#[serde(rename = "PUT")]
Put,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, EnumString)]
#[strum(serialize_all = "lowercase")]
pub enum SortOrder {
#[serde(rename = "ASC")]
Asc,
#[serde(rename = "DESC")]
Desc,
}
#[derive(Debug, Clone)]
pub struct Credentials {
pub key: SecUtf8,
pub secret: SecUtf8,
}
impl Credentials {
pub fn new(key: SecUtf8, secret: SecUtf8) -> Credentials {
Credentials { key, secret }
}
}
impl Into<oauth::Credentials> for Credentials {
fn into(self) -> oauth::Credentials {
oauth::Credentials::new(self.key.into_unsecure(), self.secret.into_unsecure())
}
}
impl<T> From<oauth::Credentials<T>> for Credentials
where
T: Into<SecUtf8>,
{
fn from(input: oauth::Credentials<T>) -> Self {
Credentials {
key: input.identifier.into(),
secret: input.secret.into(),
}
}
}
pub trait Store {
fn put(
&self,
namespace: impl Into<String> + Send,
key: impl Into<String> + Send,
value: impl Into<SecUtf8> + Send,
) -> Result<()>;
fn del(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<()>;
fn get(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<Option<SecUtf8>>;
}
#[derive(Debug)]
pub struct Memstore {
data: Arc<Mutex<HashMap<String, HashMap<String, SecUtf8>>>>,
}
impl Memstore {
pub fn new() -> Self {
Memstore {
data: Arc::new(Mutex::new(HashMap::new())),
}
}
}
impl Default for Memstore {
fn default() -> Self {
Memstore::new()
}
}
impl Store for Memstore {
fn put(
&self,
namespace: impl Into<String> + Send,
key: impl Into<String> + Send,
value: impl Into<SecUtf8> + Send,
) -> Result<()> {
let mut data = self.data.lock().unwrap();
let svc_state = data.entry(namespace.into()).or_insert_with(HashMap::new);
svc_state.insert(key.into(), value.into());
Ok(())
}
fn del(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<()> {
let mut data = self.data.lock().unwrap();
if let Some(st) = data.get_mut(namespace.as_ref()) {
st.remove(key.as_ref());
}
Ok(())
}
fn get(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<Option<SecUtf8>> {
let data = self.data.lock().unwrap();
Ok(data.get(namespace.as_ref()).and_then(|r| r.get(key.as_ref()).cloned()))
}
}
#[cfg(test)]
pub mod tests {
use super::{Memstore, Store};
use anyhow::Result;
use secstr::SecUtf8;
pub(crate) fn init() {
std::env::set_var("RUST_LOG", "debug");
let _ = pretty_env_logger::try_init();
}
#[test]
fn test_mem_store() {
verify_token_store(Memstore::new());
}
pub fn verify_token_store(token_store: impl Store) {
let expected: Result<SecUtf8> = Ok("hello".into());
token_store.put("my_svc", "api_key", "hello").unwrap();
assert_eq!(token_store.get("my_svc", "api_key").ok(), Some(expected.ok()));
assert!(token_store.del("my_svc", "api_key").is_ok());
assert!(token_store.get("my_svc", "api_key").unwrap().is_none());
}
}
|
pub use self::user_mapper::*;
mod user_mapper; |
// verification-helper: PROBLEM https://judge.yosupo.jp/problem/unionfind
use proconio::input;
fn main() {
input! {
vertices_len: usize,
queries_len: usize,
}
let mut uf = cprlib::union_find::UnionFind::new(vertices_len);
input! {
queries: [(u64, usize, usize); queries_len],
}
for &(is_same, x, y) in queries.iter() {
if is_same == 1 {
println!(
"{}",
match uf.same(x, y) {
true => "1",
false => "0",
}
);
} else {
uf.unite(x, y);
}
}
}
|
use std::mem;
struct Node {
elem: i32,
next: Link,
}
pub struct List {
head: Link,
}
impl List {
pub fn new() -> Self {
List { head: Link::Empty }
}
pub fn push(&mut self, elem: i32) {
let new_node = Box::new(Node {
elem: elem,
// this line, replaces the memory stored at self.head
// and places an empty value for its place.
// And then outputs the value it had to be placed into next.
next: mem::replace(&mut self.head, Link::Empty),
});
// Set the new value into head.
self.head = Link::More(new_node);
}
// Option can either return Some or None
// Essentially saying, it CAN return a value but may not at times.
pub fn pop(&mut self) -> Option<i32> {
match mem::replace(&mut self.head, Link::Empty) {
Link::Empty => None,
Link::More(node) => {
self.head = node.next;
Some(node.elem)
}
}
}
}
impl Drop for List {
fn drop(&mut self) {
let mut cur_link = mem::replace(&mut self.head, Link::Empty);
// `while let` == "do this thing until this pattern doesn't match"
while let Link::More(mut boxed_node) = cur_link {
cur_link = mem::replace(&mut boxed_node.next, Link::Empty);
// boxed_node goes out of scope and gets dropped here.
// but its Node's `next` field has been set to Link::Empty
// so no unbounded recursion occurs.
}
}
}
enum Link {
Empty,
More(Box<Node>),
}
// cfg(test) prevents the compiler from warning when use super::List;
#[cfg(test)]
mod test {
use super::List;
// Mark this function as a testing function.
#[test]
fn basic() {
{
// Given an empty list
let mut l = List::new();
{
// When popping
assert_eq!(l.pop(), None);
}
}
{
// Given a populate list.
let mut l = List::new();
// Populate the list.
l.push(1);
l.push(2);
l.push(3);
{
// When popping
assert_eq!(l.pop(), Some(3));
assert_eq!(l.pop(), Some(2));
assert_eq!(l.pop(), Some(1));
// Exhausted list pop.
assert_eq!(l.pop(), None);
}
}
}
}
|
/// bindings for ARINC653P1-5 3.6.2.2 queuing
pub mod basic {
use crate::bindings::*;
use crate::Locked;
pub type QueuingPortName = ApexName;
/// According to ARINC 653P1-5 this may either be 32 or 64 bits.
/// Internally we will use 64-bit by default.
/// The implementing Hypervisor may cast this to 32-bit if needed
pub type QueuingPortId = ApexLongInteger;
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct QueuingPortStatus {
pub nb_message: MessageRange,
pub max_nb_message: MessageRange,
pub max_message_size: MessageSize,
pub port_direction: PortDirection,
pub waiting_processes: WaitingRange,
}
pub trait ApexQueuingPortP4 {
// Only during Warm/Cold-Start
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn create_queuing_port<L: Locked>(
queuing_port_name: QueuingPortName,
max_message_size: MessageSize,
max_nb_message: MessageRange,
port_direction: PortDirection,
queuing_discipline: QueuingDiscipline,
) -> Result<QueuingPortId, ErrorReturnCode>;
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn send_queuing_message<L: Locked>(
queuing_port_id: QueuingPortId,
message: &[ApexByte],
time_out: ApexSystemTime,
) -> Result<(), ErrorReturnCode>;
/// # Safety
///
/// This function is safe, as long as the buffer can hold whatever is received
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
unsafe fn receive_queuing_message<L: Locked>(
queuing_port_id: QueuingPortId,
time_out: ApexSystemTime,
message: &mut [ApexByte],
) -> Result<MessageSize, ErrorReturnCode>;
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_queuing_port_status<L: Locked>(
queuing_port_id: QueuingPortId,
) -> Result<QueuingPortStatus, ErrorReturnCode>;
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn clear_queuing_port<L: Locked>(
queuing_port_id: QueuingPortId,
) -> Result<(), ErrorReturnCode>;
}
pub trait ApexQueuingPortP1: ApexQueuingPortP4 {
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_queuing_port_id<L: Locked>(
queuing_port_name: QueuingPortName,
) -> Result<QueuingPortId, ErrorReturnCode>;
}
}
/// abstractions for ARINC653P1-5 3.6.2.2 queuing
pub mod abstraction {
use core::marker::PhantomData;
use core::sync::atomic::AtomicPtr;
// Reexport important basic-types for downstream-user
pub use super::basic::{QueuingPortId, QueuingPortStatus};
use crate::bindings::*;
use crate::hidden::Key;
use crate::prelude::*;
#[derive(Debug)]
pub struct QueuingPortSender<
const MSG_SIZE: MessageSize,
const NB_MSGS: MessageRange,
Q: ApexQueuingPortP4,
> {
_b: PhantomData<AtomicPtr<Q>>,
id: QueuingPortId,
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, S: ApexQueuingPortP4> Clone
for QueuingPortSender<MSG_SIZE, NB_MSGS, S>
{
fn clone(&self) -> Self {
Self {
_b: self._b,
id: self.id,
}
}
}
#[derive(Debug)]
pub struct QueuingPortReceiver<
const MSG_SIZE: MessageSize,
const NB_MSGS: MessageRange,
Q: ApexQueuingPortP4,
> {
_b: PhantomData<AtomicPtr<Q>>,
id: QueuingPortId,
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, S: ApexQueuingPortP4> Clone
for QueuingPortReceiver<MSG_SIZE, NB_MSGS, S>
{
fn clone(&self) -> Self {
Self {
_b: self._b,
id: self.id,
}
}
}
pub trait ApexQueuingPortP4Ext: ApexQueuingPortP4 + Sized {
fn queueing_port_send_unchecked(
id: QueuingPortId,
buffer: &[ApexByte],
timeout: SystemTime,
) -> Result<(), Error>;
/// # Safety
///
/// This function is safe, as long as the buffer can hold whatever is received
unsafe fn queueing_port_receive_unchecked(
id: QueuingPortId,
timeout: SystemTime,
buffer: &mut [ApexByte],
) -> Result<&[ApexByte], Error>;
}
pub trait ApexQueuingPortP1Ext: ApexQueuingPortP1 + Sized {
/// Returns Err(Error::InvalidConfig) if queuing port with name does not exists or
/// if the message size of the found queuing port is different than MSG_SIZE or
/// if number messages of found queuing port is different than NB_MSGS
fn get_queuing_port_sender<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange>(
name: Name,
) -> Result<QueuingPortSender<MSG_SIZE, NB_MSGS, Self>, Error>;
/// Returns Err(Error::InvalidConfig) if queuing port with name does not exists or
/// if the message size of the found queuing port is different than MSG_SIZE or
/// if number messages of found queuing port is different than NB_MSGS
fn get_queuing_port_receiver<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange>(
name: Name,
) -> Result<QueuingPortReceiver<MSG_SIZE, NB_MSGS, Self>, Error>;
}
impl<Q: ApexQueuingPortP4> ApexQueuingPortP4Ext for Q {
fn queueing_port_send_unchecked(
id: QueuingPortId,
buffer: &[ApexByte],
timeout: SystemTime,
) -> Result<(), Error> {
Q::send_queuing_message::<Key>(id, buffer, timeout.into())?;
Ok(())
}
unsafe fn queueing_port_receive_unchecked(
id: QueuingPortId,
timeout: SystemTime,
buffer: &mut [ApexByte],
) -> Result<&[ApexByte], Error> {
let len = Q::receive_queuing_message::<Key>(id, timeout.into(), buffer)? as usize;
Ok(&buffer[..len])
}
}
impl<Q: ApexQueuingPortP1> ApexQueuingPortP1Ext for Q {
/// Returns Err(Error::InvalidConfig) if queuing port with name does not exists or
/// if the message size of the found queuing port is different than MSG_SIZE or
/// if number messages of found queuing port is different than NB_MSGS
fn get_queuing_port_sender<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange>(
name: Name,
) -> Result<QueuingPortSender<MSG_SIZE, NB_MSGS, Q>, Error> {
let id = Q::get_queuing_port_id::<Key>(name.into())?;
// According to ARINC653P1-5 3.6.2.2.5 this can only fail if the queuing_port_id
// does not exist in the current partition.
// But since we retrieve the queuing_port_id directly from the hypervisor
// there is no possible way for it not existing
let QueuingPortStatus {
max_nb_message,
max_message_size,
port_direction,
..
} = Q::get_queuing_port_status::<Key>(id).unwrap();
if max_nb_message != NB_MSGS {
return Err(Error::InvalidConfig);
}
if max_message_size != MSG_SIZE {
return Err(Error::InvalidConfig);
}
if port_direction != PortDirection::Source {
return Err(Error::InvalidConfig);
}
Ok(QueuingPortSender {
_b: Default::default(),
id,
})
}
/// Returns Err(Error::InvalidConfig) if queuing port with name does not exists or
/// if the message size of the found queuing port is different than MSG_SIZE or
/// if number messages of found queuing port is different than NB_MSGS
fn get_queuing_port_receiver<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange>(
name: Name,
) -> Result<QueuingPortReceiver<MSG_SIZE, NB_MSGS, Q>, Error> {
let id = Q::get_queuing_port_id::<Key>(name.into())?;
// According to ARINC653P1-5 3.6.2.2.5 this can only fail if the queuing_port_id
// does not exist in the current partition.
// But since we retrieve the queuing_port_id directly from the hypervisor
// there is no possible way for it not existing
let QueuingPortStatus {
max_nb_message,
max_message_size,
port_direction,
..
} = Q::get_queuing_port_status::<Key>(id).unwrap();
if max_nb_message != NB_MSGS {
return Err(Error::InvalidConfig);
}
if max_message_size != MSG_SIZE {
return Err(Error::InvalidConfig);
}
if port_direction != PortDirection::Destination {
return Err(Error::InvalidConfig);
}
Ok(QueuingPortReceiver {
_b: Default::default(),
id,
})
}
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, Q: ApexQueuingPortP4>
QueuingPortSender<MSG_SIZE, NB_MSGS, Q>
{
pub fn send(&self, buffer: &[ApexByte], timeout: SystemTime) -> Result<(), Error> {
buffer.validate_write(MSG_SIZE)?;
Q::queueing_port_send_unchecked(self.id, buffer, timeout)
}
pub fn id(&self) -> QueuingPortId {
self.id
}
pub const fn size(&self) -> usize {
MSG_SIZE as usize
}
pub const fn range(&self) -> MessageRange {
NB_MSGS
}
pub fn status(&self) -> QueuingPortStatus {
// According to ARINC653P1-5 3.6.2.2.5 this can only fail if the queuing_port_id
// does not exist in the current partition.
// But since we retrieve the queuing_port_id directly from the hypervisor
// there is no possible way for it not existing
Q::get_queuing_port_status::<Key>(self.id).unwrap()
}
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, Q: ApexQueuingPortP1>
QueuingPortSender<MSG_SIZE, NB_MSGS, Q>
{
pub fn from_name(name: Name) -> Result<QueuingPortSender<MSG_SIZE, NB_MSGS, Q>, Error> {
Q::get_queuing_port_sender(name)
}
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, Q: ApexQueuingPortP4>
QueuingPortReceiver<MSG_SIZE, NB_MSGS, Q>
{
pub fn receive<'a>(
&self,
buffer: &'a mut [ApexByte],
timeout: SystemTime,
) -> Result<&'a [ApexByte], Error> {
buffer.validate_read(MSG_SIZE)?;
unsafe { Q::queueing_port_receive_unchecked(self.id, timeout, buffer) }
}
pub fn clear(&self) {
// According to ARINC653P1-5 3.6.2.2.6 this can only fail if the queuing_port_id does not exist
// in the current partition or if this is not a destination port.
// But since we retrieve the queuing_port_id directly from the hypervisor
// and we verify that this is a destination port,
// there is no possible way for it not existing
Q::clear_queuing_port::<Key>(self.id).unwrap();
}
pub fn id(&self) -> QueuingPortId {
self.id
}
pub const fn size(&self) -> usize {
MSG_SIZE as usize
}
pub const fn range(&self) -> MessageRange {
NB_MSGS
}
pub fn status(&self) -> QueuingPortStatus {
// According to ARINC653P1-5 3.6.2.2.5 this can only fail if the queuing_port_id
// does not exist in the current partition.
// But since we retrieve the queuing_port_id directly from the hypervisor
// there is no possible way for it not existing
Q::get_queuing_port_status::<Key>(self.id).unwrap()
}
}
impl<const MSG_SIZE: MessageSize, const NB_MSGS: MessageRange, Q: ApexQueuingPortP1>
QueuingPortReceiver<MSG_SIZE, NB_MSGS, Q>
{
pub fn from_name(name: Name) -> Result<QueuingPortReceiver<MSG_SIZE, NB_MSGS, Q>, Error> {
Q::get_queuing_port_receiver(name)
}
}
impl<Q: ApexQueuingPortP4> StartContext<Q> {
pub fn create_queuing_port_sender<
const MSG_SIZE: MessageSize,
const NB_MSGS: MessageRange,
>(
&mut self,
name: Name,
qd: QueuingDiscipline,
) -> Result<QueuingPortSender<MSG_SIZE, NB_MSGS, Q>, Error> {
let id = Q::create_queuing_port::<Key>(
name.into(),
MSG_SIZE,
NB_MSGS,
PortDirection::Source,
qd,
)?;
Ok(QueuingPortSender {
_b: Default::default(),
id,
})
}
pub fn create_queuing_port_receiver<
const MSG_SIZE: MessageSize,
const NB_MSGS: MessageRange,
>(
&mut self,
name: Name,
qd: QueuingDiscipline,
) -> Result<QueuingPortReceiver<MSG_SIZE, NB_MSGS, Q>, Error> {
let id = Q::create_queuing_port::<Key>(
name.into(),
MSG_SIZE,
NB_MSGS,
PortDirection::Destination,
qd,
)?;
Ok(QueuingPortReceiver {
_b: Default::default(),
id,
})
}
}
}
|
#[derive(Serialize, Deserialize, Debug)]
#[allow(non_snake_case)]
pub struct SystemInfo {
pub Containers: u64,
pub Images: u64,
pub Driver: String,
pub DriverStatus: Vec<(String, String)>,
pub ExecutionDriver: String,
pub KernelVersion: String,
pub NCPU: u64,
pub MemTotal: u64,
pub Name: String,
pub ID: String,
pub Debug: u64, // bool
pub NFd: u64,
pub NGoroutines: u64,
pub NEventsListener: u64,
pub InitPath: String,
pub InitSha1: String,
pub IndexServerAddress: String,
pub MemoryLimit: u64, // bool
pub SwapLimit: u64, // bool
pub IPv4Forwarding: u64, // bool
pub Labels: Option<Vec<String>>,
pub DockerRootDir: String,
pub OperatingSystem: String,
}
impl Clone for SystemInfo {
fn clone(&self) -> Self {
SystemInfo {
Containers: self.Containers,
Images: self.Images,
Driver: self.Driver.clone(),
DriverStatus: self.DriverStatus.clone(),
ExecutionDriver: self.ExecutionDriver.clone(),
KernelVersion: self.KernelVersion.clone(),
NCPU: self.NCPU,
MemTotal: self.MemTotal,
Name: self.Name.clone(),
ID: self.ID.clone(),
Debug: self.Debug,
NFd: self.NFd,
NGoroutines: self.NGoroutines,
NEventsListener: self.NEventsListener,
InitPath: self.InitPath.clone(),
InitSha1: self.InitSha1.clone(),
IndexServerAddress: self.IndexServerAddress.clone(),
MemoryLimit: self.MemoryLimit,
SwapLimit: self.SwapLimit,
IPv4Forwarding: self.IPv4Forwarding,
Labels: self.Labels.clone(),
DockerRootDir: self.DockerRootDir.clone(),
OperatingSystem: self.OperatingSystem.clone(),
}
}
}
|
/*
* A sample API conforming to the draft standard OGC API - Features - Part 1: Core
*
* This is a sample OpenAPI definition that conforms to the conformance classes \"Core\", \"GeoJSON\", \"HTML\" and \"OpenAPI 3.0\" of the draft standard \"OGC API - Features - Part 1: Core\". This example is a generic OGC API Features definition that uses path parameters to describe all feature collections and all features. The generic OpenAPI definition does not provide any details on the collections or the feature content. This information is only available from accessing the feature collection resources. There is [another example](ogcapi-features-1-example2.yaml) that specifies each collection explicitly.
*
* The version of the OpenAPI document: 1.0.0
* Contact: info@example.org
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Collection {
/// identifier of the collection used, for example, in URIs
#[serde(rename = "id")]
pub id: String,
/// human readable title of the collection
#[serde(rename = "title", skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
/// a description of the features in the collection
#[serde(rename = "description", skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "links")]
pub links: Vec<crate::models::Link>,
#[serde(rename = "extent", skip_serializing_if = "Option::is_none")]
pub extent: Option<crate::models::Extent>,
/// indicator about the type of the items in the collection (the default value is 'feature').
#[serde(rename = "itemType", skip_serializing_if = "Option::is_none")]
pub item_type: Option<String>,
/// the list of coordinate reference systems supported by the service
#[serde(rename = "crs", skip_serializing_if = "Option::is_none")]
pub crs: Option<Vec<String>>,
}
impl Collection {
pub fn new(id: String, links: Vec<crate::models::Link>) -> Collection {
Collection {
id,
title: None,
description: None,
links,
extent: None,
item_type: None,
crs: None,
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#[cfg(feature = "package-2020-03-preview")]
mod package_2020_03_preview;
#[cfg(feature = "package-2020-03-preview")]
pub use package_2020_03_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-pure-2020-03-preview")]
mod package_pure_2020_03_preview;
#[cfg(feature = "package-pure-2020-03-preview")]
pub use package_pure_2020_03_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-pure-2017-04-preview")]
mod package_pure_2017_04_preview;
#[cfg(feature = "package-pure-2017-04-preview")]
pub use package_pure_2017_04_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-pure-2016-03")]
mod package_pure_2016_03;
#[cfg(feature = "package-pure-2016-03")]
pub use package_pure_2016_03::{models, operations, API_VERSION};
pub struct OperationConfig {
pub api_version: String,
pub client: reqwest::Client,
pub base_path: String,
pub token_credential: Option<Box<dyn azure_core::TokenCredential>>,
pub token_credential_resource: String,
}
impl OperationConfig {
pub fn new(token_credential: Box<dyn azure_core::TokenCredential>) -> Self {
Self {
token_credential: Some(token_credential),
..Default::default()
}
}
}
impl Default for OperationConfig {
fn default() -> Self {
Self {
api_version: API_VERSION.to_owned(),
client: reqwest::Client::new(),
base_path: "https://management.azure.com".to_owned(),
token_credential: None,
token_credential_resource: "https://management.azure.com/".to_owned(),
}
}
}
|
use rocket::Request;
use rocket_contrib::Template;
#[catch(404)]
fn not_found(req: &Request) -> Template {
let mut map = std::collections::HashMap::new();
map.insert("path", req.uri().as_str());
Template::render("error/not-found", &map)
}
// https://api.rocket.rs/rocket_contrib/struct.Template.html |
use std::cell::RefCell;
use std::path::Path;
use std::sync::{Arc, Weak};
use pane::{Item, Pane};
use project::Project;
use platform;
pub struct Workspace {
/// Parent application reference.
pub application: Weak<platform::Application>,
pub project: Project,
/// Window to which the `Workspace` is rendered.
pub window: RefCell<platform::Window>,
/// A workspace will have one or more panes in a given arrangement;
/// multiple panes with arrangements isn't currently implemented, but
/// for the sake of forward-thinking'ness we'll represent the
/// workspace's panes as an array.
pub panes: Vec<Box<Pane>>,
}
impl Workspace {
pub fn new(application: Arc<platform::Application>, project: Project) -> Workspace {
let window = platform::Window::new();
Workspace {
application: Arc::downgrade(&application),
project: project,
window: RefCell::new(window),
panes: vec![],
}
}
pub fn render(&self) {
let panes = self.panes.clone();
let project = self.project.clone();
let mut window = self.window.borrow_mut();
window.render(project, panes)
}
/// Gets the current active pane. If one does not exist then it will
/// create one.
pub fn active_pane_mut<'a>(&'a mut self) -> &'a mut Pane {
if self.panes.is_empty() {
self.panes.push(Box::new(Pane::new()))
}
&mut self.panes[0]
}
pub fn open_path<P: AsRef<Path>>(&mut self, path: P) {
let mut pane = self.active_pane_mut();
pane.add_item(Item::from_path(path))
}
}
|
use db;
use error::Result;
use spec;
use may::sync::Mutex;
lazy_static! {
static ref WRITER_MUTEX: Mutex<()> = Mutex::new(());
}
/// save a unit
pub fn save_joint(joint: spec::Joint) -> Result<()> {
// first construct all the sql within a mutex
info!("saving unit = {:?}", joint.unit);
let _g = WRITER_MUTEX.lock()?;
// and then execute the transaction
let mut db = db::DB_POOL.get_connection();
let tx = db.transaction()?;
tx.commit()?;
unimplemented!();
}
|
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
use nell::Message;
use nell::Netlink;
use nell::Socket;
use nell::Family;
use nell::ffi::diag::{inet_diag_msg, inet_diag_req_v2, SOCK_DIAG_BY_FAMILY, INET_DIAG_INFO};
use nell::ffi::core::{NLM_F_DUMP, NLM_F_REQUEST, IPPROTO_TCP, AF_INET};
use nell::sys::Bytes;
use nell::err::Invalid;
use std::net::{SocketAddr, IpAddr};
use std::mem::transmute;
use std::convert::TryFrom;
use std::vec::Vec;
use std::string::String;
#[repr(C)]
#[derive(Clone, Debug, Default)]
pub struct TCPInfo {
pub tcpi_state: u8,
pub tcpi_ca_state: u8,
pub tcpi_retransmits: u8,
pub tcpi_probes: u8,
pub tcpi_backoff: u8,
pub tcpi_options: u8,
pub _bitfield_1: [u8; 2usize],
pub tcpi_rto: u32,
pub tcpi_ato: u32,
pub tcpi_snd_mss: u32,
pub tcpi_rcv_mss: u32,
pub tcpi_unacked: u32,
pub tcpi_sacked: u32,
pub tcpi_lost: u32,
pub tcpi_retrans: u32,
pub tcpi_fackets: u32,
pub tcpi_last_data_sent: u32,
pub tcpi_last_ack_sent: u32,
pub tcpi_last_data_recv: u32,
pub tcpi_last_ack_recv: u32,
pub tcpi_pmtu: u32,
pub tcpi_rcv_ssthresh: u32,
pub tcpi_rtt: u32,
pub tcpi_rttvar: u32,
pub tcpi_snd_ssthresh: u32,
pub tcpi_snd_cwnd: u32,
pub tcpi_advmss: u32,
pub tcpi_reordering: u32,
pub tcpi_rcv_rtt: u32,
pub tcpi_rcv_space: u32,
pub tcpi_total_retrans: u32,
pub tcpi_pacing_rate: u64,
pub tcpi_max_pacing_rate: u64,
pub tcpi_bytes_acked: u64,
pub tcpi_bytes_received: u64,
pub tcpi_segs_out: u32,
pub tcpi_segs_in: u32,
pub tcpi_notsent_bytes: u32,
pub tcpi_min_rtt: u32,
pub tcpi_data_segs_in: u32,
pub tcpi_data_segs_out: u32,
pub tcpi_delivery_rate: u64,
pub tcpi_busy_time: u64,
pub tcpi_rwnd_limited: u64,
pub tcpi_sndbuf_limited: u64,
pub tcpi_delivered: u32,
pub tcpi_delivered_ce: u32,
pub tcpi_bytes_sent: u64,
pub tcpi_bytes_retrains: u64,
/*
pub tcpi_dsack_dups: u32,
pub tcpi_reord_seen: u32,
pub tcpi_rcv_ooopack: u32,
pub tcpi_send_wnd: u32,
*/
}
unsafe impl Bytes for TCPInfo{}
#[derive(Debug)]
pub struct DiagWithInode<T = TCPInfo> {
family: u8,
pub src: SocketAddr,
pub dst: SocketAddr,
state: u8,
pub inode: u32,
pub info: Option<T>,
}
fn diag_with_node(msg: &Message<inet_diag_msg>) -> Result<DiagWithInode, Invalid> {
let src = addr(msg.idiag_family, &msg.id.idiag_src, msg.id.idiag_sport)?;
let dst = addr(msg.idiag_family, &msg.id.idiag_dst, msg.id.idiag_dport)?;
let info = msg.info();
Ok(DiagWithInode {
family: msg.idiag_family,
src: src,
dst: dst,
state: msg.idiag_state,
info: info,
inode: msg.idiag_inode,
})
}
fn addr(family: u8, addr: &[u32; 4], port: u16) -> Result<SocketAddr, Invalid> {
let octets: &[u8; 16] = unsafe { transmute(addr) };
Ok(SocketAddr::new(match family {
AF_INET => IpAddr::from(<[u8; 4]>::try_from(&octets[..4])?),
AF_INET6 => IpAddr::from(<[u8; 16]>::try_from(&octets[..])?),
family => return Err(Invalid::Family(family)),
}, port.to_be()))
}
pub fn gather_sockets() -> Vec<DiagWithInode> {
let mut socket = Socket::new(Family::INET_DIAG).unwrap();
let mut msg = Message::<inet_diag_req_v2>::new(SOCK_DIAG_BY_FAMILY);
msg.set_flags(NLM_F_REQUEST | NLM_F_DUMP);
msg.sdiag_family = AF_INET;
msg.sdiag_protocol = IPPROTO_TCP;
msg.idiag_states = !0;
msg.idiag_ext = 1 << (INET_DIAG_INFO as u8 - 1);
socket.send(&msg).unwrap();
let mut sockets: Vec<DiagWithInode> = Vec::new();
while let Netlink::Msg(msg) = socket.recv::<inet_diag_msg>().unwrap() {
let sockdiag = diag_with_node(&msg).unwrap();
match &sockdiag.info {
Some(info) => {
// LISTEN state is pretty pointless for this. It really only serves as a receive
// queue to create NEW sockets for clients. We will get the info from those newly
// created sockets, not the LISTEN one.
if info.tcpi_state != 10 {
sockets.push(sockdiag)
}
},
None => continue
}
}
return sockets;
}
pub enum TCP_STATE {
UNKNOWN,
ESTABLISHED,
SYN_SENT,
SYN_RECV,
FIN_WAIT1,
FIN_WAIT2,
TIME_WAIT,
CLOSE,
CLOSE_WAIT,
LAST_ACK,
LISTEN,
CLOSING,
NEW_SYN_REC,
MAX_STATES
}
impl TCP_STATE {
pub fn from_u8(state: u8) -> TCP_STATE {
match state {
1 => TCP_STATE::ESTABLISHED,
2 => TCP_STATE::SYN_SENT,
3 => TCP_STATE::SYN_RECV,
4 => TCP_STATE::FIN_WAIT1,
5 => TCP_STATE::FIN_WAIT2,
6 => TCP_STATE::TIME_WAIT,
7 => TCP_STATE::CLOSE,
8 => TCP_STATE::CLOSE_WAIT,
9 => TCP_STATE::LAST_ACK,
10 => TCP_STATE::LISTEN,
11 => TCP_STATE::CLOSING,
12 => TCP_STATE::NEW_SYN_REC,
13 => TCP_STATE::MAX_STATES,
_ => panic!("dont do it")
}
}
pub fn to_string(&self) -> String {
match self {
TCP_STATE::UNKNOWN => String::from("UNKNOWN"),
TCP_STATE::ESTABLISHED => String::from("ESTABLISHED"),
TCP_STATE::SYN_SENT => String::from("SYN_SENT"),
TCP_STATE::SYN_RECV => String::from("SYN_RECV"),
TCP_STATE::FIN_WAIT1 => String::from( "FIN_WAIT1"),
TCP_STATE::FIN_WAIT2 => String::from( "FIN_WAIT2"),
TCP_STATE::TIME_WAIT => String::from( "TIME_WAIT"),
TCP_STATE::CLOSE => String::from("CLOSE"),
TCP_STATE::CLOSE_WAIT => String::from("CLOSE_WAIT"),
TCP_STATE::LAST_ACK => String::from("LAST_ACK"),
TCP_STATE::LISTEN => String::from("LISTEN"),
TCP_STATE::CLOSING => String::from("CLOSING"),
TCP_STATE::NEW_SYN_REC => String::from("NEW_SYN_REC"),
TCP_STATE::MAX_STATES => String::from("MAX_STATES")
}
}
}
|
use serde_json::{Value};
use std::collections::HashMap;
//Note that (thick) triples are not OWL
pub fn translate(v : &Value,
m : &HashMap<String, String>,
t: &dyn Fn(&Value, &HashMap<String, String>) -> Value)
-> Value {
let owl_operator: String = v[0].to_string();
//TODO: ambiguous expressions?
match owl_operator.as_str() {
"\"SubClassOf\"" => translate_subclass_of(v,m,t),
"\"DisjointClasses\"" => translate_disjoint_classes(v,m,t),
"\"DisjointUnionOf\"" => translate_disjoint_union_of(v,m,t),
"\"EquivalentClasses\"" => translate_equivalent_classes(v,m,t),
"\"ObjectSomeValuesFrom\"" => translate_some_values_from(v,m,t),
"\"ObjectAllValuesFrom\"" => translate_all_values_from(v,m,t),
"\"ObjectHasValue\"" => translate_has_value(v,m,t),
"\"ObjectMinCardinality\"" => translate_min_cardinality(v,m,t),
"\"ObjectMinQualifiedCardinality\"" => translate_min_qualified_cardinality(v,m,t),
"\"ObjectMaxCardinality\"" => translate_max_cardinality(v,m,t),
"\"ObjectMaxQualifiedCardinality\"" => translate_max_qualified_cardinality(v,m,t),
"\"ObjectExactCardinality\"" => translate_exact_cardinality(v,m,t),
"\"ObjectExactQualifiedCardinality\"" => translate_exact_qualified_cardinality(v,m,t),
"\"ObjectHasSelf\"" => translate_has_self(v,m,t),
"\"ObjectIntersectionOf\"" => translate_intersection_of(v,m,t),
"\"ObjectUnionOf\"" => translate_union_of(v,m,t),
"\"ObjectOneOf\"" => translate_one_of(v,m,t),
"\"ObjectComplementOf\"" => translate_complement_of(v,m,t),
"\"ObjectInverseOf\"" => translate_inverse_of(v,m,t),
_ => t(v,m),//substitute labels for entities
}
}
pub fn translate_subclass_of(v : &Value,
m : &HashMap<String,String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
//translate OWL classes
let subclass : Value = translate(&v[1], m, t);
let superclass : Value = translate(&v[2], m, t);
let operator = Value::String(String::from("SubClassOf"));
let v = vec![operator, subclass, superclass];
Value::Array(v)
}
pub fn translate_disjoint_classes(v : &Value,
m : &HashMap<String,String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let mut operands : Value = translate_list(&(v.as_array().unwrap())[1..], m,t);
let operator = Value::String(String::from("DisjointClasses"));
let mut disjoint = vec![operator];
let arguments = operands.as_array_mut().unwrap();
disjoint.append(arguments);
Value::Array(disjoint.to_vec())
}
pub fn translate_disjoint_union_of(v : &Value,
m : &HashMap<String,String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let lhs = translate(&v[1], m, t);
let mut operands : Value = translate_list(&(v.as_array().unwrap())[2..], m, t);
let operator = Value::String(String::from("DisjointUnionOf"));
let mut union = vec![operator, lhs];
let arguments = operands.as_array_mut().unwrap();
union.append(arguments);
Value::Array(union.to_vec())
}
pub fn translate_equivalent_classes(v : &Value,
m : &HashMap<String,String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let mut operands : Value = translate_list(&(v.as_array().unwrap())[1..],m,t);
let operator = Value::String(String::from("EquivalentClasses"));
let mut equivalent = vec![operator];
let arguments = operands.as_array_mut().unwrap();
equivalent.append(arguments);
Value::Array(equivalent.to_vec())
}
pub fn translate_some_values_from(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let filler: Value = translate(&v[2],m,t);
let expression = vec![operator, property, filler];
Value::Array(expression)
}
pub fn translate_all_values_from(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let filler: Value = translate(&v[2],m,t);
let expression = vec![operator, property, filler];
Value::Array(expression)
}
pub fn translate_has_value(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let filler: Value = translate(&v[2],m,t);
let expression = vec![operator, property, filler];
Value::Array(expression)
}
pub fn translate_has_self(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let expression = vec![operator, property];
Value::Array(expression)
}
pub fn translate_min_cardinality(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let expression = vec![operator, property, cardinality];
Value::Array(expression)
}
pub fn translate_min_qualified_cardinality(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let filler: Value = translate(&v[3],m,t);
let expression = vec![operator, property, cardinality, filler];
Value::Array(expression)
}
pub fn translate_max_cardinality(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let expression = vec![operator, property, cardinality];
Value::Array(expression)
}
pub fn translate_max_qualified_cardinality(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let filler: Value = translate(&v[3],m,t);
let expression = vec![operator, property, cardinality, filler];
Value::Array(expression)
}
pub fn translate_exact_cardinality(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let expression = vec![operator, property, cardinality];
Value::Array(expression)
}
pub fn translate_exact_qualified_cardinality(v : &Value,
m : &HashMap<String, String>,
t: &dyn Fn(&Value, &HashMap<String, String>) -> Value ) -> Value {
let operator: Value = v[0].clone();
let property: Value = translate(&v[1],m,t);
let cardinality: Value = translate(&v[2],m,t);
let filler: Value = translate(&v[3],m,t);
let expression = vec![operator, property, cardinality, filler];
Value::Array(expression)
}
pub fn translate_list(v : &[Value],
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let mut res = Vec::new();
for argument in v {
let t: Value = translate(&argument,m,t);
res.push(t)
}
Value::Array(res)
}
pub fn translate_intersection_of(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let mut operands : Value = translate_list(&(v.as_array().unwrap())[1..],m,t);
let mut res = Vec::new();
let r = operands.as_array_mut().unwrap();
res.push(operator);
res.append(r);
Value::Array(res)
}
pub fn translate_union_of(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let mut operands : Value = translate_list(&(v.as_array().unwrap())[1..],m,t);
let mut res = Vec::new();
let r = operands.as_array_mut().unwrap();
res.push(operator);
res.append(r);
Value::Array(res)
}
pub fn translate_one_of(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let mut operands : Value = translate_list(&(v.as_array().unwrap())[1..],m,t);
let mut res = Vec::new();
let r = operands.as_array_mut().unwrap();
res.push(operator);
res.append(r);
Value::Array(res)
}
pub fn translate_complement_of(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let argument: Value = translate(&v[1],m,t);
let v = vec![operator, argument];
Value::Array(v)
}
pub fn translate_inverse_of(v : &Value,
m : &HashMap<String, String>,
t : &dyn Fn(&Value, &HashMap<String, String>) -> Value
) -> Value {
let operator: Value = v[0].clone();
let property = translate(&v[1],m,t);
let v = vec![operator, property];
Value::Array(v)
}
|
// Copyright 2018-2019 Parity Technologies (UK) Ltd.
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use futures::prelude::*;
use libp2p::core::{
self, muxing::StreamMuxerBox, transport::boxed::Boxed, transport::OptionalTransport, upgrade,
};
#[cfg(not(target_os = "unknown"))]
use libp2p::core::{either::EitherError, either::EitherOutput};
use libp2p::{
bandwidth, identity, mplex, secio, wasm_ext, yamux, InboundUpgradeExt, OutboundUpgradeExt,
PeerId, Transport,
};
#[cfg(not(target_os = "unknown"))]
use libp2p::{dns, noise, tcp, websocket};
use std::{io, sync::Arc, time::Duration, usize};
pub use self::bandwidth::BandwidthSinks;
/// Builds the transport that serves as a common ground for all connections.
///
/// If `memory_only` is true, then only communication within the same process are allowed. Only
/// addresses with the format `/memory/...` are allowed.
///
/// Returns a `BandwidthSinks` object that allows querying the average bandwidth produced by all
/// the connections spawned with this transport.
pub fn build_transport(
keypair: identity::Keypair,
memory_only: bool,
wasm_external_transport: Option<wasm_ext::ExtTransport>,
) -> (
Boxed<(PeerId, StreamMuxerBox), io::Error>,
Arc<bandwidth::BandwidthSinks>,
) {
// Build configuration objects for encryption mechanisms.
#[cfg(not(target_os = "unknown"))]
let noise_config =
{
let noise_keypair = noise::Keypair::new().into_authentic(&keypair)
// For more information about this panic, see in "On the Importance of Checking
// Cryptographic Protocols for Faults" by Dan Boneh, Richard A. DeMillo,
// and Richard J. Lipton.
.expect("can only fail in case of a hardware bug; since this signing is performed only \
once and at initialization, we're taking the bet that the inconvenience of a very \
rare panic here is basically zero");
noise::NoiseConfig::ix(noise_keypair)
};
let secio_config = secio::SecioConfig::new(keypair);
// Build configuration objects for multiplexing mechanisms.
let mut mplex_config = mplex::MplexConfig::new();
mplex_config.max_buffer_len_behaviour(mplex::MaxBufferBehaviour::Block);
mplex_config.max_buffer_len(usize::MAX);
let yamux_config = yamux::Config::default();
// Build the base layer of the transport.
let transport = if let Some(t) = wasm_external_transport {
OptionalTransport::some(t)
} else {
OptionalTransport::none()
};
#[cfg(not(target_os = "unknown"))]
let transport = transport.or_transport(if !memory_only {
let desktop_trans = tcp::TcpConfig::new();
let desktop_trans =
websocket::WsConfig::new(desktop_trans.clone()).or_transport(desktop_trans);
OptionalTransport::some(dns::DnsConfig::new(desktop_trans))
} else {
OptionalTransport::none()
});
let transport = transport.or_transport(if memory_only {
OptionalTransport::some(libp2p::core::transport::MemoryTransport::default())
} else {
OptionalTransport::none()
});
let (transport, sinks) = bandwidth::BandwidthLogging::new(transport, Duration::from_secs(5));
// Encryption
// For non-WASM, we support both secio and noise.
#[cfg(not(target_os = "unknown"))]
let transport = transport.and_then(move |stream, endpoint| {
let upgrade = core::upgrade::SelectUpgrade::new(noise_config, secio_config);
core::upgrade::apply(stream, upgrade, endpoint, upgrade::Version::V1).and_then(|out| {
match out {
// We negotiated noise
EitherOutput::First((remote_id, out)) => {
let remote_key = match remote_id {
noise::RemoteIdentity::IdentityKey(key) => key,
_ => {
return Err(upgrade::UpgradeError::Apply(EitherError::A(
noise::NoiseError::InvalidKey,
)))
}
};
Ok((EitherOutput::First(out), remote_key.into_peer_id()))
}
// We negotiated secio
EitherOutput::Second((remote_id, out)) => {
Ok((EitherOutput::Second(out), remote_id))
}
}
})
});
// For WASM, we only support secio for now.
#[cfg(target_os = "unknown")]
let transport = transport.and_then(move |stream, endpoint| {
core::upgrade::apply(stream, secio_config, endpoint, upgrade::Version::V1)
.and_then(|(id, stream)| Ok((stream, id)))
});
// Multiplexing
let transport = transport
.and_then(move |(stream, peer_id), endpoint| {
let peer_id2 = peer_id.clone();
let upgrade = core::upgrade::SelectUpgrade::new(yamux_config, mplex_config)
.map_inbound(move |muxer| (peer_id, muxer))
.map_outbound(move |muxer| (peer_id2, muxer));
core::upgrade::apply(stream, upgrade, endpoint, upgrade::Version::V1)
.map(|(id, muxer)| (id, core::muxing::StreamMuxerBox::new(muxer)))
})
.timeout(Duration::from_secs(20))
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
.boxed();
(transport, sinks)
}
|
use super::*;
#[cfg(feature = "aes256-cbc")]
impl<S: Syscall> Aes256Cbc for ClientImplementation<S> {}
pub trait Aes256Cbc: CryptoClient {
fn decrypt_aes256cbc<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Decrypt, Self>
{
self.decrypt(
Mechanism::Aes256Cbc, key, message, &[], &[], &[],
)
}
fn wrap_key_aes256cbc(&mut self, wrapping_key: KeyId, key: KeyId)
-> ClientResult<'_, reply::WrapKey, Self>
{
self.wrap_key(Mechanism::Aes256Cbc, wrapping_key, key, &[])
}
}
#[cfg(feature = "chacha8-poly1305")]
impl<S: Syscall> Chacha8Poly1305 for ClientImplementation<S> {}
pub trait Chacha8Poly1305: CryptoClient {
fn decrypt_chacha8poly1305<'c>(&'c mut self, key: KeyId, message: &[u8], associated_data: &[u8],
nonce: &[u8], tag: &[u8])
-> ClientResult<'c, reply::Decrypt, Self>
{
self.decrypt(Mechanism::Chacha8Poly1305, key, message, associated_data, nonce, tag)
}
fn encrypt_chacha8poly1305<'c>(&'c mut self, key: KeyId, message: &[u8], associated_data: &[u8],
nonce: Option<&[u8; 12]>)
-> ClientResult<'c, reply::Encrypt, Self>
{
self.encrypt(Mechanism::Chacha8Poly1305, key, message, associated_data,
nonce.and_then(|nonce| ShortData::from_slice(nonce).ok()))
}
fn generate_chacha8poly1305_key(&mut self, persistence: Location)
-> ClientResult<'_, reply::GenerateKey, Self>
{
self.generate_key(Mechanism::Chacha8Poly1305, StorageAttributes::new().set_persistence(persistence))
}
fn unwrap_key_chacha8poly1305<'c>(&'c mut self, wrapping_key: KeyId, wrapped_key: &[u8],
associated_data: &[u8], location: Location)
-> ClientResult<'c, reply::UnwrapKey, Self>
{
self.unwrap_key(Mechanism::Chacha8Poly1305, wrapping_key,
Message::from_slice(wrapped_key).map_err(|_| ClientError::DataTooLarge)?,
associated_data,
StorageAttributes::new().set_persistence(location))
}
fn wrap_key_chacha8poly1305<'c>(&'c mut self, wrapping_key: KeyId, key: KeyId,
associated_data: &[u8])
-> ClientResult<'c, reply::WrapKey, Self>
{
self.wrap_key(Mechanism::Chacha8Poly1305, wrapping_key, key, associated_data)
}
}
#[cfg(feature = "hmac-blake2s")]
impl<S: Syscall> HmacBlake2s for ClientImplementation<S> {}
pub trait HmacBlake2s: CryptoClient {
fn hmacblake2s_derive_key(&mut self, base_key: KeyId, message: &[u8], persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(
Mechanism::HmacBlake2s, base_key,
Some(MediumData::from_slice(message).map_err(|_| ClientError::DataTooLarge)?),
StorageAttributes::new().set_persistence(persistence))
}
fn sign_hmacblake2s<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::HmacBlake2s, key, message, SignatureSerialization::Raw)
}
}
#[cfg(feature = "hmac-sha1")]
impl<S: Syscall> HmacSha1 for ClientImplementation<S> {}
pub trait HmacSha1: CryptoClient {
fn hmacsha1_derive_key(&mut self, base_key: KeyId, message: &[u8], persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(
Mechanism::HmacSha1, base_key,
Some(MediumData::from_slice(message).map_err(|_| ClientError::DataTooLarge)?),
StorageAttributes::new().set_persistence(persistence))
}
fn sign_hmacsha1<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::HmacSha1, key, message, SignatureSerialization::Raw)
}
}
#[cfg(feature = "hmac-sha256")]
impl<S: Syscall> HmacSha256 for ClientImplementation<S> {}
pub trait HmacSha256: CryptoClient {
fn hmacsha256_derive_key(&mut self, base_key: KeyId, message: &[u8], persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(
Mechanism::HmacSha256, base_key,
Some(MediumData::from_slice(message).map_err(|_| ClientError::DataTooLarge)?),
StorageAttributes::new().set_persistence(persistence))
}
fn sign_hmacsha256<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::HmacSha256, key, message, SignatureSerialization::Raw)
}
}
#[cfg(feature = "hmac-sha512")]
impl<S: Syscall> HmacSha512 for ClientImplementation<S> {}
pub trait HmacSha512: CryptoClient {
fn hmacsha512_derive_key(&mut self, base_key: KeyId, message: &[u8], persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(
Mechanism::HmacSha512, base_key,
Some(MediumData::from_slice(message).map_err(|_| ClientError::DataTooLarge)?),
StorageAttributes::new().set_persistence(persistence))
}
fn sign_hmacsha512<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::HmacSha512, key, message, SignatureSerialization::Raw)
}
}
#[cfg(feature = "ed255")]
impl<S: Syscall> Ed255 for ClientImplementation<S> {}
pub trait Ed255: CryptoClient {
fn generate_ed255_private_key(&mut self, persistence: Location)
-> ClientResult<'_, reply::GenerateKey, Self>
{
self.generate_key(Mechanism::Ed255, StorageAttributes::new().set_persistence(persistence))
}
fn derive_ed255_public_key(&mut self, private_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(Mechanism::Ed255, private_key, None, StorageAttributes::new().set_persistence(persistence))
}
fn deserialize_ed255_key<'c>(&'c mut self, serialized_key: &[u8], format: KeySerialization, attributes: StorageAttributes)
-> ClientResult<'c, reply::DeserializeKey, Self>
{
self.deserialize_key(Mechanism::Ed255, serialized_key, format, attributes)
}
fn serialize_ed255_key(&mut self, key: KeyId, format: KeySerialization)
-> ClientResult<'_, reply::SerializeKey, Self>
{
self.serialize_key(Mechanism::Ed255, key, format)
}
fn sign_ed255<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::Ed255, key, message, SignatureSerialization::Raw)
}
fn verify_ed255<'c>(&'c mut self, key: KeyId, message: &[u8], signature: &[u8])
-> ClientResult<'c, reply::Verify, Self>
{
self.verify(Mechanism::Ed255, key, message, signature, SignatureSerialization::Raw)
}
}
#[cfg(feature = "p256")]
impl<S: Syscall> P256 for ClientImplementation<S> {}
pub trait P256: CryptoClient {
fn generate_p256_private_key(&mut self, persistence: Location)
-> ClientResult<'_, reply::GenerateKey, Self>
{
self.generate_key(Mechanism::P256, StorageAttributes::new().set_persistence(persistence))
}
fn derive_p256_public_key(&mut self, private_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(Mechanism::P256, private_key, None, StorageAttributes::new().set_persistence(persistence))
}
fn deserialize_p256_key<'c>(&'c mut self, serialized_key: &[u8], format: KeySerialization, attributes: StorageAttributes)
-> ClientResult<'c, reply::DeserializeKey, Self>
{
self.deserialize_key(Mechanism::P256, serialized_key, format, attributes)
}
fn serialize_p256_key(&mut self, key: KeyId, format: KeySerialization)
-> ClientResult<'_, reply::SerializeKey, Self>
{
self.serialize_key(Mechanism::P256, key, format)
}
// generally, don't offer multiple versions of a mechanism, if possible.
// try using the simplest when given the choice.
// hashing is something users can do themselves hopefully :)
//
// on the other hand: if users need sha256, then if the service runs in secure trustzone
// domain, we'll maybe need two copies of the sha2 code
fn sign_p256<'c>(&'c mut self, key: KeyId, message: &[u8], format: SignatureSerialization)
-> ClientResult<'c, reply::Sign, Self>
{
self.sign(Mechanism::P256, key, message, format)
}
fn verify_p256<'c>(&'c mut self, key: KeyId, message: &[u8], signature: &[u8])
-> ClientResult<'c, reply::Verify, Self>
{
self.verify(Mechanism::P256, key, message, signature, SignatureSerialization::Raw)
}
fn agree_p256(&mut self, private_key: KeyId, public_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::Agree, Self>
{
self.agree(
Mechanism::P256,
private_key,
public_key,
StorageAttributes::new().set_persistence(persistence),
)
}
}
#[cfg(feature = "sha256")]
impl<S: Syscall> Sha256 for ClientImplementation<S> {}
pub trait Sha256: CryptoClient {
fn sha256_derive_key(&mut self, shared_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(Mechanism::Sha256, shared_key, None, StorageAttributes::new().set_persistence(persistence))
}
fn hash_sha256<'c>(&'c mut self, message: &[u8])
-> ClientResult<'c, reply::Hash, Self>
{
self.hash(Mechanism::Sha256, Message::from_slice(message).map_err(|_| ClientError::DataTooLarge)?)
}
}
#[cfg(feature = "tdes")]
impl<S: Syscall> Tdes for ClientImplementation<S> {}
pub trait Tdes: CryptoClient {
fn decrypt_tdes<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Decrypt, Self>
{
self.decrypt(Mechanism::Tdes, key, message, &[], &[], &[])
}
fn encrypt_tdes<'c>(&'c mut self, key: KeyId, message: &[u8])
-> ClientResult<'c, reply::Encrypt, Self>
{
self.encrypt(Mechanism::Tdes, key, message, &[], None)
}
}
#[cfg(feature = "totp")]
impl<S: Syscall> Totp for ClientImplementation<S> {}
pub trait Totp: CryptoClient {
fn sign_totp(&mut self, key: KeyId, timestamp: u64)
-> ClientResult<'_, reply::Sign, Self>
{
self.sign(Mechanism::Totp, key,
×tamp.to_le_bytes().as_ref(),
SignatureSerialization::Raw,
)
}
}
#[cfg(feature = "x255")]
impl<S: Syscall> X255 for ClientImplementation<S> {}
pub trait X255: CryptoClient {
fn generate_x255_secret_key(&mut self, persistence: Location)
-> ClientResult<'_, reply::GenerateKey, Self>
{
self.generate_key(Mechanism::X255, StorageAttributes::new().set_persistence(persistence))
}
fn derive_x255_public_key(&mut self, secret_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::DeriveKey, Self>
{
self.derive_key(Mechanism::X255, secret_key, None, StorageAttributes::new().set_persistence(persistence))
}
fn agree_x255(&mut self, private_key: KeyId, public_key: KeyId, persistence: Location)
-> ClientResult<'_, reply::Agree, Self>
{
self.agree(
Mechanism::X255,
private_key,
public_key,
StorageAttributes::new().set_persistence(persistence),
)
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Reservation_AvailableScopes(#[from] reservation::available_scopes::Error),
#[error(transparent)]
GetCatalog(#[from] get_catalog::Error),
#[error(transparent)]
GetAppliedReservationList(#[from] get_applied_reservation_list::Error),
#[error(transparent)]
ReservationOrder_Calculate(#[from] reservation_order::calculate::Error),
#[error(transparent)]
ReservationOrder_List(#[from] reservation_order::list::Error),
#[error(transparent)]
ReservationOrder_Get(#[from] reservation_order::get::Error),
#[error(transparent)]
ReservationOrder_Purchase(#[from] reservation_order::purchase::Error),
#[error(transparent)]
Reservation_Split(#[from] reservation::split::Error),
#[error(transparent)]
Reservation_Merge(#[from] reservation::merge::Error),
#[error(transparent)]
Reservation_List(#[from] reservation::list::Error),
#[error(transparent)]
Reservation_Get(#[from] reservation::get::Error),
#[error(transparent)]
Reservation_Update(#[from] reservation::update::Error),
#[error(transparent)]
Reservation_ListRevisions(#[from] reservation::list_revisions::Error),
#[error(transparent)]
Operation_List(#[from] operation::list::Error),
#[error(transparent)]
CalculateExchange_Post(#[from] calculate_exchange::post::Error),
#[error(transparent)]
Exchange_Post(#[from] exchange::post::Error),
#[error(transparent)]
Quota_Get(#[from] quota::get::Error),
#[error(transparent)]
Quota_CreateOrUpdate(#[from] quota::create_or_update::Error),
#[error(transparent)]
Quota_Update(#[from] quota::update::Error),
#[error(transparent)]
Quota_List(#[from] quota::list::Error),
#[error(transparent)]
QuotaRequestStatus_Get(#[from] quota_request_status::get::Error),
#[error(transparent)]
QuotaRequestStatus_List(#[from] quota_request_status::list::Error),
}
pub mod reservation {
use super::{models, API_VERSION};
pub async fn available_scopes(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
body: &models::AvailableScopeRequest,
) -> std::result::Result<models::AvailableScopeProperties, available_scopes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/availableScopes",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(available_scopes::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(available_scopes::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(available_scopes::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(available_scopes::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(available_scopes::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailableScopeProperties = serde_json::from_slice(rsp_body)
.map_err(|source| available_scopes::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| available_scopes::Error::DeserializeError(source, rsp_body.clone()))?;
Err(available_scopes::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod available_scopes {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn split(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &models::SplitRequest,
) -> std::result::Result<split::Response, split::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/split",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(split::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(split::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(split::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(split::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(split::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::ReservationResponse> =
serde_json::from_slice(rsp_body).map_err(|source| split::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(split::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(split::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| split::Error::DeserializeError(source, rsp_body.clone()))?;
Err(split::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod split {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(Vec<models::ReservationResponse>),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn merge(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &models::MergeRequest,
) -> std::result::Result<merge::Response, merge::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/merge",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(merge::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(merge::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(merge::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(merge::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(merge::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::ReservationResponse> =
serde_json::from_slice(rsp_body).map_err(|source| merge::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(merge::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(merge::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| merge::Error::DeserializeError(source, rsp_body.clone()))?;
Err(merge::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod merge {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(Vec<models::ReservationResponse>),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
) -> std::result::Result<models::ReservationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
expand: Option<&str>,
) -> std::result::Result<models::ReservationResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
parameters: &models::Patch,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ReservationResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_revisions(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
) -> std::result::Result<models::ReservationList, list_revisions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/revisions",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(list_revisions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_revisions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_revisions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_revisions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationList =
serde_json::from_slice(rsp_body).map_err(|source| list_revisions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list_revisions::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_revisions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_revisions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub async fn get_catalog(
operation_config: &crate::OperationConfig,
subscription_id: &str,
reserved_resource_type: &str,
location: Option<&str>,
) -> std::result::Result<Vec<models::Catalog>, get_catalog::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/catalogs",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(get_catalog::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_catalog::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("reservedResourceType", reserved_resource_type);
if let Some(location) = location {
url.query_pairs_mut().append_pair("location", location);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_catalog::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_catalog::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::Catalog> =
serde_json::from_slice(rsp_body).map_err(|source| get_catalog::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get_catalog::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_catalog::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_catalog {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_applied_reservation_list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::AppliedReservations, get_applied_reservation_list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/appliedReservations",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(get_applied_reservation_list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_applied_reservation_list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(get_applied_reservation_list::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_applied_reservation_list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AppliedReservations = serde_json::from_slice(rsp_body)
.map_err(|source| get_applied_reservation_list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error = serde_json::from_slice(rsp_body)
.map_err(|source| get_applied_reservation_list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_applied_reservation_list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_applied_reservation_list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub mod reservation_order {
use super::{models, API_VERSION};
pub async fn calculate(
operation_config: &crate::OperationConfig,
body: &models::PurchaseRequest,
) -> std::result::Result<models::CalculatePriceResponse, calculate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/calculatePrice", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(calculate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(calculate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(calculate::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(calculate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(calculate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CalculatePriceResponse =
serde_json::from_slice(rsp_body).map_err(|source| calculate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| calculate::Error::DeserializeError(source, rsp_body.clone()))?;
Err(calculate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod calculate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::ReservationOrderList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/reservationOrders", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationOrderList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
expand: Option<&str>,
) -> std::result::Result<models::ReservationOrderResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn purchase(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &models::PurchaseRequest,
) -> std::result::Result<purchase::Response, purchase::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(purchase::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(purchase::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(purchase::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(purchase::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(purchase::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(purchase::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(purchase::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError(source, rsp_body.clone()))?;
Err(purchase::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod purchase {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ReservationOrderResponse),
Accepted202(models::ReservationOrderResponse),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operation {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod calculate_exchange {
use super::{models, API_VERSION};
pub async fn post(
operation_config: &crate::OperationConfig,
body: &models::CalculateExchangeRequest,
) -> std::result::Result<post::Response, post::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/calculateExchange", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(post::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(post::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(post::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(post::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(post::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CalculateExchangeOperationResultResponse =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(post::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(post::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Err(post::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::CalculateExchangeOperationResultResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod exchange {
use super::{models, API_VERSION};
pub async fn post(
operation_config: &crate::OperationConfig,
body: &models::ExchangeRequest,
) -> std::result::Result<post::Response, post::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/exchange", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(post::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(post::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(post::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(post::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(post::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ExchangeOperationResultResponse =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(post::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(post::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::Error =
serde_json::from_slice(rsp_body).map_err(|source| post::Error::DeserializeError(source, rsp_body.clone()))?;
Err(post::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ExchangeOperationResultResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod quota {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
resource_name: &str,
) -> std::result::Result<models::CurrentQuotaLimitBase, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimits/{}",
operation_config.base_path(),
subscription_id,
provider_id,
location,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::CurrentQuotaLimitBase =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
resource_name: &str,
create_quota_request: &models::CurrentQuotaLimitBase,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimits/{}",
operation_config.base_path(),
subscription_id,
provider_id,
location,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(create_quota_request).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestOneResourceSubmitResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestSubmitResponse201 = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::QuotaRequestOneResourceSubmitResponse),
Created201(models::QuotaRequestSubmitResponse201),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
resource_name: &str,
create_quota_request: &models::CurrentQuotaLimitBase,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimits/{}",
operation_config.base_path(),
subscription_id,
provider_id,
location,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(create_quota_request).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestOneResourceSubmitResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestSubmitResponse201 =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::QuotaRequestOneResourceSubmitResponse),
Created201(models::QuotaRequestSubmitResponse201),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
) -> std::result::Result<models::QuotaLimits, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimits",
operation_config.base_path(),
subscription_id,
provider_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaLimits =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod quota_request_status {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
id: &str,
) -> std::result::Result<models::QuotaRequestDetails, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimitsRequests/{}",
operation_config.base_path(),
subscription_id,
provider_id,
location,
id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestDetails =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
provider_id: &str,
location: &str,
filter: Option<&str>,
top: Option<i32>,
skiptoken: Option<&str>,
) -> std::result::Result<models::QuotaRequestDetailsList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/resourceProviders/{}/locations/{}/serviceLimitsRequests",
operation_config.base_path(),
subscription_id,
provider_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(skiptoken) = skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::QuotaRequestDetailsList =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ExceptionResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ExceptionResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
pub const POSEIDON_ROUNDS: usize = 31;
|
use super::*;
pub fn expression() -> Expression {
Expression {
boostrap_compiler,
typecheck,
codegen,
}
}
fn boostrap_compiler(_compiler: &mut Compiler) {}
pub fn typecheck(
resolver: &mut TypeResolver<TypecheckType>,
function: &TypevarFunction,
args: &Vec<TypeVar>,
) -> GenericResult<TypeVar> {
resolver.add_constraint(Constraint::Equality(
function.return_type.clone(),
args[0].clone(),
))?;
Ok(args[0].clone())
}
pub fn codegen(context: &mut Context, args: &[Token]) -> CodegenResult<Object> {
if args.len() != 1 {
return Err(CodegenError::new(&format!(
"expected one argument for return, found {}: {:?}",
args.len(),
args
)));
}
let result = gen_token(context, &args[0])?;
context.add_instruction(LLVMInstruction::BuildRet {
source: result.index,
});
Ok(result)
}
|
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::VecDeque;
/// Tree structure provided by LeetCode
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
/// Creates a new TreeNode. Provided by LeetCode.
#[inline]
pub fn new(val: i32) -> Self {
TreeNode{
val,
left: None,
right: None
}
}
}
/// LeetCode Monthly Challenge problem for March 9th, 2021.
pub struct Solution {}
impl Solution {
/// Given the root of a binary tree, then value v and depth d, you need to
/// add a row of nodes with value v at the given depth d. The root node is
/// at depth 1.
/// The adding rule is: given a positive integer depth d, for each NOT null
/// tree nodes N in depth d-1, create two tree nodes with value v as N's
/// left subtree root and right subtree root. And N's original left subtree
/// should be the left subtree of the new left subtree root, its original
/// right subtree should be the right subtree of the new right subtree root.
/// If depth d is 1 that means there is no depth d-1 at all, then create a
/// tree node with value v as the new root of the whole original tree, and
/// the original tree is the new root's left subtree.
///
/// # Arguments
/// * root - The root node of a binary tree.
/// * v - The value of newly inserted nodes.
/// * d - The level of the binary tree to insert the row (1 indexed).
///
/// # Example
/// ```
/// # use crate::add_one_row_to_tree::{Solution, TreeNode};
/// # use std::rc::Rc;
/// # use std::cell::RefCell;
/// # let root = Some(Rc::new(RefCell::new(
/// # TreeNode{
/// # val: 4,
/// # left: Some(Rc::new(RefCell::new(
/// # TreeNode{
/// # val: 2,
/// # left: Some(Rc::new(RefCell::new(TreeNode::new(3)))),
/// # right: Some(Rc::new(RefCell::new(TreeNode::new(1)))),
/// # }
/// # ))),
/// # right: Some(Rc::new(RefCell::new(
/// # TreeNode{
/// # val: 6,
/// # left: Some(Rc::new(RefCell::new(TreeNode::new(5)))),
/// # right: None
/// # }
/// # )))
/// # }
/// # )));
/// // Given the following binary tree before and after insertion:
/// // __4__ __4__
/// // / \ / \
/// // 2 6 => 1 1
/// // / \ / / \
/// // 3 1 5 2 6
/// // / \ /
/// // 3 1 5
/// let ex_one = Solution::add_one_row(root, 1, 2);
///
/// assert_eq!(
/// Solution::level_order(ex_one),
/// vec![Some(4),
/// Some(1), Some(1),
/// Some(2), None, None, Some(6),
/// Some(3), Some(1), Some(5), None,
/// None, None, None, None, None, None
/// ]
/// );
/// ```
///
/// # Constraints
/// * The given d is in range [1, max depth of given tree + 1]
/// * The binary tree has at least one node.
///
pub fn add_one_row(root: Option<Rc<RefCell<TreeNode>>>, v: i32, d: i32) -> Option<Rc<RefCell<TreeNode>>> {
if d == 1 {
let mut new_root = TreeNode::new(v);
new_root.left = root;
return Some(Rc::new(RefCell::new(new_root)))
}
Solution::recursive_add_row(
root.as_ref().unwrap().clone(),
2,
v,
d
);
root
}
fn recursive_add_row(node: Rc<RefCell<TreeNode>>, level: i32, v: i32, d: i32) {
if level != d {
if let Some(left) = &node.borrow().left {
Solution::recursive_add_row(
left.clone(),
level + 1,
v,
d
);
}
if let Some(right) = &node.borrow().right {
Solution::recursive_add_row(
right.clone(),
level + 1,
v,
d
);
}
} else {
let mut n = node.borrow_mut();
n.left = Some(
Rc::new(
RefCell::new(
TreeNode{
val: v,
left: n.left.take(),
right: None
}
)
)
);
n.right = Some(
Rc::new(
RefCell::new(
TreeNode{
val: v,
left: None,
right: n.right.take()
}
)
)
)
}
}
/// Returns the level order traversal of a binary tree.
pub fn level_order(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Option<i32>> {
let mut order: Vec<Option<i32>> = Vec::new();
let mut nodes: VecDeque<Option<Rc<RefCell<TreeNode>>>> = VecDeque::new();
nodes.push_front(root);
while let Some(node) = nodes.pop_front() {
if let Some(n) = node {
order.push(Some(n.borrow().val));
nodes.push_back(n.borrow_mut().left.take());
nodes.push_back(n.borrow_mut().right.take());
} else {
order.push(None);
}
}
order
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn mixed_trees() {
// __4__ __4__
// / \ / \
// 2 6 => 1 1
// / \ / / \
// 3 1 5 2 6
// / \ /
// 3 1 5
let root = Some(Rc::new(RefCell::new(
TreeNode{
val: 4,
left: Some(Rc::new(RefCell::new(
TreeNode{
val: 2,
left: Some(Rc::new(RefCell::new(TreeNode::new(3)))),
right: Some(Rc::new(RefCell::new(TreeNode::new(1)))),
}
))),
right: Some(Rc::new(RefCell::new(
TreeNode{
val: 6,
left: Some(Rc::new(RefCell::new(TreeNode::new(5)))),
right: None
}
)))
}
)));
assert_eq!(
Solution::level_order(Solution::add_one_row(root, 1, 2)),
vec![Some(4),
Some(1), Some(1),
Some(2), None, None, Some(6),
Some(3), Some(1), Some(5), None,
None, None, None, None, None, None
]
);
// 4 4
// / /
// 2 => 2
// / \ / \
// 3 1 1 1
// / \
// 3 1
let root = Some(Rc::new(RefCell::new(
TreeNode{
val: 4,
left: Some(Rc::new(RefCell::new(
TreeNode{
val: 2,
left: Some(Rc::new(RefCell::new(TreeNode::new(3)))),
right: Some(Rc::new(RefCell::new(TreeNode::new(1))))
}
))),
right: None
}
)));
assert_eq!(
Solution::level_order(Solution::add_one_row(root, 1, 3)),
vec![Some(4),
Some(2), None,
Some(1), Some(1),
Some(3), None, None, Some(1),
None, None, None, None
]
);
}
#[test]
fn single_node_tree() {
// 1
// 4 => /
// 4
let root = Some(Rc::new(RefCell::new(TreeNode::new(4))));
assert_eq!(
Solution::level_order(Solution::add_one_row(root, 1, 1)),
vec![Some(1),
Some(4), None,
None, None
]
);
}
#[test]
fn add_row_at_max_depth() {
// 4
// 4 => / \
// 1 1
let root = Some(Rc::new(RefCell::new(TreeNode::new(4))));
assert_eq!(
Solution::level_order(Solution::add_one_row(root, 1, 2)),
vec![Some(4),
Some(1), Some(1),
None, None, None, None
]
);
}
}
|
#[doc = "Register `UR1` reader"]
pub type R = crate::R<UR1_SPEC>;
#[doc = "Register `UR1` writer"]
pub type W = crate::W<UR1_SPEC>;
#[doc = "Field `BCM4` reader - Boot Cortex-M4"]
pub type BCM4_R = crate::BitReader;
#[doc = "Field `BCM4` writer - Boot Cortex-M4"]
pub type BCM4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BCM7` reader - Boot Cortex-M7"]
pub type BCM7_R = crate::BitReader;
#[doc = "Field `BCM7` writer - Boot Cortex-M7"]
pub type BCM7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Boot Cortex-M4"]
#[inline(always)]
pub fn bcm4(&self) -> BCM4_R {
BCM4_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 16 - Boot Cortex-M7"]
#[inline(always)]
pub fn bcm7(&self) -> BCM7_R {
BCM7_R::new(((self.bits >> 16) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Boot Cortex-M4"]
#[inline(always)]
#[must_use]
pub fn bcm4(&mut self) -> BCM4_W<UR1_SPEC, 0> {
BCM4_W::new(self)
}
#[doc = "Bit 16 - Boot Cortex-M7"]
#[inline(always)]
#[must_use]
pub fn bcm7(&mut self) -> BCM7_W<UR1_SPEC, 16> {
BCM7_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SYSCFG user register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ur1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct UR1_SPEC;
impl crate::RegisterSpec for UR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ur1::R`](R) reader structure"]
impl crate::Readable for UR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ur1::W`](W) writer structure"]
impl crate::Writable for UR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets UR1 to value 0"]
impl crate::Resettable for UR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use blake3::Hasher;
use std::env;
use std::fs;
use std::io;
/// This is a utility program to hash files using blake3. I am reimplementing
/// this instead of relying on a utility to keep dependencies low.
fn main() {
// Get the command line arguments
let args: Vec<String> = env::args().collect();
// Check if the filename argument is provided
if args.len() != 2 {
println!("Usage: {} <filename>", args[0]);
return;
}
// Get the filename from the command line argument
let filename = &args[1];
// Read the content of the file
let mut file = fs::File::open(filename).expect("Error opening file");
let mut hasher = Hasher::new();
io::copy(&mut file, &mut hasher)
.unwrap_or_else(|_| panic!("Could not hash static file at path: {filename}"));
println!("{}", hasher.finalize().to_hex());
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct CH {
#[doc = "0x00 - channel x configuration register"]
pub cr: CR,
#[doc = "0x04 - channel x number of data register"]
pub ndtr: NDTR,
#[doc = "0x08 - channel x peripheral address register"]
pub par: PAR,
#[doc = "0x0c - channel x memory address register"]
pub mar: MAR,
_reserved_end: [u8; 0x04],
}
#[doc = "CR (rw) register accessor: channel x configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "channel x configuration register"]
pub mod cr;
#[doc = "NDTR (rw) register accessor: channel x number of data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ndtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ndtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ndtr`]
module"]
pub type NDTR = crate::Reg<ndtr::NDTR_SPEC>;
#[doc = "channel x number of data register"]
pub mod ndtr;
#[doc = "PAR (rw) register accessor: channel x peripheral address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`par::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`par::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`par`]
module"]
pub type PAR = crate::Reg<par::PAR_SPEC>;
#[doc = "channel x peripheral address register"]
pub mod par;
#[doc = "MAR (rw) register accessor: channel x memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mar`]
module"]
pub type MAR = crate::Reg<mar::MAR_SPEC>;
#[doc = "channel x memory address register"]
pub mod mar;
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Configuration Register"]
pub cfg: CFG,
#[doc = "0x04 - Software POI Reset"]
pub swpoi: SWPOI,
#[doc = "0x08 - Software POR Reset"]
pub swpor: SWPOR,
_reserved0: [u8; 8usize],
#[doc = "0x14 - TPIU reset"]
pub tpiurst: TPIURST,
_reserved1: [u8; 488usize],
#[doc = "0x200 - Reset Interrupt register: Enable"]
pub inten: INTEN,
#[doc = "0x204 - Reset Interrupt register: Status"]
pub intstat: INTSTAT,
#[doc = "0x208 - Reset Interrupt register: Clear"]
pub intclr: INTCLR,
#[doc = "0x20c - Reset Interrupt register: Set"]
pub intset: INTSET,
_reserved2: [u8; 268430832usize],
#[doc = "0xffff000 - Status Register (SBL)"]
pub stat: STAT,
}
#[doc = "Configuration Register"]
pub struct CFG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Configuration Register"]
pub mod cfg;
#[doc = "Software POI Reset"]
pub struct SWPOI {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Software POI Reset"]
pub mod swpoi;
#[doc = "Software POR Reset"]
pub struct SWPOR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Software POR Reset"]
pub mod swpor;
#[doc = "TPIU reset"]
pub struct TPIURST {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "TPIU reset"]
pub mod tpiurst;
#[doc = "Reset Interrupt register: Enable"]
pub struct INTEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Reset Interrupt register: Enable"]
pub mod inten;
#[doc = "Reset Interrupt register: Status"]
pub struct INTSTAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Reset Interrupt register: Status"]
pub mod intstat;
#[doc = "Reset Interrupt register: Clear"]
pub struct INTCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Reset Interrupt register: Clear"]
pub mod intclr;
#[doc = "Reset Interrupt register: Set"]
pub struct INTSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Reset Interrupt register: Set"]
pub mod intset;
#[doc = "Status Register (SBL)"]
pub struct STAT {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Status Register (SBL)"]
pub mod stat;
|
use core::cell::{RefCell, RefMut};
use gfx::Bitmap3;
pub type FbGetter<'a> = fn() -> RefMut<'a, Framebuffer>;
pub fn top_screen<'a>() -> RefMut<'a, Framebuffer> {
unsafe {
TOP_SCREEN.borrow_mut()
}
}
static mut TOP_SCREEN: RefCell<Framebuffer> = RefCell::new(Framebuffer {
addr: 0,
width: 400,
height: 240
});
pub fn init() {
extern {
static TOP_FRAMEBUF_START: u32;
}
top_screen().addr = unsafe { TOP_FRAMEBUF_START };
}
pub struct Framebuffer {
addr: u32,
width: usize,
height: usize,
}
impl Framebuffer {
pub fn draw_pixel(&mut self, pos: (usize, usize), [r, g, b]: [u8;3]) {
let (x, y) = pos;
let (x, y) = (self.height - y - 1, x);
let base_addr = self.addr + (3 * (y * self.height + x)) as u32;
unsafe {
(base_addr as *mut [u8;3]).write_volatile([r, g, b]);
}
}
pub fn filter_mask_blit<F>(&mut self, pos: (usize, usize), bmp: &Bitmap3, f: F)
where F: Fn((usize, usize), [u8; 3]) -> (bool, [u8; 3]) {
bmp.foreach_byte(|(x, y), rgb| {
let (draw, rgb) = f((x, y), rgb);
if !draw { return }
self.draw_pixel((pos.0 + x, pos.1 + y), rgb);
});
}
pub fn clear(&mut self, rgb: [u8; 3]) {
for x in 0..self.width {
for y in 0..self.height {
self.draw_pixel((x, y), rgb);
}
}
}
pub fn size(&self) -> (usize, usize) {
(self.width, self.height)
}
}
pub fn draw_commit() {
::caches::flush_dcache()
}
fn no_filter(_: (usize, usize), rgb: [u8; 3]) -> (bool, [u8; 3]) {
(true, rgb)
}
pub fn blit(pos: (usize, usize), bmp: &Bitmap3) {
top_screen().filter_mask_blit(pos, bmp, no_filter);
}
pub fn text_blit(pos: (usize, usize), bmp: &Bitmap3, color: [u8; 3]) {
let filter = |_, rgb: [u8; 3]| {
let draw = rgb[0] == 0 && rgb[1] == 0 && rgb[2] == 0;
(draw, color)
};
top_screen().filter_mask_blit(pos, bmp, filter);
}
#[inline(never)]
pub fn clear_screen(r: u8, g: u8, b: u8) {
top_screen().clear([r, g, b]);
}
|
use log::info;
use serde::{Deserialize, Serialize};
use serde_xml_rs::from_reader;
use anyhow::Result;
#[derive(Debug, Deserialize, Serialize)]
pub struct EnaLineageTaxon {
#[serde(rename = "scientificName")]
pub name: String,
#[serde(rename = "taxId")]
pub taxid: usize,
pub rank: Option<String>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct EnaLineage {
#[serde(rename = "taxon")]
taxons: Vec<EnaLineageTaxon>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct EnaTaxonInfo {
#[serde(rename = "scientificName")]
pub name: String,
#[serde(rename = "taxId")]
pub taxid: usize,
pub rank: Option<String>,
pub lineage: EnaLineage,
}
impl EnaTaxonInfo {
pub fn parent_taxons(&self) -> &Vec<EnaLineageTaxon> {
return &self.lineage.taxons;
}
}
#[derive(Debug, Deserialize, Serialize)]
struct TaxonSet {
#[serde(rename = "taxon")]
taxons: Vec<EnaTaxonInfo>,
}
pub fn species(taxids: &[usize]) -> Result<Vec<EnaTaxonInfo>> {
let string_taxids: Vec<String> = taxids.iter().map(|t| t.to_string()).collect();
let tids = string_taxids.join(",");
info!("Fetching species info for {}", tids);
let mut url = String::from("https://www.ebi.ac.uk/ena/browser/api/xml/");
url.push_str(&tids);
let response = reqwest::blocking::get(&url)?;
let mut info = Vec::new();
let body = response.text()?;
if !body.is_empty() {
let taxon_set: TaxonSet = from_reader(body.as_bytes())?;
for taxon in taxon_set.taxons {
info.push(taxon);
}
}
return Ok(info);
}
|
use std::collections::HashMap;
use cards::{Pile, HALF_POINTS, NUM_CARDS, TALON_SIZE};
use contracts::{Klop};
use player::{PlayerId, ContractPlayers};
// A map of scores for individual players.
// Only players that have the score != 0 are included.
pub type PlayerScores = HashMap<PlayerId, int>;
// Calculate the scores for the players depending on the contract played.
// At least one player will always score.
pub fn score(players: &ContractPlayers) -> PlayerScores {
if players.contract().is_klop() {
score_klop(players)
} else if players.contract().is_beggar() {
score_beggar(players)
} else if players.contract().is_valat() {
score_valat(players)
} else {
score_normal(players)
}
}
// Calculate the scores for normal contracts.
fn score_normal(players: &ContractPlayers) -> PlayerScores {
let contract = players.contract();
let mut pile = Pile::new();
let scoring = players.scoring_players();
let mut p = Vec::with_capacity(2);
// Add card piles of all scoring players to one pile.
for player in scoring.into_iter() {
p.push(player.id());
pile.add_pile(player.pile());
}
// Score all the cards from the scoring players together.
let score = pile.score();
// Every scoring player gets the same amount of points.
p.iter().map(|&player_id| {
let score = score_sign(|| score > HALF_POINTS) * (score + contract.value());
(player_id, round_score(score))
}).collect()
}
// Calculate the scores for Klop contract.
fn score_klop(players: &ContractPlayers) -> PlayerScores {
let mut scores = HashMap::new();
let scoring = players.scoring_players();
// Cards are scored fore every player individually.
for player in scoring.into_iter() {
scores.insert(player.id(), -player.pile().score());
}
let winner_loser = scores.iter()
.map(|(_, &score)| score)
.find(|score| is_winner_loser(*score))
.is_some();
if !winner_loser {
scores.iter().map(|(&player_id, &score)| (player_id, round_score(score))).collect()
} else {
// Set the max and -max scores for winner and loser respectively.
scores.iter()
.filter(|&(_, &score)| is_winner_loser(score))
.map(|(&player_id, &score)| {
let score = if is_winner(score) {
Klop.value()
} else {
-Klop.value()
};
(player_id, score)
})
.collect()
}
}
// Returns true if a player is a winner or a loser in Klop contract.
fn is_winner_loser(score: int) -> bool {
is_winner(score) || is_loser(score)
}
// Returns true if a player is a winner in Klop contract.
fn is_winner(score: int) -> bool {
score == 0
}
// Returns true is a player is a loser in Klop contract.
fn is_loser(score: int) -> bool {
score < -HALF_POINTS
}
// Calculate the scores for Beggar and Open Beggar contracts.
fn score_beggar(players: &ContractPlayers) -> PlayerScores {
let contract = players.contract();
let mut scores = HashMap::new();
let scoring = players.scoring_players();
assert!(scoring.len() == 1);
let score = score_sign(|| scoring[0].pile().is_empty()) * contract.value();
scores.insert(scoring[0].id(), score);
scores
}
// Calculate the scores for Valat and Color Valat contracts.
fn score_valat(players: &ContractPlayers) -> PlayerScores {
let contract = players.contract();
let mut scores = HashMap::new();
let scoring = players.scoring_players();
assert!(scoring.len() == 1);
let score = score_sign(|| scoring[0].pile().size() >= NUM_CARDS - TALON_SIZE) * contract.value();
scores.insert(scoring[0].id(), score);
scores
}
// Returns +1 if the condition succeeds and -1 otherwise.
fn score_sign(cond: || -> bool) -> int {
if cond() {
1
} else {
-1
}
}
// Round the score to the nearest score divisible by 5.
fn round_score(score: int) -> int {
(score as f64 / 5.0).round() as int * 5
}
#[cfg(test)]
mod test {
use cards::*;
use contracts::{SoloWithout, Klop, Standard, Three, Two, Beggar, beggar, Valat, valat};
use player::{Players, PlayerId};
use super::*;
fn init_cards(players: &mut Players) {
for card in [CARD_TAROCK_SKIS, CARD_CLUBS_EIGHT, CARD_HEARTS_JACK,
CARD_SPADES_QUEEN, CARD_TAROCK_14, CARD_HEARTS_KNIGHT].iter() {
players.player_mut(2).pile_mut().add_card(*card);
}
players.player_mut(0).pile_mut().add_card(CARD_HEARTS_KING);
players.player_mut(1).pile_mut().add_card(CARD_SPADES_KING);
players.player_mut(1).pile_mut().add_card(CARD_SPADES_JACK);
players.player_mut(3).pile_mut().add_card(CARD_DIAMONDS_KING);
}
fn init_no_cards(players: &mut Players, player: PlayerId) {
*players.player_mut(player).pile_mut() = Pile::new();
}
fn init_half_points(players: &mut Players, player: PlayerId) {
for card in [CARD_CLUBS_KING, CARD_CLUBS_QUEEN, CARD_CLUBS_KNIGHT,
CARD_TAROCK_SKIS, CARD_TAROCK_MOND, CARD_TAROCK_PAGAT,
CARD_HEARTS_KING, CARD_HEARTS_QUEEN, CARD_HEARTS_KNIGHT].iter() {
players.player_mut(player).pile_mut().add_card(*card);
}
}
#[test]
fn score_for_declarer_is_calculated() {
let mut players = Players::new(4);
init_cards(&mut players);
let cp = players.play_contract(2, SoloWithout);
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[2], -90);
}
#[test]
fn score_for_declarer_and_partner_is_calculated() {
let mut players = Players::new(4);
players.player_mut(3).set_partner(2);
init_cards(&mut players);
let cp = players.play_contract(3, Standard(Two));
let scores = score(&cp);
assert_eq!(scores.len(), 2);
assert_eq!(scores[3], -35);
assert_eq!(scores[3], scores[2]);
}
#[test]
fn winning_the_contract_awards_positive_points() {
let mut players = Players::new(4);
players.player_mut(3).set_partner(2);
init_cards(&mut players);
init_half_points(&mut players, 2);
let cp = players.play_contract(3, Standard(Three));
let scores = score(&cp);
assert_eq!(scores.len(), 2);
assert_eq!(scores[3], 60);
assert_eq!(scores[3], scores[2]);
}
#[test]
fn every_player_is_scored_independently_in_klop() {
let mut players = Players::new(4);
init_cards(&mut players);
let cp = players.play_contract(2, Klop);
let scores = score(&cp);
assert_eq!(scores.len(), 4);
assert_eq!(scores[0], -5);
assert_eq!(scores[1], -5);
assert_eq!(scores[2], -10);
assert_eq!(scores[3], -5);
}
#[test]
fn klop_only_winner_scores() {
let mut players = Players::new(4);
init_cards(&mut players);
init_no_cards(&mut players, 0);
let cp = players.play_contract(2, Klop);
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[0], 70);
}
#[test]
fn klop_only_loser_scores() {
let mut players = Players::new(4);
init_cards(&mut players);
init_half_points(&mut players, 1);
let cp = players.play_contract(2, Klop);
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[1], -70);
}
#[test]
fn both_winner_and_loser_score() {
let mut players = Players::new(4);
init_cards(&mut players);
init_no_cards(&mut players, 2);
init_half_points(&mut players, 3);
let cp = players.play_contract(0, Klop);
let scores = score(&cp);
assert_eq!(scores.len(), 2);
assert_eq!(scores[2], 70);
assert_eq!(scores[3], -70);
}
#[test]
fn beggar_is_won_if_declarer_wins_no_tricks() {
let mut players = Players::new(4);
init_cards(&mut players);
init_no_cards(&mut players, 2);
let cp = players.play_contract(2, Beggar(beggar::Normal));
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[2], 70);
}
#[test]
fn beggar_is_lost_if_declarer_wins_no_tricks() {
let mut players = Players::new(4);
init_cards(&mut players);
let cp = players.play_contract(2, Beggar(beggar::Open));
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[2], -90);
}
#[test]
fn valat_is_won_if_declarer_wins_no_tricks() {
let mut players = Players::new(4);
for card in CARDS[0 .. 48].iter() {
players.player_mut(1).pile_mut().add_card(*card);
}
let cp = players.play_contract(1, Valat(valat::Normal));
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[1], 250);
}
#[test]
fn valat_is_lost_if_declarer_wins_no_tricks() {
let mut players = Players::new(4);
for card in CARDS[0 .. 47].iter() {
players.player_mut(3).pile_mut().add_card(*card);
}
let cp = players.play_contract(3, Valat(valat::Color));
let scores = score(&cp);
assert_eq!(scores.len(), 1);
assert_eq!(scores[3], -125);
}
}
|
extern crate diesel;
use super::schema::posts;
use super::schema::comments;
use diesel::*;
#[derive(Queryable, Identifiable, PartialEq, Clone)]
pub struct Post {
pub id: i32,
pub title: String,
pub body: String,
pub published: bool,
}
#[derive(Insertable)]
#[table_name="posts"]
pub struct NewPost<'a> {
pub title: &'a str,
pub body: &'a str,
}
// この辺を Post <-> Commentでderiveしてないとダメ
#[derive(Queryable, Associations, PartialEq, Identifiable)]
#[belongs_to(Post, foreign_key = "post_id")]
#[table_name = "comments"]
pub struct Comment {
pub id: i32,
pub post_id: i32,
pub text: String,
}
#[derive(Insertable)]
#[table_name="comments"]
pub struct NewComments<'a> {
pub post_id: &'a i32,
pub text: &'a str,
} |
extern crate wallpaper;
extern crate chrono;
extern crate structopt;
extern crate serde_json;
extern crate serde;
use structopt::StructOpt;
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
fn parse_time(time: &str) -> NaiveTime {
NaiveTime::parse_from_str(time, "%H:%M").unwrap()
}
#[derive(Debug, StructOpt)]
#[structopt(name = "wallpaper", about = "An example of StructOpt usage.")]
enum Opts {
#[structopt(name = "add")]
Add {
#[structopt(name = "path")]
path: String,
#[structopt(name = "time", parse(from_str = "parse_time"))]
time: NaiveTime
},
#[structopt(name = "run")]
Run
}
#[derive(Serialize, Deserialize)]
struct Wallpaper {
path: String,
#[serde(with = "time_format")]
time: NaiveTime
}
#[derive(Serialize, Deserialize)]
struct Config {
wallpapers: Vec<Wallpaper>
}
mod time_format {
use chrono::{NaiveTime};
use serde::{self, Deserialize, Serializer, Deserializer};
const FORMAT: &'static str = "%H:%M";
pub fn serialize<S>(
date: &NaiveTime,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", date.format(FORMAT));
serializer.serialize_str(&s)
}
pub fn deserialize<'de, D>(
deserializer: D,
) -> Result<NaiveTime, D::Error>
where
D: Deserializer<'de>,
{
let time = String::deserialize(deserializer)?;
let time = &*time;
NaiveTime::parse_from_str(time, "%H:%M").map_err(serde::de::Error::custom)
}
}
fn main() {
let opt = Opts::from_args();
let srcdir = PathBuf::from("./src/config.json");
println!("{:?}", fs::canonicalize(&srcdir));
let config = fs::read_to_string(srcdir)
.expect("Something went wrong reading the file");
let config: &str = &*config;
let config: Config = serde_json::from_str(config).unwrap();
match opt {
Opts::Add { path, time } => {
},
Opts::Run => {
let mut wallpapers = config.wallpapers;
wallpapers.sort_by(|a, b| b.time.cmp(&a.time));
let current_time = Local::now().naive_local().time();
let wallpaper_to_change = wallpapers.iter().find(|x| x.time <= current_time).unwrap();
let path: &str = &*wallpaper_to_change.path;
match wallpaper::set_from_path(path) {
Ok(_) => println!("nice"),
Err(_) => println!("error")
};
}
}
} |
#[doc = "Register `ETH_MACRxQC2R` reader"]
pub type R = crate::R<ETH_MACRX_QC2R_SPEC>;
#[doc = "Register `ETH_MACRxQC2R` writer"]
pub type W = crate::W<ETH_MACRX_QC2R_SPEC>;
#[doc = "Field `PSRQ0` reader - PSRQ0"]
pub type PSRQ0_R = crate::FieldReader;
#[doc = "Field `PSRQ0` writer - PSRQ0"]
pub type PSRQ0_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `PSRQ1` reader - PSRQ1"]
pub type PSRQ1_R = crate::FieldReader;
#[doc = "Field `PSRQ1` writer - PSRQ1"]
pub type PSRQ1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
impl R {
#[doc = "Bits 0:7 - PSRQ0"]
#[inline(always)]
pub fn psrq0(&self) -> PSRQ0_R {
PSRQ0_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - PSRQ1"]
#[inline(always)]
pub fn psrq1(&self) -> PSRQ1_R {
PSRQ1_R::new(((self.bits >> 8) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - PSRQ0"]
#[inline(always)]
#[must_use]
pub fn psrq0(&mut self) -> PSRQ0_W<ETH_MACRX_QC2R_SPEC, 0> {
PSRQ0_W::new(self)
}
#[doc = "Bits 8:15 - PSRQ1"]
#[inline(always)]
#[must_use]
pub fn psrq1(&mut self) -> PSRQ1_W<ETH_MACRX_QC2R_SPEC, 8> {
PSRQ1_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register controls the routing of tagged packets based on the USP (user priority) field of the received packets to the Rx queue 0 and 1.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_macrx_qc2r::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eth_macrx_qc2r::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ETH_MACRX_QC2R_SPEC;
impl crate::RegisterSpec for ETH_MACRX_QC2R_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eth_macrx_qc2r::R`](R) reader structure"]
impl crate::Readable for ETH_MACRX_QC2R_SPEC {}
#[doc = "`write(|w| ..)` method takes [`eth_macrx_qc2r::W`](W) writer structure"]
impl crate::Writable for ETH_MACRX_QC2R_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ETH_MACRxQC2R to value 0"]
impl crate::Resettable for ETH_MACRX_QC2R_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
v1_imports!();
use rocket::Route;
use db::{project, session, staff, student, user};
use session::Session;
pub fn get_routes() -> Vec<Route> {
routes![
get_projs,
new_proj,
update_proj,
rm_proj,
get_project_students
]
}
#[allow(needless_pass_by_value)]
#[get("/projects")]
fn get_projs(conn: DatabaseConnection, session: Session) -> V1Response<ProjectList> {
let res = match user::find_user(&conn, &session.email[..]) {
Some(user::User::Staff(_s)) => project::get_all(&conn),
Some(user::User::Student(_s)) => project::get_all_current(&conn),
None => panic!("A session exists for a user which does not exist!"),
}.map_err(select_error_handler!("no projects found"))?;
let projs =
project::attach_staff(&conn, res).map_err(select_error_handler!("error fetching staff"))?;
Ok(Json(ProjectList { projects: projs }))
}
#[allow(needless_pass_by_value)]
#[post("/projects", data = "<body>")]
fn new_proj(
mut body: Json<project::NewProjectWithStaff>,
usr: staff::Staff,
conn: DatabaseConnection,
) -> V1Response<project::ProjectWithStaff> {
if !usr.is_admin {
body.supervisor_name = usr.full_name;
body.supervisor_email = usr.email;
}
match project::create_with_staff(&conn, &body) {
Ok(p) => Ok(Json(p)),
Err(e) => Err(diesel_error_handler!(e)),
}
}
#[allow(needless_pass_by_value)]
#[put("/projects/<id>", data = "<body>")]
fn update_proj(
id: i32,
body: Json<project::Project>,
usr: staff::Staff,
conn: DatabaseConnection,
) -> V1Response<project::Project> {
if !usr.is_admin && usr.email != body.supervisor_email {
return Err(bad_request!("you do not own that project"));
}
if body.id != id {
return Err(bad_request!("project ID does not match ID in body"));
}
let current_proj = project::get_project(&conn, id).map_err(|e| match e {
SelectError::NoSuchValue() => not_found!("no such project"),
SelectError::DieselError(e) => diesel_error_handler!(e),
})?;
let (is_curr, _) = session::get_session(&conn, current_proj.session)
.map_err(|_e| internal_server_error!("database error"))?;
if !is_curr {
return Err(bad_request!("cannot edit an archived project"));
}
project::update(&conn, &body).map_err(|e| diesel_error_handler!(e))?;
Ok(body)
}
#[allow(needless_pass_by_value)]
#[delete("/projects/<id>")]
fn rm_proj(id: i32, _usr: staff::Admin, conn: DatabaseConnection) -> V1Response<GenericMessage> {
let p = project::get_project(&conn, id).map_err(select_error_handler!("no such project"))?;
project::delete(&conn, &p).map_err(|e| diesel_error_handler!(e))?;
Ok(generic_message!("ok"))
}
#[allow(needless_pass_by_value)]
#[get("/projects/<id>/students")]
fn get_project_students(
id: i32,
_usr: staff::Admin,
conn: DatabaseConnection,
) -> V1Response<StudentList> {
let students = student::selection::get_students_for_project(&conn, id)
.map_err(select_error_handler!("database error"))?;
Ok(Json(StudentList { students }))
}
|
use crate::{
cost_model::transferred_byte_cycles,
syscalls::{
utils::store_data, CellField, Source, SourceEntry, INDEX_OUT_OF_BOUND, ITEM_MISSING,
LOAD_CELL_BY_FIELD_SYSCALL_NUMBER, LOAD_CELL_SYSCALL_NUMBER, SUCCESS,
},
};
use byteorder::{LittleEndian, WriteBytesExt};
use ckb_types::{
core::{cell::CellMeta, Capacity},
packed::CellOutput,
prelude::*,
};
use ckb_vm::{
registers::{A0, A3, A4, A5, A7},
Error as VMError, Register, SupportMachine, Syscalls,
};
pub struct LoadCell<'a> {
outputs: &'a [CellMeta],
resolved_inputs: &'a [CellMeta],
resolved_cell_deps: &'a [CellMeta],
group_inputs: &'a [usize],
group_outputs: &'a [usize],
}
impl<'a> LoadCell<'a> {
pub fn new(
outputs: &'a [CellMeta],
resolved_inputs: &'a [CellMeta],
resolved_cell_deps: &'a [CellMeta],
group_inputs: &'a [usize],
group_outputs: &'a [usize],
) -> LoadCell<'a> {
LoadCell {
outputs,
resolved_inputs,
resolved_cell_deps,
group_inputs,
group_outputs,
}
}
fn fetch_cell(&self, source: Source, index: usize) -> Result<&'a CellMeta, u8> {
match source {
Source::Transaction(SourceEntry::Input) => {
self.resolved_inputs.get(index).ok_or(INDEX_OUT_OF_BOUND)
}
Source::Transaction(SourceEntry::Output) => {
self.outputs.get(index).ok_or(INDEX_OUT_OF_BOUND)
}
Source::Transaction(SourceEntry::CellDep) => {
self.resolved_cell_deps.get(index).ok_or(INDEX_OUT_OF_BOUND)
}
Source::Transaction(SourceEntry::HeaderDep) => Err(INDEX_OUT_OF_BOUND),
Source::Group(SourceEntry::Input) => self
.group_inputs
.get(index)
.ok_or(INDEX_OUT_OF_BOUND)
.and_then(|actual_index| {
self.resolved_inputs
.get(*actual_index)
.ok_or(INDEX_OUT_OF_BOUND)
}),
Source::Group(SourceEntry::Output) => self
.group_outputs
.get(index)
.ok_or(INDEX_OUT_OF_BOUND)
.and_then(|actual_index| self.outputs.get(*actual_index).ok_or(INDEX_OUT_OF_BOUND)),
Source::Group(SourceEntry::CellDep) => Err(INDEX_OUT_OF_BOUND),
Source::Group(SourceEntry::HeaderDep) => Err(INDEX_OUT_OF_BOUND),
}
}
fn load_full<Mac: SupportMachine>(
&self,
machine: &mut Mac,
output: &CellOutput,
) -> Result<(u8, u64), VMError> {
let data = output.as_slice();
let wrote_size = store_data(machine, data)?;
Ok((SUCCESS, wrote_size))
}
fn load_by_field<Mac: SupportMachine>(
&self,
machine: &mut Mac,
cell: &CellMeta,
) -> Result<(u8, u64), VMError> {
let field = CellField::parse_from_u64(machine.registers()[A5].to_u64())?;
let output = &cell.cell_output;
let result = match field {
CellField::Capacity => {
let capacity: Capacity = output.capacity().unpack();
let mut buffer = vec![];
buffer.write_u64::<LittleEndian>(capacity.as_u64())?;
(SUCCESS, store_data(machine, &buffer)?)
}
CellField::DataHash => {
if let Some((_, data_hash)) = &cell.mem_cell_data {
let bytes = data_hash.raw_data();
(SUCCESS, store_data(machine, &bytes)?)
} else {
(ITEM_MISSING, 0)
}
}
CellField::OccupiedCapacity => {
let mut buffer = vec![];
buffer.write_u64::<LittleEndian>(
cell.occupied_capacity()
.map_err(|_| VMError::Unexpected)?
.as_u64(),
)?;
(SUCCESS, store_data(machine, &buffer)?)
}
CellField::Lock => {
let lock = output.lock();
let data = lock.as_slice();
(SUCCESS, store_data(machine, data)?)
}
CellField::LockHash => {
let hash = output.calc_lock_hash();
let bytes = hash.as_bytes();
(SUCCESS, store_data(machine, &bytes)?)
}
CellField::Type => match output.type_().to_opt() {
Some(type_) => {
let data = type_.as_slice();
(SUCCESS, store_data(machine, data)?)
}
None => (ITEM_MISSING, 0),
},
CellField::TypeHash => match output.type_().to_opt() {
Some(type_) => {
let hash = type_.calc_script_hash();
let bytes = hash.as_bytes();
(SUCCESS, store_data(machine, &bytes)?)
}
None => (ITEM_MISSING, 0),
},
};
Ok(result)
}
}
impl<'a, Mac: SupportMachine> Syscalls<Mac> for LoadCell<'a> {
fn initialize(&mut self, _machine: &mut Mac) -> Result<(), VMError> {
Ok(())
}
fn ecall(&mut self, machine: &mut Mac) -> Result<bool, VMError> {
let load_by_field = match machine.registers()[A7].to_u64() {
LOAD_CELL_SYSCALL_NUMBER => false,
LOAD_CELL_BY_FIELD_SYSCALL_NUMBER => true,
_ => return Ok(false),
};
let index = machine.registers()[A3].to_u64();
let source = Source::parse_from_u64(machine.registers()[A4].to_u64())?;
let cell = self.fetch_cell(source, index as usize);
if let Err(err) = cell {
machine.set_register(A0, Mac::REG::from_u8(err));
return Ok(true);
}
let cell = cell.unwrap();
let (return_code, len) = if load_by_field {
self.load_by_field(machine, cell)?
} else {
self.load_full(machine, &cell.cell_output)?
};
machine.add_cycles(transferred_byte_cycles(len as u64))?;
machine.set_register(A0, Mac::REG::from_u8(return_code));
Ok(true)
}
}
|
use crate::common::{self, *};
pub type LinearLayout = AMember<AControl<AContainer<AMultiContainer<ALinearLayout<TestableLinearLayout>>>>>;
#[repr(C)]
pub struct TestableLinearLayout {
base: TestableControlBase<LinearLayout>,
orientation: layout::Orientation,
children: Vec<Box<dyn controls::Control>>,
}
impl<O: controls::LinearLayout> NewLinearLayoutInner<O> for TestableLinearLayout {
fn with_uninit_params(u: &mut mem::MaybeUninit<O>, orientation: layout::Orientation) -> Self {
TestableLinearLayout {
base: common::TestableControlBase::with_id(u),
orientation,
children: Vec::new(),
}
}
}
impl LinearLayoutInner for TestableLinearLayout {
fn with_orientation(orientation: layout::Orientation) -> Box<dyn controls::LinearLayout> {
let mut b: Box<mem::MaybeUninit<LinearLayout>> = Box::new_uninit();
let ab = AMember::with_inner(
AControl::with_inner(
AContainer::with_inner(
AMultiContainer::with_inner(
ALinearLayout::with_inner(
<Self as NewLinearLayoutInner<LinearLayout>>::with_uninit_params(b.as_mut(), orientation)
),
)
),
)
);
unsafe {
b.as_mut_ptr().write(ab);
b.assume_init()
}
}
}
impl Spawnable for TestableLinearLayout {
fn spawn() -> Box<dyn controls::Control> {
Self::with_orientation(layout::Orientation::Vertical).into_control()
}
}
impl HasOrientationInner for TestableLinearLayout {
fn orientation(&self, _: &MemberBase) -> layout::Orientation {
self.orientation
}
fn set_orientation(&mut self, _base: &mut MemberBase, orientation: layout::Orientation) {
if orientation != self.orientation {
self.orientation = orientation;
self.base.invalidate();
}
}
}
impl MultiContainerInner for TestableLinearLayout {
fn len(&self) -> usize {
self.children.len()
}
fn set_child_to(&mut self, base: &mut MemberBase, index: usize, child: Box<dyn controls::Control>) -> Option<Box<dyn controls::Control>> {
let old = self.remove_child_from(base, index);
self.children.insert(index, child);
if self.base.parent.is_some() {
let (w, h) = base.as_any().downcast_ref::<LinearLayout>().unwrap().inner().base.measured;
self.children.get_mut(index).unwrap().on_added_to_container(
self.base.as_outer_mut(),
w as i32 - DEFAULT_PADDING,
h as i32 - DEFAULT_PADDING,
utils::coord_to_size(w as i32 - DEFAULT_PADDING),
utils::coord_to_size(h as i32 - DEFAULT_PADDING),
);
self.base.invalidate();
}
old
}
fn remove_child_from(&mut self, _base: &mut MemberBase, index: usize) -> Option<Box<dyn controls::Control>> {
if index < self.children.len() {
let mut old = self.children.remove(index);
if self.base.parent.is_some() {
old.on_removed_from_container(self.base.as_outer_mut());
self.base.invalidate();
}
Some(old)
} else {
None
}
}
fn child_at(&self, index: usize) -> Option<&dyn controls::Control> {
self.children.get(index).map(|c| c.as_ref())
}
fn child_at_mut(&mut self, index: usize) -> Option<&mut dyn controls::Control> {
//self.children.get_mut(index).map(|c| c.as_mut()) //the anonymous lifetime #1 does not necessarily outlive the static lifetime
if let Some(c) = self.children.get_mut(index) {
Some(c.as_mut())
} else {
None
}
}
}
impl ControlInner for TestableLinearLayout {
fn parent(&self) -> Option<&dyn controls::Member> {
self.base.parent().map(|p| p.as_member())
}
fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> {
self.base.parent_mut().map(|p| p.as_member_mut())
}
fn root(&self) -> Option<&dyn controls::Member> {
self.base.root().map(|p| p.as_member())
}
fn root_mut(&mut self) -> Option<&mut dyn controls::Member> {
self.base.root_mut().map(|p| p.as_member_mut())
}
fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, parent: &dyn controls::Container, px: i32, py: i32, pw: u16, ph: u16) {
self.base.parent = Some(unsafe {parent.native_id() as InnerId});
self.base.position = (px, py);
control.coords = Some((px as i32, py as i32));
let mut x = DEFAULT_PADDING;
let mut y = DEFAULT_PADDING;
for ref mut child in self.children.as_mut_slice() {
let self2: &mut LinearLayout = unsafe { utils::base_to_impl_mut(member) };
child.on_added_to_container(
self2,
x,
y,
utils::coord_to_size(pw as i32 - DEFAULT_PADDING - DEFAULT_PADDING) as u16,
utils::coord_to_size(ph as i32 - DEFAULT_PADDING - DEFAULT_PADDING) as u16,
);
let (xx, yy) = child.size();
match self.orientation {
layout::Orientation::Horizontal => x += xx as i32,
layout::Orientation::Vertical => y += yy as i32,
}
}
}
fn on_removed_from_container(&mut self, member: &mut MemberBase, _control: &mut ControlBase, _: &dyn controls::Container) {
for ref mut child in self.children.as_mut_slice() {
let self2: &mut LinearLayout = unsafe { utils::base_to_impl_mut(member) };
child.on_removed_from_container(self2);
}
self.base.parent = None;
}
#[cfg(feature = "markup")]
fn fill_from_markup(&mut self, member: &mut MemberBase, _control: &mut ControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) {
use plygui_api::markup::MEMBER_TYPE_LINEAR_LAYOUT;
fill_from_markup_base!(self, member, markup, registry, LinearLayout, [MEMBER_TYPE_LINEAR_LAYOUT]);
fill_from_markup_children!(self, member, markup, registry);
}
}
impl HasLayoutInner for TestableLinearLayout {
fn on_layout_changed(&mut self, _base: &mut MemberBase) {
self.base.invalidate();
}
fn layout_margin(&self, _member: &MemberBase) -> layout::BoundarySize {
layout::BoundarySize::AllTheSame(DEFAULT_PADDING)
}
}
impl HasNativeIdInner for TestableLinearLayout {
type Id = common::TestableId;
fn native_id(&self) -> Self::Id {
self.base.id.into()
}
}
impl MemberInner for TestableLinearLayout {}
impl HasSizeInner for TestableLinearLayout {
fn on_size_set(&mut self, base: &mut MemberBase, (width, height): (u16, u16)) -> bool {
use plygui_api::controls::HasLayout;
let this = base.as_any_mut().downcast_mut::<LinearLayout>().unwrap();
this.set_layout_width(layout::Size::Exact(width));
this.set_layout_width(layout::Size::Exact(height));
self.base.invalidate();
unsafe { utils::base_to_impl_mut::<LinearLayout>(base) }.call_on_size::<LinearLayout>(width, height);
true
}
}
impl HasVisibilityInner for TestableLinearLayout {
fn on_visibility_set(&mut self, _base: &mut MemberBase, value: types::Visibility) -> bool {
self.base.on_set_visibility(value)
}
}
impl ContainerInner for TestableLinearLayout {
fn find_control_mut<'a>(&'a mut self, arg: &'a types::FindBy) -> Option<&'a mut dyn controls::Control> {
for child in self.children.as_mut_slice() {
match arg {
types::FindBy::Id(ref id) => {
if child.as_member_mut().id() == *id {
return Some(child.as_mut());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = child.as_member_mut().tag() {
if tag.as_str() == mytag {
return Some(child.as_mut());
}
}
}
}
if let Some(c) = child.is_container_mut() {
let ret = c.find_control_mut(arg);
if ret.is_none() {
continue;
}
return ret;
}
}
None
}
fn find_control<'a>(&'a self, arg: &'a types::FindBy) -> Option<&'a dyn controls::Control> {
for child in self.children.as_slice() {
match arg {
types::FindBy::Id(ref id) => {
if child.as_member().id() == *id {
return Some(child.as_ref());
}
}
types::FindBy::Tag(ref tag) => {
if let Some(mytag) = child.as_member().tag() {
if tag.as_str() == mytag {
return Some(child.as_ref());
}
}
}
}
if let Some(c) = child.is_container() {
let ret = c.find_control(arg);
if ret.is_none() {
continue;
}
return ret;
}
}
None
}
}
impl Drawable for TestableLinearLayout {
fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) {
self.base.draw("LinearLayout", control.coords, control.measured);
let mut x = DEFAULT_PADDING;
let mut y = DEFAULT_PADDING;
for ref mut child in self.children.as_mut_slice() {
child.draw(Some((x, y)));
let (xx, yy) = child.size();
match self.orientation {
layout::Orientation::Horizontal => x += xx as i32,
layout::Orientation::Vertical => y += yy as i32,
}
}
}
fn measure(&mut self, _member: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) {
use std::cmp::max;
let orientation = self.orientation;
let old_size = control.measured;
let hp = DEFAULT_PADDING + DEFAULT_PADDING;
let vp = DEFAULT_PADDING + DEFAULT_PADDING;
control.measured = match control.visibility {
types::Visibility::Gone => (0, 0),
_ => {
let mut measured = false;
let w = match control.layout.width {
layout::Size::Exact(w) => w,
layout::Size::MatchParent => parent_width,
layout::Size::WrapContent => {
let mut w = 0;
for child in self.children.as_mut_slice() {
let (cw, _, _) = child.measure(max(0, parent_width as i32 - hp) as u16, max(0, parent_height as i32 - vp) as u16);
match orientation {
layout::Orientation::Horizontal => {
w += cw;
}
layout::Orientation::Vertical => {
w = max(w, cw);
}
}
}
measured = true;
max(0, w as i32 + hp) as u16
}
};
let h = match control.layout.height {
layout::Size::Exact(h) => h,
layout::Size::MatchParent => parent_height,
layout::Size::WrapContent => {
let mut h = 0;
for child in self.children.as_mut_slice() {
let ch = if measured {
child.size().1
} else {
let (_, ch, _) = child.measure(max(0, parent_width as i32 - hp) as u16, max(0, parent_height as i32 - vp) as u16);
ch
};
match orientation {
layout::Orientation::Horizontal => {
h = max(h, ch);
}
layout::Orientation::Vertical => {
h += ch;
}
}
}
max(0, h as i32 + vp) as u16
}
};
(w, h)
}
};
(control.measured.0, control.measured.1, control.measured != old_size)
}
fn invalidate(&mut self, _member: &mut MemberBase, _control: &mut ControlBase) {
self.base.invalidate()
}
}
|
#[macro_use]
extern crate diesel;
pub mod apps;
pub mod config;
pub mod db;
pub mod errors;
pub mod redis;
pub mod routes;
#[cfg(test)]
mod tests;
|
// Copyright 2020 The VectorDB Authors.
//
// Code is licensed under Apache License, Version 2.0.
#[macro_use]
mod macros;
mod tests;
pub mod arithmetic;
pub mod comparator;
pub mod datum;
pub use self::datum::Datum;
|
use hex::FromHex;
fn xor(x1: Vec<u8>, x2: Vec<u8>) -> Vec<u8> {
assert_eq!(x1.len(), x2.len());
let mut out = vec![0; x1.len()];
for x in 0..x1.len() {
out[x] = x1[x] ^ x2[x];
}
return out;
}
fn main() {
let x1 = Vec::from_hex("1c0111001f010100061a024b53535009181c").expect("invalid hex string");
let x2 = Vec::from_hex("686974207468652062756c6c277320657965").expect("invalid hex string");
let expected = Vec::from_hex("746865206b696420646f6e277420706c6179").expect("invalid hex string");
let res = xor(x1, x2);
assert_eq!(expected, res);
println!("passed");
}
|
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Build lowlevel types into the library but don't let clients use them.
mod lowlevel;
// Reexport generated high level types for use by clients.
mod generated;
pub use generated::*;
|
#[cfg(feature = "src_mysql")]
use crate::sources::mysql::{BinaryProtocol as MySQLBinaryProtocol, TextProtocol};
#[cfg(feature = "src_postgres")]
use crate::sources::postgres::{
rewrite_tls_args, BinaryProtocol as PgBinaryProtocol, CSVProtocol, CursorProtocol,
SimpleProtocol,
};
use crate::{prelude::*, sql::CXQuery};
use fehler::{throw, throws};
use log::debug;
#[cfg(feature = "src_postgres")]
use postgres::NoTls;
#[cfg(feature = "src_postgres")]
use postgres_openssl::MakeTlsConnector;
#[allow(unused_imports)]
use std::sync::Arc;
#[allow(unreachable_code, unreachable_patterns, unused_variables, unused_mut)]
#[throws(ConnectorXOutError)]
pub fn get_arrow2(
source_conn: &SourceConn,
origin_query: Option<String>,
queries: &[CXQuery<String>],
) -> Arrow2Destination {
let mut destination = Arrow2Destination::new();
let protocol = source_conn.proto.as_str();
debug!("Protocol: {}", protocol);
match source_conn.ty {
#[cfg(feature = "src_postgres")]
SourceType::Postgres => {
let (config, tls) = rewrite_tls_args(&source_conn.conn)?;
match (protocol, tls) {
("csv", Some(tls_conn)) => {
let sb = PostgresSource::<CSVProtocol, MakeTlsConnector>::new(
config,
tls_conn,
queries.len(),
)?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<CSVProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, origin_query
);
dispatcher.run()?;
}
("csv", None) => {
let sb =
PostgresSource::<CSVProtocol, NoTls>::new(config, NoTls, queries.len())?;
let dispatcher =
Dispatcher::<_, _, PostgresArrow2Transport<CSVProtocol, NoTls>>::new(
sb,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
("binary", Some(tls_conn)) => {
let sb = PostgresSource::<PgBinaryProtocol, MakeTlsConnector>::new(
config,
tls_conn,
queries.len(),
)?;
let dispatcher =
Dispatcher::<
_,
_,
PostgresArrow2Transport<PgBinaryProtocol, MakeTlsConnector>,
>::new(sb, &mut destination, queries, origin_query);
dispatcher.run()?;
}
("binary", None) => {
let sb = PostgresSource::<PgBinaryProtocol, NoTls>::new(
config,
NoTls,
queries.len(),
)?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<PgBinaryProtocol, NoTls>,
>::new(
sb, &mut destination, queries, origin_query
);
dispatcher.run()?;
}
("cursor", Some(tls_conn)) => {
let sb = PostgresSource::<CursorProtocol, MakeTlsConnector>::new(
config,
tls_conn,
queries.len(),
)?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<CursorProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, origin_query
);
dispatcher.run()?;
}
("cursor", None) => {
let sb =
PostgresSource::<CursorProtocol, NoTls>::new(config, NoTls, queries.len())?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<CursorProtocol, NoTls>,
>::new(
sb, &mut destination, queries, origin_query
);
dispatcher.run()?;
}
("simple", Some(tls_conn)) => {
let sb = PostgresSource::<SimpleProtocol, MakeTlsConnector>::new(
config,
tls_conn,
queries.len(),
)?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<SimpleProtocol, MakeTlsConnector>,
>::new(
sb, &mut destination, queries, origin_query
);
debug!("Running dispatcher");
dispatcher.run()?;
}
("simple", None) => {
let sb =
PostgresSource::<SimpleProtocol, NoTls>::new(config, NoTls, queries.len())?;
let dispatcher = Dispatcher::<
_,
_,
PostgresArrow2Transport<SimpleProtocol, NoTls>,
>::new(
sb, &mut destination, queries, origin_query
);
debug!("Running dispatcher");
dispatcher.run()?;
}
_ => unimplemented!("{} protocol not supported", protocol),
}
}
#[cfg(feature = "src_mysql")]
SourceType::MySQL => match protocol {
"binary" => {
let source =
MySQLSource::<MySQLBinaryProtocol>::new(&source_conn.conn[..], queries.len())?;
let dispatcher = Dispatcher::<_, _, MySQLArrow2Transport<MySQLBinaryProtocol>>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
"text" => {
let source =
MySQLSource::<TextProtocol>::new(&source_conn.conn[..], queries.len())?;
let dispatcher = Dispatcher::<_, _, MySQLArrow2Transport<TextProtocol>>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
_ => unimplemented!("{} protocol not supported", protocol),
},
#[cfg(feature = "src_sqlite")]
SourceType::SQLite => {
// remove the first "sqlite://" manually since url.path is not correct for windows
let path = &source_conn.conn.as_str()[9..];
let source = SQLiteSource::new(path, queries.len())?;
let dispatcher = Dispatcher::<_, _, SQLiteArrow2Transport>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
#[cfg(feature = "src_mssql")]
SourceType::MsSQL => {
let rt = Arc::new(tokio::runtime::Runtime::new().expect("Failed to create runtime"));
let source = MsSQLSource::new(rt, &source_conn.conn[..], queries.len())?;
let dispatcher = Dispatcher::<_, _, MsSQLArrow2Transport>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
#[cfg(feature = "src_oracle")]
SourceType::Oracle => {
let source = OracleSource::new(&source_conn.conn[..], queries.len())?;
let dispatcher = Dispatcher::<_, _, OracleArrow2Transport>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
#[cfg(feature = "src_bigquery")]
SourceType::BigQuery => {
let rt = Arc::new(tokio::runtime::Runtime::new().expect("Failed to create runtime"));
let source = BigQuerySource::new(rt, &source_conn.conn[..])?;
let dispatcher = Dispatcher::<_, _, BigQueryArrow2Transport>::new(
source,
&mut destination,
queries,
origin_query,
);
dispatcher.run()?;
}
_ => throw!(ConnectorXOutError::SourceNotSupport(format!(
"{:?}",
source_conn.ty
))),
}
destination
}
|
extern crate tomorrow_core;
extern crate hyper;
extern crate hyper_native_tls;
extern crate serde;
extern crate serde_json;
#[macro_use] extern crate serde_derive;
pub const PACKAGE: &'static str = env!("CARGO_PKG_NAME");
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub mod json;
pub mod raw;
mod builder;
pub use self::builder::Builder; |
#[derive(Serialize, Deserialize, Debug)]
struct Languages {
lang: Vec<Language>,
}
#[derive(Serialize, Deserialize, Debug)]
struct Language {
name: String,
identifier: String,
extension: String,
}
pub fn get_lang(extension: String) -> Option<String> {
let langs = get_langdata().lang;
for lang in langs {
if lang.extension == extension {
return Some(lang.identifier);
}
}
return None;
}
fn get_langdata() -> Languages {
let mut lang_json_path = std::env::current_exe().expect("cannot get current_exe");
lang_json_path.pop();
lang_json_path.push("lang.json");
let contents = super::file::read_file(&lang_json_path);
let langs: Languages = serde_json::from_str(&contents).expect("cannot get lang.json");
return langs;
}
|
use std::ptr;
use rustc_serialize::hex::FromHex as RustcFromHex;
use bloomchain::Bloom;
pub trait FromHex {
fn from_hex(s: &str) -> Self where Self: Sized;
}
impl FromHex for Bloom {
fn from_hex(s: &str) -> Self {
let mut res = [0u8; 256];
let v = s.from_hex().unwrap();
assert_eq!(res.len(), v.len());
unsafe {
ptr::copy(v.as_ptr(), res.as_mut_ptr(), res.len());
}
From::from(res)
}
}
|
mod gfx;
use crate::gfx::*;
use lazy_static::*;
use std::collections::*;
use std::env;
use std::io::*;
use termcolor::*;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() == 1 || args[1] == "--help" {
print_help(&args);
return;
}
if args[1] == "--species" {
for name in SPECIES_MAP.keys() {
println!("{}", name);
}
return;
} else if args.len() < 4 {
print_help(&args);
return;
}
let mut stdout = StandardStream::stdout(ColorChoice::Always);
macro_rules! cprint {
($color: expr, $message: literal, $($element:expr),*) => {
stdout.set_color(ColorSpec::new().set_fg(Some($color))).unwrap();
writeln!(&mut stdout, $message, $(
$element,
)*);
};
}
let name = &args[1].to_lowercase();
if let Some(base_hp) = SPECIES_MAP.get(&name[..]) {
if let Ok(level) = args[2].parse::<u32>() {
if let Ok(current_hp) = args[3].parse::<u32>() {
let max_hp = get_maxhp(level, *base_hp);
let pixels = get_pixels(current_hp, max_hp);
cprint!(Color::White, "lv{} {}", level, name);
cprint!(Color::White, "{} max hp", max_hp);
cprint!(Color::White, "{}/48 pixels", pixels);
let font = Font {
bitmap: png_load(include_bytes!("font.png")),
character_width: 8,
character_height: 8,
charmap: "A B C D E F G H I J K L M N O P Q R S T U V W X Y Z ( ) : ; [ ] a b c d e f g h i j k l m n o p q r s t u v w x y z 'e 'd 'l 's 't 'v _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ' PK MN 'r 'm ? ! . _ _ _ _ _ _ _M $ * . / , _F 0 1 2 3 4 5 6 7 8 9"
.split(' ')
.collect(),
};
let mut bitmap = png_load(include_bytes!("base_bar.png"));
bitmap_text(&font, &mut bitmap, 8, 0, &name.to_uppercase());
bitmap_text(&font, &mut bitmap, 40, 8, &level.to_string());
bitmap_fill(&mut bitmap, 32, 19, pixels, 2, 127, 56, 72);
let file_name = format!("hpbar-{}-lv{}-{}.png", name, level, current_hp);
png_write(&file_name, &bitmap);
cprint!(Color::Green, "written to {}", file_name);
if current_hp != max_hp && pixels == get_pixels(current_hp + 1, max_hp) {
cprint!(Color::Yellow, "Warning: the hp bars for {} hp and {} hp look identical", current_hp, current_hp + 1);
}
} else {
cprint!(Color::Red, "{} is not a numeric current hp.", args[3]);
}
} else {
cprint!(Color::Red, "{} is not a numeric level.", args[2]);
}
} else {
cprint!(Color::Red, "{} is not a valid species name. Use '{} --species' to list the supported names.", args[1], args[0]);
}
stdout.reset().unwrap();
}
fn print_help(args: &Vec<String>) {
println!("{} --species | lists the supported species names", args[0]);
println!("{} (pokemon) (level) (current hp) | writes a screenshot of the hp bar to a png", args[0]);
}
fn get_pixels(hp: u32, mut maxhp: u32) -> u32 {
let mut n = hp * 48;
if maxhp > 0xff {
maxhp = maxhp / 4;
n = (n & 0xff0000) | ((n & 0x00ffff) / 4);
}
return (n / maxhp) & 0xff;
}
fn get_maxhp(level: u32, base: u32) -> u32 {
return ((2 * (8 + base)) * level / 100 + level + 10) & 0xffff;
}
lazy_static! {
static ref SPECIES_MAP: HashMap<&'static str, u32> = [
("rhydon", 105),
("kangaskhan", 105),
("nidoran_m", 46),
("clefairy", 70),
("spearow", 40),
("voltorb", 40),
("nidoking", 81),
("slowbro", 95),
("ivysaur", 60),
("exeggutor", 95),
("lickitung", 90),
("exeggcute", 60),
("grimer", 80),
("gengar", 60),
("nidoran_f", 55),
("nidoqueen", 90),
("cubone", 50),
("rhyhorn", 80),
("lapras", 130),
("arcanine", 90),
("mew", 100),
("gyarados", 95),
("shellder", 30),
("tentacool", 40),
("gastly", 30),
("scyther", 70),
("staryu", 30),
("blastoise", 79),
("pinsir", 65),
("tangela", 65),
("growlithe", 55),
("onix", 35),
("fearow", 65),
("pidgey", 40),
("slowpoke", 90),
("kadabra", 40),
("graveler", 55),
("chansey", 250),
("machoke", 80),
("mr.mime", 40),
("hitmonlee", 50),
("hitmonchan", 50),
("arbok", 60),
("parasect", 60),
("psyduck", 50),
("drowzee", 60),
("golem", 80),
("magmar", 65),
("electabuzz", 65),
("magneton", 50),
("koffing", 40),
("mankey", 40),
("seel", 65),
("diglett", 10),
("tauros", 75),
("farfetch'd", 52),
("venonat", 60),
("dragonite", 91),
("doduo", 35),
("poliwag", 40),
("jynx", 65),
("moltres", 90),
("articuno", 90),
("zapdos", 90),
("ditto", 48),
("meowth", 40),
("krabby", 30),
("vulpix", 38),
("ninetales", 73),
("pikachu", 35),
("raichu", 60),
("dratini", 41),
("dragonair", 61),
("kabuto", 30),
("kabutops", 60),
("horsea", 30),
("seadra", 55),
("sandshrew", 50),
("sandslash", 75),
("omanyte", 35),
("omastar", 70),
("jigglypuff", 115),
("wigglytuff", 140),
("eevee", 55),
("flareon", 65),
("jolteon", 65),
("vaporeon", 130),
("machop", 70),
("zubat", 40),
("ekans", 35),
("paras", 35),
("poliwhirl", 65),
("poliwrath", 90),
("weedle", 40),
("kakuna", 45),
("beedrill", 65),
("dodrio", 60),
("primeape", 65),
("dugtrio", 35),
("venomoth", 70),
("dewgong", 90),
("caterpie", 45),
("metapod", 50),
("butterfree", 60),
("machamp", 90),
("golduck", 80),
("hypno", 85),
("golbat", 75),
("mewtwo", 106),
("snorlax", 160),
("magikarp", 20),
("muk", 105),
("kingler", 55),
("cloyster", 50),
("electrode", 60),
("clefable", 95),
("weezing", 65),
("persian", 65),
("marowak", 60),
("haunter", 45),
("abra", 25),
("alakazam", 55),
("pidgeotto", 63),
("pidgeot", 83),
("starmie", 60),
("bulbasaur", 45),
("venusaur", 80),
("tentacruel", 80),
("goldeen", 45),
("seaking", 80),
("ponyta", 50),
("rapidash", 65),
("rattata", 30),
("raticate", 55),
("nidorino", 61),
("nidorina", 70),
("geodude", 40),
("porygon", 65),
("aerodactyl", 80),
("magnemite", 25),
("charmander", 39),
("squirtle", 44),
("charmeleon", 58),
("wartortle", 59),
("charizard", 78),
("oddish", 45),
("gloom", 60),
("vileplume", 75),
("bellsprout", 50),
("weepinbell", 65),
("victreebel", 80),
]
.iter()
.copied()
.collect();
}
|
#[allow(non_snake_case)]
fn main() {
println!("{}", addPers(199))
}
/// Returns the additive persistence of a number
///
/// # Arguments
/// * `num` - The i32 you want the additive persistence of
///
/// # An Example
/// addPers(13) would return 1
#[allow(non_snake_case)]
fn addPers(mut num: i32) -> i32 {
let mut count = 0;
while num > 9 {
num = addSum(num);
count += 1;
}
count
}
#[allow(non_snake_case)]
fn addSum(mut num: i32) -> i32 {
let mut sum = 0;
while num > 0 {
sum += num % 10;
num /= 10;
}
sum
}
// all of my tests to seem fancy
#[cfg(test)]
mod tests {
use super::addPers;
#[test]
fn test1() {
assert_eq!(1, addPers(13))
}
#[test]
fn test2() {
assert_eq!(2, addPers(1234))
}
#[test]
fn test3() {
assert_eq!(2, addPers(9876))
}
#[test]
fn test4() {
assert_eq!(3, addPers(199))
}
}
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Code for annotating snippets.
use syntax_pos::{Span, FileMap};
use CodeMapper;
use std::rc::Rc;
use {Level};
#[derive(Clone)]
pub struct SnippetData {
codemap: Rc<CodeMapper>,
files: Vec<FileInfo>
}
#[derive(Clone)]
pub struct FileInfo {
file: Rc<FileMap>,
/// The "primary file", if any, gets a `-->` marker instead of
/// `>>>`, and has a line-number/column printed and not just a
/// filename. It appears first in the listing. It is known to
/// contain at least one primary span, though primary spans (which
/// are designated with `^^^`) may also occur in other files.
primary_span: Option<Span>,
lines: Vec<Line>,
}
#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct Line {
pub line_index: usize,
pub annotations: Vec<Annotation>,
}
#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub struct Annotation {
/// Start column, 0-based indexing -- counting *characters*, not
/// utf-8 bytes. Note that it is important that this field goes
/// first, so that when we sort, we sort orderings by start
/// column.
pub start_col: usize,
/// End column within the line (exclusive)
pub end_col: usize,
/// Is this annotation derived from primary span
pub is_primary: bool,
/// Is this a large span minimized down to a smaller span
pub is_minimized: bool,
/// Optional label to display adjacent to the annotation.
pub label: Option<String>,
}
#[derive(Debug)]
pub struct StyledString {
pub text: String,
pub style: Style,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Style {
HeaderMsg,
FileNameStyle,
LineAndColumn,
LineNumber,
Quotation,
UnderlinePrimary,
UnderlineSecondary,
LabelPrimary,
LabelSecondary,
OldSchoolNoteText,
OldSchoolNote,
NoStyle,
ErrorCode,
Level(Level),
}
|
use ocl;
use std::ffi::CString;
use parenchyma::error::Result;
use parenchyma::frameworks::OpenCLContext;
// const WGS: usize = 64;
// const WGS1: usize = 64;
// const WGS2: usize = 64;
// /// Caches instances of `Kernel`
// #[derive(Debug)]
// pub struct OpenCLPackage {
// pub(in super) program: ocl::Program,
// asum: [ocl::Kernel; 2],
// pub(in super) axpy: ocl::Kernel,
// copy: ocl::Kernel,
// dot: [ocl::Kernel; 2],
// nrm2: [ocl::Kernel; 2],
// scal: ocl::Kernel,
// swap: ocl::Kernel,
// gemm_direct: Gemm,
// }
// #[derive(Debug)]
// pub struct Gemm {
// tt: ocl::Kernel,
// tn: ocl::Kernel,
// nt: ocl::Kernel,
// nn: ocl::Kernel,
// }
/// Caches instances of `Kernel`
#[derive(Debug)]
pub struct OpenCLPackage {
pub(in frameworks::open_cl) program: ocl::Program,
}
impl OpenCLPackage {
pub fn compile(cx: &mut OpenCLContext<()>) -> Result<OpenCLPackage> {
let program = cx.program(vec![
CString::new(include_str!("source/common.cl")).unwrap(),
CString::new(include_str!("source/level1/level1.cl")).unwrap(),
CString::new(include_str!("source/level1/xasum.cl")).unwrap(),
CString::new(include_str!("source/level1/xaxpy.cl")).unwrap(),
CString::new(include_str!("source/level1/xcopy.cl")).unwrap(),
CString::new(include_str!("source/level1/xdot.cl")).unwrap(),
CString::new(include_str!("source/level1/xnrm2.cl")).unwrap(),
CString::new(include_str!("source/level1/xscal.cl")).unwrap(),
CString::new(include_str!("source/level1/xswap.cl")).unwrap(),
CString::new(include_str!("source/level3/level3.cl")).unwrap(),
CString::new(include_str!("source/level3/xgemm_direct_part1.cl")).unwrap(),
CString::new(include_str!("source/level3/xgemm_direct_part2.cl")).unwrap(),
CString::new(include_str!("source/level3/xgemm_direct_part3.cl")).unwrap(),
])?;
// Ok(OpenCLPackage {
// asum: [ocl::Kernel::new("Xasum", &program)?, ocl::Kernel::new("XasumEpilogue", &program)?],
// axpy: ocl::Kernel::new("Xaxpy", &program)?,
// copy: ocl::Kernel::new("Xcopy", &program)?,
// dot: [ocl::Kernel::new("Xdot", &program)?, ocl::Kernel::new("XdotEpilogue", &program)?],
// nrm2: [ocl::Kernel::new("Xnrm2", &program)?, ocl::Kernel::new("Xnrm2Epilogue", &program)?],
// scal: ocl::Kernel::new("Xscal", &program)?,
// swap: ocl::Kernel::new("Xswap", &program)?,
// gemm_direct: Gemm {
// tt: ocl::Kernel::new("XgemmDirectTT", &program)?,
// tn: ocl::Kernel::new("XgemmDirectTN", &program)?,
// nt: ocl::Kernel::new("XgemmDirectNT", &program)?,
// nn: ocl::Kernel::new("XgemmDirectNN", &program)?,
// },
// program,
// })
Ok(OpenCLPackage { program })
}
} |
pub mod cr1 {
pub mod ckd {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C00u32 as *const u32) >> 8) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFCFFu32;
reg |= (val & 0x3) << 8;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
pub mod arpe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C00u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
pub mod opm {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C00u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
pub mod urs {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C00u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
pub mod udis {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C00u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
pub mod cen {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C00u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C00u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40014C00u32 as *mut u32, reg);
}
}
}
}
pub mod cr2 {
pub mod mms {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C04u32 as *const u32) >> 4) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C04u32 as *const u32);
reg &= 0xFFFFFF8Fu32;
reg |= (val & 0x7) << 4;
core::ptr::write_volatile(0x40014C04u32 as *mut u32, reg);
}
}
}
}
pub mod smcr {
pub mod msm {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C08u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C08u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40014C08u32 as *mut u32, reg);
}
}
}
pub mod ts {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C08u32 as *const u32) >> 4) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C08u32 as *const u32);
reg &= 0xFFFFFF8Fu32;
reg |= (val & 0x7) << 4;
core::ptr::write_volatile(0x40014C08u32 as *mut u32, reg);
}
}
}
pub mod sms {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C08u32 as *const u32) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C08u32 as *const u32);
reg &= 0xFFFFFFF8u32;
reg |= val & 0x7;
core::ptr::write_volatile(0x40014C08u32 as *mut u32, reg);
}
}
}
}
pub mod dier {
pub mod tie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C0Cu32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C0Cu32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40014C0Cu32 as *mut u32, reg);
}
}
}
pub mod cc2ie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C0Cu32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C0Cu32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40014C0Cu32 as *mut u32, reg);
}
}
}
pub mod cc1ie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C0Cu32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C0Cu32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40014C0Cu32 as *mut u32, reg);
}
}
}
pub mod uie {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C0Cu32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C0Cu32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40014C0Cu32 as *mut u32, reg);
}
}
}
}
pub mod sr {
pub mod cc2of {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C10u32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
pub mod cc1of {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C10u32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
pub mod tif {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C10u32 as *const u32) >> 6) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFFBFu32;
reg |= (val & 0x1) << 6;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
pub mod cc2if {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C10u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
pub mod cc1if {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C10u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
pub mod uif {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C10u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C10u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40014C10u32 as *mut u32, reg);
}
}
}
}
pub mod egr {
pub mod tg {
pub fn set(val: u32) {
unsafe {
let reg = (val & 0x1) << 6;
core::ptr::write_volatile(0x40014C14u32 as *mut u32, reg);
}
}
}
pub mod cc2g {
pub fn set(val: u32) {
unsafe {
let reg = (val & 0x1) << 2;
core::ptr::write_volatile(0x40014C14u32 as *mut u32, reg);
}
}
}
pub mod cc1g {
pub fn set(val: u32) {
unsafe {
let reg = (val & 0x1) << 1;
core::ptr::write_volatile(0x40014C14u32 as *mut u32, reg);
}
}
}
pub mod ug {
pub fn set(val: u32) {
unsafe {
let reg = val & 0x1;
core::ptr::write_volatile(0x40014C14u32 as *mut u32, reg);
}
}
}
}
pub mod ccmr1_output {
pub mod oc2m {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 12) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFF8FFFu32;
reg |= (val & 0x7) << 12;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod oc2pe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 11) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFF7FFu32;
reg |= (val & 0x1) << 11;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod oc2fe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 10) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFBFFu32;
reg |= (val & 0x1) << 10;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod cc2s {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 8) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFCFFu32;
reg |= (val & 0x3) << 8;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod oc1m {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 4) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFF8Fu32;
reg |= (val & 0x7) << 4;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod oc1pe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod oc1fe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod cc1s {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C18u32 as *const u32) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFFFCu32;
reg |= val & 0x3;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
}
pub mod ccmr1_input {
pub mod ic2f {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 12) & 0xF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFF0FFFu32;
reg |= (val & 0xF) << 12;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod ic2psc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 10) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFF3FFu32;
reg |= (val & 0x3) << 10;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod cc2s {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 8) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFCFFu32;
reg |= (val & 0x3) << 8;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod ic1f {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 4) & 0xF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFF0Fu32;
reg |= (val & 0xF) << 4;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod ic1psc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C18u32 as *const u32) >> 2) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFFF3u32;
reg |= (val & 0x3) << 2;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
pub mod cc1s {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C18u32 as *const u32) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C18u32 as *const u32);
reg &= 0xFFFFFFFCu32;
reg |= val & 0x3;
core::ptr::write_volatile(0x40014C18u32 as *mut u32, reg);
}
}
}
}
pub mod ccer {
pub mod cc2np {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C20u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
pub mod cc2p {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C20u32 as *const u32) >> 5) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFFDFu32;
reg |= (val & 0x1) << 5;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
pub mod cc2e {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C20u32 as *const u32) >> 4) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFFEFu32;
reg |= (val & 0x1) << 4;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
pub mod cc1np {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C20u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
pub mod cc1p {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40014C20u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
pub mod cc1e {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C20u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C20u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40014C20u32 as *mut u32, reg);
}
}
}
}
pub mod cnt {
pub mod cnt {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C24u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C24u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40014C24u32 as *mut u32, reg);
}
}
}
}
pub mod psc {
pub mod psc {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C28u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C28u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40014C28u32 as *mut u32, reg);
}
}
}
}
pub mod arr {
pub mod arr {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C2Cu32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C2Cu32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40014C2Cu32 as *mut u32, reg);
}
}
}
}
pub mod ccr1 {
pub mod ccr1 {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C34u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C34u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40014C34u32 as *mut u32, reg);
}
}
}
}
pub mod ccr2 {
pub mod ccr2 {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40014C38u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40014C38u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40014C38u32 as *mut u32, reg);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.