text stringlengths 8 4.13M |
|---|
use crate::grid::grid::Grid;
use crate::grid::Dir;
use crate::moveable::Moveable;
use crate::player::Player;
use crate::CELL_SIZE;
use quicksilver::geom::Rectangle;
use quicksilver::geom::Shape;
use quicksilver::geom::Transform;
use quicksilver::geom::Vector;
use quicksilver::graphics::Color;
use quicksilver::graphics::Font;
use quicksilver::graphics::FontStyle;
use quicksilver::graphics::Image;
use quicksilver::lifecycle::Window;
use quicksilver::prelude::{Col, Img};
use quicksilver::Result;
use rand::Rng;
pub struct Monster {
pub location: Moveable,
pub size: usize,
pub health: isize,
pub started_negative: bool,
pub damage: isize,
pub speed: f32,
pub rendered_health: Image,
pub damage_cooldown: usize,
}
impl Monster {
pub fn new(location: Vector, font: &Font, style: &FontStyle) -> Result<Self> {
let mut rng = rand::thread_rng();
let health = rng.gen_range(-10, 10);
let rendered_health = font.render(&health.to_string(), style)?;
Ok(Self {
location: Moveable::new(location),
size: 15,
health,
damage: 5,
speed: 5.,
started_negative: health < 0,
rendered_health,
damage_cooldown: 0,
})
}
pub fn move_a_bit(&mut self, grid: &Grid, player: &Player) {
if self.damage_cooldown > 0 {
self.damage_cooldown -= 1;
}
let mut rng = rand::thread_rng();
let dir = if rng.gen_range(0, 10) > 7 {
if self.location.cell_loc.0 > player.location.cell_loc.0 {
Dir::Left
} else if self.location.cell_loc.0 < player.location.cell_loc.0 {
Dir::Right
} else if self.location.cell_loc.1 > player.location.cell_loc.1 {
Dir::Up
} else if self.location.cell_loc.1 < player.location.cell_loc.1 {
Dir::Down
} else {
rng.gen()
}
} else {
rng.gen()
};
self.location.move_some(dir, self.speed, grid, self.size);
}
pub fn get_damage(&mut self, damage: isize, font: &Font, style: &FontStyle) -> Result<bool> {
if self.damage_cooldown > 0 {
return Ok(self.is_alive());
}
if (!self.started_negative) && damage < 0 {
if self.health - damage < 12 {
self.health -= damage;
self.damage_cooldown = 20;
self.rendered_health = font.render(&self.health.to_string(), style)?;
}
} else if self.started_negative && damage > 0 {
if self.health - damage > -12 {
self.health -= damage;
self.damage_cooldown = 20;
self.rendered_health = font.render(&self.health.to_string(), style)?;
}
} else {
self.health -= damage;
self.damage_cooldown = 20;
self.rendered_health = font.render(&self.health.to_string(), style)?;
}
Ok(self.is_alive())
}
pub fn is_alive(&self) -> bool {
(self.started_negative && self.health < 0) || ((!self.started_negative) && self.health > 0)
}
pub fn draw(&self, window: &mut Window, z: i32, player: &Player) {
let screen_pos = player.grid_to_screen(&(
self.location.location.x / CELL_SIZE as f32,
self.location.location.y / CELL_SIZE as f32,
));
let mut monster_rec =
Rectangle::new(screen_pos.clone(), (self.size as f32, self.size as f32))
.with_center(screen_pos);
window.draw_ex(&monster_rec, Col(Color::INDIGO), Transform::IDENTITY, z);
monster_rec.pos.y += 20.;
monster_rec.size.y = 15.;
monster_rec.size.x = 20.;
window.draw_ex(
&monster_rec,
Img(&self.rendered_health),
Transform::IDENTITY,
z,
);
}
}
|
use crate::datastructures::{
AtomSpatial::PointsTo,
Entailment,
Expr::{Nil, Var},
Formula,
Op::AtomNeq,
Pure::And,
Rule,
Spatial::SepConj,
};
/// Π ∧ E1!=nil | E1->E2 * Σ |- Π' | Σ' ==> Π | E1->E2 * Σ |- Π' | Σ'
pub struct NilNotLVal;
impl Rule for NilNotLVal {
fn predicate(&self, goal: &Entailment) -> bool {
let mut add_new = false;
let antecedent = &goal.antecedent;
if let SepConj(atom_spatials) = antecedent.get_spatial() {
let points_to_facts = atom_spatials.iter().filter(move |x| x.is_points_to());
if let And(pure_ops) = antecedent.get_pure() {
for points_to_fact in points_to_facts {
if let PointsTo(l, _) = points_to_fact {
if pure_ops.iter().any(move |op| match op {
AtomNeq(le, re) => (le == l && re == &Nil) || (re == l && le == &Nil),
_ => false,
}) {
continue;
} else {
add_new = true;
break;
}
}
}
} else {
add_new = true;
}
}
add_new
}
fn premisses(
&self,
goal: crate::datastructures::Entailment,
) -> Option<Vec<crate::datastructures::Entailment>> {
let (antecedent, consequent) = goal.destroy();
let (mut ant_pure, ant_spatial) = antecedent.destroy();
let points_to_to_add = if let SepConj(points_to_facts) = &ant_spatial {
if let Some(PointsTo(nonnil, _)) = points_to_facts.iter().find(|ptf| {
if let PointsTo(l, _) = ptf {
if let And(pure_ops) = &ant_pure {
!pure_ops.iter().any(|op| match op {
AtomNeq(le, re) => (le == l && re == &Nil) || (re == l && le == &Nil),
_ => false,
})
} else {
true
}
} else {
false
}
}) {
nonnil.clone()
} else {
Nil
}
} else {
Nil
};
if let Var(_) = points_to_to_add {
if let And(pure_ops) = &mut ant_pure {
pure_ops.push(AtomNeq(points_to_to_add, Nil));
} else {
ant_pure = And(vec![AtomNeq(points_to_to_add, Nil)]);
}
}
Some(vec![Entailment {
antecedent: Formula(ant_pure, ant_spatial),
consequent,
}])
}
}
#[cfg(test)]
mod test {
use super::NilNotLVal;
use crate::datastructures::{
AtomSpatial::PointsTo,
Entailment, Expr,
Expr::Nil,
Formula,
Op::AtomNeq,
Pure::{And, True},
Rule,
Spatial::{Emp, SepConj},
};
#[test]
pub fn test_nil_not_lval() -> Result<(), ()> {
let goal_not_applicable = Entailment {
antecedent: Formula(
And(vec![
AtomNeq(Expr::new_var("y"), Nil),
AtomNeq(Expr::new_var("x"), Nil),
]),
SepConj(vec![
PointsTo(Expr::new_var("y"), Expr::new_var("x")),
PointsTo(Expr::new_var("x"), Expr::new_var("z")),
]),
),
consequent: Formula(True, Emp),
};
assert_eq!(false, NilNotLVal.predicate(&goal_not_applicable));
let goal1 = Entailment {
antecedent: Formula(
And(vec![AtomNeq(Expr::new_var("y"), Nil)]),
SepConj(vec![
PointsTo(Expr::new_var("y"), Expr::new_var("x")),
PointsTo(Expr::new_var("x"), Expr::new_var("z")),
]),
),
consequent: Formula(True, Emp),
};
assert!(NilNotLVal.predicate(&goal1));
let goal_expected1 = Entailment {
antecedent: Formula(
And(vec![
AtomNeq(Expr::new_var("y"), Nil),
AtomNeq(Expr::new_var("x"), Nil),
]),
SepConj(vec![
PointsTo(Expr::new_var("y"), Expr::new_var("x")),
PointsTo(Expr::new_var("x"), Expr::new_var("z")),
]),
),
consequent: Formula(True, Emp),
};
let premisses = NilNotLVal.premisses(goal1);
if let Some(prem) = premisses {
assert_eq!(1, prem.len());
assert_eq!(goal_expected1, prem[0]);
} else {
return Err(());
}
let goal2 = Entailment {
antecedent: Formula(
True,
SepConj(vec![
PointsTo(Expr::new_var("y"), Expr::new_var("x")),
PointsTo(Expr::new_var("x"), Expr::new_var("z")),
]),
),
consequent: Formula(True, Emp),
};
assert!(NilNotLVal.predicate(&goal2));
let goal_expected2 = Entailment {
antecedent: Formula(
And(vec![AtomNeq(Expr::new_var("y"), Nil)]),
SepConj(vec![
PointsTo(Expr::new_var("y"), Expr::new_var("x")),
PointsTo(Expr::new_var("x"), Expr::new_var("z")),
]),
),
consequent: Formula(True, Emp),
};
let premisses = NilNotLVal.premisses(goal2);
if let Some(prem) = premisses {
assert_eq!(1, prem.len());
assert_eq!(goal_expected2, prem[0]);
Ok(())
} else {
Err(())
}
}
}
|
mod call;
mod deploy;
mod error;
mod gas;
mod returndata;
mod spawn;
pub(crate) mod logs;
pub(crate) use error::{decode_error, encode_error};
pub use call::{decode_call, encode_call};
pub use deploy::{decode_deploy, encode_deploy};
pub use spawn::{decode_spawn, encode_spawn};
use svm_types::Receipt;
mod types {
pub const DEPLOY: u8 = 0;
pub const SPAWN: u8 = 1;
pub const CALL: u8 = 2;
}
/// Decodes a binary Receipt into its Rust struct wrapped as `ReceiptOwned`
pub fn decode_receipt(bytes: &[u8]) -> Receipt {
assert!(bytes.len() > 0);
let ty = bytes[0];
match ty {
types::DEPLOY => {
let receipt = decode_deploy(bytes);
Receipt::Deploy(receipt)
}
types::SPAWN => {
let receipt = decode_spawn(bytes);
Receipt::Spawn(receipt)
}
types::CALL => {
let receipt = decode_call(bytes);
Receipt::Call(receipt)
}
_ => unreachable!(),
}
}
|
use std::process::Command;
use anyhow::anyhow;
use clap::{crate_authors, crate_description, crate_name, crate_version, App, AppSettings, Arg};
use serde::{Deserialize, Serialize};
type FprResult<T> = Result<T, Box<dyn std::error::Error>>;
#[derive(Debug, Serialize, Deserialize)]
struct PullRequest {
pub head: Head,
}
#[derive(Serialize, Deserialize, PartialEq, Debug)]
pub struct Head {
#[serde(rename = "ref")]
pub ref_string: String,
}
fn main() -> FprResult<()> {
let app = build_app();
let matches = app.get_matches();
let remote = matches.value_of("remote").unwrap_or("origin");
let (owner, repo) = read_gitconfig(&remote)?;
let owner = matches.value_of("owner").unwrap_or(&owner);
let repo = matches.value_of("repository").unwrap_or(&repo);
let pr_no = matches.value_of("pr_no").unwrap();
let url = format!(
"https://api.github.com/repos/{}/{}/pulls/{}",
owner, repo, pr_no
);
let pr: FprResult<PullRequest> = async_std::task::block_on(async {
let res: PullRequest = match surf::get(url.clone()).recv_json().await {
Err(_) => return Err(anyhow!("feailed fetch pull request.\nfetch url: {}", url).into()),
Ok(r) => r,
};
Ok(res)
});
let ref_string = pr?.head.ref_string;
let arg = format!("pull/{}/head:{}", pr_no, ref_string);
let output = Command::new("git").args(&["fetch", &remote, &arg]).output();
let output = match output {
Ok(output) => output,
Err(_) => return Err(anyhow!("git command execution failed").into()),
};
let stderr = std::str::from_utf8(&output.stderr)?.trim();
if !stderr.is_empty() {
return Err(anyhow!("{}", stderr).into());
}
println!("create new branch: {}", ref_string);
Ok(())
}
fn read_gitconfig(remote: &str) -> FprResult<(String, String)> {
let remote = format!("remote.{}.url", remote);
let output = Command::new("git")
.arg("config")
.arg("--get")
.arg(remote)
.output();
let output = match output {
Ok(output) => output,
Err(_) => return Err(anyhow!("git command execution failed").into()),
};
let origin_url = std::str::from_utf8(&output.stdout)?.trim();
let owner = origin_url
.split('/')
.nth(3)
.ok_or_else(|| "Reading of origin url failed")?;
let repo = origin_url
.split('/')
.nth(4)
.ok_or_else(|| "Reading of origin url failed")?
.trim_end_matches(".git");
Ok((owner.to_owned(), repo.to_owned()))
}
fn build_app() -> App<'static, 'static> {
App::new(crate_name!())
.version(crate_version!())
.about(crate_description!())
.author(crate_authors!())
.setting(AppSettings::DeriveDisplayOrder)
.setting(AppSettings::ColoredHelp)
.arg(
Arg::with_name("pr_no")
.help("pull request number fetching to local")
.value_name("pr_no")
.required(true),
)
.arg(
Arg::with_name("remote")
.value_name("remote")
.default_value("origin"),
)
.arg(
Arg::with_name("owner")
.short("o")
.long("owner")
.help("repository owner (By default it uses the local repository's remote url)")
.value_name("owner"),
)
.arg(
Arg::with_name("repository")
.short("r")
.long("repository")
.help("repository name (By default it uses the local repository's remote url)")
.value_name("repository"),
)
}
|
#![deny(clippy::all)]
#![deny(clippy::pedantic)]
#![forbid(unsafe_code)]
use futures_util::stream::StreamExt;
use pubnub_hyper::runtime::tokio_global::TokioGlobal;
use pubnub_hyper::transport::hyper::Hyper;
use pubnub_hyper::{core::json::object, Builder};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let transport = Hyper::new()
.publish_key("demo")
.subscribe_key("demo")
.build()?;
let mut pubnub = Builder::new()
.transport(transport)
.runtime(TokioGlobal)
.build();
let message = object! {
"username" => "JoeBob",
"content" => "Hello, world!",
};
let mut stream = pubnub.subscribe("my-channel".parse().unwrap()).await;
let timetoken = pubnub
.publish("my-channel".parse().unwrap(), message)
.await?;
println!("timetoken = {:?}", timetoken);
let received = stream.next().await;
println!("received = {:?}", received);
Ok(())
}
|
#[allow(unused_imports)]
#[macro_use]
extern crate limn;
mod util;
use limn::prelude::*;
struct CountEvent;
fn main() {
let window_builder = glutin::WindowBuilder::new()
.with_title("Limn counter demo")
.with_min_dimensions(100, 100);
let app = util::init(window_builder);
let mut root = Widget::new("root");
root.layout().add(min_size(Size::new(200.0, 100.0)));
let mut layout_settings = LinearLayoutSettings::new(Orientation::Horizontal);
layout_settings.spacing = Spacing::Around;
root.linear_layout(layout_settings);
#[derive(Default)]
struct CountHandler {
count: u32,
}
impl EventHandler<CountEvent> for CountHandler {
fn handle(&mut self, _: &CountEvent, mut args: EventArgs) {
self.count += 1;
args.widget.update(|state: &mut TextState| state.text = format!("{}", self.count));
}
}
let mut text_widget = Widget::from_modifier_style(StaticTextStyle::from_text("0"));
text_widget.add_handler(CountHandler::default());
text_widget.layout().add(constraints![
center_vertical(&root),
]);
let mut button_widget = Widget::from_modifier_style(ButtonStyle::from_text("Count"));
let text_widget_ref = text_widget.clone();
button_widget.add_handler(move |_: &ClickEvent, _: EventArgs| {
text_widget_ref.event(CountEvent);
});
button_widget.layout().add(constraints![
center_vertical(&root),
]);
root
.add_child(text_widget)
.add_child(button_widget);
app.main_loop(root);
}
|
use super::shared_vec_slice::SharedVecSlice;
use common::HasLen;
#[cfg(feature = "mmap")]
use fst::raw::MmapReadOnly;
use stable_deref_trait::{CloneStableDeref, StableDeref};
use std::ops::Deref;
/// Read object that represents files in tantivy.
///
/// These read objects are only in charge to deliver
/// the data in the form of a constant read-only `&[u8]`.
/// Whatever happens to the directory file, the data
/// hold by this object should never be altered or destroyed.
pub enum ReadOnlySource {
/// Mmap source of data
#[cfg(feature = "mmap")]
Mmap(MmapReadOnly),
/// Wrapping a `Vec<u8>`
Anonymous(SharedVecSlice),
}
unsafe impl StableDeref for ReadOnlySource {}
unsafe impl CloneStableDeref for ReadOnlySource {}
impl Deref for ReadOnlySource {
type Target = [u8];
fn deref(&self) -> &[u8] {
self.as_slice()
}
}
impl ReadOnlySource {
/// Creates an empty ReadOnlySource
pub fn empty() -> ReadOnlySource {
ReadOnlySource::Anonymous(SharedVecSlice::empty())
}
/// Returns the data underlying the ReadOnlySource object.
pub fn as_slice(&self) -> &[u8] {
match *self {
#[cfg(feature = "mmap")]
ReadOnlySource::Mmap(ref mmap_read_only) => mmap_read_only.as_slice(),
ReadOnlySource::Anonymous(ref shared_vec) => shared_vec.as_slice(),
}
}
/// Splits into 2 `ReadOnlySource`, at the offset given
/// as an argument.
pub fn split(self, addr: usize) -> (ReadOnlySource, ReadOnlySource) {
let left = self.slice(0, addr);
let right = self.slice_from(addr);
(left, right)
}
/// Creates a ReadOnlySource that is just a
/// view over a slice of the data.
///
/// Keep in mind that any living slice extends
/// the lifetime of the original ReadOnlySource,
///
/// For instance, if `ReadOnlySource` wraps 500MB
/// worth of data in anonymous memory, and only a
/// 1KB slice is remaining, the whole `500MBs`
/// are retained in memory.
pub fn slice(&self, from_offset: usize, to_offset: usize) -> ReadOnlySource {
assert!(
from_offset <= to_offset,
"Requested negative slice [{}..{}]",
from_offset,
to_offset
);
match *self {
#[cfg(feature = "mmap")]
ReadOnlySource::Mmap(ref mmap_read_only) => {
let sliced_mmap = mmap_read_only.range(from_offset, to_offset - from_offset);
ReadOnlySource::Mmap(sliced_mmap)
}
ReadOnlySource::Anonymous(ref shared_vec) => {
ReadOnlySource::Anonymous(shared_vec.slice(from_offset, to_offset))
}
}
}
/// Like `.slice(...)` but enforcing only the `from`
/// boundary.
///
/// Equivalent to `.slice(from_offset, self.len())`
pub fn slice_from(&self, from_offset: usize) -> ReadOnlySource {
let len = self.len();
self.slice(from_offset, len)
}
/// Like `.slice(...)` but enforcing only the `to`
/// boundary.
///
/// Equivalent to `.slice(0, to_offset)`
pub fn slice_to(&self, to_offset: usize) -> ReadOnlySource {
self.slice(0, to_offset)
}
}
impl HasLen for ReadOnlySource {
fn len(&self) -> usize {
self.as_slice().len()
}
}
impl Clone for ReadOnlySource {
fn clone(&self) -> Self {
self.slice(0, self.len())
}
}
impl From<Vec<u8>> for ReadOnlySource {
fn from(data: Vec<u8>) -> ReadOnlySource {
let shared_data = SharedVecSlice::from(data);
ReadOnlySource::Anonymous(shared_data)
}
}
|
use maze::Coor;
#[derive(Clone, Debug)]
pub struct Square {
visited: bool,
destinations: Vec<Coor>,
}
impl Square {
pub fn new() -> Square {
Square {
visited: false,
destinations: Vec::new(),
}
}
pub fn visit(&mut self) {
self.visited = true;
}
pub fn add_dest(&mut self, dest: Coor) {
self.destinations.push(dest);
}
pub fn dest(&self) -> &Vec<Coor> {
&self.destinations
}
pub fn visited(&self) -> bool {
self.visited
}
}
|
use std::sync::mpsc::Sender;
use std::sync::{Arc, Mutex};
use crate::{ApplicationContext, Message};
mod mifare_classic;
mod mifare_desfire;
mod nfc_reader;
pub use nfc_reader::identify_atr;
pub fn create(sender: Sender<Message>, context: Arc<Mutex<ApplicationContext>>) {
nfc_reader::run(sender, context);
}
|
pub fn min_distance(word1: String, word2: String) -> i32 {
use std::cmp::min;
let n = word1.len();
let m = word2.len();
let mut distance = vec![vec![None; m+1]; n+1];
let c1: Vec<char> = word1.chars().collect();
let c2: Vec<char> = word2.chars().collect();
for i in 0..n+1 {
distance[i][m] = Some((n-i) as i32);
}
for j in 0..m+1 {
distance[n][j] = Some((m-j) as i32);
}
fn distance_dfs(i: usize, j: usize, c1: &[char], c2: &[char], distance: &mut Vec<Vec<Option<i32>>>) -> i32{
match distance[i][j] {
Some(d) => d,
None => {
let mut d = distance_dfs(i+1, j, c1, c2, distance) + 1;
d = min(d, distance_dfs(i, j+1, c1, c2, distance) + 1);
d = min(d, distance_dfs(i+1, j+1, c1, c2, distance) + if c1[i] == c2[j] { 0 } else { 1 });
distance[i][j] = Some(d);
d
},
}
}
distance_dfs(0, 0, c1.as_slice(), c2.as_slice(), &mut distance)
}
#[test]
fn test_min_distance() {
assert_eq!(min_distance("horse".to_string(), "ros".to_string()), 3);
assert_eq!(min_distance("intention".to_string(), "execution".to_string()), 5);
} |
use core::convert::TryFrom;
use super::EdgeCallInfo;
pub const EDGE_BUFFER_SIZE: usize = 3 << 10;
pub const EDGE_CALL_INFO_SIZE: usize = 256;
#[repr(C)]
pub struct EdgeMemory {
pub req: u32,
pub buf_len: u32,
pub result: i64,
pub info: EdgeCallInfo,
pub buffer: [u8; EDGE_BUFFER_SIZE],
}
#[repr(u32)]
#[derive(num_enum::TryFromPrimitive, num_enum::IntoPrimitive)]
pub enum EdgeCallReq {
EdgeCallInvalid,
EdgeCallPrint,
EdgeCallSyscall,
EdgeCallFileApi,
}
impl EdgeMemory {
#[inline]
pub fn read_request(&self) -> EdgeCallReq {
EdgeCallReq::try_from(self.req).expect("invalid edge call request")
}
#[inline]
pub fn write_request(&mut self, req: EdgeCallReq) -> &mut Self {
self.req = req.into();
self
}
#[inline]
pub fn read_buffer(&self) -> &[u8] {
&self.buffer[0..(self.buf_len as usize)]
}
#[inline]
pub fn write_buffer(&mut self, data: &[u8]) -> &mut Self {
use core::convert::TryInto;
assert!(data.len() <= EDGE_BUFFER_SIZE);
self.buffer[0..data.len()].copy_from_slice(data);
self.buf_len = data.len().try_into().unwrap();
self
}
#[inline]
pub fn read_info(&self) -> EdgeCallInfo {
self.info
}
#[inline]
pub fn write_info(&mut self, info: EdgeCallInfo) -> &mut Self {
self.info = info;
self
}
#[inline]
pub fn read_syscall_result(&self) -> isize {
use core::convert::TryInto;
self.result.try_into().expect("integer overflow?!")
}
}
|
use chrono::{NaiveTime, Timelike};
const SECONDS_IN_A_DAY: u32 = 24 * 60 * 60;
pub fn time_to_day_fraction(time: &NaiveTime) -> f64 {
time.num_seconds_from_midnight() as f64 / SECONDS_IN_A_DAY as f64
}
pub fn day_fraction_to_time(day_fraction: f64) -> NaiveTime {
assert!(day_fraction <= 1.0);
let seconds_passed = (day_fraction * SECONDS_IN_A_DAY as f64) as u32;
NaiveTime::from_num_seconds_from_midnight_opt(seconds_passed, 0).unwrap()
}
|
use spectral::prelude::*;
use super::*;
#[test]
fn is_fraction_with_valid_expression() {
let fraction_exp = "1/2";
assert_that!(&Fraction::is_fraction(fraction_exp))
.is_true();
}
#[test]
fn is_fraction_with_invalid_expression() {
let fraction_exp = "2";
assert_that!(&Fraction::is_fraction(fraction_exp))
.is_false();
}
#[test]
fn parse_fraction_with_valid_expresion() {
let fraction_exp = "1/2";
let fraction = Fraction::parse_fraction(fraction_exp).unwrap();
assert_that!(&fraction.numerator)
.is_equal_to(&1);
assert_that!(&fraction.denominator)
.is_equal_to(&2);
}
#[test]
fn parse_fraction_with_valid_negative_expresion() {
let fraction_exp = "-1/2";
let fraction = Fraction::parse_fraction(fraction_exp).unwrap();
assert_that!(&fraction.numerator)
.is_equal_to(&-1);
assert_that!(&fraction.denominator)
.is_equal_to(&2);
}
#[test]
fn parse_fraction_with_negative_elements() {
let fraction_exp = "-1/-2";
let fraction = Fraction::parse_fraction(fraction_exp).unwrap();
assert_that!(&fraction.numerator)
.is_equal_to(&1);
assert_that!(&fraction.denominator)
.is_equal_to(&2);
}
#[test]
#[should_panic(expected = "Unparseable fraction!")]
fn parse_fraction_with_invalid_expresion() {
let fraction_exp = "2";
Fraction::parse_fraction(fraction_exp).unwrap();
}
#[test]
fn add_fractions() {
let x = Fraction::new(1, 2).unwrap();
let y = Fraction::new(1, 3).unwrap();
let actual = x.add(&y);
let expected = Fraction::new(5, 6).unwrap();
assert_that!(&actual)
.is_equal_to(&expected);
}
#[test]
fn substract_fractions() {
let x = Fraction::new(1, 2).unwrap();
let y = Fraction::new(1, 3).unwrap();
let actual = x.substract(&y);
let expected = Fraction::new(1, 6).unwrap();
assert_that!(&actual)
.is_equal_to(&expected);
}
#[test]
fn multiply_fractions() {
let x = Fraction::new(1, 2).unwrap();
let y = Fraction::new(1, 3).unwrap();
let actual = x.multiply(&y);
let expected = Fraction::new(1, 6).unwrap();
assert_that!(&actual)
.is_equal_to(&expected);
}
#[test]
fn divide_fractions() {
let x = Fraction::new(1, 2).unwrap();
let y = Fraction::new(1, 3).unwrap();
let actual = x.divide(&y).unwrap();
let expected = Fraction::new(3, 2).unwrap();
assert_that!(&actual)
.is_equal_to(&expected);
}
#[test]
#[should_panic(expected = "Division by zero!")]
fn divide_fractions_by_zero() {
let x = Fraction::new(1, 2).unwrap();
let y = Fraction::new(0, 3).unwrap();
x.divide(&y).unwrap();
}
#[test]
fn new_simplifies_fraction() {
let actual = Fraction::new(3, 12).unwrap();
let expected = Fraction::new(1, 4).unwrap();
assert_that!(&actual)
.is_equal_to(&expected);
}
#[test]
#[should_panic(expected = "Fraction with zero denominator!")]
fn new_with_zero_denominator() {
Fraction::new(3, 0).unwrap();
}
#[test]
fn display_proper_fraction() {
let fraction = Fraction::new(1, 2).unwrap();
let actual = format!("{}", fraction);
assert_that!(actual.as_str())
.is_equal_to(&"1/2");
}
#[test]
fn display_improper_fraction_as_mixed_number() {
let fraction = Fraction::new(5, 2).unwrap();
let actual = format!("{}", fraction);
assert_that!(actual.as_str())
.is_equal_to(&"2_1/2");
}
#[test]
fn display_exact_fraction_as_integer() {
let fraction = Fraction::new(4, 2).unwrap();
let actual = format!("{}", fraction);
assert_that!(actual.as_str())
.is_equal_to(&"2");
}
#[test]
fn display_zero_fraction() {
let fraction = Fraction::new(0, 2).unwrap();
let actual = format!("{}", fraction);
assert_that!(actual.as_str())
.is_equal_to(&"0");
}
#[test]
fn display_negative_proper_fraction() {
let fraction = Fraction::new(-5, 3).unwrap();
let actual = format!("{}", fraction);
assert_that!(actual.as_str())
.is_equal_to(&"-1_2/3");
}
|
use std::collections::HashMap;
use std::marker::PhantomData;
use analyser::interface::*;
use ops::prelude::*;
use tensor::Datum;
use Result;
#[derive(Debug, Clone, Default, new)]
pub struct AddN<T: Datum> {
n: usize,
_phantom: PhantomData<T>,
}
pub fn add_n(pb: &::tfpb::node_def::NodeDef) -> Result<Box<Op>> {
let dtype = pb.get_attr_datatype("T")?;
let n = pb.get_attr_int("N")?;
Ok(boxed_new!(AddN(dtype)(n)))
}
impl<T> Op for AddN<T>
where
T: Datum,
{
/// Evaluates the operation given the input tensors.
fn eval(&self, mut inputs: Vec<TensorView>) -> Result<Vec<TensorView>> {
if inputs.len() != self.n || self.n == 0 {
bail!("Expected {} inputs", self.n);
}
let mut result = T::tensor_into_array(inputs.pop().unwrap().into_tensor())?; // checked, non empty
for input in &inputs[0..] {
result += &T::tensor_to_view(input.as_tensor())?;
}
Ok(vec![T::array_into_tensor(result).into()])
}
/// Returns the attributes of the operation and their values.
fn get_attributes(&self) -> HashMap<&'static str, Attr> {
hashmap!{
"T" => Attr::DataType(T::datatype()),
"N" => Attr::Usize(self.n),
}
}
}
impl<T: Datum> InferenceRulesOp for AddN<T> {
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
solver: &mut Solver<'r>,
inputs: &'p TensorsProxy,
outputs: &'p TensorsProxy,
) {
let n = self.n as isize;
solver
.equals(&inputs.len, n)
.equals(&outputs.len, 1)
.equals(&inputs[0].datatype, &outputs[0].datatype)
.equals_all((0..self.n).map(|i| bexp(&inputs[i].datatype)).collect())
.equals(&inputs[0].rank, &outputs[0].rank)
.equals_all((0..self.n).map(|i| bexp(&inputs[i].rank)).collect())
.given(&inputs[0].rank, move |solver, rank: usize| {
for dim in 0..rank {
solver.equals(&inputs[0].shape[dim], &outputs[0].shape[dim]);
solver.equals_all(
(0..n as usize)
.map(|i| bexp(&inputs[i].shape[dim]))
.collect(),
);
}
});
}
}
|
fn main() {
fn add(x: i32, y: i32) -> i32 {
x + y
}
fn subtract(x: i32, y: i32) -> i32 {
x - y
}
fn calculate(f: fn(i32, i32) -> i32, x: i32, y: i32 ) -> i32 {
f(x, y)
}
let (x, y) = (4, 2);
println!("variables {}, {}", x, y);
println!("Trying add = {}", calculate(add, x, y));
println!("Trying subtract = {}", calculate(subtract, x, y));
}
|
use {Error, Result};
use serde::Deserialize;
/// A response payload from the Telegram bot API.
#[derive(Debug,Deserialize)]
pub struct Response<T: Deserialize> {
ok: bool,
description: Option<String>,
result: Option<T>,
}
impl<T: Deserialize> Response<T> {
/// Handles the response, returning an error result if the payload signals
/// an error or is invalid, or a success result if the payload is a valid
/// successful response.
pub fn handle(self) -> Result<T> {
if self.ok {
match self.result {
Some(r) => Ok(r),
None => Err(Error::Api("No result received".to_string())),
}
} else {
match self.description {
Some(d) => Err(Error::Api(d)),
None => Err(Error::Api("Unknown API error".to_string())),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[derive(Debug,Deserialize)]
struct TestType {
text: String,
}
#[test]
fn handle_correctly_handles_successful_responses() {
let text = "Testing stuff".to_string();
let value = TestType { text: text.clone() };
let response: Response<TestType> = Response {
ok: true,
description: None,
result: Some(value),
};
let result = response.handle();
if let Ok(TestType { text: t }) = result {
assert_eq!(text, t);
} else {
let expected: Result<TestType> = Ok(TestType { text: text });
panic!("Expected {:?}, found {:?}", expected, result);
}
}
#[test]
fn handle_correctly_handles_errored_responses() {
let error = "It's broken!".to_string();
let response: Response<TestType> = Response {
ok: false,
description: Some(error.clone()),
result: None,
};
let result = response.handle();
if let Err(Error::Api(msg)) = result {
assert_eq!(error, msg);
} else {
let expected: Result<TestType> = Err(Error::Api(error));
panic!("Expected {:?}, found {:?}", expected, result);
}
}
#[test]
fn handle_correctly_handles_invalid_successful_responses() {
let response: Response<TestType> = Response {
ok: true,
description: None,
result: None,
};
let result = response.handle();
if let Err(Error::Api(msg)) = result {
assert_eq!("No result received".to_string(), msg);
} else {
let expected: Result<TestType> = Err(Error::Api("No result received".to_string()));
panic!("Expected {:?}, found {:?}", expected, result);
}
}
#[test]
fn handle_correctly_handles_invalid_errored_responses() {
let response: Response<TestType> = Response {
ok: false,
description: None,
result: None,
};
let result = response.handle();
if let Err(Error::Api(msg)) = result {
assert_eq!("Unknown API error".to_string(), msg);
} else {
let expected: Result<TestType> = Err(Error::Api("Unknown API error".to_string()));
panic!("Expected {:?}, found {:?}", expected, result);
}
}
}
|
// quite simplistic text field implementation.
use skulpin::skia_safe::*;
use crate::ui::*;
pub struct TextField {
text: Vec<char>,
text_utf8: String,
focused: bool,
blink_start: f32,
}
#[derive(Clone)]
pub struct TextFieldColors {
pub outline: Color,
pub outline_focus: Color,
pub fill: Color,
pub text: Color,
pub text_hint: Color,
pub label: Color,
}
#[derive(Clone, Copy)]
pub struct TextFieldArgs<'a, 'b> {
pub width: f32,
pub colors: &'a TextFieldColors,
pub hint: Option<&'b str>,
}
impl TextField {
const BACKSPACE: char = '\x08';
const BLINK_PERIOD: f32 = 1.0;
const HALF_BLINK: f32 = Self::BLINK_PERIOD / 2.0;
const TAB: char = '\x09';
pub fn new(initial_text: Option<&str>) -> Self {
let text_utf8: String = initial_text.unwrap_or("").into();
Self {
text: text_utf8.chars().collect(),
text_utf8,
focused: false,
blink_start: 0.0,
}
}
fn update_utf8(&mut self) {
self.text_utf8 = self.text.iter().collect();
}
pub fn height(ui: &Ui) -> f32 {
f32::round(16.0 / 7.0 * ui.font_size())
}
pub fn process(
&mut self,
ui: &mut Ui,
canvas: &mut Canvas,
input: &Input,
TextFieldArgs { width, colors, hint }: TextFieldArgs,
) {
ui.push_group((width, Self::height(ui)), Layout::Freeform);
// rendering: box
ui.draw_on_canvas(canvas, |canvas| {
let mut paint = Paint::new(Color4f::from(colors.fill), None);
paint.set_anti_alias(true);
let mut rrect = RRect::new_rect_xy(&Rect::from_point_and_size((0.0, 0.0), ui.size()), 4.0, 4.0);
canvas.draw_rrect(rrect, &paint);
paint.set_color(if self.focused {
colors.outline_focus
} else {
colors.outline
});
paint.set_style(paint::Style::Stroke);
rrect.offset((0.5, 0.5));
canvas.draw_rrect(rrect, &paint);
});
// rendering: text
ui.push_group(ui.size(), Layout::Freeform);
ui.pad((16.0, 0.0));
canvas.save();
ui.clip(canvas);
// render hint
if hint.is_some() && self.text.len() == 0 {
ui.text(canvas, hint.unwrap(), colors.text_hint, (AlignH::Left, AlignV::Middle));
}
let text_advance = ui.text(canvas, &self.text_utf8, colors.text, (AlignH::Left, AlignV::Middle));
if self.focused && (input.time_in_seconds() - self.blink_start) % Self::BLINK_PERIOD < Self::HALF_BLINK {
ui.draw_on_canvas(canvas, |canvas| {
let mut paint = Paint::new(Color4f::from(colors.text), None);
paint.set_anti_alias(false);
paint.set_style(paint::Style::Stroke);
let x = text_advance + 1.0;
let y1 = Self::height(ui) * 0.2;
let y2 = Self::height(ui) * 0.8;
canvas.draw_line((x, y1), (x, y2), &paint);
});
}
canvas.restore();
ui.pop_group();
// process events
self.process_events(ui, input);
ui.pop_group();
}
fn reset_blink(&mut self, input: &Input) {
self.blink_start = input.time_in_seconds();
}
fn append(&mut self, ch: char) {
self.text.push(ch);
self.update_utf8();
}
fn backspace(&mut self) {
self.text.pop();
self.update_utf8();
}
fn process_events(&mut self, ui: &Ui, input: &Input) {
if input.mouse_button_just_pressed(MouseButton::Left) {
self.focused = ui.has_mouse(input);
if self.focused {
self.reset_blink(input);
}
}
if self.focused {
if !input.characters_typed().is_empty() {
self.reset_blink(input);
}
for ch in input.characters_typed() {
match *ch {
_ if !ch.is_control() => self.append(*ch),
Self::BACKSPACE => self.backspace(),
_ => (),
}
}
}
}
pub fn labelled_height(ui: &Ui) -> f32 {
16.0 + TextField::height(ui)
}
pub fn with_label(&mut self, ui: &mut Ui, canvas: &mut Canvas, input: &Input, label: &str, args: TextFieldArgs) {
ui.push_group((args.width, Self::labelled_height(ui)), Layout::Vertical);
// label
ui.push_group((args.width, 16.0), Layout::Freeform);
ui.text(canvas, label, args.colors.label, (AlignH::Left, AlignV::Top));
ui.pop_group();
// field
self.process(ui, canvas, input, args);
ui.pop_group();
}
pub fn text<'a>(&'a self) -> &'a str {
&self.text_utf8
}
}
impl Focus for TextField {
fn focused(&self) -> bool {
self.focused
}
fn set_focus(&mut self, focused: bool) {
self.focused = focused;
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SloListResponseMetadata : The metadata object containing additional information about the list of SLOs.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SloListResponseMetadata {
#[serde(rename = "page", skip_serializing_if = "Option::is_none")]
pub page: Option<Box<crate::models::SloListResponseMetadataPage>>,
}
impl SloListResponseMetadata {
/// The metadata object containing additional information about the list of SLOs.
pub fn new() -> SloListResponseMetadata {
SloListResponseMetadata {
page: None,
}
}
}
|
fn test() {
let numbers = vec!["-17", "99", "-27", "+68"];
let answer = numbers.into_iter()
.map(|x| str::parse::<i32>(x).expect("Token is not an int"))
.sum::<i32>();
println!("Total is {}", answer);
}
fn read_stdin() {
use std::io::BufRead; // trait required for lines() method
let answer = std::io::stdin().lock()
.lines() // The BufRead iterator yields io::Result<String> without the newline (\r\n or \n) at end
.map(|x| {
// x is Result<&str, _>
x.expect("Error reading input line")
.parse::<i32>()
.expect("Line is not an integer")
})
.sum::<i32>();
println!("Total is {}", answer);
}
fn read_file() {
use std::io::BufRead; // trait required for lines() method
let f = std::fs::File::open("input.txt").expect("Unable to open file");
let reader = std::io::BufReader::new(f);
let answer = reader
.lines() // // The BufRead iterator yields io::Result<String> without the newline (\r\n or \n) at end
.map(|x| {
// x is Result<&str, _>
x.expect("Error reading input line")
.parse::<i32>()
.expect("Line is not an integer")
})
.sum::<i32>();
println!("Total is {}", answer);
}
fn read_str() {
let data = std::fs::read_to_string("input.txt").expect("Unable to create String from input.txt");
let answer = data
.lines() // The String iterator yields &str without the newline (\r\n or \n) at end
.map(|x| {
x.parse::<i32>()
.expect("Line is not an integer")
})
.sum::<i32>();
println!("Total is {}", answer);
}
fn main() {
// test with hard coded data
//test();
// read stdin line by line
//read_stdin();
// read file line by line and process
//read_file();
// read entire file into a single String
read_str();
}
// Tried to create a function that takes an "iterable" IntoIterator trait, so that
// I can implement the solution logic on a file, stdin, String::lines(), or a test vector
// I was able to write a working version that takes ownership of a Vec<&str>,
// but could not figure out how to do a borrowing version
// The BufRead::lines() yields an iterator on io::Result<String>, so it will need to be unwrapped first
/*
//fn part1<'a, T: Iterator<Item = &'a str>>(lines: &T) -> Result<i32, std::num::ParseIntError> {
fn part1<T>(lines: T) -> Result<i32, std::num::ParseIntError>
where
T: IntoIterator,
T::Item: FromStr,
{
lines.into_iter()
.map(|x| i32::from_str(x))
//.map(i32::from_str) //requires use std::str::FromStr;
//.map(std::str::parse::<i32>())
.sum()
}
// Consumes lines, I cannot figure our how to borrow lines
fn test2<'a, T>(lines: T) -> Result<i32, std::num::ParseIntError>
where
T: IntoIterator<Item = &'a str> // if the Item constraint is a type
// T: IntoIterator, T::Item = std::fmt::Debug // if the Item constraint is a trait
{
lines.into_iter().map(|x| str::parse::<i32>(x)).sum()
}
fn test() {
let numbers = vec!["-17", "99", "-27", "+68"];
//let a = (&numbers).into_iter();
//println!("{:#?}", a);
//for i in a { println!("{}", i); }
//println!("{:#?}", &numbers);
let answer: Result<i32, std::num::ParseIntError> = test2(numbers);
//let answer: Result<i32, std::num::ParseIntError> = test2((&numbers).into_iter());
println!("Total is {:#?}", answer);
//test2(numbers);
//println!("{:#?}", numbers);
//let answer: Result<i32, std::num::ParseIntError> = test2(a);
/*
let answer: Result<i32, std::num::ParseIntError> = part1((&numbers).into_iter());
println!("Total is {:#?}", answer);
let answer: Result<i32, std::num::ParseIntError> = part1((&numbers).into_iter());
println!("Total is {:#?}", answer);
*/
}
*/
|
//! ## Keymap
//!
//! Keymap contains pub constants which can be used in the `update` function to match messages
/**
* MIT License
*
* termail - Copyright (c) 2021 Larry Hao
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
use tuirealm::event::{KeyCode, KeyEvent, KeyModifiers};
use tuirealm::Msg;
// -- Special keys
pub const MSG_KEY_ENTER: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
});
pub const MSG_KEY_ESC: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Esc,
modifiers: KeyModifiers::NONE,
});
pub const MSG_KEY_TAB: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Tab,
modifiers: KeyModifiers::NONE,
});
// pub const MSG_KEY_SHIFT_TAB: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::BackTab,
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_DEL: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Delete,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_BACKSPACE: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Backspace,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_DOWN: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Down,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_LEFT: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Left,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_RIGHT: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Right,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_UP: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Up,
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_SPACE: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char(' '),
// modifiers: KeyModifiers::NONE,
// });
// // -- char keys
// pub const MSG_KEY_CHAR_A: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('a'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_B: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('b'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_CAPITAL_B: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('B'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_CAPITAL_B: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('B'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_C: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('c'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_D: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('d'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_CAPITAL_D: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('D'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_E: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('e'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_F: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('f'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_CAPITAL_F: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('F'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_CAPITAL_F: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('F'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_G: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('g'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_CAPITAL_G: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('G'),
// modifiers: KeyModifiers::SHIFT,
// });
pub const MSG_KEY_CHAR_H: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('h'),
modifiers: KeyModifiers::NONE,
});
// pub const MSG_KEY_CHAR_I: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('i'),
// modifiers: KeyModifiers::NONE,
// });
// /*
pub const MSG_KEY_CHAR_J: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('j'),
modifiers: KeyModifiers::NONE,
});
pub const MSG_KEY_CHAR_K: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('k'),
modifiers: KeyModifiers::NONE,
});
// */
pub const MSG_KEY_CHAR_L: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('l'),
modifiers: KeyModifiers::NONE,
});
// pub const MSG_KEY_CHAR_CAPITAL_L: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('L'),
// modifiers: KeyModifiers::SHIFT,
// });
// // /*
// pub const MSG_KEY_CHAR_M: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('m'),
// modifiers: KeyModifiers::NONE,
// });
// // */
// pub const MSG_KEY_CHAR_N: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('n'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_CAPITAL_N: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('N'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_O: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('o'),
// modifiers: KeyModifiers::NONE,
// });
// /*
// pub const MSG_KEY_CHAR_P: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('p'),
// modifiers: KeyModifiers::NONE,
// });
// */
pub const MSG_KEY_CHAR_CAPITAL_Q: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('Q'),
modifiers: KeyModifiers::SHIFT,
});
// pub const MSG_KEY_CHAR_CAPITAL_T: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('T'),
// modifiers: KeyModifiers::SHIFT,
// });
// pub const MSG_KEY_CHAR_PLUS: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('+'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_MINUS: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('-'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_EQUAL: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('='),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_DASH: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('_'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_R: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('r'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_S: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('s'),
// modifiers: KeyModifiers::NONE,
// });
// // /*
// pub const MSG_KEY_CHAR_T: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('t'),
// modifiers: KeyModifiers::NONE,
// });
// */
// pub const MSG_KEY_CHAR_U: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('u'),
// modifiers: KeyModifiers::NONE,
// });
// /*
// pub const MSG_KEY_CHAR_V: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('v'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_W: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('w'),
// modifiers: KeyModifiers::NONE,
// });
// */
// pub const MSG_KEY_CHAR_X: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('x'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CHAR_Y: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('y'),
// modifiers: KeyModifiers::NONE,
// });
// /*
// pub const MSG_KEY_CHAR_Z: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('z'),
// modifiers: KeyModifiers::NONE,
// });
// */
// // -- control
// pub const MSG_KEY_CTRL_C: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('c'),
// modifiers: KeyModifiers::CONTROL,
// });
// pub const MSG_KEY_CTRL_E: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('e'),
// modifiers: KeyModifiers::CONTROL,
// });
pub const MSG_KEY_CTRL_H: Msg = Msg::OnKey(KeyEvent {
code: KeyCode::Char('h'),
modifiers: KeyModifiers::CONTROL,
});
// pub const MSG_KEY_SLASH: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('/'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_QUESTION_MARK: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('?'),
// modifiers: KeyModifiers::NONE,
// });
// pub const MSG_KEY_CTRL_N: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('n'),
// modifiers: KeyModifiers::CONTROL,
// });
// pub const MSG_KEY_CTRL_R: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('r'),
// modifiers: KeyModifiers::CONTROL,
// });
// pub const MSG_KEY_CTRL_S: Msg = Msg::OnKey(KeyEvent {
// code: KeyCode::Char('s'),
// modifiers: KeyModifiers::CONTROL,
// });
|
use prime::{get_fermat_primes, get_left_truncatable_primes, get_right_truncatable_primes};
#[cfg(test)]
mod fermat_primes {
use super::*;
#[test]
fn count() {
let f = get_fermat_primes();
assert_eq!(f.len(), 5);
}
}
#[cfg(test)]
mod truncatable_primes {
use super::*;
#[test]
fn count_left() {
let ltps = get_left_truncatable_primes();
assert_eq!(ltps.len(), 4260);
}
#[test]
fn count_right() {
let rtps = get_right_truncatable_primes();
assert_eq!(rtps.len(), 83);
}
}
|
use std::io;
use std::error::Error;
fn main() -> std::result::Result<(), Box<dyn Error>> {
// Loop.
loop {
// Read.
let mut rl = rustyline::Editor::<()>::with_config(
rustyline::Config::builder()
.build()
);
let line = rl.readline(">> ")?;
rl.add_history_entry(line.clone());
// Eval
if line == "quit" {
break;
}
// Print.
println!("{}", line);
}
Ok(())
}
|
use super::Dev;
#[inline]
pub fn makedev(maj: u32, min: u32) -> Dev {
((u64::from(maj) & 0xfffff000_u64) << 32)
| ((u64::from(maj) & 0x00000fff_u64) << 8)
| ((u64::from(min) & 0xffffff00_u64) << 12)
| (u64::from(min) & 0x000000ff_u64)
}
#[inline]
pub fn major(dev: Dev) -> u32 {
(((dev >> 31 >> 1) & 0xfffff000) | ((dev >> 8) & 0x00000fff)) as u32
}
#[inline]
pub fn minor(dev: Dev) -> u32 {
(((dev >> 12) & 0xffffff00) | (dev & 0x000000ff)) as u32
}
|
pub fn is_scramble(s1: String, s2: String) -> bool {
fn core(s1: &[char], s2: &[char]) -> bool {
use std::collections::HashMap;
if s1 == s2 {
return true
}
/*fn get_freq(s: &[char]) -> HashMap::<char, usize> {
let mut map = HashMap::new();
for c in s {
map.entry(*c).and_modify(|x| *x += 1).or_insert(1);
}
map
}*/
fn get_freq(s: &[char]) -> Vec::<usize> {
let mut vec = vec![0; 26];
for c in s {
vec[(*c as u8 - 'a' as u8) as usize] += 1;
}
vec
}
if get_freq(s1) != get_freq(s2) {
return false
}
for i in 1..s1.len() {
if core(&s1[..i], &s2[..i]) && core(&s1[i..], &s2[i..]) {
return true
}
if core(&s1[..i], &s2[s1.len()-i..]) && core(&s1[i..], &s2[..s1.len()-i]) {
return true
}
}
false
}
let s1 = s1.chars().collect::<Vec<char>>();
let s2 = s2.chars().collect::<Vec<char>>();
core(s1.as_slice(), s2.as_slice())
}
|
use super::estimate::PointEstimateRep;
use super::MentalState;
use crate::entity::{Entity, Source, Storage, WorldEntity};
use crate::entity_type::EntityType;
use crate::position::Coord;
use crate::{Action, Cell, Event, Observation, Occupancy, Position, StorageSlice, World};
use crate::agent::estimate::ParticleFilterRep;
use rand::Rng;
use rayon::prelude::*;
use std::collections::{hash_map::RandomState, HashMap};
pub trait MentalStateRep: std::fmt::Display + Sized {
type Iter<'a>: std::iter::ExactSizeIterator<
Item = (
&'a super::emotion::EmotionalState,
&'a Action,
&'a Option<super::Behavior>,
),
>;
fn sample<R: Rng + ?Sized>(&self, scale: f32, rng: &mut R) -> MentalState;
fn update_seen<'a>(
&'a mut self,
action: Action,
others: &impl Estimator,
observation: &impl Observation,
);
fn update_on_events<'a>(
&'a mut self,
events: impl IntoIterator<Item = &'a Event> + Copy,
world_model: Option<&'a World<Occupancy>>,
);
fn update_unseen<'a>(&'a mut self, others: &impl Estimator, observation: &impl Observation);
fn into_ms(&self) -> MentalState {
let mut rng: rand_xorshift::XorShiftRng = rand::SeedableRng::seed_from_u64(0);
self.sample(0.0, &mut rng)
}
fn default(we: &WorldEntity) -> Self;
fn from_aggregate<B>(we: &WorldEntity, iter: impl Iterator<Item = B>) -> Self
where
B: std::borrow::Borrow<Self>;
fn get_type(&self) -> EntityType;
fn iter(&self) -> Self::Iter<'_>;
}
pub trait Estimator {
fn invoke(&self, we: WorldEntity) -> Option<MentalState>;
fn update_on_events<'a>(
&'a mut self,
events: impl IntoIterator<Item = &'a Event> + Copy,
world_model: Option<&'a World<Occupancy>>,
);
type Rep: MentalStateRep;
fn invoke_sampled<'a, R: rand::Rng + Sized>(
&'a self,
we: WorldEntity,
rng: &'a mut R,
n: usize,
) -> InvokeIter<'a, Self::Rep, R>;
fn learn<'a, C: Cell>(
&mut self,
action: Action,
other: WorldEntity,
other_pos: Position,
world: &World<C>,
world_models: &'a impl Source<'a, &'a World<Occupancy>>,
);
}
#[derive(Clone, Debug)]
pub struct LearningEstimator<E: MentalStateRep> {
pub agents: Vec<(WorldEntity, Coord)>,
pub estimators: Storage<E>,
}
pub enum InvokeIter<'a, E: MentalStateRep, R: Rng> {
Rep {
count: usize,
rep: &'a E,
rng: &'a mut R,
},
Empty,
}
impl<'a, E: MentalStateRep, R: Rng> Iterator for InvokeIter<'a, E, R> {
type Item = MentalState;
fn next(&mut self) -> Option<Self::Item> {
let ms = match self {
Self::Empty => None,
Self::Rep {
ref mut count,
rep,
rng,
} => {
if *count == 0 {
None
} else {
*count -= 1;
Some(rep.sample(1.0, rng))
}
}
};
if ms.is_none() {
*self = Self::Empty;
}
ms
}
}
impl<E: MentalStateRep + 'static> LearningEstimator<E> {
fn assure_init(&mut self, entity: &WorldEntity, observation: &impl Observation) {
if self.estimators.get(entity).is_some() {
return;
}
{
let rep = MentalStateRep::from_aggregate(
entity,
self.estimators
.into_iter()
.filter(|e| e.get_type() == entity.e_type()),
);
self.estimators.insert(entity, rep);
}
}
pub fn new(agents: Vec<(WorldEntity, Coord)>) -> Self {
Self {
agents,
estimators: Storage::new(),
}
}
pub fn replace(&mut self, old: WorldEntity, new: WorldEntity) {
if let Some(tpl) = self.agents.iter_mut().find(|(id, _)| id.id() == old.id()) {
tpl.0 = new;
}
}
fn learn_helper(
&mut self,
_agent: WorldEntity,
other: WorldEntity,
action: Action,
sight: Coord,
own_pos: &Position,
other_pos: Position,
observation: impl Observation,
) {
let dist = own_pos.distance(&other_pos);
if dist <= sight {
self.assure_init(&other, &observation);
}
if let Some((es, sc)) = self.estimators.split_out_mut(other) {
if dist <= sight {
es.update_seen(action, &&sc, &observation);
} else {
es.update_unseen(&&sc, &observation);
}
}
}
}
impl<E: MentalStateRep + 'static> Estimator for LearningEstimator<E> {
fn invoke(&self, entity: WorldEntity) -> Option<MentalState> {
self.estimators.get(entity).map(MentalStateRep::into_ms)
}
type Rep = E;
fn invoke_sampled<'a, R: rand::Rng + Sized>(
&'a self,
entity: WorldEntity,
rng: &'a mut R,
n: usize,
) -> InvokeIter<'a, E, R> {
if let Some(rep) = self.estimators.get(entity) {
InvokeIter::Rep { rng, count: n, rep }
} else {
InvokeIter::Empty
}
// self.estimators.get(entity).iter().flat_map(|e| e.sample(1.0, rng)).fuse()
}
fn learn<'a, C: Cell>(
&mut self,
action: Action,
other: WorldEntity,
other_pos: Position,
world: &World<C>,
world_models: &'a impl Source<'a, &'a World<Occupancy>>,
) {
for (agent, sight) in self.agents.clone() {
if let Some(own_pos) = world.positions.get(agent) {
if let Some(wm) = world_models.get(agent.into()) {
self.learn_helper(agent, other, action, sight, own_pos, other_pos, wm);
} else {
self.learn_helper(
agent,
other,
action,
sight,
own_pos,
other_pos,
world.observe_in_radius(&agent, sight),
);
}
}
}
}
fn update_on_events<'a>(
&'a mut self,
events: impl IntoIterator<Item = &'a Event> + Copy,
world_model: Option<&'a World<Occupancy>>,
) {
for est in self.estimators.iter_mut() {
est.update_on_events(events, world_model)
}
}
}
impl<'c, T: MentalStateRep + Sized + 'static> Estimator for &'c StorageSlice<'c, T> {
type Rep = T;
fn invoke(&self, entity: WorldEntity) -> Option<MentalState> {
self.get(entity.into()).map(MentalStateRep::into_ms)
}
fn invoke_sampled<'a, R: rand::Rng + Sized>(
&'a self,
we: WorldEntity,
rng: &'a mut R,
n: usize,
) -> InvokeIter<'a, T, R> {
self.get(we.into())
.map(move |rep| InvokeIter::Rep { rng, count: n, rep })
.unwrap_or(InvokeIter::Empty)
}
fn learn<'a, C: Cell>(
&mut self,
action: Action,
other: WorldEntity,
other_pos: Position,
world: &World<C>,
world_models: &impl Source<'a, &'a World<Occupancy>>,
) {
unimplemented!()
}
fn update_on_events<'a>(
&'a mut self,
events: impl IntoIterator<Item = &'a Event> + Copy,
world_model: Option<&'a World<Occupancy>>,
) {
unimplemented!()
}
}
type EstimateRep = ParticleFilterRep;
pub type EstimatorT = LearningEstimator<EstimateRep>;
#[derive(Clone, Debug, Default)]
pub struct EstimatorMap {
pub estimators: Vec<LearningEstimator<EstimateRep>>,
pub estimator_map: HashMap<Entity, usize, RandomState>,
}
impl EstimatorMap {
pub fn insert(&mut self, ms: &MentalState) {
self.estimator_map
.insert(ms.id.into(), self.estimators.len());
self.estimators
.push(LearningEstimator::new(vec![(ms.id, ms.sight_radius)]));
}
pub fn get(&self, entity: Entity) -> Option<&EstimatorT> {
if let Some(i) = self.estimator_map.get(&entity) {
return self.estimators.get(*i);
}
None
}
pub fn get_representation_source<'a>(
&'a self,
entity: Entity,
) -> Option<impl Iterator<Item = &impl MentalStateRep> + 'a> {
self.get(entity).map(|r| r.estimators.iter())
}
pub fn rebind_estimator(&mut self, old: WorldEntity, new: WorldEntity) {
if let Some(idx) = self.estimator_map.remove(&old.into()) {
self.estimators[idx].replace(old, new);
self.estimator_map.insert(new.into(), idx);
}
}
pub fn par_iter_mut<'a>(&'a mut self) -> impl ParallelIterator<Item = &'a mut EstimatorT> + 'a {
self.estimators.par_iter_mut()
}
}
|
use crate::StorageExecutor;
use actix_web::{
web::{Data, Path},
Error, HttpResponse,
};
use proger_core::protocol::response::StepPageProgress;
use crate::storage::storage_driver::{StorageCmd, StorageDriver};
use actix::Addr;
pub async fn read_step_page<T: StorageDriver>(
link: Path<String>,
storage: Data<Addr<StorageExecutor<T>>>,
) -> Result<HttpResponse, Error> {
let result = storage
.into_inner()
.send(StorageCmd::ReadStepPage(link.to_string()))
.await?;
match result {
Ok(page) => Ok(HttpResponse::Ok().json(StepPageProgress {
steps: page.steps,
completed: page.completed,
updated: page.updated,
})),
Err(e) => Ok(HttpResponse::BadRequest().finish()),
}
}
|
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate lazy_static;
extern crate lab;
extern crate rand;
use criterion::Criterion;
use rand::distributions::Standard;
use rand::Rng;
lazy_static! {
static ref LABS: Vec<lab::Lab> = {
let rand_seed = [0u8; 32];
let mut rng: rand::StdRng = rand::SeedableRng::from_seed(rand_seed);
let labs: Vec<[f32; 8]> = rng.sample_iter(&Standard).take(512).collect();
labs.iter()
.map(|lab| lab::Lab {
l: lab[0],
a: lab[1],
b: lab[2],
})
.collect()
};
}
fn labs_to_rgbs(c: &mut Criterion) {
c.bench_function("labs_to_rgbs", move |b| b.iter(|| lab::labs_to_rgbs(&LABS)));
}
fn labs_to_rgbs_simd(c: &mut Criterion) {
c.bench_function("labs_to_rgbs_simd", move |b| {
b.iter(|| unsafe { lab::simd::labs_to_rgbs(&LABS) })
});
}
criterion_group!(benches, labs_to_rgbs, labs_to_rgbs_simd);
criterion_main!(benches);
|
extern crate structopt;
extern crate atlas_coverage_core;
use atlas_coverage_core as e2e_cc;
use std::path::PathBuf;
use structopt::StructOpt;
use std::error::Error;
use std::fs;
use std::fs::OpenOptions;
use std::io::BufWriter;
/// A basic example
#[derive(StructOpt, Debug)]
#[structopt(name = "atlas-coverage")]
struct Opt {
/// Where to write the xml
#[structopt(short = "o", long = "output", parse(from_os_str))]
output: PathBuf,
/// Path to configuration json. Uses the CWD if omitted
#[structopt(short = "-c", long = "config", parse(from_os_str))]
config: Option<PathBuf>,
/// Input directory with .json files to parse
#[structopt(name = "input", parse(from_os_str))]
input: PathBuf,
}
fn main() -> Result<(), Box<dyn Error>>{
let opt = Opt::from_args();
let settings = if let Some(path) = opt.config {
e2e_cc::settings::from_path(path)
} else {
e2e_cc::settings::from_root()
}.expect("Cannot read settings");
let writer = {
let output_file = opt.output;
fs::create_dir_all(output_file.parent().unwrap())?;
let unbuffered = OpenOptions::new().create(true).write(true).truncate(true).open(output_file).expect("Cannot open output file");
BufWriter::new(unbuffered)
};
let inputs : Vec<_> = {
let input_directory_items = fs::read_dir(opt.input)?;
input_directory_items.into_iter()
.flat_map(|potential_input|{
let input_path = potential_input.expect("Encountered intermittent io error").path();
if input_path.to_string_lossy().ends_with(".json") {
Some(input_path)
} else {
eprintln!("Skipping non-json file: {}", input_path.to_string_lossy());
None
}
}).collect()
};
Ok(e2e_cc::run(settings, inputs, Some(writer)))
} |
use super::{Indicator, MovingAverage};
use crate::economy::Monetary;
pub struct SMA<const PERIOD: usize> {
sma: Monetary,
count: usize,
values: [Monetary; PERIOD],
}
impl<const PERIOD: usize> Indicator for SMA<PERIOD> {
type Output = Option<Monetary>;
fn initialize(value: Monetary) -> Self {
SMA {
sma: value.clone(),
count: 0,
values: [value; PERIOD],
}
}
fn evaluate(&mut self, value: Monetary) -> Self::Output {
self.count += 1;
self.sma = value;
for i in 1..self.values.len() {
self.values[i - 1] = self.values[i];
self.sma += self.values[i];
}
self.values[self.values.len() - 1] = value;
self.sma /= PERIOD as Monetary;
if self.count >= PERIOD {
Some(self.sma)
} else {
None
}
}
}
impl<const PERIOD: usize> MovingAverage for SMA<PERIOD> {}
#[tokio::test]
async fn test_sma() {
let mut sma = SMA::<9>::initialize(0.0);
for i in 0..8 {
assert_eq!(sma.evaluate(i as f64), None);
}
assert_eq!(sma.evaluate(8.0), Some(4.0));
}
|
#![feature(test)]
extern crate smallbox;
extern crate test;
use smallbox::space::*;
use smallbox::SmallBox;
use test::{black_box, Bencher};
#[bench]
fn smallbox_small_item_small_space(b: &mut Bencher) {
b.iter(|| {
let small: SmallBox<_, S1> = black_box(SmallBox::new(black_box(true)));
small
})
}
#[bench]
fn smallbox_small_item_large_space(b: &mut Bencher) {
b.iter(|| {
let small: SmallBox<_, S64> = black_box(SmallBox::new(black_box(true)));
small
})
}
#[bench]
fn smallbox_large_item_small_space(b: &mut Bencher) {
b.iter(|| {
let large: SmallBox<_, S1> = black_box(SmallBox::new(black_box([0usize; 64])));
large
})
}
#[bench]
fn smallbox_large_item_large_space(b: &mut Bencher) {
b.iter(|| {
let large: SmallBox<_, S64> = black_box(SmallBox::new(black_box([0usize; 64])));
large
})
}
#[bench]
fn box_small_item(b: &mut Bencher) {
b.iter(|| {
let large: Box<_> = black_box(Box::new(black_box(true)));
large
})
}
#[bench]
fn box_large_item(b: &mut Bencher) {
b.iter(|| {
let large: Box<_> = black_box(Box::new(black_box([0usize; 64])));
large
})
}
|
#![cfg_attr(not(test), no_std)]
use num_traits::float::Float;
const SECONDS_IN_DAY: u32 = (24 * 60 * 60) as u32;
pub fn pwm_from_time(seconds_from_midnight: u32) -> f32 {
pwm_from_time_shift(seconds_from_midnight, 0)
}
pub fn pwm_from_time_shift(seconds_from_midnight: u32, seconds_shift: i32) -> f32 {
let shift = seconds_shift as f32 / SECONDS_IN_DAY as f32;
let ratio =
((((seconds_from_midnight as f32) / (SECONDS_IN_DAY as f32)) - shift) * 2f32) - 1f32;
let angle_of_sun = ratio * core::f32::consts::PI;
angle_of_sun.cos().clamp(0f32, 1f32)
}
#[cfg(test)]
pub mod test {
const SUNRISE: u32 = 6 * 60 * 60;
const SUNSET: u32 = (6 + 12) * 60 * 60;
use approx::assert_relative_eq;
use super::*;
extern crate approx;
extern crate std;
#[test]
fn test_time_boundary() {
assert!(pwm_from_time(0) == 0f32);
assert!(pwm_from_time(SECONDS_IN_DAY - 1) == 0f32);
}
#[test]
fn test_time_boundary_shift() {
assert!(pwm_from_time_shift(0, 2 * 60 * 60) == 0f32);
assert!(pwm_from_time_shift(SECONDS_IN_DAY - 1, 2 * 60 * 60) == 0f32);
}
#[test]
fn test_midday() {
assert_relative_eq!(pwm_from_time(12 * 60 * 60), 1f32, epsilon = 0.01f32);
}
#[test]
fn test_midday_shift() {
assert_relative_eq!(
pwm_from_time_shift(14 * 60 * 60, 2 * 60 * 60),
1f32,
epsilon = 0.01f32
);
}
#[test]
fn test_sunrise_sunset() {
assert_relative_eq!(pwm_from_time(SUNRISE + 1), 0.0f32, epsilon = 0.01f32);
assert_relative_eq!(pwm_from_time(SUNSET - 1), 0.0f32, epsilon = 0.01f32);
}
#[test]
fn test_sunrise_sunset_shift() {
const TWO_HOURS: u32 = 2 * 60 * 60;
assert_relative_eq!(pwm_from_time_shift(SUNRISE + 1 + TWO_HOURS, TWO_HOURS as i32), 0.0f32, epsilon = 0.01f32);
assert_relative_eq!(pwm_from_time_shift(SUNSET - 1 + TWO_HOURS, TWO_HOURS as i32), 0.0f32, epsilon = 0.01f32);
}
#[test]
fn test_three_after() {
assert_relative_eq!(
pwm_from_time(SUNRISE + (60 * 60 * 3)),
0.7f32,
epsilon = 0.01f32
);
assert_relative_eq!(
pwm_from_time(SUNSET - (60 * 60 * 3)),
0.7f32,
epsilon = 0.01f32
);
}
#[test]
fn test_three_after_shift() {
const TWO_HOURS: u32 = 2 * 60 * 60;
assert_relative_eq!(
pwm_from_time_shift(SUNRISE + TWO_HOURS + (60 * 60 * 3), TWO_HOURS as i32),
0.7f32,
epsilon = 0.01f32
);
assert_relative_eq!(
pwm_from_time_shift(SUNSET + TWO_HOURS - (60 * 60 * 3), TWO_HOURS as i32),
0.7f32,
epsilon = 0.01f32
);
}
}
|
/*!
A wrapper type for a mutable pointer with a lifetime.
*/
use std_::marker::PhantomData;
/// A wrapper type that associates a mutable raw pointer with a lifetime.
///
/// # Motivation
///
/// This type was declared to pass a mutable-reference-like type to
/// multiple methods to borrow multiple fields individually.
/// If those methods took in mutable references it would cause
/// undefined behavior to borrow multiple fields mutably,
/// since each call borrows the entire data structure.
#[repr(transparent)]
#[derive(Debug)]
pub struct MutRef<'a,T:?Sized>{
pub ptr:*mut T,
_marker:PhantomData<&'a mut T>,
}
impl<'a,T:?Sized> Copy for MutRef<'a,T>{}
impl<'a,T:?Sized> Clone for MutRef<'a,T>{
#[inline(always)]
fn clone(&self)->Self{
*self
}
}
impl<'a,T:?Sized> MutRef<'a,T>{
/// Constructs a MutRef from a mutable reference.
#[inline(always)]
pub fn new(mut_ref:&'a mut T)->Self{
Self{
ptr:mut_ref,
_marker:PhantomData,
}
}
/// Constructs a MutRef from a mutable pointer.
#[inline(always)]
pub fn from_ptr(ptr:*mut T)->Self{
Self{
ptr,
_marker:PhantomData,
}
}
/// An unchecked cast from `MutRef<'a,T>` to `MutRef<'a,U>`.
#[inline(always)]
pub fn cast<U>(self)->MutRef<'a,U>{
MutRef{
ptr:self.ptr as *mut U,
_marker:PhantomData,
}
}
}
impl<'a,T:?Sized> From<&'a mut T> for MutRef<'a,T>{
#[inline(always)]
fn from(mutref:&'a mut T)->Self{
Self::new(mutref)
}
} |
#![deny(clippy::all)]
mod app;
mod data_buffer;
mod ui;
use app::*;
fn main() {
let mut my_app = App::init().expect("App failed init");
let mut ui = ui::Ui::init().expect("Ui Initialization failure");
let ret = my_app.run(&mut ui).expect("Failure running app");
println!("App exited: {}", ret);
}
|
use crate::vec2::*;
type NodeId = usize;
type ShaderId = usize;
struct Node {
parent: NodeId,
children: Vec<NodeId>,
shader: ShaderId,
}
enum QuadSection {
QuadNode(Box<QuadNode>),
Node(NodeId),
}
struct QuadNode {
centre: Vec2f,
top_left: QuadSection,
top_right: QuadSection,
bottom_left: QuadSection,
bottom_right: QuadSection,
}
pub struct SceneGraph {
nodes: Vec<Option<Node>>
}
impl SceneGraph {
pub fn new() -> SceneGraph {
SceneGraph {
nodes: vec![]
}
}
fn allocate_node(&mut self) -> NodeId {
// TODO
return 0;
}
fn item_damaged(&mut self, item: &Item) {
let id = item.get_item().id.get();
let node;
if let Some(index) = id {
node = &mut self.nodes[index];
} else {
let new_index = self.allocate_node();
node = &mut self.nodes[new_index];
}
}
}
|
use fnv::FnvHashMap;
#[derive(Clone, Debug)]
pub struct Array2D<T> {
elems: FnvHashMap<(usize, usize), T>,
dims: (usize, usize),
}
impl<T> Array2D<T> {
pub fn new(width: usize, height: usize) -> Self {
Self {
dims: (width, height),
elems: FnvHashMap::default(),
}
}
}
impl<T> Array2D<T> {
pub fn iter(&self) -> impl Iterator<Item = &T> {
self.elems.values()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {
self.elems.values_mut()
}
pub fn size(&self) -> (usize, usize) {
self.dims
}
pub fn get(&self, idx: (usize, usize)) -> Option<&T> {
self.elems.get(&idx)
}
}
impl<T: Default> Array2D<T> {
pub fn get_or_insert(&mut self, idx: (usize, usize)) -> &mut T {
if self.elems.contains_key(&idx) {
return self.elems.get_mut(&idx).unwrap();
}
assert!(
idx.0 < self.dims.0 && idx.1 < self.dims.1,
"{}: {:?} >= {:?} || {:?} >= {:?}",
"Out of bounds index in Array2D",
idx.0,
self.dims.0,
idx.1,
self.dims.1
);
self.elems.insert(idx, T::default());
self.elems.get_mut(&idx).unwrap()
}
}
|
use std::sync::OnceLock;
static LONG_VERSION: OnceLock<String> = OnceLock::new();
pub fn get_long_version() -> &'static str {
LONG_VERSION.get_or_init(|| {
let mut res = env!("CARGO_PKG_VERSION").to_owned();
let details = [
("Commit SHA:", env!("VERGEN_GIT_SHA")),
("Features:", env!("VERGEN_CARGO_FEATURES")),
("Opt level:", env!("VERGEN_CARGO_OPT_LEVEL")),
("Target triple:", env!("VERGEN_CARGO_TARGET_TRIPLE")),
("Codegen flags:", env!("RSONPATH_CODEGEN_FLAGS")),
];
res += "\n";
for (k, v) in details {
if v != "VERGEN_IDEMPOTENT_OUTPUT" {
res += &format!("\n{: <16} {}", k, v);
}
}
res
})
}
|
use sha2::{Digest, Sha512Trunc224};
pub const SIZE224: usize = 28;
pub type SHA512_224 = Sha512Trunc224;
impl super::Hash for SHA512_224 {
fn size() -> usize {
SHA512_224::output_size()
}
fn block_size() -> usize {
super::BLOCK_SIZE
}
fn reset(&mut self) {
Digest::reset(self)
}
fn sum(&mut self) -> Vec<u8> {
self.clone().result().as_slice().to_vec()
}
}
pub fn new512_224() -> SHA512_224 {
Digest::new()
}
pub fn sum512_224(b: &[u8]) -> [u8; SIZE224] {
let d = Sha512Trunc224::digest(b);
let mut out = [0u8; SIZE224];
out.copy_from_slice(d.as_slice());
out
}
|
extern crate vmit;
use vmit::Workspace;
fn main() {
let ws = Workspace::from_pwd();
println!("{}", ws);
}
|
//! Entry point of the program.
mod error;
mod eval;
mod identifier;
mod label;
mod merge;
mod operation;
mod parser;
mod position;
mod program;
mod serialize;
mod stack;
mod stdlib;
mod term;
mod transformations;
mod typecheck;
mod types;
use crate::error::{Error, IOError, SerializationError};
use crate::program::Program;
use crate::term::RichTerm;
use std::path::PathBuf;
use std::str::FromStr;
use std::{fmt, fs, io, process};
// use std::ffi::OsStr;
use structopt::StructOpt;
extern crate either;
/// Command-line options and subcommands.
#[derive(StructOpt, Debug)]
/// The interpreter of the Nickel language.
struct Opt {
/// The input file. Standard input by default
#[structopt(short = "f", long)]
#[structopt(parse(from_os_str))]
file: Option<PathBuf>,
#[structopt(subcommand)]
command: Option<Command>,
}
/// Available export formats.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum ExportFormat {
Json,
}
impl std::default::Default for ExportFormat {
fn default() -> Self {
ExportFormat::Json
}
}
impl fmt::Display for ExportFormat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "json")
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct ParseFormatError(String);
impl fmt::Display for ParseFormatError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "unsupported export format {}", self.0)
}
}
impl FromStr for ExportFormat {
type Err = ParseFormatError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_ref() {
"json" => Ok(ExportFormat::Json),
_ => Err(ParseFormatError(String::from(s))),
}
}
}
/// Available subcommands.
#[derive(StructOpt, Debug)]
enum Command {
/// Export the result to a different format
Export {
/// Available formats: `json`. Default format: `json`.
#[structopt(long)]
format: Option<ExportFormat>,
/// Output file. Standard output by default
#[structopt(short = "o", long)]
#[structopt(parse(from_os_str))]
output: Option<PathBuf>,
},
/// Typecheck a program, but do not run it
Typecheck,
}
fn main() {
let opts = Opt::from_args();
let mut program = opts
.file
.map(|path: PathBuf| -> io::Result<_> {
let file = fs::File::open(&path)?;
Program::new_from_source(file, &path)
})
.unwrap_or_else(Program::new_from_stdin)
.unwrap_or_else(|err| {
eprintln!("Error when reading input: {}", err);
process::exit(1)
});
let result = match opts.command {
Some(Command::Export { format, output }) => {
program.eval_full().map(RichTerm::from).and_then(|rt| {
serialize::validate(&rt).map_err(Error::from)?;
let format = format.unwrap_or_default();
if let Some(file) = output {
let file = fs::File::create(&file).map_err(IOError::from)?;
match format {
ExportFormat::Json => serde_json::to_writer_pretty(file, &rt),
}
.map_err(|err| SerializationError::Other(err.to_string()))?;
} else {
match format {
ExportFormat::Json => serde_json::to_writer_pretty(io::stdout(), &rt),
}
.map_err(|err| SerializationError::Other(err.to_string()))?;
}
Ok(())
})
}
Some(Command::Typecheck) => program.typecheck().map(|_| ()),
None => program.eval().and_then(|t| {
println!("Done: {:?}", t);
Ok(())
}),
};
if let Err(err) = result {
program.report(err);
process::exit(1)
}
}
|
// 6. Base 32 Encoding
// https://tools.ietf.org/html/rfc4648#section-6
//
// Table 3: The Base 32 Alphabet
//
// Value Encoding Value Encoding Value Encoding Value Encoding
// 0 A 9 J 18 S 27 3
// 1 B 10 K 19 T 28 4
// 2 C 11 L 20 U 29 5
// 3 D 12 M 21 V 30 6
// 4 E 13 N 22 W 31 7
// 5 F 14 O 23 X
// 6 G 15 P 24 Y (pad) =
// 7 H 16 Q 25 Z
// 8 I 17 R 26 2
//
// 7. Base 32 Encoding with Extended Hex Alphabet
// https://tools.ietf.org/html/rfc4648#section-7
//
// Table 4: The "Extended Hex" Base 32 Alphabet
//
// Value Encoding Value Encoding Value Encoding Value Encoding
// 0 0 9 9 18 I 27 R
// 1 1 10 A 19 J 28 S
// 2 2 11 B 20 K 29 T
// 3 3 12 C 21 L 30 U
// 4 4 13 D 22 M 31 V
// 5 5 14 E 23 N
// 6 6 15 F 24 O (pad) =
// 7 7 16 G 25 P
// 8 8 17 H 26 Q
pub use super::base64::Config;
pub use super::base64::Error;
pub use super::base64::ErrorKind;
pub use super::base64::DEFAULT_CONFIG;
static STANDARD_TABLE: [u8; 32] = [
// A B C D E F G H I J K L M N O P
0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50,
// Q R S T U V W X Y Z 2 3 4 5 6 7
0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
];
static URL_SAFE_TABLE: [u8; 32] = [
// 0 1 2 3 4 5 6 7 8 9 A B C D E F
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46,
// G H I J K L M N O P Q R S T U V
0x47, 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56,
];
// Invalid base32 characters
const ____: u8 = 0xff;
const _EXT: u8 = 0xfe; // PADDED.
// NOTE: 大小写不敏感
static STANDARD_DECODE_TABLE: [u8; 256] = [
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
// 2 3 4 5 6 7 b'='
____, ____, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, ____, ____, ____, ____, ____, _EXT, ____, ____,
// A B C D E F G H I J K L M N O
____, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
// P Q R S T U V W X Y Z
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, ____, ____, ____, ____, ____,
// a b c d e f g h i j k l m n o
____, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
// p q r s t u v w x y z
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
];
// NOTE: 大小写不敏感
static URL_SAFE_DECODE_TABLE: [u8; 256] = [
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
// 0 1 2 3 4 5 6 7 8 9 b'='
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, ____, ____, ____, _EXT, ____, ____,
// A B C D E F G H I J K L M N O
____, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
// P Q R S T U V
0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, ____, ____, ____, ____, ____, ____, ____, ____, ____,
// a b c d e f g h i j k l m n o
____, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
// p q r s t u v
0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____, ____,
];
#[inline]
fn encode_buffer_len(ilen: usize, config: Config) -> usize {
// Groups Len ( 5 * 8 = 40-bits )
let n = ilen / 5;
let r = ilen % 5;
// NO-PAD
if config.no_padding {
match r {
0 => n * 8,
1 => n * 8 + 2,
2 => n * 8 + 4,
3 => n * 8 + 5,
4 => n * 8 + 7,
_ => unreachable!(),
}
} else {
// PAD
if r > 0 {
n * 8 + 8
} else {
n * 8
}
}
}
#[inline]
fn decode_buffer_len(ilen: usize) -> usize {
let n = ilen / 8;
let r = ilen % 8;
let olen = if r > 0 { n * 5 + 5 } else { n * 5 };
olen
}
pub fn encode<D: AsRef<[u8]>>(input: D) -> String {
encode_with_config(input, DEFAULT_CONFIG)
}
pub fn urlsafe_encode<D: AsRef<[u8]>>(input: D) -> String {
urlsafe_encode_with_config(input, DEFAULT_CONFIG)
}
pub fn encode_with_config<D: AsRef<[u8]>>(input: D, config: Config) -> String {
let input = input.as_ref();
if input.is_empty() {
return String::new();
}
let ilen = input.len();
let olen = encode_buffer_len(ilen, config);
let mut output = vec![0u8; olen];
let amt = encode_to_slice_inner(&STANDARD_TABLE, input, &mut output, config);
if amt < olen {
output.truncate(amt);
}
unsafe { String::from_utf8_unchecked(output) }
}
pub fn urlsafe_encode_with_config<D: AsRef<[u8]>>(input: D, config: Config) -> String {
let input = input.as_ref();
if input.is_empty() {
return String::new();
}
let ilen = input.len();
let olen = encode_buffer_len(ilen, DEFAULT_CONFIG);
let mut output = vec![0u8; olen];
let amt = encode_to_slice_inner(&URL_SAFE_TABLE, input, &mut output, config);
if amt < olen {
output.truncate(amt);
}
unsafe { String::from_utf8_unchecked(output) }
}
pub fn encode_to_slice<R: AsRef<[u8]>, W: AsMut<[u8]>>(input: R, output: &mut W) -> usize {
encode_to_slice_with_config(input, output, DEFAULT_CONFIG)
}
pub fn urlsafe_encode_to_slice<R: AsRef<[u8]>, W: AsMut<[u8]>>(input: R, output: &mut W) -> usize {
urlsafe_encode_to_slice_with_config(input, output, DEFAULT_CONFIG)
}
pub fn encode_to_slice_with_config<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
config: Config,
) -> usize {
encode_to_slice_inner(&STANDARD_TABLE, input, output, config)
}
pub fn urlsafe_encode_to_slice_with_config<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
config: Config,
) -> usize {
encode_to_slice_inner(&URL_SAFE_TABLE, input, output, config)
}
pub fn decode<D: AsRef<[u8]>>(input: D) -> Result<Vec<u8>, Error> {
decode_with_config(input, DEFAULT_CONFIG)
}
pub fn urlsafe_decode<D: AsRef<[u8]>>(input: D) -> Result<Vec<u8>, Error> {
urlsafe_decode_with_config(input, DEFAULT_CONFIG)
}
pub fn decode_with_config<D: AsRef<[u8]>>(input: D, config: Config) -> Result<Vec<u8>, Error> {
let input = input.as_ref();
if input.is_empty() {
return Ok(Vec::new());
}
let olen = decode_buffer_len(input.len());
let mut output = vec![0u8; olen];
let amt = decode_to_slice_inner(&STANDARD_DECODE_TABLE, input, &mut output, config)?;
if amt < olen {
output.truncate(amt);
}
Ok(output)
}
pub fn urlsafe_decode_with_config<D: AsRef<[u8]>>(
input: D,
config: Config,
) -> Result<Vec<u8>, Error> {
let input = input.as_ref();
if input.is_empty() {
return Ok(Vec::new());
}
let olen = decode_buffer_len(input.len());
let mut output = vec![0u8; olen];
let amt = decode_to_slice_inner(&URL_SAFE_DECODE_TABLE, input, &mut output, config)?;
if amt < olen {
output.truncate(amt);
}
Ok(output)
}
pub fn decode_to_slice<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
) -> Result<usize, Error> {
decode_to_slice_with_config(input, output, DEFAULT_CONFIG)
}
pub fn urlsafe_decode_to_slice<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
) -> Result<usize, Error> {
urlsafe_decode_to_slice_with_config(input, output, DEFAULT_CONFIG)
}
pub fn decode_to_slice_with_config<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
config: Config,
) -> Result<usize, Error> {
decode_to_slice_inner(&STANDARD_DECODE_TABLE, input, output, config)
}
pub fn urlsafe_decode_to_slice_with_config<R: AsRef<[u8]>, W: AsMut<[u8]>>(
input: R,
output: &mut W,
config: Config,
) -> Result<usize, Error> {
decode_to_slice_inner(&URL_SAFE_DECODE_TABLE, input, output, config)
}
#[inline]
fn decode_to_slice_inner<R: AsRef<[u8]>, W: AsMut<[u8]>>(
table: &[u8; 256],
input: R,
output: &mut W,
config: Config,
) -> Result<usize, Error> {
let input = input.as_ref();
let output = output.as_mut();
let ilen = input.len();
let mut ipos = 0usize; // input data index
let mut opos = 0usize; // output data index
let mut group = 0u64; // 5 bytes encode to 8 base32 character.
let mut gpos = 0u8; // group bit index
// PADDING-LEN
let mut plen = 0usize;
while ipos < ilen {
let val = table[input[ipos] as usize];
match val {
____ => {
return Err(Error {
pos: ipos,
byte: input[ipos],
kind: ErrorKind::InvalidCodedCharacter,
});
}
_EXT => {
// DECODE-PADDING DATA
plen = 1;
ipos += 1;
const MAX_PADDING_LEN: usize = 7;
while ipos < ilen && plen < MAX_PADDING_LEN {
let val = table[input[ipos] as usize];
if val != _EXT {
return Err(Error {
pos: ipos,
byte: input[ipos],
kind: ErrorKind::InvalidPaddingCharacter,
});
}
plen += 1;
ipos += 1;
}
// NOTE: 忽略后续的字符,即便它不是合法的填充字符 `=`。
break;
}
_ => {
match gpos {
0 => {
group = (val as u64) << 59;
gpos = 5;
}
5 => {
group |= (val as u64) << 54;
gpos = 10;
}
10 => {
group |= (val as u64) << 49;
gpos = 15;
}
15 => {
group |= (val as u64) << 44;
gpos = 20;
}
20 => {
group |= (val as u64) << 39;
gpos = 25;
}
25 => {
group |= (val as u64) << 34;
gpos = 30;
}
30 => {
group |= (val as u64) << 29;
gpos = 35;
}
35 => {
group |= (val as u64) << 24;
let [b1, b2, b3, b4, b5, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
output[opos + 1] = b2;
output[opos + 2] = b3;
output[opos + 3] = b4;
output[opos + 4] = b5;
opos += 5;
gpos = 0;
}
_ => unreachable!(),
}
ipos += 1;
}
}
}
// NOTE: 预期需要填充的长度。
let mut expected_padding_len = 0usize;
// Check trailing bits
match gpos {
0 => {
group = 0;
}
5 => {
// rem 5-bits
// NOTE: 这种情况,大部分属于数据被截断了。
expected_padding_len = 7;
}
10 => {
// rem 2-bits
let [b1, b2, _, _, _, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
opos += 1;
group = b2 as u64;
expected_padding_len = 6; // 8 - (10 / 5)
}
15 => {
// rem 7-bits
let [b1, b2, _, _, _, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
opos += 1;
group = b2 as u64;
expected_padding_len = 5; // 8 - (15 / 5)
}
20 => {
// rem 4-bits
let [b1, b2, b3, _, _, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
output[opos + 1] = b2;
opos += 2;
group = b3 as u64;
expected_padding_len = 4; // 8 - (20 / 5)
}
25 => {
// rem 1-bits
let [b1, b2, b3, b4, _, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
output[opos + 1] = b2;
output[opos + 2] = b3;
opos += 3;
group = b4 as u64;
expected_padding_len = 3; // 8 - (25 / 5)
}
30 => {
// rem 6-bits
let [b1, b2, b3, b4, _, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
output[opos + 1] = b2;
output[opos + 2] = b3;
opos += 3;
group = b4 as u64;
expected_padding_len = 2; // 8 - (30 / 5)
}
35 => {
// rem 3-bits
let [b1, b2, b3, b4, b5, _, _, _] = group.to_be_bytes();
output[opos + 0] = b1;
output[opos + 1] = b2;
output[opos + 2] = b3;
output[opos + 3] = b4;
opos += 4;
group = b5 as u64;
expected_padding_len = 1; // 8 - (35 / 5)
}
_ => unreachable!(),
}
if !config.no_padding {
// NOTE: 检查 PADDING 长度.
if expected_padding_len > 0 && plen != expected_padding_len {
ipos -= 1;
return Err(Error {
pos: ipos,
byte: input[ipos],
kind: ErrorKind::InvalidPaddingLength,
});
}
}
if !config.allow_trailing_non_zero_bits && group > 0 {
// NOTE: 不允许直接忽略尾随的 NonZero bits.
ipos -= 1;
return Err(Error {
pos: ipos,
byte: input[ipos],
kind: ErrorKind::TrailingNonZeroBits,
});
}
Ok(opos)
}
#[inline]
fn encode_to_slice_inner<R: AsRef<[u8]>, W: AsMut<[u8]>>(
table: &[u8; 32],
input: R,
output: &mut W,
config: Config,
) -> usize {
let input = input.as_ref();
let output = output.as_mut();
let ilen = input.len();
// Groups Len ( 5 * 8 = 40-bits )
let n = ilen / 5;
let r = ilen % 5;
let mut ipos = 0usize;
let mut opos = 0usize;
while ipos < n * 5 {
let group = u64::from_be_bytes([
input[ipos + 0],
input[ipos + 1],
input[ipos + 2],
input[ipos + 3],
input[ipos + 4],
0,
0,
0,
]);
output[opos + 0] = table[((group >> 59) & 0x1F) as usize];
output[opos + 1] = table[((group >> 54) & 0x1F) as usize];
output[opos + 2] = table[((group >> 49) & 0x1F) as usize];
output[opos + 3] = table[((group >> 44) & 0x1F) as usize];
output[opos + 4] = table[((group >> 39) & 0x1F) as usize];
output[opos + 5] = table[((group >> 34) & 0x1F) as usize];
output[opos + 6] = table[((group >> 29) & 0x1F) as usize];
output[opos + 7] = table[((group >> 24) & 0x1F) as usize];
ipos += 5;
opos += 8;
}
// Last bytes ( 0、1、2、4 bytes )
match r {
0 => {}
1 => {
let group = u64::from_be_bytes([input[ipos + 0], 0, 0, 0, 0, 0, 0, 0]);
output[opos + 0] = table[((group >> 59) & 0x1F) as usize];
output[opos + 1] = table[((group >> 54) & 0x1F) as usize];
if config.no_padding {
opos += 2;
} else {
// PAD-LEN: 6
output[opos + 2] = b'=';
output[opos + 3] = b'=';
output[opos + 4] = b'=';
output[opos + 5] = b'=';
output[opos + 6] = b'=';
output[opos + 7] = b'=';
opos += 8;
}
}
2 => {
let group = u64::from_be_bytes([input[ipos + 0], input[ipos + 1], 0, 0, 0, 0, 0, 0]);
output[opos + 0] = table[((group >> 59) & 0x1F) as usize];
output[opos + 1] = table[((group >> 54) & 0x1F) as usize];
output[opos + 2] = table[((group >> 49) & 0x1F) as usize];
output[opos + 3] = table[((group >> 44) & 0x1F) as usize];
if config.no_padding {
opos += 4;
} else {
// PAD-LEN: 4
output[opos + 4] = b'=';
output[opos + 5] = b'=';
output[opos + 6] = b'=';
output[opos + 7] = b'=';
opos += 8;
}
}
3 => {
let group = u64::from_be_bytes([
input[ipos + 0],
input[ipos + 1],
input[ipos + 2],
0,
0,
0,
0,
0,
]);
output[opos + 0] = table[((group >> 59) & 0x1F) as usize];
output[opos + 1] = table[((group >> 54) & 0x1F) as usize];
output[opos + 2] = table[((group >> 49) & 0x1F) as usize];
output[opos + 3] = table[((group >> 44) & 0x1F) as usize];
output[opos + 4] = table[((group >> 39) & 0x1F) as usize];
if config.no_padding {
opos += 5;
} else {
// PAD-LEN: 3
output[opos + 5] = b'=';
output[opos + 6] = b'=';
output[opos + 7] = b'=';
opos += 8;
}
}
4 => {
let group = u64::from_be_bytes([
input[ipos + 0],
input[ipos + 1],
input[ipos + 2],
input[ipos + 3],
0,
0,
0,
0,
]);
output[opos + 0] = table[((group >> 59) & 0x1F) as usize];
output[opos + 1] = table[((group >> 54) & 0x1F) as usize];
output[opos + 2] = table[((group >> 49) & 0x1F) as usize];
output[opos + 3] = table[((group >> 44) & 0x1F) as usize];
output[opos + 4] = table[((group >> 39) & 0x1F) as usize];
output[opos + 5] = table[((group >> 34) & 0x1F) as usize];
output[opos + 6] = table[((group >> 29) & 0x1F) as usize];
if config.no_padding {
opos += 7;
} else {
// PAD-LEN: 1
output[opos + 7] = b'=';
opos += 8;
}
}
_ => unreachable!(),
}
opos
}
#[test]
fn test_base32() {
// 10. Test Vectors
// https://tools.ietf.org/html/rfc4648#section-10
// Standard encode/decode
assert_eq!(encode(""), "");
assert_eq!(encode("f"), "MY======");
assert_eq!(encode("fo"), "MZXQ====");
assert_eq!(encode("foo"), "MZXW6===");
assert_eq!(encode("foob"), "MZXW6YQ=");
assert_eq!(encode("fooba"), "MZXW6YTB");
assert_eq!(encode("foobar"), "MZXW6YTBOI======");
assert_eq!(decode("").unwrap(), b"");
assert_eq!(decode("MY======").unwrap(), b"f");
assert_eq!(decode("MZXQ====").unwrap(), b"fo");
assert_eq!(decode("MZXW6===").unwrap(), b"foo");
assert_eq!(decode("MZXW6YQ=").unwrap(), b"foob");
assert_eq!(decode("MZXW6YTB").unwrap(), b"fooba");
assert_eq!(decode("MZXW6YTBOI======").unwrap(), b"foobar");
// URL-SAFE encode/decode (BASE32-HEX)
assert_eq!(urlsafe_encode(""), "");
assert_eq!(urlsafe_encode("f"), "CO======");
assert_eq!(urlsafe_encode("fo"), "CPNG====");
assert_eq!(urlsafe_encode("foo"), "CPNMU===");
assert_eq!(urlsafe_encode("foob"), "CPNMUOG=");
assert_eq!(urlsafe_encode("fooba"), "CPNMUOJ1");
assert_eq!(urlsafe_encode("foobar"), "CPNMUOJ1E8======");
assert_eq!(urlsafe_decode("").unwrap(), b"");
assert_eq!(urlsafe_decode("CO======").unwrap(), b"f");
assert_eq!(urlsafe_decode("CPNG====").unwrap(), b"fo");
assert_eq!(urlsafe_decode("CPNMU===").unwrap(), b"foo");
assert_eq!(urlsafe_decode("CPNMUOG=").unwrap(), b"foob");
assert_eq!(urlsafe_decode("CPNMUOJ1").unwrap(), b"fooba");
assert_eq!(urlsafe_decode("CPNMUOJ1E8======").unwrap(), b"foobar");
}
// #[cfg(test)]
// #[bench]
// fn bench_encode_slice(b: &mut test::Bencher) {
// let input = b"foobar";
// let ilen = input.len();
// let olen = encode_buffer_len(ilen, DEFAULT_CONFIG);
// let mut output = vec![0u8; olen];
// b.iter(|| {
// encode_to_slice(input, &mut output)
// })
// }
// #[cfg(test)]
// #[bench]
// fn bench_decode_slice(b: &mut test::Bencher) {
// let input = b"MZXW6YTBOI======";
// let ilen = input.len();
// let olen = decode_buffer_len(ilen);
// let mut output = vec![0u8; olen];
// b.iter(|| {
// decode_to_slice(input, &mut output).unwrap()
// })
// }
// #[cfg(test)]
// #[bench]
// fn bench_encode(b: &mut test::Bencher) {
// let input = b"foobar";
// b.iter(|| {
// encode(input)
// })
// }
// #[cfg(test)]
// #[bench]
// fn bench_decode(b: &mut test::Bencher) {
// let input = b"MZXW6YTBOI======";
// b.iter(|| {
// decode(input).unwrap()
// })
// }
// #[cfg(test)]
// #[bench]
// fn bench_crate_encode(b: &mut test::Bencher) {
// use base32::Alphabet;
// let input = b"foobar";
// let alphabet = Alphabet::RFC4648 { padding: true };
// b.iter(|| {
// base32::encode(alphabet, input)
// })
// }
// #[cfg(test)]
// #[bench]
// fn bench_crate_decode(b: &mut test::Bencher) {
// use base32::Alphabet;
// let input = "MZXW6YTBOI======";
// let alphabet = Alphabet::RFC4648 { padding: true };
// b.iter(|| {
// base32::decode(alphabet, input).unwrap()
// })
// }
|
// use crate::ast::{Token, ParseError};
use std::str::Chars;
use std::iter::Peekable;
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum Token<'a> {
OpenParen,
ClosedParen,
Int(i64),
Float(f64),
Bool(bool),
Str(&'a str),
Symbol(&'a str),
}
#[derive(Debug)]
pub struct ParseError {
pub message: &'static str,
pub line: usize,
}
fn is_whitespace(ch: char) -> bool {
(ch == ' ') || (ch == '\n') || (ch == '\r') || (ch == '\t')
}
fn is_numeric(ch: char) -> bool {
(ch >= '0') && (ch <= '9')
}
const MISSING_QUOTE: &str = "Missing quote '\"'";
const UNEXPECTED_QUOTE: &str = "Unexpected quote '\"'";
const OPEN_PAREN_IN_ATOM: &str = "Found illegal opening paren '(' in atom";
pub struct Scanner<'a> {
source: &'a str,
iter: Peekable<Chars<'a>>,
start: usize,
current: usize,
line: usize,
tokens: Vec<Token<'a>>
}
impl<'a> Scanner<'a> {
pub fn new(source: &'a str) -> Self {
Scanner {
source,
iter: source.chars().peekable(),
start: 0,
current: 0,
line: 1,
tokens: Vec::new(),
}
}
pub fn scan_tokens(mut self) -> Result<Vec<Token<'a>>, ParseError> {
while !self.at_end() {
self.start = self.current;
self.token()?;
}
Ok(self.tokens)
}
fn parse_err(&self, message: &'static str) -> Result<(), ParseError> {
Err(ParseError { message: message, line: self.line })
}
fn token(&mut self) -> Result<(), ParseError> {
match self.advance().unwrap() {
' ' => Ok(()),
'\n' => { self.line += 1; Ok(()) }
'(' => { self.tokens.push(Token::OpenParen); Ok(()) }
')' => { self.tokens.push(Token::ClosedParen); Ok(()) }
'+' | '-' => if self.is_more_token() { self.int() } else { self.symbol() }
ch if is_numeric(ch) => self.int(),
'.' => if self.is_more_token() { self.float(false) } else { self.symbol() }
'"' => self.string(),
'#' => self.bool_(),
_ => self.symbol(),
}
}
fn int(&mut self) -> Result<(), ParseError> {
while self.is_more_token() {
match self.advance().unwrap() {
ch if is_numeric(ch) => (),
'.' => return self.float(false),
'e' | 'E' => return match self.peek() {
None => self.symbol(),
Some(_) => self.float(true),
},
'"' => return self.parse_err(UNEXPECTED_QUOTE),
'(' => return self.parse_err(OPEN_PAREN_IN_ATOM),
_ => return self.symbol(),
}
}
self.add_int_token();
Ok(())
}
fn float(&mut self, mut exponent_consumed: bool) -> Result<(), ParseError> {
if !exponent_consumed {
while self.is_more_token() {
match self.advance().unwrap() {
ch if is_numeric(ch) => (),
'e' | 'E' => { exponent_consumed = true; break; }
'(' => return self.parse_err(OPEN_PAREN_IN_ATOM),
_ => return self.symbol(),
}
}
if !self.is_more_token() {
if exponent_consumed {
return self.symbol();
} else {
self.add_float_token();
return Ok(());
}
}
}
match self.peek() {
None => (),
Some(ch) => match ch {
'+' | '-' => { self.advance(); },
ch if is_numeric(ch) => (),
_ => return self.symbol(),
}
};
while self.is_more_token() {
match self.advance().unwrap() {
ch if is_numeric(ch) => (),
'(' => return self.parse_err(OPEN_PAREN_IN_ATOM),
_ => return self.symbol(),
}
}
self.add_float_token();
Ok(())
}
fn string(&mut self) -> Result<(), ParseError> {
loop {
match self.advance() {
None => return self.parse_err(MISSING_QUOTE),
Some(ch) => match ch {
'"' => { self.add_string_token(); return Ok(()); }
'\n' => return self.parse_err(MISSING_QUOTE),
_ => ()
}
}
}
}
fn symbol(&mut self) -> Result<(), ParseError> {
while self.is_more_token() {
match self.advance().unwrap() {
'(' => return self.parse_err(OPEN_PAREN_IN_ATOM),
_ => ()
}
}
self.add_symbol_token();
Ok(())
}
fn bool_(&mut self) -> Result<(), ParseError> {
match self.advance() {
None => self.parse_err("expected char"),
Some(ch) => match ch {
't' | 'f' => if self.is_more_token() {
self.parse_err("unexpected char after '#' 1")
} else {
self.tokens.push(Token::Bool(ch == 't'));
Ok(())
}
_ => self.parse_err("unexpected char after '#' 2"),
}
}
}
fn add_int_token(&mut self) {
let token_str = self.source.get(self.start..self.current).unwrap();
let parsed = token_str.parse::<i64>().ok().unwrap();
self.tokens.push(Token::Int(parsed));
}
fn add_float_token(&mut self) {
let token_str = self.source.get(self.start..self.current).unwrap();
let parsed = token_str.parse::<f64>().ok().unwrap();
self.tokens.push(Token::Float(parsed));
}
fn add_string_token(&mut self) {
let slice = self.source.get(self.start+1..self.current-1).unwrap();
self.tokens.push(Token::Str(slice));
}
fn add_symbol_token(&mut self) {
let slice = self.source.get(self.start..self.current).unwrap();
self.tokens.push(Token::Symbol(slice));
}
fn at_end(&self) -> bool {
self.current >= self.source.len()
}
fn is_more_token(&mut self) -> bool {
match self.peek() {
None => false,
Some(ch) if is_whitespace(ch) || (ch == ')') => false,
_ => true,
}
}
fn advance(&mut self) -> Option<char> {
match self.iter.next() {
Some(ch) => {
self.current += ch.len_utf8();
Some(ch)
}
None => None,
}
}
fn peek(&mut self) -> Option<char> {
match self.iter.peek() {
Some(&ch) => Some(ch),
None => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn s(x: &'static str) -> Token { Token::Symbol(x) }
fn st(x: &'static str) -> Token { Token::Str(x) }
fn i(x: i64) -> Token<'static> { Token::Int(x) }
fn f(x: f64) -> Token<'static> { Token::Float(x) }
fn op() -> Token<'static> { Token::OpenParen }
fn cp() -> Token<'static> { Token::ClosedParen }
fn tokens(x: &'static str) -> Result<Vec<Token>, ParseError> {
let scanner = Scanner::new(x);
scanner.scan_tokens()
}
fn scan_ok(x: &'static str, expected: Vec<Token>) {
let res = tokens(x).expect("err");
assert_eq!(res, expected);
}
fn scan_err(x: &'static str) {
let res = tokens(x);
assert!(res.is_err());
}
#[test]
fn test_scan_tokens() {
let mut tests = vec![
("()", vec![op(), cp()]),
("(() (1 2 3))", vec![op(), op(), cp(), op(), i(1), i(2), i(3), cp(), cp()]),
("($%-a)", vec![op(), s("$%-a"), cp()]),
("(() () ())", vec![op(), op(), cp(), op(), cp(), op(), cp(), cp()]),
];
let same = vec![
"(+ abc def)", "( + abc def)", "(+ abc def )",
"(+ abc def)", " (+ abc def) ", "(+\nabc\ndef)",
];
for i in same { tests.push((i, vec![op(), s("+"), s("abc"), s("def"), cp()])); }
for (x, y) in tests { scan_ok(x, y); }
let errs = vec![
"(ab( 1 2)", "(1 2\nab( 3)",
];
for x in errs { scan_err(x); }
}
#[test]
fn test_scan_int() {
let mut tests = vec![
("123", vec![i(123)]),
("(1", vec![op(), i(1)]),
("(+011233)", vec![op(), i(11233), cp()]),
("(-011233)", vec![op(), i(-11233), cp()]),
("-55123", vec![i(-55123)]),
("--5123", vec![s("--5123")]),
("-+5123", vec![s("-+5123")]),
("5-5", vec![s("5-5")]),
];
let same = vec![
" 123", "0123"," 0123",
" +0123", "+0123",
];
for x in same { tests.push((x, vec![i(123)])); }
for (x, y) in tests { scan_ok(x, y) }
scan_err(" +01123(3");
}
#[test]
fn test_scan_string() {
let tests = vec! [
(r#" ("abc") "#, vec![op(), st("abc"), cp()]),
(r#" "(abc))())(" "#, vec![st("(abc))())(")]),
(r#" ("abc" "def" ("ijk")) "#, vec![op(), st("abc"), st("def"), op(), st("ijk"), cp(), cp()]),
];
for (x, y) in tests { scan_ok(x, y); }
let errs = vec![
" \"a\nb\" ", "(\")"
];
for x in errs { scan_err(x); }
}
#[test]
fn test_scan_float() {
let mut tests = vec![
("1.1.", vec![s("1.1.")]),
("1.e15.", vec![s("1.e15.")]),
("1.e", vec![s("1.e")]),
(".1", vec![f(0.1)]),
(".", vec![s(".")]),
("3.14156e-03", vec![f(3.14156e-03)])
];
let same1 = vec![
"1.0", "01.0", "+1.0", "+01.0", "+01.",
"01.", "1.", "1.00", "+1.00", "+01.00",
];
for i in same1 { tests.push((i, vec![f(1.0)])); }
let same2 = vec![
"1e1", "+1e1", "+1e+1",
"1e+01", "001e01", "001.0e+01",
"1.e+1", "1.e+001", "001.e01",
];
for i in same2 { tests.push((i, vec![f(1e1)])); }
let errs = vec![
"1(e1", "1e1(", "1.e1(", "1.e(1",
];
for x in errs { scan_err(x); }
}
#[test]
fn test_bool() {
scan_ok("#t", vec![Token::Bool(true)]);
scan_ok("#f", vec![Token::Bool(false)]);
scan_err("#");
scan_err("#ta");
scan_err("#fa");
scan_err("#a");
scan_err("#b");
}
#[test]
fn test_advance() {
let mut scanner = Scanner::new("123");
assert_eq!(scanner.advance(), Some('1'));
assert_eq!(scanner.advance(), Some('2'));
assert_eq!(scanner.advance(), Some('3'));
assert_eq!(scanner.advance(), None);
}
#[test]
fn test_peek() {
let mut scanner = Scanner::new("1");
assert_eq!(scanner.peek(), Some('1'));
assert_eq!(scanner.advance(), Some('1'));
assert_eq!(scanner.peek(), None);
assert_eq!(scanner.advance(), None);
}
#[test]
fn test_at_end() {
let mut scanner = Scanner::new("12345");
for _ in 0..5 { scanner.advance(); }
assert!(scanner.at_end());
}
} |
use crate::Record;
use lmdb::{Cursor, Transaction};
pub struct RoQuery<'txn, T> {
pub phantom: std::marker::PhantomData<T>,
pub db: lmdb::Database,
pub txn: lmdb::RoTransaction<'txn>,
pub iter: Option<lmdb::Iter<'txn>>,
}
impl<'txn, T: 'txn + Record> RoQuery<'txn, T> {
pub fn new(db: lmdb::Database, txn: lmdb::RoTransaction<'txn>) -> RoQuery<'txn, T> {
RoQuery {
phantom: std::marker::PhantomData::<T>,
db,
txn,
iter: None,
}
}
}
impl<'txn, T: 'txn + Record> Iterator for RoQuery<'txn, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.iter.is_none() {
let mut cursor = self.txn.open_ro_cursor(self.db).unwrap();
self.iter = Some(cursor.iter());
}
if let Some(iter) = &mut self.iter {
if let Some(record) = iter.next() {
return match T::from_binary(record.1) {
Ok(record) => Some(record),
Err(_) => None,
};
}
}
None
}
}
|
extern crate sqlite;
fn main() {
let connection = sqlite::open("gprl-sqlite3-demo.db").unwrap();
println!("修改前的数据为:");
connection.iterate("SELECT * FROM my_demo_apps", |pairs| {
for &(column, value) in pairs.iter() {
print!("{} = {} | ", column, value.unwrap());
}
println!("");
true
}).unwrap();
connection.execute("update my_demo_apps set authors=\"祥勇\" where id=1;").unwrap();
println!("修改后的数据为:");
connection.iterate("SELECT * FROM my_demo_apps", |pairs| {
for &(column, value) in pairs.iter() {
print!("{} = {} | ", column, value.unwrap());
}
println!("");
true
}).unwrap();
}
|
#![forbid(unsafe_code)]
pub mod version_1;
pub mod version_2;
|
// Based on https://github.com/vulkano-rs/vulkano-www/blob/master/examples/guide-mandelbrot.rs
// which carries this notice:
//
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use image::ImageBuffer;
use image::Rgba;
use vulkano::command_buffer::CommandBuffer;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSet;
use vulkano::descriptor::pipeline_layout::PipelineLayoutAbstract;
use vulkano::format::Format;
use vulkano::pipeline::ComputePipeline;
use vulkano::sync::GpuFuture;
use vulkan_playground::*;
fn main() {
let started = now();
// init
let vk = Interface::new_compute();
println!("using {}", vk.info());
// buffers
let (w, h) = (2048, 2048);
let gpu_image = vk.storage_image((w, h), Format::R8G8B8A8Unorm);
let cpu_buffer = vk.cpu_accessible_buffer((w * h * 4) as usize);
// shader
mod cs {
vulkano_shaders::shader! {
ty: "compute",
// v6
path: "src/bin/mandelbrot/mandelbrot.glsl",
}
}
let shader = cs::Shader::load(vk.device()).unwrap();
// command
let compute_pipeline = Arc::new(ComputePipeline::new(vk.device(), &shader.main_entry_point(), &()).unwrap());
let set = Arc::new(
PersistentDescriptorSet::start(compute_pipeline.layout().descriptor_set_layout(0).unwrap().clone())
.add_image(gpu_image.clone())
.unwrap()
.build()
.unwrap(),
);
let local_size_x = 8;
let local_size_y = 8;
let local_size_z = 1;
let mut builder = vk.auto_command_buffer_builder();
builder
.dispatch(
[w / local_size_x, h / local_size_y, local_size_z],
compute_pipeline.clone(),
set.clone(),
(),
)
.unwrap()
.copy_image_to_buffer(gpu_image.clone(), cpu_buffer.clone())
.unwrap();
let command_buffer = builder.build().unwrap();
println!("init: {} ms", started.elapsed().as_secs_f32() * 1000.0);
// exec + transfer
let started = now();
let finished = command_buffer.execute(vk.queue()).unwrap();
finished.then_signal_fence_and_flush().unwrap().wait(None).unwrap();
let buffer_content = cpu_buffer.read().unwrap(); // read is really just lock
println!("compute + transfer: {} ms", started.elapsed().as_secs_f32() * 1000.0);
let started = now();
let image = ImageBuffer::<Rgba<u8>, _>::from_raw(w, h, &buffer_content[..]).unwrap();
image.save("image.png").expect("save image.png");
println!("encode: {} ms", started.elapsed().as_secs_f32() * 1000.0);
}
fn now() -> std::time::Instant {
std::time::Instant::now()
}
|
![feature(env, old_io)]
use std::old_io as io;
fn main(){
let mut stdin = io::stdin();
for line in stdin.lock().lines() {
print!("{}", line.unwrap());
}
}
|
use super::js_object::Object;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type GoogleAPI;
pub static gapi: GoogleAPI;
#[wasm_bindgen(method, getter)]
pub fn client(this: &GoogleAPI) -> GoogleAPIClient;
#[wasm_bindgen(method, getter)]
pub fn auth2(this: &GoogleAPI) -> GoogleAPIAuth2;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIClient;
#[wasm_bindgen(method)]
pub fn init(this: &GoogleAPIClient, args: &JsValue) -> GoogleThenalbe;
#[wasm_bindgen(method, getter)]
pub fn drive(this: &GoogleAPIClient) -> GoogleAPIClientDrive;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIClientDrive;
#[wasm_bindgen(method, getter)]
pub fn files(this: &GoogleAPIClientDrive) -> GoogleAPIClientDriveFiles;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIClientDriveFiles;
#[wasm_bindgen(method)]
pub fn create(this: &GoogleAPIClientDriveFiles, args: &JsValue) -> GoogleThenalbe;
#[wasm_bindgen(method)]
pub fn list(this: &GoogleAPIClientDriveFiles, args: &JsValue) -> GoogleThenalbe;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIAuth2;
#[wasm_bindgen(method, js_name = "getAuthInstance")]
pub fn get_auth_instance(this: &GoogleAPIAuth2) -> GoogleAPIGoogleAuth;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIGoogleAuth;
#[wasm_bindgen(method, js_name = "signIn")]
pub fn sign_in(this: &GoogleAPIGoogleAuth);
#[wasm_bindgen(method, js_name = "signOut")]
pub fn sign_out(this: &GoogleAPIGoogleAuth);
#[wasm_bindgen(method, getter, js_name = "isSignedIn")]
pub fn is_signed_in(this: &GoogleAPIGoogleAuth) -> GoogleAPIGoogleAuthIsSignedIn;
}
#[wasm_bindgen]
extern "C" {
pub type GoogleAPIGoogleAuthIsSignedIn;
#[wasm_bindgen(method)]
pub fn get(this: &GoogleAPIGoogleAuthIsSignedIn) -> bool;
#[wasm_bindgen(method)]
pub fn listen(this: &GoogleAPIGoogleAuthIsSignedIn, callback: &js_sys::Function);
}
#[wasm_bindgen]
extern "C" {
pub type GoogleThenalbe;
#[wasm_bindgen(method)]
pub fn then(
this: &GoogleThenalbe,
resolve: Option<&js_sys::Function>,
reject: Option<&js_sys::Function>,
);
}
#[wasm_bindgen]
extern "C" {
pub type GoogleResponse;
#[wasm_bindgen(method, getter)]
pub fn result(this: &GoogleResponse) -> Object;
}
|
pub mod core;
pub mod atom;
pub mod urid;
pub mod midi;
|
mod db_types;
use crate::backend;
use crate::errors::{FinError, ResultFin};
use crate::models;
use crate::server;
use chrono::prelude::*;
use std::collections::HashMap;
use r2d2;
pub(crate) use self::db_types::*;
pub trait FinDb {
//========== USER
fn get_incomplete_by_proj_id(
&self,
proj_id: i64,
) -> ResultFin<Vec<models::Item>>;
fn get_all_tasks(&self) -> ResultFin<Vec<models::Item>>;
}
pub struct PgFinDb {
pub conn: mysql::Pool,
logger: slog::Logger,
}
impl PgFinDb {
pub fn new(conn: mysql::Pool, logger: slog::Logger) -> Self {
PgFinDb {
conn: conn,
logger: logger.new(o!("mod" => "data")),
}
}
}
impl FinDb for PgFinDb {
fn get_incomplete_by_proj_id(
&self,
proj_id: i64,
) -> ResultFin<Vec<models::Item>> {
let items: ResultFin<Vec<models::Item>> = self
.conn
.prep_exec(
"SELECT itemId, title, description from frk_item WHERE projectId = :a",
params!{"a" => proj_id},
)
.map(|result| {
result
.map(|x| x.unwrap())
.map(|row| {
let (itemId, title, description) = mysql::from_row(row);
models::Item::new(itemId, title, description)
})
.collect() // Collect payments so now `QueryResult` is mapped to `Vec<Item>`
})
.map_err(|err| {
lineError!(self.logger, err);
FinError::DatabaseErr
});
items
}
fn get_all_tasks(&self) -> ResultFin<Vec<models::Item>> {
let items: ResultFin<Vec<models::Item>> = self
.conn
.prep_exec("SELECT itemId, title, description from frk_item", ())
.map(|result| {
// In this closure we will map `QueryResult` to `Vec<Item>`
// `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// will map each `MyResult` to contained `row` (no proper error handling)
// and second call to `map` will map each `row` to `Item`
result
.map(|x| x.unwrap())
.map(|row| {
// Note that from_row will panic if you don't follow your schema
let (itemId, title, description) = mysql::from_row(row);
models::Item::new(itemId, title, description)
})
.collect() // Collect payments so now `QueryResult` is mapped to `Vec<Item>`
})
.map_err(|err| {
lineError!(self.logger, err);
FinError::DatabaseErr
});
// println!("{:?}", items);
// println!("here");
items
}
}
|
// Module containing functions for calculating first-order greeks
use std::f64::consts::E;
use common::*;
use stats::cnd;
/// Calculates the delta of a call option.
///
/// Delta measures the rate of the theoretical option value with respect to the changes in the underlying asset's price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn delta_call(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
let cnd = cnd(d1);
let e = E.powf(-(q * t));
return e * cnd;
}
/// Calculates the delta of a put options
///
/// Delta measures the rate of the theoretical option value with respect to the changes in the underlying asset's price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn delta_put(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
let cnd = cnd(d1);
let e = E.powf(-(q * t));
return e * (cnd - 1.0);
}
/// Calculates the lambda of a call option, also known as Omega
///
/// Omega is the percentage of change in an option's value with respect to the percentage change in the underlying price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
/// * `v` - value or current price of the option
pub fn lambda_call(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64, v: f64) -> f64 {
let delta = delta_call(s0, x, t, r, q, sigma);
return lambda(s0, v, delta);
}
/// Calculates the lambda of a put option, also known as Omega
///
/// Omega is the percentage of change in an option's value with respect to the percentage change in the underlying price.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
/// * `v` - value or current price of the option
pub fn lambda_put(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64, v: f64) -> f64 {
let delta = delta_put(s0, x, t, r, q, sigma);
return lambda(s0, v, delta);
}
fn lambda(s0: f64, v: f64, delta: f64) -> f64 {
return delta * s0 / v;
}
/// Calculates the Rho of a call option
///
/// Rho measures the sensitivity to the interest rate. Rho is the derivative of the option value with respect to the risk free interest rate.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn rho_call(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d2_cnd = cnd(d2(s0, x, t, r, q, sigma));
return (1.0 / 100.0) * x * t * E.powf(-r * t) * d2_cnd;
}
/// Calculates the Rho of a put option
///
/// Rho measures the sensitivity to the interest rate. Rho is the derivative of the option value with respect to the risk free interest rate.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn rho_put(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let neg_d2_cnd = cnd(-d2(s0, x, t, r, q, sigma));
return -(1.0 / 100.0) * x * t * E.powf(-r * t) * neg_d2_cnd;
}
/// Calculates the Theta of a call option
///
/// Theta measures the sensitivity of the value of the derivative to the passage of time.
pub fn theta_call(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64, days_per_year: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
let arg1 = theta_arg_1(s0, t, q, sigma, d1);
let d2 = d2_d1(t, sigma, d1);
let arg2 = theta_arg_2(x, t, r, d2);
let arg3 = theta_arg_3(s0, t, q, d1);
return (1.0 / days_per_year) * (arg1 - arg2 + arg3);
}
/// Calculates the Theta of a put option
///
/// Theta measures the sensitivity of the value of the derivative to the passage of time.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
/// * `days_per_year` - the number of calendar days in the year
pub fn theta_put(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64, days_per_year: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
let arg1 = theta_arg_1(s0, t, q, sigma, d1);
let d2 = d2_d1(t, sigma, d1);
let arg2 = theta_arg_2(x, t, r, -d2); // d2 is negative for a put
let arg3 = theta_arg_3(s0, t, q, -d1); // d1 is negative for a put
return (1.0 / days_per_year) * (arg1 + arg2 - arg3);
}
fn theta_arg_1(s0: f64, t: f64, q: f64, sigma: f64, d1: f64) -> f64 {
return -(((s0 * sigma * E.powf(-q * t)) / (2.0 * t.sqrt())) * one_over_sqrt_pi() *
E.powf((-d1.powf(2.0)) / 2.0));
}
fn theta_arg_2(x: f64, t: f64, r: f64, d2: f64) -> f64 {
return r * x * E.powf(-r * t) * cnd(d2);
}
fn theta_arg_3(s0: f64, t: f64, q: f64, d1: f64) -> f64 {
return q * s0 * E.powf(-q * t) * cnd(d1);
}
/// Calculates the Vega of a given option
///
/// Vega measures the sensitivity to volatility. Vega is the derivative of the option value with respect to the volatility of the underlying asset.
///
/// # Arguments
/// * `s0` - The underlying price of the option
/// * `x` - The strike price of the option
/// * `t` - time to expiration as a percentage of the year
/// * `r` - continuously compounded risk-free interest rate
/// * `q` - continuously compounded divident yield
/// * `sigma` - volatility
pub fn vega(s0: f64, x: f64, t: f64, r: f64, q: f64, sigma: f64) -> f64 {
let d1 = d1(s0, x, t, r, q, sigma);
return vega_d1(s0, t, q, d1);
}
pub fn vega_d1(s0: f64, t: f64, q: f64, d1: f64) -> f64 {
let mult1 = (1.0 / 100.0) * s0 * E.powf(-(q * t)) * t.sqrt();
let mult2 = one_over_sqrt_pi();
let mult3 = E.powf((-d1.powf(2.0) / 2.0));
return mult1 * mult2 * mult3;
}
#[cfg(test)]
mod tests {
use greeks::*;
use value::*;
const UNDERLYING: f64 = 64.68;
const STRIKE: f64 = 65.00;
const VOL: f64 = 0.5051;
const INTEREST_RATE: f64 = 0.0150;
const DIV_YIELD: f64 = 0.0210;
const DAYS_PER_YEAR: f64 = 365.0;
const TIME_TO_EXPIRY: f64 = 23.0 / DAYS_PER_YEAR;
const E_CALL_DELTA: f64 = 0.5079;
const E_PUT_DELTA: f64 = -0.4908;
const E_LAMBDA_PUT: f64 = -3.0759;
const E_LAMBDA_CALL: f64 = 3.3936;
const E_RHO_CALL: f64 = 0.0187;
const E_RHO_PUT: f64 = -0.0222;
const E_THETA_CALL: f64 = -0.0703;
const E_THETA_PUT: f64 = -0.0714;
const E_VEGA: f64 = 0.0647;
#[test]
fn test_delta_call() {
let call_delta = delta_call(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (call_delta - E_CALL_DELTA).abs();
assert!(abs < 0.001);
}
#[test]
fn test_delta_put() {
let put_delta = delta_put(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (put_delta - E_PUT_DELTA).abs();
assert!(abs < 0.001);
}
#[test]
fn test_lambda_put() {
// Abitrary change in underlying at expiry
let price = put_at_expiry(UNDERLYING - 10.0, STRIKE);
let lambda = lambda_put(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL,
price);
println!("{}", lambda);
let abs = (lambda - E_LAMBDA_PUT).abs();
assert!(abs < 0.001);
}
#[test]
fn test_lambda_call() {
// abitrary change in underlying at expiry
let price = call_at_expiry(UNDERLYING + 10.0, STRIKE);
let lambda = lambda_call(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL,
price);
let abs = (lambda - E_LAMBDA_CALL).abs();
assert!(abs < 0.001);
}
#[test]
fn test_rho_call() {
let rho_call = rho_call(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (rho_call - E_RHO_CALL).abs();
assert!(abs < 0.001);
}
#[test]
fn test_rho_put() {
let rho_put = rho_put(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (rho_put - E_RHO_PUT).abs();
assert!(abs < 0.001);
}
#[test]
fn test_theta_call() {
let theta_call = theta_call(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL,
DAYS_PER_YEAR);
let abs = (theta_call - E_THETA_CALL).abs();
assert!(abs < 0.001);
}
#[test]
fn test_theta_put() {
let theta_put = theta_put(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL,
DAYS_PER_YEAR);
let abs = (theta_put - E_THETA_PUT).abs();
assert!(abs < 0.001);
}
#[test]
fn test_vega() {
let vega = vega(UNDERLYING,
STRIKE,
TIME_TO_EXPIRY,
INTEREST_RATE,
DIV_YIELD,
VOL);
let abs = (vega - E_VEGA).abs();
assert!(abs < 0.001);
}
} |
use std::ops::Bound;
use std::sync::Arc;
use anyhow::Context;
use axum::extract::Extension;
use hyper::{Body, Response};
use serde_derive::Deserialize;
use svc_agent::AccountId;
use svc_utils::extractors::AccountIdExtractor;
use tracing::{error, info, instrument};
use uuid::Uuid;
use crate::app::authz::AuthzObject;
use crate::app::error::ErrorExt;
use crate::app::error::ErrorKind as AppErrorKind;
use crate::app::http::Json;
use crate::app::metrics::AuthorizeMetrics;
use crate::app::services;
use crate::app::AppContext;
use crate::db::class;
use crate::db::class::ClassType;
use crate::db::class::KeyValueProperties;
use super::AppError;
use super::AppResult;
#[derive(Deserialize)]
pub struct P2PCreatePayload {
scope: String,
audience: String,
tags: Option<serde_json::Value>,
#[serde(default)]
properties: KeyValueProperties,
#[serde(default = "class::default_whiteboard")]
whiteboard: bool,
}
#[instrument(
skip_all,
fields(
audience = ?body.audience,
scope = ?body.scope
)
)]
pub async fn create(
Extension(ctx): Extension<Arc<dyn AppContext>>,
AccountIdExtractor(account_id): AccountIdExtractor,
Json(body): Json<P2PCreatePayload>,
) -> AppResult {
info!("Creating p2p");
let r = do_create(ctx.as_ref(), &account_id, body).await;
if let Err(e) = &r {
error!(error = ?e, "Failed to create p2p");
}
r
}
async fn do_create(
state: &dyn AppContext,
account_id: &AccountId,
body: P2PCreatePayload,
) -> AppResult {
let object = AuthzObject::new(&["classrooms"]).into();
state
.authz()
.authorize(
body.audience.clone(),
account_id.clone(),
object,
"create".into(),
)
.await
.measure()?;
info!("Authorized p2p create");
let dummy = insert_p2p_dummy(state, &body).await?;
let time = (Bound::Unbounded, Bound::Unbounded);
let result = services::create_event_and_conference_rooms(state, &dummy, &time).await;
let mut conn = state
.get_conn()
.await
.error(AppErrorKind::DbConnAcquisitionFailed)?;
let event_room_id = match result {
Ok((event_id, conference_id)) => {
info!(?event_id, ?conference_id, "Created rooms",);
class::EstablishQuery::new(dummy.id(), event_id, conference_id)
.execute(&mut conn)
.await
.context("Failed to establish webinar dummy")
.error(AppErrorKind::DbQueryFailed)?;
event_id
}
Err(e) => {
info!("Failed to create rooms");
class::DeleteQuery::new(dummy.id())
.execute(&mut conn)
.await
.context("Failed to delete webinar dummy")
.error(AppErrorKind::DbQueryFailed)?;
return Err(e);
}
};
if body.whiteboard {
if let Err(e) = state.event_client().create_whiteboard(event_room_id).await {
error!(
?event_room_id,
"Failed to create whiteboard in event room, err = {:?}", e
);
}
}
let body = serde_json::to_string_pretty(&dummy)
.context("Failed to serialize p2p")
.error(AppErrorKind::SerializationFailed)?;
let response = Response::builder()
.status(201)
.body(Body::from(body))
.unwrap();
Ok(response)
}
async fn insert_p2p_dummy(
state: &dyn AppContext,
body: &P2PCreatePayload,
) -> Result<class::Dummy, AppError> {
let query = class::InsertQuery::new(
ClassType::P2P,
body.scope.clone(),
body.audience.clone(),
(Bound::Unbounded, Bound::Unbounded).into(),
)
.properties(body.properties.clone())
.preserve_history(true);
let query = if let Some(ref tags) = body.tags {
query.tags(tags.clone())
} else {
query
};
let mut conn = state
.get_conn()
.await
.error(AppErrorKind::DbConnAcquisitionFailed)?;
query
.execute(&mut conn)
.await
.context("Failed to insert p2p")
.error(AppErrorKind::DbQueryFailed)?
.ok_or_else(|| AppError::from(AppErrorKind::ClassAlreadyEstablished))
}
#[derive(Deserialize)]
pub struct P2PConvertObject {
scope: String,
audience: String,
event_room_id: Uuid,
conference_room_id: Uuid,
tags: Option<serde_json::Value>,
#[serde(default)]
properties: KeyValueProperties,
}
pub async fn convert(
Extension(ctx): Extension<Arc<dyn AppContext>>,
AccountIdExtractor(account_id): AccountIdExtractor,
Json(body): Json<P2PConvertObject>,
) -> AppResult {
let object = AuthzObject::new(&["classrooms"]).into();
ctx.authz()
.authorize(
body.audience.clone(),
account_id.clone(),
object,
"convert".into(),
)
.await
.measure()?;
let query = crate::db::class::P2PInsertQuery::new(
body.scope,
body.audience,
body.conference_room_id,
body.event_room_id,
);
let query = if let Some(tags) = body.tags {
query.tags(tags)
} else {
query
};
let query = query.properties(body.properties);
let p2p = {
let mut conn = ctx
.get_conn()
.await
.error(AppErrorKind::DbConnAcquisitionFailed)?;
query
.execute(&mut conn)
.await
.context("Failed to find recording")
.error(AppErrorKind::DbQueryFailed)?
};
crate::app::services::update_classroom_id(
ctx.as_ref(),
p2p.id(),
p2p.event_room_id(),
p2p.conference_room_id(),
)
.await
.error(AppErrorKind::MqttRequestFailed)?;
let body = serde_json::to_string(&p2p)
.context("Failed to serialize p2p")
.error(AppErrorKind::SerializationFailed)?;
let response = Response::builder()
.status(201)
.body(Body::from(body))
.unwrap();
Ok(response)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{db::class::P2PReadQuery, test_helpers::prelude::*};
use mockall::predicate as pred;
use uuid::Uuid;
#[tokio::test]
async fn create_p2p() {
let agent = TestAgent::new("web", "user1", USR_AUDIENCE);
let mut authz = TestAuthz::new();
authz.allow(agent.account_id(), vec!["classrooms"], "create");
let mut state = TestState::new(authz).await;
let event_room_id = Uuid::new_v4();
let conference_room_id = Uuid::new_v4();
create_p2p_mocks(&mut state, event_room_id, conference_room_id);
let scope = random_string();
let state = Arc::new(state);
let body = P2PCreatePayload {
scope: scope.clone(),
audience: USR_AUDIENCE.to_string(),
tags: None,
properties: KeyValueProperties::default(),
whiteboard: true,
};
let r = do_create(state.as_ref(), agent.account_id(), body).await;
r.expect("Failed to create p2p");
// Assert DB changes.
let mut conn = state.get_conn().await.expect("Failed to get conn");
P2PReadQuery::by_scope(USR_AUDIENCE, &scope)
.execute(&mut conn)
.await
.expect("Failed to fetch p2p")
.expect("p2p not found");
}
#[tokio::test]
async fn create_p2p_unauthorized() {
let agent = TestAgent::new("web", "user1", USR_AUDIENCE);
let state = TestState::new(TestAuthz::new()).await;
let scope = random_string();
let state = Arc::new(state);
let body = P2PCreatePayload {
scope: scope.clone(),
audience: USR_AUDIENCE.to_string(),
tags: None,
properties: KeyValueProperties::default(),
whiteboard: true,
};
do_create(state.as_ref(), agent.account_id(), body)
.await
.expect_err("Unexpectedly succeeded");
}
#[tokio::test]
async fn create_p2p_with_properties() {
let agent = TestAgent::new("web", "user1", USR_AUDIENCE);
let mut authz = TestAuthz::new();
authz.allow(agent.account_id(), vec!["classrooms"], "create");
let mut state = TestState::new(authz).await;
let event_room_id = Uuid::new_v4();
let conference_room_id = Uuid::new_v4();
create_p2p_mocks(&mut state, event_room_id, conference_room_id);
let scope = random_string();
let mut properties: KeyValueProperties = serde_json::Map::new().into();
properties.insert("is_adult".into(), true.into());
let state = Arc::new(state);
let body = P2PCreatePayload {
scope: scope.clone(),
audience: USR_AUDIENCE.to_string(),
tags: None,
properties: properties.clone(),
whiteboard: true,
};
let r = do_create(state.as_ref(), agent.account_id(), body).await;
r.expect("Failed to create p2p");
// Assert DB changes.
let mut conn = state.get_conn().await.expect("Failed to get conn");
let new_p2p = P2PReadQuery::by_scope(USR_AUDIENCE, &scope)
.execute(&mut conn)
.await
.expect("Failed to fetch p2p")
.expect("P2P not found");
assert_eq!(*new_p2p.properties(), properties);
}
fn create_p2p_mocks(state: &mut TestState, event_room_id: Uuid, conference_room_id: Uuid) {
state
.event_client_mock()
.expect_create_room()
.with(
pred::always(),
pred::always(),
pred::always(),
pred::always(),
pred::always(),
)
.returning(move |_, _, _, _, _| Ok(event_room_id));
state
.event_client_mock()
.expect_create_whiteboard()
.with(pred::eq(event_room_id))
.returning(move |_room_id| Ok(()));
state
.event_client_mock()
.expect_update_room()
.with(pred::eq(event_room_id), pred::always())
.returning(move |_room_id, _| Ok(()));
state
.conference_client_mock()
.expect_create_room()
.withf(move |_, _, _, _, _, _| true)
.returning(move |_, _, _, _, _, _| Ok(conference_room_id));
state
.conference_client_mock()
.expect_update_room()
.with(pred::eq(conference_room_id), pred::always())
.returning(move |_room_id, _| Ok(()));
}
}
|
use proconio::{input, marker::Usize1};
fn main() {
input! {
n: usize,
k: usize,
p: [Usize1; n],
c: [i64; n],
}
let mut r = calc(&p, &c, k, p[0]);
for i in 1..n {
r = r.max(calc(&p, &c, k, p[i]));
}
println!("{}", r);
}
fn calc(p: &Vec<usize>, c: &Vec<i64>, k: usize, i: usize) -> i64 {
let mut j = p[i];
let mut cum = (1, c[j]);
let mut v = cum.1;
while cum.0 < k && j != i {
j = p[j];
cum.0 += 1;
cum.1 += c[j];
v = v.max(cum.1);
}
if cum.0 < k && cum.1 > 0 {
let l = k / cum.0;
if l > 1 {
cum.0 *= l - 1;
cum.1 *= l as i64 - 1;
v = v.max(cum.1);
}
while cum.0 < k {
j = p[j];
cum.0 += 1;
cum.1 += c[j];
v = v.max(cum.1);
}
}
v
}
|
// Copyright (c) 2018, ilammy
//
// Licensed under the Apache License, Version 2.0 (see LICENSE in the
// root directory). This file may be copied, distributed, and modified
// only in accordance with the terms specified by the license.
extern crate exonum;
extern crate exonum_legislation as legislation;
#[macro_use]
extern crate exonum_testkit;
use exonum::crypto;
use exonum_testkit::{TestKit, TestKitBuilder};
use legislation::{
schema::{Act, BodyOfLaw, Clause}, service::LegalService,
transactions::{TxCreateAct, TxModifyAct, TxRepealAct},
};
fn init_testkit() -> TestKit {
TestKitBuilder::validator()
.with_service(LegalService)
.create()
}
#[test]
fn test_create_act() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),]);
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 1);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
}
#[test]
fn test_create_act_duplicate() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key,
),
]);
// Duplicate transactions are silently skipped
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 1);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
}
#[test]
fn test_create_act_repeated() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
],
&key,
),
]);
// Second transaction will be ignored, the act will not be updated
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 1);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
}
#[test]
fn test_repeal_act() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxRepealAct::new(1, 0, &key),
]);
// Act will be still stored, but marked as repealed.
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
assert_eq!(act.repealed(), true);
}
#[test]
fn test_repeal_act_missing() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![TxRepealAct::new(404, 0, &key),]);
// Missing acts cannot be repealed, no phantom acts are created.
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&404)
};
assert!(act.is_none());
}
#[test]
fn test_repeal_act_invalid_revision() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxRepealAct::new(1, 10, &key),
]);
// Act will not be repealed as the repeal transaction has incorrect revision.
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
assert_eq!(act.repealed(), false);
}
#[test]
fn test_modify_act() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxModifyAct::new(
1,
0,
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
],
&key,
),
]);
// Second transaction will be ignored, the act will not be updated
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 2);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
assert_eq!(clauses[1].id(), 2);
assert_eq!(clauses[1].body(), "Test Clause 1.2 Please Ignore");
}
#[test]
fn test_modify_act_repeated() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxModifyAct::new(
1,
0,
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
],
&key,
),
TxModifyAct::new(
1,
1,
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
Clause::new(3, "Test Clause 1.3 Please Ignore"),
],
&key,
),
]);
// Second transaction will be ignored, the act will not be updated
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 3);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
assert_eq!(clauses[1].id(), 2);
assert_eq!(clauses[1].body(), "Test Clause 1.2 Please Ignore");
assert_eq!(clauses[2].id(), 3);
assert_eq!(clauses[2].body(), "Test Clause 1.3 Please Ignore");
}
#[test]
fn test_modify_act_reoredered() {
let mut testkit = init_testkit();
let (_, key) = crypto::gen_keypair();
testkit.create_block_with_transactions(txvec![
TxCreateAct::new(
1,
"Test Act 1 Please Ignore",
vec![Clause::new(1, "Test Clause 1.1 Please Ignore")],
&key
),
TxModifyAct::new(
1,
1,
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
Clause::new(3, "Test Clause 1.3 Please Ignore"),
],
&key,
),
TxModifyAct::new(
1,
0,
vec![
Clause::new(1, "Test Clause 1.1 Please Ignore"),
Clause::new(2, "Test Clause 1.2 Please Ignore"),
],
&key,
),
]);
// Only the second modification (with valid revision) will be applied
let act = {
let snapshot = testkit.snapshot();
BodyOfLaw::new(&snapshot).act(&1).expect("expected act 1")
};
assert_eq!(act.id(), 1);
assert_eq!(act.name(), "Test Act 1 Please Ignore");
let clauses = act.clauses();
assert_eq!(clauses.len(), 2);
assert_eq!(clauses[0].id(), 1);
assert_eq!(clauses[0].body(), "Test Clause 1.1 Please Ignore");
assert_eq!(clauses[1].id(), 2);
assert_eq!(clauses[1].body(), "Test Clause 1.2 Please Ignore");
}
|
//mod Optimizer;
//mod Loss;
pub use self::sequential::Sequential;
mod sequential;
use af::{Array};
use na::DMat;
use layer::Layer;
use optimizer::Optimizer;
pub trait Model {
fn new(optimizer: Box<Optimizer>, loss: &'static str) -> Self;
fn fit(&mut self, input: &mut DMat<f32>, target: &mut DMat<f32>
, batch_size: u64, shuffle: bool, verbose: bool) -> (Vec<f32>, DMat<f32>);
fn forward(&mut self, activation: &Array) -> Array;
fn backward(&mut self, prediction: &Array, target: &Array) -> f32;
fn add(&mut self, layer: Box<Layer>);
fn set_device(&mut self, device_id: i32);
fn info(&self);
}
|
use serde::Deserialize;
#[derive(Deserialize)]
pub struct Case {
pub data: String,
pub lat: f64,
pub long: f64,
pub tipo: String
} |
use std::sync::Arc;
use common::error::Error;
use common::result::Result;
use crate::domain::user::{Email, Password, PasswordHasher, UserId, UserRepository, Username};
pub struct UserService {
user_repo: Arc<dyn UserRepository>,
password_hasher: Arc<dyn PasswordHasher>,
}
impl UserService {
pub fn new(
user_repo: Arc<dyn UserRepository>,
password_hasher: Arc<dyn PasswordHasher>,
) -> Self {
UserService {
user_repo,
password_hasher,
}
}
pub async fn available(&self, username: &str, email: &str) -> Result<bool> {
let mut err = Error::new("identity", "invalid");
if self
.user_repo
.find_by_username(&Username::new(username)?)
.await
.is_ok()
{
err.add_context("username", "not_available");
}
if self
.user_repo
.find_by_email(&Email::new(email)?)
.await
.is_ok()
{
err.add_context("email", "not_available");
}
if err.has_context() {
return Err(err);
}
Ok(true)
}
pub async fn change_password(
&self,
user_id: &UserId,
old_password: &str,
new_password: &str,
) -> Result<()> {
if old_password == new_password {
return Err(Error::new("passwords", "are_the_same"));
}
let mut user = self.user_repo.find_by_id(user_id).await?;
if user.base().id() != user_id {
return Err(Error::new("user", "unauthorized"));
}
let user_password = match user.identity().password() {
Some(password) => password.value(),
None => return Err(Error::new("password", "unavailable")),
};
if !self.password_hasher.compare(user_password, old_password) {
return Err(Error::new("password", "invalid"));
}
let hashed_password = self.generate_password(new_password)?;
let password = Password::new(&hashed_password)?;
user.set_password(password)?;
self.user_repo.save(&mut user).await?;
Ok(())
}
pub fn generate_password(&self, plain_password: &str) -> Result<String> {
// TODO: improve validation
if plain_password.len() < 8 {
return Err(Error::new("password", "too_short"));
}
self.password_hasher.hash(plain_password)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mocks;
#[tokio::test]
async fn check_availability() {
let c = mocks::container();
let serv = c.user_serv();
let mut user = mocks::user1();
c.user_repo().save(&mut user).await.unwrap();
assert!(serv
.available(
user.identity().username().value(),
user.identity().email().value()
)
.await
.is_err());
assert!(serv
.available(user.identity().username().value(), "new@email.com")
.await
.is_err());
assert!(serv
.available("new-user", user.identity().email().value())
.await
.is_err());
assert!(serv.available("new-user", "new@email.com").await.is_ok());
}
#[tokio::test]
async fn change_password() {
let c = mocks::container();
let serv = c.user_serv();
let mut user = mocks::user1();
c.user_repo().save(&mut user).await.unwrap();
assert!(serv
.change_password(
&UserId::new("#invalid-id").unwrap(),
"P@asswd!",
"new-password"
)
.await
.is_err());
assert!(serv
.change_password(&user.base().id(), "P@asswd!", "123")
.await
.is_err());
assert!(serv
.change_password(&user.base().id(), "invalid-password", "New_P@asswd!")
.await
.is_err());
assert!(serv
.change_password(&user.base().id(), "P@asswd!", "New_P@asswd!")
.await
.is_ok());
}
#[test]
fn generate_password() {
let c = mocks::container();
let serv = c.user_serv();
assert!(serv.generate_password("123").is_err());
assert!(serv.generate_password("abc123").is_err());
assert!(serv.generate_password("P@asswd!").is_ok());
}
}
|
pub mod filter {
use super::handler;
use crate::http::api_handler::api::ApiHandler;
use crate::log::kflog;
use std::sync::Arc;
use warp::Filter;
pub fn new_api(
logger: kflog::Logger,
api_handler: Arc<ApiHandler>,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
return warp::path!("push")
.and(warp::post())
.and(warp::body::json())
.and(with_logger(logger))
.and(with_api_handler(api_handler))
.and_then(handler::push);
}
fn with_api_handler(
handler: Arc<ApiHandler>,
) -> impl Filter<Extract = (Arc<ApiHandler>,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || handler.clone())
}
fn with_logger(
logger: kflog::Logger,
) -> impl Filter<Extract = (kflog::Logger,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || logger.clone())
}
}
mod handler {
use crate::http::api_handler::api::{requests, ApiHandler};
use crate::log::kflog;
use std::convert::Infallible;
use std::sync::Arc;
use std::time::SystemTime;
use uuid::Uuid;
use warp::Reply;
lazy_static::lazy_static! {
static ref REQUEST_DURATION: prometheus::HistogramVec = prometheus::register_histogram_vec!(
"http_requests_duration",
"Duration of HTTP requests",
&["code", "method"],
prometheus::exponential_buckets(5.0, 2.0, 5).unwrap()
).unwrap();
}
fn generate_request_id() -> String {
Uuid::new_v4().to_string()
}
pub async fn push(
req: requests::PushRequest,
logger: kflog::Logger,
handler: Arc<ApiHandler>,
) -> Result<impl Reply, Infallible> {
let request_id = generate_request_id();
let start = SystemTime::now();
// NOTE(a.petrukhin): sharding is based on uuid from request.
// It leads to allocations.
// let request_id_cloned = request_id.clone();
let is_sync_request = req.wait_for_send;
let push_result = handler.handle_push(req).await;
let passed_result = SystemTime::now().duration_since(start);
let passed = match passed_result {
Err(e) => {
slog::warn!(logger,
"got error when tried to get duration_since";
"error" => e.to_string());
0.0
}
Ok(psd) => (psd.as_micros() as f64) / 1000.0,
};
let json = warp::reply::json(&push_result);
let mut status_code = warp::http::StatusCode::OK;
if push_result.status != "ok" {
status_code = warp::http::StatusCode::INTERNAL_SERVER_ERROR;
}
let result = Ok(warp::reply::with_status(json, status_code));
let mut method = "push/async";
if is_sync_request.is_some() && is_sync_request.unwrap() {
method = "push/sync";
}
REQUEST_DURATION
.with_label_values(&[&status_code.as_u16().to_string(), &method])
.observe(passed);
// TODO(shmel1k): add errors to log.
slog::info!(
logger,
"proceeded_request";
"request_id" => request_id,
"passed" => (passed).to_string() + "ms",
);
return result;
}
}
|
use std::cell::RefCell;
use std::fmt;
use std::path::PathBuf;
use std::rc::{Rc, Weak};
use anyhow::{Context, Result};
use serde::export::fmt::Debug;
use serde::export::Formatter;
use crate::cfg::error::CfgError;
use crate::cfg::global::GlobalProjectSetupCfg;
use crate::cfg::LocalSetupCfg;
use crate::env_file;
use crate::env_file::{path_from_env_name, Env};
pub trait SetupsCfg {
type Setup: SetupCfg;
fn add_setup(&mut self, setup: Self::Setup) {
if let None = self.get_setup(setup.name()) {
self.get_setups()
.borrow_mut()
.append(&mut vec![Rc::new(RefCell::new(setup))])
}
}
fn remove_by_name_setup(&mut self, name: &String) {
self.get_setups().borrow_mut().retain(|setup| {
let setup = setup.borrow();
setup.name() != name
});
}
fn get_setup(&self, name: &String) -> Option<Rc<RefCell<Self::Setup>>> {
self.get_setups()
.borrow()
.iter()
.find(|setup| setup.borrow().name() == name)
.map(|setup| Rc::clone(setup))
}
fn get_setups(&self) -> Rc<RefCell<Vec<Rc<RefCell<Self::Setup>>>>>;
}
pub trait SetupCfg {
fn name(&self) -> &String;
fn set_name(&mut self, name: String);
}
#[derive(Clone)]
pub struct Setup {
local_cfg_file: Option<PathBuf>,
local_setup: Weak<RefCell<LocalSetupCfg>>,
global_setup: Weak<RefCell<GlobalProjectSetupCfg>>,
}
impl Setup {
pub fn new() -> Self {
Self {
local_cfg_file: None,
local_setup: Weak::default(),
global_setup: Weak::default(),
}
}
pub fn local_cfg_file(&self) -> Result<&PathBuf> {
self.local_cfg_file.as_ref().context("no local cfg file")
}
pub fn local_cfg_dir(&self) -> Result<PathBuf> {
let local_cfg_file = self.local_cfg_file()?;
let local_cfg_dir = local_cfg_file
.parent()
.context(format!("can not reach parent of {:?}", local_cfg_file))?;
Ok(local_cfg_dir.to_path_buf())
}
pub fn local_cfg_run_file(&self) -> Result<PathBuf> {
let local_cfg_dir = self.local_cfg_dir()?;
let local_setup = self.local_setup().context("local_setup not found")?;
let local_setup = local_setup.borrow();
let run_file = local_cfg_dir.join(local_setup.file());
Ok(run_file)
}
pub fn env(&self, env_name: &String) -> Result<Env> {
let env_file = self.env_file(env_name)?;
let env = Env::from_file_reader(env_file)?;
Ok(env)
}
pub fn env_file(&self, env_name: &String) -> Result<PathBuf> {
match (self.envs_private_dir(), self.envs_public_dir()) {
(Ok(private_dir), Ok(public_dir)) => {
let public_env = path_from_env_name(&public_dir, env_name);
let private_env = path_from_env_name(&private_dir, env_name);
if private_env.exists() && public_env.exists() {
Err(CfgError::EnvExistTwice(env_name.clone(), public_env, private_env).into())
} else if private_env.exists() {
Ok(private_env)
} else if public_env.exists() {
Ok(public_env)
} else {
Err(CfgError::EnvNotFound(env_name.clone()).into())
}
}
(Ok(private_dir), Err(_)) => {
let private_env = path_from_env_name(&private_dir, env_name);
if private_env.exists() {
Ok(private_env)
} else {
Err(CfgError::EnvNotFound(env_name.clone()).into())
}
}
(Err(_), Ok(public_env)) => {
let public_env = path_from_env_name(&public_env, env_name);
if public_env.exists() {
Ok(public_env)
} else {
Err(CfgError::EnvNotFound(env_name.clone()).into())
}
}
(_, Err(err)) => Err(err),
}
}
pub fn envs(&self) -> Vec<Result<Env>> {
let mut env = vec![];
env.append(&mut self.envs_public());
env.append(&mut self.envs_private());
env
}
pub fn envs_public_dir(&self) -> Result<PathBuf> {
if let (Some(local_setup), Some(file)) = (&self.local_setup(), &self.local_cfg_file) {
if let Some(root_dir) = file.parent() {
let local_setup = local_setup.borrow();
return Ok(root_dir.join(local_setup.public_env_dir()));
}
}
bail!(CfgError::PublicEnvDirNotFound(self.name()?))
}
pub fn envs_public(&self) -> Vec<Result<Env>> {
if let Ok(abs_path) = self.envs_public_dir() {
let env = env_file::read_dir(&abs_path);
return env
.into_iter()
.map(|env| env.context("fail to parse public env"))
.collect();
}
vec![]
}
pub fn envs_private_dir(&self) -> Result<PathBuf> {
if let Some(global_setup) = self.global_setup() {
if let Ok(dir) = global_setup.borrow().private_env_dir() {
return Ok(dir.clone());
}
}
bail!(CfgError::PrivateEnvDirNotFound(self.name()?))
}
pub fn envs_private(&self) -> Vec<Result<Env>> {
if let Ok(global_setup) = self.envs_private_dir() {
let env = env_file::read_dir(&global_setup);
return env
.into_iter()
.map(|env| env.context("fail to parse private env"))
.collect();
}
vec![]
}
pub fn name(&self) -> Result<String> {
if let Some(local_setup) = self.local_setup() {
return Ok(local_setup.borrow().name().clone());
}
if let Some(global_setup) = self.global_setup() {
return Ok(global_setup.borrow().name().clone());
}
Err(anyhow!(
"fail to get name : local and global cfg are not sets"
))
}
pub fn rename(&self, name: &String) -> Result<()> {
let mut bool = false;
if let Some(local_setup) = self.local_setup() {
local_setup.borrow_mut().set_name(name.to_owned());
bool = true;
}
if let Some(global_setup) = self.global_setup() {
global_setup.borrow_mut().set_name(name.clone());
bool = true;
}
if !bool {
return Err(anyhow!(
"fail to rename : local and global cfg are not sets"
));
}
Ok(())
}
pub fn new_fill(
local_file: &PathBuf,
local_setup: &Rc<RefCell<LocalSetupCfg>>,
global_setup: &Rc<RefCell<GlobalProjectSetupCfg>>,
) -> Result<Self> {
if local_setup.borrow().name() == global_setup.borrow().name() {
Ok(Self {
local_cfg_file: Some(local_file.to_owned()),
local_setup: Rc::downgrade(local_setup),
global_setup: Rc::downgrade(global_setup),
})
} else {
Err(anyhow!(
"local setup and global setup must has the same name"
))
}
}
pub fn local_setup(&self) -> Option<Rc<RefCell<LocalSetupCfg>>> {
self.local_setup.upgrade()
}
pub fn global_setup(&self) -> Option<Rc<RefCell<GlobalProjectSetupCfg>>> {
self.global_setup.upgrade()
}
}
impl Debug for Setup {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "local : {:#?}\n", self.local_setup())?;
write!(f, "global : {:#?}\n", self.global_setup())?;
Ok(())
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get_catalog(
operation_config: &crate::OperationConfig,
subscription_id: &str,
reserved_resource_type: &str,
location: Option<&str>,
) -> std::result::Result<Vec<Catalog>, get_catalog::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/catalogs",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_catalog::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("reservedResourceType", reserved_resource_type)]);
if let Some(location) = location {
req_builder = req_builder.query(&[("location", location)]);
}
let req = req_builder.build().context(get_catalog::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_catalog::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_catalog::ResponseBytesError)?;
let rsp_value: Vec<Catalog> = serde_json::from_slice(&body).context(get_catalog::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_catalog::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(get_catalog::DeserializeError { body })?;
get_catalog::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_catalog {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_applied_reservation_list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<AppliedReservations, get_applied_reservation_list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/appliedReservations",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_applied_reservation_list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_applied_reservation_list::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(get_applied_reservation_list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_applied_reservation_list::ResponseBytesError)?;
let rsp_value: AppliedReservations =
serde_json::from_slice(&body).context(get_applied_reservation_list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_applied_reservation_list::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(get_applied_reservation_list::DeserializeError { body })?;
get_applied_reservation_list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_applied_reservation_list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub mod reservation_order {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<ReservationOrderList, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Capacity/reservationOrders", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ReservationOrderList = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
) -> std::result::Result<ReservationOrderResponse, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}",
&operation_config.base_path, reservation_order_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ReservationOrderResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod reservation {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn split(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &SplitRequest,
) -> std::result::Result<split::Response, split::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/split",
&operation_config.base_path, reservation_order_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(split::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(body);
let req = req_builder.build().context(split::BuildRequestError)?;
let rsp = client.execute(req).await.context(split::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(split::ResponseBytesError)?;
let rsp_value: Vec<ReservationResponse> = serde_json::from_slice(&body).context(split::DeserializeError { body })?;
Ok(split::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(split::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(split::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(split::DeserializeError { body })?;
split::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod split {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Vec<ReservationResponse>),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn merge(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &MergeRequest,
) -> std::result::Result<merge::Response, merge::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/merge",
&operation_config.base_path, reservation_order_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(merge::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(body);
let req = req_builder.build().context(merge::BuildRequestError)?;
let rsp = client.execute(req).await.context(merge::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(merge::ResponseBytesError)?;
let rsp_value: Vec<ReservationResponse> = serde_json::from_slice(&body).context(merge::DeserializeError { body })?;
Ok(merge::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(merge::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(merge::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(merge::DeserializeError { body })?;
merge::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod merge {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Vec<ReservationResponse>),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
) -> std::result::Result<ReservationList, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations",
&operation_config.base_path, reservation_order_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ReservationList = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
) -> std::result::Result<ReservationResponse, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
&operation_config.base_path, reservation_order_id, reservation_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ReservationResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
parameters: &Patch,
) -> std::result::Result<update::Response, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
&operation_config.base_path, reservation_order_id, reservation_id
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ReservationResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ReservationResponse),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_revisions(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
) -> std::result::Result<ReservationList, list_revisions::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/revisions",
&operation_config.base_path, reservation_order_id, reservation_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_revisions::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_revisions::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_revisions::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_revisions::ResponseBytesError)?;
let rsp_value: ReservationList = serde_json::from_slice(&body).context(list_revisions::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_revisions::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(list_revisions::DeserializeError { body })?;
list_revisions::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_revisions {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod operation {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationList, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Capacity/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationList = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: Error = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse { status_code: StatusCode, value: models::Error },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
|
pub mod crop;
pub mod plant;
pub mod task;
use rusqlite::{Connection, NO_PARAMS};
pub struct DataMgr {
pub conn: Connection,
}
impl DataMgr {
pub fn new(db: String) -> Self {
let data = DataMgr {
conn: Connection::open(db).unwrap(),
};
data.conn
.execute(
"CREATE TABLE IF NOT EXISTS plants (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
days_to_maturity INTEGER,
notes TEXT,
zones BLOB,
plant_type TEXT NOT NULL
);",
NO_PARAMS,
)
.unwrap();
data.conn
.execute(
"CREATE TABLE IF NOT EXISTS crops (
id INTEGER PRIMARY KEY AUTOINCREMENT,
num_plants INTEGER NOT NULL,
date_planted TEXT NOT NULL,
plant_id INTEGER NOT NULL,
FOREIGN KEY(plant_id) REFERENCES plants(id));",
NO_PARAMS,
)
.unwrap();
data.conn
.execute(
"CREATE TABLE IF NOT EXISTS tasks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT NOT NULL,
is_completed BOOL NOT NULL,
completed_date TEXT);",
NO_PARAMS,
)
.unwrap();
data
}
}
|
/// An enum to represent all characters in the AlchemicalSymbols block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum AlchemicalSymbols {
/// \u{1f700}: '🜀'
AlchemicalSymbolForQuintessence,
/// \u{1f701}: '🜁'
AlchemicalSymbolForAir,
/// \u{1f702}: '🜂'
AlchemicalSymbolForFire,
/// \u{1f703}: '🜃'
AlchemicalSymbolForEarth,
/// \u{1f704}: '🜄'
AlchemicalSymbolForWater,
/// \u{1f705}: '🜅'
AlchemicalSymbolForAquafortis,
/// \u{1f706}: '🜆'
AlchemicalSymbolForAquaRegia,
/// \u{1f707}: '🜇'
AlchemicalSymbolForAquaRegiaDash2,
/// \u{1f708}: '🜈'
AlchemicalSymbolForAquaVitae,
/// \u{1f709}: '🜉'
AlchemicalSymbolForAquaVitaeDash2,
/// \u{1f70a}: '🜊'
AlchemicalSymbolForVinegar,
/// \u{1f70b}: '🜋'
AlchemicalSymbolForVinegarDash2,
/// \u{1f70c}: '🜌'
AlchemicalSymbolForVinegarDash3,
/// \u{1f70d}: '🜍'
AlchemicalSymbolForSulfur,
/// \u{1f70e}: '🜎'
AlchemicalSymbolForPhilosophersSulfur,
/// \u{1f70f}: '🜏'
AlchemicalSymbolForBlackSulfur,
/// \u{1f710}: '🜐'
AlchemicalSymbolForMercurySublimate,
/// \u{1f711}: '🜑'
AlchemicalSymbolForMercurySublimateDash2,
/// \u{1f712}: '🜒'
AlchemicalSymbolForMercurySublimateDash3,
/// \u{1f713}: '🜓'
AlchemicalSymbolForCinnabar,
/// \u{1f714}: '🜔'
AlchemicalSymbolForSalt,
/// \u{1f715}: '🜕'
AlchemicalSymbolForNitre,
/// \u{1f716}: '🜖'
AlchemicalSymbolForVitriol,
/// \u{1f717}: '🜗'
AlchemicalSymbolForVitriolDash2,
/// \u{1f718}: '🜘'
AlchemicalSymbolForRockSalt,
/// \u{1f719}: '🜙'
AlchemicalSymbolForRockSaltDash2,
/// \u{1f71a}: '🜚'
AlchemicalSymbolForGold,
/// \u{1f71b}: '🜛'
AlchemicalSymbolForSilver,
/// \u{1f71c}: '🜜'
AlchemicalSymbolForIronOre,
/// \u{1f71d}: '🜝'
AlchemicalSymbolForIronOreDash2,
/// \u{1f71e}: '🜞'
AlchemicalSymbolForCrocusOfIron,
/// \u{1f71f}: '🜟'
AlchemicalSymbolForRegulusOfIron,
/// \u{1f720}: '🜠'
AlchemicalSymbolForCopperOre,
/// \u{1f721}: '🜡'
AlchemicalSymbolForIronDashCopperOre,
/// \u{1f722}: '🜢'
AlchemicalSymbolForSublimateOfCopper,
/// \u{1f723}: '🜣'
AlchemicalSymbolForCrocusOfCopper,
/// \u{1f724}: '🜤'
AlchemicalSymbolForCrocusOfCopperDash2,
/// \u{1f725}: '🜥'
AlchemicalSymbolForCopperAntimoniate,
/// \u{1f726}: '🜦'
AlchemicalSymbolForSaltOfCopperAntimoniate,
/// \u{1f727}: '🜧'
AlchemicalSymbolForSublimateOfSaltOfCopper,
/// \u{1f728}: '🜨'
AlchemicalSymbolForVerdigris,
/// \u{1f729}: '🜩'
AlchemicalSymbolForTinOre,
/// \u{1f72a}: '🜪'
AlchemicalSymbolForLeadOre,
/// \u{1f72b}: '🜫'
AlchemicalSymbolForAntimonyOre,
/// \u{1f72c}: '🜬'
AlchemicalSymbolForSublimateOfAntimony,
/// \u{1f72d}: '🜭'
AlchemicalSymbolForSaltOfAntimony,
/// \u{1f72e}: '🜮'
AlchemicalSymbolForSublimateOfSaltOfAntimony,
/// \u{1f72f}: '🜯'
AlchemicalSymbolForVinegarOfAntimony,
/// \u{1f730}: '🜰'
AlchemicalSymbolForRegulusOfAntimony,
/// \u{1f731}: '🜱'
AlchemicalSymbolForRegulusOfAntimonyDash2,
/// \u{1f732}: '🜲'
AlchemicalSymbolForRegulus,
/// \u{1f733}: '🜳'
AlchemicalSymbolForRegulusDash2,
/// \u{1f734}: '🜴'
AlchemicalSymbolForRegulusDash3,
/// \u{1f735}: '🜵'
AlchemicalSymbolForRegulusDash4,
/// \u{1f736}: '🜶'
AlchemicalSymbolForAlkali,
/// \u{1f737}: '🜷'
AlchemicalSymbolForAlkaliDash2,
/// \u{1f738}: '🜸'
AlchemicalSymbolForMarcasite,
/// \u{1f739}: '🜹'
AlchemicalSymbolForSalDashAmmoniac,
/// \u{1f73a}: '🜺'
AlchemicalSymbolForArsenic,
/// \u{1f73b}: '🜻'
AlchemicalSymbolForRealgar,
/// \u{1f73c}: '🜼'
AlchemicalSymbolForRealgarDash2,
/// \u{1f73d}: '🜽'
AlchemicalSymbolForAuripigment,
/// \u{1f73e}: '🜾'
AlchemicalSymbolForBismuthOre,
/// \u{1f73f}: '🜿'
AlchemicalSymbolForTartar,
/// \u{1f740}: '🝀'
AlchemicalSymbolForTartarDash2,
/// \u{1f741}: '🝁'
AlchemicalSymbolForQuickLime,
/// \u{1f742}: '🝂'
AlchemicalSymbolForBorax,
/// \u{1f743}: '🝃'
AlchemicalSymbolForBoraxDash2,
/// \u{1f744}: '🝄'
AlchemicalSymbolForBoraxDash3,
/// \u{1f745}: '🝅'
AlchemicalSymbolForAlum,
/// \u{1f746}: '🝆'
AlchemicalSymbolForOil,
/// \u{1f747}: '🝇'
AlchemicalSymbolForSpirit,
/// \u{1f748}: '🝈'
AlchemicalSymbolForTincture,
/// \u{1f749}: '🝉'
AlchemicalSymbolForGum,
/// \u{1f74a}: '🝊'
AlchemicalSymbolForWax,
/// \u{1f74b}: '🝋'
AlchemicalSymbolForPowder,
/// \u{1f74c}: '🝌'
AlchemicalSymbolForCalx,
/// \u{1f74d}: '🝍'
AlchemicalSymbolForTutty,
/// \u{1f74e}: '🝎'
AlchemicalSymbolForCaputMortuum,
/// \u{1f74f}: '🝏'
AlchemicalSymbolForScepterOfJove,
/// \u{1f750}: '🝐'
AlchemicalSymbolForCaduceus,
/// \u{1f751}: '🝑'
AlchemicalSymbolForTrident,
/// \u{1f752}: '🝒'
AlchemicalSymbolForStarredTrident,
/// \u{1f753}: '🝓'
AlchemicalSymbolForLodestone,
/// \u{1f754}: '🝔'
AlchemicalSymbolForSoap,
/// \u{1f755}: '🝕'
AlchemicalSymbolForUrine,
/// \u{1f756}: '🝖'
AlchemicalSymbolForHorseDung,
/// \u{1f757}: '🝗'
AlchemicalSymbolForAshes,
/// \u{1f758}: '🝘'
AlchemicalSymbolForPotAshes,
/// \u{1f759}: '🝙'
AlchemicalSymbolForBrick,
/// \u{1f75a}: '🝚'
AlchemicalSymbolForPowderedBrick,
/// \u{1f75b}: '🝛'
AlchemicalSymbolForAmalgam,
/// \u{1f75c}: '🝜'
AlchemicalSymbolForStratumSuperStratum,
/// \u{1f75d}: '🝝'
AlchemicalSymbolForStratumSuperStratumDash2,
/// \u{1f75e}: '🝞'
AlchemicalSymbolForSublimation,
/// \u{1f75f}: '🝟'
AlchemicalSymbolForPrecipitate,
/// \u{1f760}: '🝠'
AlchemicalSymbolForDistill,
/// \u{1f761}: '🝡'
AlchemicalSymbolForDissolve,
/// \u{1f762}: '🝢'
AlchemicalSymbolForDissolveDash2,
/// \u{1f763}: '🝣'
AlchemicalSymbolForPurify,
/// \u{1f764}: '🝤'
AlchemicalSymbolForPutrefaction,
/// \u{1f765}: '🝥'
AlchemicalSymbolForCrucible,
/// \u{1f766}: '🝦'
AlchemicalSymbolForCrucibleDash2,
/// \u{1f767}: '🝧'
AlchemicalSymbolForCrucibleDash3,
/// \u{1f768}: '🝨'
AlchemicalSymbolForCrucibleDash4,
/// \u{1f769}: '🝩'
AlchemicalSymbolForCrucibleDash5,
/// \u{1f76a}: '🝪'
AlchemicalSymbolForAlembic,
/// \u{1f76b}: '🝫'
AlchemicalSymbolForBathOfMary,
/// \u{1f76c}: '🝬'
AlchemicalSymbolForBathOfVapours,
/// \u{1f76d}: '🝭'
AlchemicalSymbolForRetort,
/// \u{1f76e}: '🝮'
AlchemicalSymbolForHour,
/// \u{1f76f}: '🝯'
AlchemicalSymbolForNight,
/// \u{1f770}: '🝰'
AlchemicalSymbolForDayDashNight,
/// \u{1f771}: '🝱'
AlchemicalSymbolForMonth,
/// \u{1f772}: '🝲'
AlchemicalSymbolForHalfDram,
/// \u{1f773}: '🝳'
AlchemicalSymbolForHalfOunce,
}
impl Into<char> for AlchemicalSymbols {
fn into(self) -> char {
match self {
AlchemicalSymbols::AlchemicalSymbolForQuintessence => '🜀',
AlchemicalSymbols::AlchemicalSymbolForAir => '🜁',
AlchemicalSymbols::AlchemicalSymbolForFire => '🜂',
AlchemicalSymbols::AlchemicalSymbolForEarth => '🜃',
AlchemicalSymbols::AlchemicalSymbolForWater => '🜄',
AlchemicalSymbols::AlchemicalSymbolForAquafortis => '🜅',
AlchemicalSymbols::AlchemicalSymbolForAquaRegia => '🜆',
AlchemicalSymbols::AlchemicalSymbolForAquaRegiaDash2 => '🜇',
AlchemicalSymbols::AlchemicalSymbolForAquaVitae => '🜈',
AlchemicalSymbols::AlchemicalSymbolForAquaVitaeDash2 => '🜉',
AlchemicalSymbols::AlchemicalSymbolForVinegar => '🜊',
AlchemicalSymbols::AlchemicalSymbolForVinegarDash2 => '🜋',
AlchemicalSymbols::AlchemicalSymbolForVinegarDash3 => '🜌',
AlchemicalSymbols::AlchemicalSymbolForSulfur => '🜍',
AlchemicalSymbols::AlchemicalSymbolForPhilosophersSulfur => '🜎',
AlchemicalSymbols::AlchemicalSymbolForBlackSulfur => '🜏',
AlchemicalSymbols::AlchemicalSymbolForMercurySublimate => '🜐',
AlchemicalSymbols::AlchemicalSymbolForMercurySublimateDash2 => '🜑',
AlchemicalSymbols::AlchemicalSymbolForMercurySublimateDash3 => '🜒',
AlchemicalSymbols::AlchemicalSymbolForCinnabar => '🜓',
AlchemicalSymbols::AlchemicalSymbolForSalt => '🜔',
AlchemicalSymbols::AlchemicalSymbolForNitre => '🜕',
AlchemicalSymbols::AlchemicalSymbolForVitriol => '🜖',
AlchemicalSymbols::AlchemicalSymbolForVitriolDash2 => '🜗',
AlchemicalSymbols::AlchemicalSymbolForRockSalt => '🜘',
AlchemicalSymbols::AlchemicalSymbolForRockSaltDash2 => '🜙',
AlchemicalSymbols::AlchemicalSymbolForGold => '🜚',
AlchemicalSymbols::AlchemicalSymbolForSilver => '🜛',
AlchemicalSymbols::AlchemicalSymbolForIronOre => '🜜',
AlchemicalSymbols::AlchemicalSymbolForIronOreDash2 => '🜝',
AlchemicalSymbols::AlchemicalSymbolForCrocusOfIron => '🜞',
AlchemicalSymbols::AlchemicalSymbolForRegulusOfIron => '🜟',
AlchemicalSymbols::AlchemicalSymbolForCopperOre => '🜠',
AlchemicalSymbols::AlchemicalSymbolForIronDashCopperOre => '🜡',
AlchemicalSymbols::AlchemicalSymbolForSublimateOfCopper => '🜢',
AlchemicalSymbols::AlchemicalSymbolForCrocusOfCopper => '🜣',
AlchemicalSymbols::AlchemicalSymbolForCrocusOfCopperDash2 => '🜤',
AlchemicalSymbols::AlchemicalSymbolForCopperAntimoniate => '🜥',
AlchemicalSymbols::AlchemicalSymbolForSaltOfCopperAntimoniate => '🜦',
AlchemicalSymbols::AlchemicalSymbolForSublimateOfSaltOfCopper => '🜧',
AlchemicalSymbols::AlchemicalSymbolForVerdigris => '🜨',
AlchemicalSymbols::AlchemicalSymbolForTinOre => '🜩',
AlchemicalSymbols::AlchemicalSymbolForLeadOre => '🜪',
AlchemicalSymbols::AlchemicalSymbolForAntimonyOre => '🜫',
AlchemicalSymbols::AlchemicalSymbolForSublimateOfAntimony => '🜬',
AlchemicalSymbols::AlchemicalSymbolForSaltOfAntimony => '🜭',
AlchemicalSymbols::AlchemicalSymbolForSublimateOfSaltOfAntimony => '🜮',
AlchemicalSymbols::AlchemicalSymbolForVinegarOfAntimony => '🜯',
AlchemicalSymbols::AlchemicalSymbolForRegulusOfAntimony => '🜰',
AlchemicalSymbols::AlchemicalSymbolForRegulusOfAntimonyDash2 => '🜱',
AlchemicalSymbols::AlchemicalSymbolForRegulus => '🜲',
AlchemicalSymbols::AlchemicalSymbolForRegulusDash2 => '🜳',
AlchemicalSymbols::AlchemicalSymbolForRegulusDash3 => '🜴',
AlchemicalSymbols::AlchemicalSymbolForRegulusDash4 => '🜵',
AlchemicalSymbols::AlchemicalSymbolForAlkali => '🜶',
AlchemicalSymbols::AlchemicalSymbolForAlkaliDash2 => '🜷',
AlchemicalSymbols::AlchemicalSymbolForMarcasite => '🜸',
AlchemicalSymbols::AlchemicalSymbolForSalDashAmmoniac => '🜹',
AlchemicalSymbols::AlchemicalSymbolForArsenic => '🜺',
AlchemicalSymbols::AlchemicalSymbolForRealgar => '🜻',
AlchemicalSymbols::AlchemicalSymbolForRealgarDash2 => '🜼',
AlchemicalSymbols::AlchemicalSymbolForAuripigment => '🜽',
AlchemicalSymbols::AlchemicalSymbolForBismuthOre => '🜾',
AlchemicalSymbols::AlchemicalSymbolForTartar => '🜿',
AlchemicalSymbols::AlchemicalSymbolForTartarDash2 => '🝀',
AlchemicalSymbols::AlchemicalSymbolForQuickLime => '🝁',
AlchemicalSymbols::AlchemicalSymbolForBorax => '🝂',
AlchemicalSymbols::AlchemicalSymbolForBoraxDash2 => '🝃',
AlchemicalSymbols::AlchemicalSymbolForBoraxDash3 => '🝄',
AlchemicalSymbols::AlchemicalSymbolForAlum => '🝅',
AlchemicalSymbols::AlchemicalSymbolForOil => '🝆',
AlchemicalSymbols::AlchemicalSymbolForSpirit => '🝇',
AlchemicalSymbols::AlchemicalSymbolForTincture => '🝈',
AlchemicalSymbols::AlchemicalSymbolForGum => '🝉',
AlchemicalSymbols::AlchemicalSymbolForWax => '🝊',
AlchemicalSymbols::AlchemicalSymbolForPowder => '🝋',
AlchemicalSymbols::AlchemicalSymbolForCalx => '🝌',
AlchemicalSymbols::AlchemicalSymbolForTutty => '🝍',
AlchemicalSymbols::AlchemicalSymbolForCaputMortuum => '🝎',
AlchemicalSymbols::AlchemicalSymbolForScepterOfJove => '🝏',
AlchemicalSymbols::AlchemicalSymbolForCaduceus => '🝐',
AlchemicalSymbols::AlchemicalSymbolForTrident => '🝑',
AlchemicalSymbols::AlchemicalSymbolForStarredTrident => '🝒',
AlchemicalSymbols::AlchemicalSymbolForLodestone => '🝓',
AlchemicalSymbols::AlchemicalSymbolForSoap => '🝔',
AlchemicalSymbols::AlchemicalSymbolForUrine => '🝕',
AlchemicalSymbols::AlchemicalSymbolForHorseDung => '🝖',
AlchemicalSymbols::AlchemicalSymbolForAshes => '🝗',
AlchemicalSymbols::AlchemicalSymbolForPotAshes => '🝘',
AlchemicalSymbols::AlchemicalSymbolForBrick => '🝙',
AlchemicalSymbols::AlchemicalSymbolForPowderedBrick => '🝚',
AlchemicalSymbols::AlchemicalSymbolForAmalgam => '🝛',
AlchemicalSymbols::AlchemicalSymbolForStratumSuperStratum => '🝜',
AlchemicalSymbols::AlchemicalSymbolForStratumSuperStratumDash2 => '🝝',
AlchemicalSymbols::AlchemicalSymbolForSublimation => '🝞',
AlchemicalSymbols::AlchemicalSymbolForPrecipitate => '🝟',
AlchemicalSymbols::AlchemicalSymbolForDistill => '🝠',
AlchemicalSymbols::AlchemicalSymbolForDissolve => '🝡',
AlchemicalSymbols::AlchemicalSymbolForDissolveDash2 => '🝢',
AlchemicalSymbols::AlchemicalSymbolForPurify => '🝣',
AlchemicalSymbols::AlchemicalSymbolForPutrefaction => '🝤',
AlchemicalSymbols::AlchemicalSymbolForCrucible => '🝥',
AlchemicalSymbols::AlchemicalSymbolForCrucibleDash2 => '🝦',
AlchemicalSymbols::AlchemicalSymbolForCrucibleDash3 => '🝧',
AlchemicalSymbols::AlchemicalSymbolForCrucibleDash4 => '🝨',
AlchemicalSymbols::AlchemicalSymbolForCrucibleDash5 => '🝩',
AlchemicalSymbols::AlchemicalSymbolForAlembic => '🝪',
AlchemicalSymbols::AlchemicalSymbolForBathOfMary => '🝫',
AlchemicalSymbols::AlchemicalSymbolForBathOfVapours => '🝬',
AlchemicalSymbols::AlchemicalSymbolForRetort => '🝭',
AlchemicalSymbols::AlchemicalSymbolForHour => '🝮',
AlchemicalSymbols::AlchemicalSymbolForNight => '🝯',
AlchemicalSymbols::AlchemicalSymbolForDayDashNight => '🝰',
AlchemicalSymbols::AlchemicalSymbolForMonth => '🝱',
AlchemicalSymbols::AlchemicalSymbolForHalfDram => '🝲',
AlchemicalSymbols::AlchemicalSymbolForHalfOunce => '🝳',
}
}
}
impl std::convert::TryFrom<char> for AlchemicalSymbols {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'🜀' => Ok(AlchemicalSymbols::AlchemicalSymbolForQuintessence),
'🜁' => Ok(AlchemicalSymbols::AlchemicalSymbolForAir),
'🜂' => Ok(AlchemicalSymbols::AlchemicalSymbolForFire),
'🜃' => Ok(AlchemicalSymbols::AlchemicalSymbolForEarth),
'🜄' => Ok(AlchemicalSymbols::AlchemicalSymbolForWater),
'🜅' => Ok(AlchemicalSymbols::AlchemicalSymbolForAquafortis),
'🜆' => Ok(AlchemicalSymbols::AlchemicalSymbolForAquaRegia),
'🜇' => Ok(AlchemicalSymbols::AlchemicalSymbolForAquaRegiaDash2),
'🜈' => Ok(AlchemicalSymbols::AlchemicalSymbolForAquaVitae),
'🜉' => Ok(AlchemicalSymbols::AlchemicalSymbolForAquaVitaeDash2),
'🜊' => Ok(AlchemicalSymbols::AlchemicalSymbolForVinegar),
'🜋' => Ok(AlchemicalSymbols::AlchemicalSymbolForVinegarDash2),
'🜌' => Ok(AlchemicalSymbols::AlchemicalSymbolForVinegarDash3),
'🜍' => Ok(AlchemicalSymbols::AlchemicalSymbolForSulfur),
'🜎' => Ok(AlchemicalSymbols::AlchemicalSymbolForPhilosophersSulfur),
'🜏' => Ok(AlchemicalSymbols::AlchemicalSymbolForBlackSulfur),
'🜐' => Ok(AlchemicalSymbols::AlchemicalSymbolForMercurySublimate),
'🜑' => Ok(AlchemicalSymbols::AlchemicalSymbolForMercurySublimateDash2),
'🜒' => Ok(AlchemicalSymbols::AlchemicalSymbolForMercurySublimateDash3),
'🜓' => Ok(AlchemicalSymbols::AlchemicalSymbolForCinnabar),
'🜔' => Ok(AlchemicalSymbols::AlchemicalSymbolForSalt),
'🜕' => Ok(AlchemicalSymbols::AlchemicalSymbolForNitre),
'🜖' => Ok(AlchemicalSymbols::AlchemicalSymbolForVitriol),
'🜗' => Ok(AlchemicalSymbols::AlchemicalSymbolForVitriolDash2),
'🜘' => Ok(AlchemicalSymbols::AlchemicalSymbolForRockSalt),
'🜙' => Ok(AlchemicalSymbols::AlchemicalSymbolForRockSaltDash2),
'🜚' => Ok(AlchemicalSymbols::AlchemicalSymbolForGold),
'🜛' => Ok(AlchemicalSymbols::AlchemicalSymbolForSilver),
'🜜' => Ok(AlchemicalSymbols::AlchemicalSymbolForIronOre),
'🜝' => Ok(AlchemicalSymbols::AlchemicalSymbolForIronOreDash2),
'🜞' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrocusOfIron),
'🜟' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusOfIron),
'🜠' => Ok(AlchemicalSymbols::AlchemicalSymbolForCopperOre),
'🜡' => Ok(AlchemicalSymbols::AlchemicalSymbolForIronDashCopperOre),
'🜢' => Ok(AlchemicalSymbols::AlchemicalSymbolForSublimateOfCopper),
'🜣' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrocusOfCopper),
'🜤' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrocusOfCopperDash2),
'🜥' => Ok(AlchemicalSymbols::AlchemicalSymbolForCopperAntimoniate),
'🜦' => Ok(AlchemicalSymbols::AlchemicalSymbolForSaltOfCopperAntimoniate),
'🜧' => Ok(AlchemicalSymbols::AlchemicalSymbolForSublimateOfSaltOfCopper),
'🜨' => Ok(AlchemicalSymbols::AlchemicalSymbolForVerdigris),
'🜩' => Ok(AlchemicalSymbols::AlchemicalSymbolForTinOre),
'🜪' => Ok(AlchemicalSymbols::AlchemicalSymbolForLeadOre),
'🜫' => Ok(AlchemicalSymbols::AlchemicalSymbolForAntimonyOre),
'🜬' => Ok(AlchemicalSymbols::AlchemicalSymbolForSublimateOfAntimony),
'🜭' => Ok(AlchemicalSymbols::AlchemicalSymbolForSaltOfAntimony),
'🜮' => Ok(AlchemicalSymbols::AlchemicalSymbolForSublimateOfSaltOfAntimony),
'🜯' => Ok(AlchemicalSymbols::AlchemicalSymbolForVinegarOfAntimony),
'🜰' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusOfAntimony),
'🜱' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusOfAntimonyDash2),
'🜲' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulus),
'🜳' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusDash2),
'🜴' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusDash3),
'🜵' => Ok(AlchemicalSymbols::AlchemicalSymbolForRegulusDash4),
'🜶' => Ok(AlchemicalSymbols::AlchemicalSymbolForAlkali),
'🜷' => Ok(AlchemicalSymbols::AlchemicalSymbolForAlkaliDash2),
'🜸' => Ok(AlchemicalSymbols::AlchemicalSymbolForMarcasite),
'🜹' => Ok(AlchemicalSymbols::AlchemicalSymbolForSalDashAmmoniac),
'🜺' => Ok(AlchemicalSymbols::AlchemicalSymbolForArsenic),
'🜻' => Ok(AlchemicalSymbols::AlchemicalSymbolForRealgar),
'🜼' => Ok(AlchemicalSymbols::AlchemicalSymbolForRealgarDash2),
'🜽' => Ok(AlchemicalSymbols::AlchemicalSymbolForAuripigment),
'🜾' => Ok(AlchemicalSymbols::AlchemicalSymbolForBismuthOre),
'🜿' => Ok(AlchemicalSymbols::AlchemicalSymbolForTartar),
'🝀' => Ok(AlchemicalSymbols::AlchemicalSymbolForTartarDash2),
'🝁' => Ok(AlchemicalSymbols::AlchemicalSymbolForQuickLime),
'🝂' => Ok(AlchemicalSymbols::AlchemicalSymbolForBorax),
'🝃' => Ok(AlchemicalSymbols::AlchemicalSymbolForBoraxDash2),
'🝄' => Ok(AlchemicalSymbols::AlchemicalSymbolForBoraxDash3),
'🝅' => Ok(AlchemicalSymbols::AlchemicalSymbolForAlum),
'🝆' => Ok(AlchemicalSymbols::AlchemicalSymbolForOil),
'🝇' => Ok(AlchemicalSymbols::AlchemicalSymbolForSpirit),
'🝈' => Ok(AlchemicalSymbols::AlchemicalSymbolForTincture),
'🝉' => Ok(AlchemicalSymbols::AlchemicalSymbolForGum),
'🝊' => Ok(AlchemicalSymbols::AlchemicalSymbolForWax),
'🝋' => Ok(AlchemicalSymbols::AlchemicalSymbolForPowder),
'🝌' => Ok(AlchemicalSymbols::AlchemicalSymbolForCalx),
'🝍' => Ok(AlchemicalSymbols::AlchemicalSymbolForTutty),
'🝎' => Ok(AlchemicalSymbols::AlchemicalSymbolForCaputMortuum),
'🝏' => Ok(AlchemicalSymbols::AlchemicalSymbolForScepterOfJove),
'🝐' => Ok(AlchemicalSymbols::AlchemicalSymbolForCaduceus),
'🝑' => Ok(AlchemicalSymbols::AlchemicalSymbolForTrident),
'🝒' => Ok(AlchemicalSymbols::AlchemicalSymbolForStarredTrident),
'🝓' => Ok(AlchemicalSymbols::AlchemicalSymbolForLodestone),
'🝔' => Ok(AlchemicalSymbols::AlchemicalSymbolForSoap),
'🝕' => Ok(AlchemicalSymbols::AlchemicalSymbolForUrine),
'🝖' => Ok(AlchemicalSymbols::AlchemicalSymbolForHorseDung),
'🝗' => Ok(AlchemicalSymbols::AlchemicalSymbolForAshes),
'🝘' => Ok(AlchemicalSymbols::AlchemicalSymbolForPotAshes),
'🝙' => Ok(AlchemicalSymbols::AlchemicalSymbolForBrick),
'🝚' => Ok(AlchemicalSymbols::AlchemicalSymbolForPowderedBrick),
'🝛' => Ok(AlchemicalSymbols::AlchemicalSymbolForAmalgam),
'🝜' => Ok(AlchemicalSymbols::AlchemicalSymbolForStratumSuperStratum),
'🝝' => Ok(AlchemicalSymbols::AlchemicalSymbolForStratumSuperStratumDash2),
'🝞' => Ok(AlchemicalSymbols::AlchemicalSymbolForSublimation),
'🝟' => Ok(AlchemicalSymbols::AlchemicalSymbolForPrecipitate),
'🝠' => Ok(AlchemicalSymbols::AlchemicalSymbolForDistill),
'🝡' => Ok(AlchemicalSymbols::AlchemicalSymbolForDissolve),
'🝢' => Ok(AlchemicalSymbols::AlchemicalSymbolForDissolveDash2),
'🝣' => Ok(AlchemicalSymbols::AlchemicalSymbolForPurify),
'🝤' => Ok(AlchemicalSymbols::AlchemicalSymbolForPutrefaction),
'🝥' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrucible),
'🝦' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrucibleDash2),
'🝧' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrucibleDash3),
'🝨' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrucibleDash4),
'🝩' => Ok(AlchemicalSymbols::AlchemicalSymbolForCrucibleDash5),
'🝪' => Ok(AlchemicalSymbols::AlchemicalSymbolForAlembic),
'🝫' => Ok(AlchemicalSymbols::AlchemicalSymbolForBathOfMary),
'🝬' => Ok(AlchemicalSymbols::AlchemicalSymbolForBathOfVapours),
'🝭' => Ok(AlchemicalSymbols::AlchemicalSymbolForRetort),
'🝮' => Ok(AlchemicalSymbols::AlchemicalSymbolForHour),
'🝯' => Ok(AlchemicalSymbols::AlchemicalSymbolForNight),
'🝰' => Ok(AlchemicalSymbols::AlchemicalSymbolForDayDashNight),
'🝱' => Ok(AlchemicalSymbols::AlchemicalSymbolForMonth),
'🝲' => Ok(AlchemicalSymbols::AlchemicalSymbolForHalfDram),
'🝳' => Ok(AlchemicalSymbols::AlchemicalSymbolForHalfOunce),
_ => Err(()),
}
}
}
impl Into<u32> for AlchemicalSymbols {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for AlchemicalSymbols {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for AlchemicalSymbols {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl AlchemicalSymbols {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
AlchemicalSymbols::AlchemicalSymbolForQuintessence
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("AlchemicalSymbols{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
extern crate aoc2017;
use aoc2017::days::day13;
fn main() {
let input = aoc2017::read("input/day13.txt").unwrap();
let value1 = day13::part1::parse(&input);
let value2 = day13::part2::parse(&input);
println!("Day 13 part 1 value: {}", value1);
println!("Day 13 part 2 value: {}", value2);
}
|
use serde_json;
use std::fmt;
#[derive(Serialize, Deserialize, Queryable)]
pub struct Post {
pub id: i32,
pub src: i32,
pub privacy: i32,
pub content_warning: Option<String>,
pub text: Option<String>,
pub image_data: Option<serde_json::Value>,
pub time: chrono::NaiveDateTime
}
#[derive(Serialize, Deserialize, Queryable)]
pub struct Account {
pub id: i32,
pub url: String
}
impl Account {
fn to_string(&self) -> &String {
&self.url
}
}
impl fmt::Display for Account {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_string())
}
}
#[derive(Queryable)]
pub struct PostDest {
pub id: i32,
pub post_id: i32,
pub dest_id: i32
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use ::libra_types::proto::*;
use ::sgtypes::proto::*;
pub mod node {
include!(concat!(env!("OUT_DIR"), "/node.rs"));
}
|
use super::constants::marker::*;
use super::error::{ErrorCode, SerdeError, SerdeResult};
use super::marker::Marker;
macro_rules! bytes_to_usize {
($b8:expr) => {
$b8 as usize
};
($b7:expr, $b8:expr) => {
($b7 as usize) << 8 | $b8 as usize
};
($b5:expr, $b6:expr, $b7:expr, $b8:expr) => {
(($b5 as usize) << 24) | (($b6 as usize) << 16) | (($b7 as usize) << 8) | $b8 as usize
};
}
pub trait Unpacker<'a> {
/// Creates new instance of Unpacker trait object
fn new(bytes: &'a [u8]) -> Self;
/// Checks if all bytes were consumed
fn is_done(&self) -> bool;
/// Sets virtual marker and/or value needed for Structure deserialization
fn set_virtual(&mut self, marker: Marker, value: Option<&'static [u8]>) -> SerdeResult<()>;
/// Scratches peeked bytes and consumes next N bytes
/// If virtual value was set then the virtual value is returned instead
fn consume_bytes(&mut self, len: usize) -> SerdeResult<&'a [u8]>;
/// Returns Nth byte from current index if it exists
fn peek_byte_nth_ahead(&self, pos_ahead: usize) -> SerdeResult<u8>;
/// Peek and consume marker
fn consume_marker(&mut self) -> SerdeResult<Marker>;
// Peek marker
fn peek_marker(&mut self) -> SerdeResult<Marker>;
fn scratch_peeked(&mut self);
}
impl<'a> Unpacker<'a> for ByteReader<'a> {
fn new(bytes: &'a [u8]) -> Self {
Self {
bytes,
index: 0,
peeked: 0,
virtual_value: None,
virtual_marker: None,
}
}
fn is_done(&self) -> bool {
self.bytes.len() == self.index
}
fn set_virtual(&mut self, marker: Marker, value: Option<&'static [u8]>) -> SerdeResult<()> {
// Ensure that call to .set_virtual never overwrites existing virtual values
if self.virtual_marker.is_some() || self.virtual_value.is_some() {
return Err(SerdeError::create(ErrorCode::VirtualIllegalAssignment));
}
self.virtual_marker = Some(marker);
self.virtual_value = value;
Ok(())
}
/// Called only when additional data needs to be consumed after consuming marker.
fn consume_bytes(&mut self, len: usize) -> SerdeResult<&'a [u8]> {
if self.virtual_value.is_some() {
assert!(self.virtual_marker.is_none());
return Ok(self.virtual_value.take().expect("Virtual Value to exist"));
}
if self.index + len > self.bytes.len() {
return Err(SerdeError::create(ErrorCode::UnexpectedEndOfBytes));
}
let bytes = &self.bytes[self.index..self.index + len];
self.index += len;
Ok(bytes)
}
fn peek_byte_nth_ahead(&self, ahead: usize) -> SerdeResult<u8> {
self.bytes
.get(self.index + ahead)
.copied()
.ok_or_else(|| SerdeError::create(ErrorCode::UnexpectedEndOfBytes))
}
fn consume_marker(&mut self) -> SerdeResult<Marker> {
let marker = self.peek_marker()?;
self.scratch_peeked();
Ok(marker)
}
fn peek_marker(&mut self) -> SerdeResult<Marker> {
if self.virtual_marker.is_some() {
assert!(
self.peeked == 0,
"ByteReader.peeked must be equal to 0 when setting virtual marker"
);
return Ok(self
.virtual_marker
.clone()
.expect("Virtual marker to exist"));
}
let marker_byte = self.peek_byte_nth_ahead(0)?;
let marker = match marker_byte {
TINY_STRING..=TINY_STRING_MAX => {
self.peeked = 1;
Marker::String((marker_byte - TINY_STRING) as usize)
}
STRING_8 => {
let len = self.peek_byte_nth_ahead(1)? as usize;
self.peeked = 2;
Marker::String(len)
}
STRING_16 => {
let b8 = self.peek_byte_nth_ahead(2)?;
let b7 = self.bytes[self.index + 1];
self.peeked = 3;
Marker::String(bytes_to_usize!(b7, b8))
}
STRING_32 => {
let b8 = self.peek_byte_nth_ahead(4)?;
let b7 = self.bytes[self.index + 3];
let b6 = self.bytes[self.index + 2];
let b5 = self.bytes[self.index + 1];
self.peeked = 5;
Marker::String(bytes_to_usize!(b5, b6, b7, b8))
}
TINY_MAP..=TINY_MAP_MAX => {
self.peeked = 1;
Marker::Map((marker_byte - TINY_MAP) as usize)
}
MAP_8 => {
let len = self.peek_byte_nth_ahead(1)? as usize;
self.peeked = 2;
Marker::Map(len)
}
MAP_16 => {
let b8 = self.peek_byte_nth_ahead(self.index + 2)?;
let b7 = self.bytes[self.index + 1];
self.peeked = 3;
Marker::Map(bytes_to_usize!(b7, b8))
}
MAP_32 => {
let b8 = self.peek_byte_nth_ahead(4)?;
let b7 = self.bytes[self.index + 3];
let b6 = self.bytes[self.index + 2];
let b5 = self.bytes[self.index + 1];
self.peeked = 5;
Marker::Map(bytes_to_usize!(b5, b6, b7, b8))
}
MAP_STREAM => {
self.peeked = 1;
Marker::Map(std::usize::MAX)
}
TINY_STRUCT..=TINY_STRUCT_MAX => {
self.peeked = 1;
Marker::Struct((marker_byte - TINY_STRUCT) as usize)
}
STRUCT_8 => {
let len = self.peek_byte_nth_ahead(1)? as usize;
self.peeked = 2;
Marker::Struct(len)
}
STRUCT_16 => {
let b8 = self.peek_byte_nth_ahead(2)?;
let b7 = self.bytes[self.index + 1];
self.peeked = 3;
Marker::Struct(bytes_to_usize!(b7, b8))
}
TINY_LIST..=TINY_LIST_MAX => {
self.peeked = 1;
Marker::List((marker_byte - TINY_LIST) as usize)
}
LIST_8 => {
let len = self.peek_byte_nth_ahead(1)? as usize;
self.peeked = 2;
Marker::List(len)
}
LIST_16 => {
let b8 = self.peek_byte_nth_ahead(2)?;
let b7 = self.bytes[self.index + 1];
self.peeked = 3;
Marker::List(bytes_to_usize!(b7, b8))
}
LIST_32 => {
let b8 = self.peek_byte_nth_ahead(4)?;
let b7 = self.bytes[self.index + 3];
let b6 = self.bytes[self.index + 2];
let b5 = self.bytes[self.index + 1];
self.peeked = 5;
Marker::List(bytes_to_usize!(b5, b6, b7, b8))
}
LIST_STREAM => {
self.peeked = 1;
Marker::List(std::usize::MAX)
}
NULL => {
self.peeked = 1;
Marker::Null
}
TRUE => {
self.peeked = 1;
Marker::True
}
FALSE => {
self.peeked = 1;
Marker::False
}
INT_8 => {
let b1 = self.peek_byte_nth_ahead(1)?;
self.peeked = 2;
Marker::I64(i64::from(i8::from_be_bytes([b1])))
}
INT_16 => {
let b2 = self.peek_byte_nth_ahead(2)?;
let b1 = self.bytes[self.index + 1];
self.peeked = 3;
let n = i16::from_be_bytes([b1, b2]);
Marker::I64(i64::from(n))
}
INT_32 => {
let b4 = self.peek_byte_nth_ahead(4)?;
let b3 = self.bytes[self.index + 3];
let b2 = self.bytes[self.index + 2];
let b1 = self.bytes[self.index + 1];
self.peeked = 5;
let n = i32::from_be_bytes([b1, b2, b3, b4]);
Marker::I64(i64::from(n))
}
INT_64 => {
let b8 = self.peek_byte_nth_ahead(8)?;
let b7 = self.bytes[self.index + 7];
let b6 = self.bytes[self.index + 6];
let b5 = self.bytes[self.index + 5];
let b4 = self.bytes[self.index + 4];
let b3 = self.bytes[self.index + 3];
let b2 = self.bytes[self.index + 2];
let b1 = self.bytes[self.index + 1];
self.peeked = 9;
Marker::I64(i64::from_be_bytes([b1, b2, b3, b4, b5, b6, b7, b8]))
}
FLOAT_64 => {
let b8 = self.peek_byte_nth_ahead(8)?;
let b7 = self.bytes[self.index + 7];
let b6 = self.bytes[self.index + 6];
let b5 = self.bytes[self.index + 5];
let b4 = self.bytes[self.index + 4];
let b3 = self.bytes[self.index + 3];
let b2 = self.bytes[self.index + 2];
let b1 = self.bytes[self.index + 1];
self.peeked = 9;
let n = f64::from_bits(u64::from_be_bytes([b1, b2, b3, b4, b5, b6, b7, b8]));
Marker::F64(n)
}
END_OF_STREAM => {
self.peeked = 1;
Marker::EOS
}
BYTES_8 => {
let len = self.peek_byte_nth_ahead(1)? as usize;
self.peeked = 2;
Marker::Bytes(len)
}
BYTES_16 => {
let b8 = self.peek_byte_nth_ahead(2)?;
let b7 = self.bytes[self.index + 1];
self.peeked = 3;
Marker::Bytes(bytes_to_usize!(b7, b8))
}
BYTES_32 => {
let b8 = self.peek_byte_nth_ahead(4)?;
let b7 = self.bytes[self.index + 3];
let b6 = self.bytes[self.index + 2];
let b5 = self.bytes[self.index + 1];
self.peeked = 5;
Marker::Bytes(bytes_to_usize!(b5, b6, b7, b8))
}
0..=0x7F | 0xF0..=0xFF => {
self.peeked = 1;
Marker::I64(i8::from_be_bytes([marker_byte]).into())
}
b => {
return Err(SerdeError::create(format!(
"Peek error: byte {:x} is not a marker",
b
)))
}
};
Ok(marker)
}
fn scratch_peeked(&mut self) {
if self.virtual_marker.is_some() {
assert!(self.peeked == 0);
self.virtual_marker = None;
} else {
assert!(self.peeked != 0);
self.index += self.peeked;
self.peeked = 0;
}
}
}
#[derive(Debug)]
pub(crate) struct ByteReader<'a> {
pub bytes: &'a [u8],
pub index: usize,
pub peeked: usize,
pub virtual_value: Option<&'static [u8]>,
pub virtual_marker: Option<Marker>,
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! assert_try_peek {
($($bytes:expr => $marker:expr),* $(,)*) => {
$(assert_eq!($marker, ByteReader::new(&$bytes).peek_marker().unwrap());)*
};
}
#[test]
fn test_set_virtual() {
let mut reader = ByteReader::new(&[TINY_STRING]);
assert!(reader.set_virtual(Marker::I64(0), Some(&[10])).is_ok());
assert!(reader.consume_marker().is_ok());
assert!(reader.consume_bytes(0).is_ok());
assert!(reader.set_virtual(Marker::Null, None).is_ok());
assert!(reader.set_virtual(Marker::Null, None).is_err());
}
#[test]
#[allow(clippy::cognitive_complexity)]
fn test_peek_marker() {
assert_try_peek! {
[TINY_MAP] => Marker::Map(0),
[TINY_MAP + 10] => Marker::Map(10),
[MAP_8, 20] => Marker::Map(20),
[MAP_16, 1, 0] => Marker::Map(256),
[MAP_32, 0, 1, 0, 0] => Marker::Map(256 * 256),
[MAP_STREAM] => Marker::Map(std::usize::MAX),
[TINY_STRING] => Marker::String(0),
[TINY_STRING + 10] => Marker::String(10),
[STRING_8, 20] => Marker::String(20),
[STRING_16, 1, 0] => Marker::String(256),
[STRING_32, 0, 1, 0, 0] => Marker::String(256 * 256),
[TINY_STRUCT] => Marker::Struct(0),
[STRUCT_8, 20] => Marker::Struct(20),
[STRUCT_16, 1, 0] => Marker::Struct(256),
[TINY_LIST + 5] => Marker::List(5),
[LIST_8, 100] => Marker::List(100),
[LIST_16, 1, 0] => Marker::List(256),
[LIST_32, 0, 1, 0, 0] => Marker::List(256 * 256),
[LIST_STREAM] => Marker::List(std::usize::MAX),
[BYTES_8, 1] => Marker::Bytes(1),
[BYTES_16, 1, 0] => Marker::Bytes(256),
[BYTES_32, 0, 1, 0, 0] => Marker::Bytes(256 * 256),
[NULL] => Marker::Null,
[TRUE] => Marker::True,
[FALSE] => Marker::False,
[END_OF_STREAM] => Marker::EOS,
[INT_8, 10] => Marker::I64(10),
[INT_8, 255] => Marker::I64(-1),
[INT_16, 1, 0] => Marker::I64(256),
[INT_16, 255, 255] => Marker::I64(-1),
[INT_32, 0, 1, 0, 0] => Marker::I64(256 * 256),
[INT_32, 255, 255, 255, 255] => Marker::I64(-1),
[INT_64, 0, 0, 1, 0, 0, 0, 0, 0] => Marker::I64(256 * 256 * 256 * 256 * 256),
[INT_64, 255, 255, 255, 255, 255, 255, 255, 255] => Marker::I64(-1),
[FLOAT_64, 0, 0, 0, 0, 0, 0, 0, 0] => Marker::F64(0.0),
};
}
}
|
pub(crate) mod event;
pub(crate) mod payload;
pub(crate) mod spec_version;
pub(crate) mod json;
pub use event::Event;
pub use payload::{Payload, PayloadMapper, PayloadReader, PayloadWriter, PayloadResult};
pub use spec_version::SpecVersion;
pub use json::*;
|
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Primitives for Spartan-based PoR.
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "std")]
pub mod spartan;
/// The length of the Randomness.
pub const RANDOMNESS_LENGTH: usize = 32;
/// Randomness value.
pub type Randomness = [u8; RANDOMNESS_LENGTH];
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
use std::fs::File;
use std::io::BufReader;
use std::io::Read;
use std::collections::HashMap;
use std::fmt;
macro_rules! enum_number {
($name:ident { $($variant:ident = $value:expr, )* }) => {
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum $name {
$($variant = $value,)*
}
impl ::serde::Serialize for $name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
// Serialize the enum as a u64.
serializer.serialize_u64(*self as u64)
}
}
impl<'de> ::serde::Deserialize<'de> for $name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = $name;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("positive integer")
}
fn visit_u64<E>(self, value: u64) -> Result<$name, E>
where
E: ::serde::de::Error,
{
// Rust does not come with a simple way of converting a
// number to an enum, so use a big `match`.
match value {
$( $value => Ok($name::$variant), )*
_ => Err(E::custom(
format!("unknown {} value: {}",
stringify!($name), value))),
}
}
}
// Deserialize the enum from a u64.
deserializer.deserialize_u64(Visitor)
}
}
}
}
#[derive(Deserialize, Debug)]
struct Build<'a> {
buildstatus: Option<BuildStatus>,
job: &'a str,
system: &'a str,
nixname: &'a str,
id: u64,
}
enum_number!(BuildStatus {
// See https://github.com/NixOS/hydra/blob/master/src/sql/hydra.sql#L202-L215
Success = 0,
BuildFailed = 1,
DependencyFailed = 2,
HostFailureAbort = 3,
Cancelled = 4,
// Obsolete
FailureWithOutput = 6,
TimeOut = 7,
CachedFailure = 8,
UnsupportedSystem = 9,
LogLimitExceeded = 10,
OutputLimitExceeded = 11,
NotDeterministic = 12,
});
fn main() {
let file = File::open("latest-eval-builds").unwrap();
let mut buf_reader = BufReader::new(file);
let mut contents = String::new();
buf_reader.read_to_string(&mut contents).unwrap();
let builds: Vec<Build> = serde_json::from_str::<Vec<Build>>(&contents).unwrap()
.into_iter()
.filter(|build| build.system != "x86_64-darwin" && build.system != "i686-linux")
.collect();
let by_nixname: HashMap<&str, Vec<&Build>> = builds
.iter()
.fold(HashMap::new(), |mut acc, build| {
if ! acc.contains_key(build.nixname) {
acc.insert(build.nixname, vec![]);
} else {
acc.get_mut(build.nixname).unwrap().push(build);
}
return acc;
});
let ok_all_platforms: u32 = by_nixname
.iter()
.filter(|(_nixname, builds)| {
builds
.iter()
.fold(true, |acc, build| acc && build.buildstatus == Some(BuildStatus::Success))
})
.fold(0, |acc, _| acc + 1);
let not_ok_all_platforms: HashMap<&&str, HashMap<&str, &&Build>> = by_nixname
.iter()
.filter(|(_nixname, builds)| {
! builds
.iter()
.fold(true, |acc, build| acc && build.buildstatus == Some(BuildStatus::Success))
})
.map(|(nixname, build_vec)| {
(
nixname,
build_vec
.iter()
.map(|build| (build.system, build))
.collect::<HashMap<&str, &&Build>>()
)
})
.collect();
let max_nix_name_len: usize = not_ok_all_platforms
.iter()
.fold(0, |acc, (nixname, _)|
{
let len = nixname.len();
if len > acc {
return len;
} else {
return acc;
}
}
);
println!("builds fine on all platforms: {:?}", ok_all_platforms);
for (job, builds) in not_ok_all_platforms {
match (builds.get("x86_64-linux"), builds.get("aarch64-linux")) {
(Some(x86_64), Some(aarch64)) => {
match (x86_64.buildstatus, aarch64.buildstatus) {
(Some(BuildStatus::Success), Some(BuildStatus::Success)) => {},
(Some(BuildStatus::Success), aarch64_status) => {
print!("{:>width$} https://hydra.nixos.org/build/{} arch64 status: {:?}", job, aarch64.id, aarch64_status, width = max_nix_name_len,
);
println!("");
},
(_, _) => {}
}
}
_ => {}
}
}
}
|
pub mod day1;
pub mod day10;
pub mod day11;
pub mod day2;
pub mod day3;
pub mod day5;
pub mod day8;
pub mod day9;
pub mod utils;
|
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use actix_web::{App,HttpServer};
use std::env;
use dotenv::dotenv;
use listenfd::ListenFd;
mod db;
mod employees;
mod error_handlers;
mod schema;
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
dotenv().ok();
db::init();
let mut listenfd=ListenFd::from_env();
let mut server=HttpServer::new(||App::new().configure(employees::init_routes));
server=match listenfd.take_tcp_listener(0)?{
Some(listener)=>server.listen(listener)?,
None=>{
let host=env::var("HOST").expect("Please set host in .env");
let port=env::var("PORT").expect("Please set port in .env");
server.bind(format!("{}:{}",host,port))?
}
};
server.run().await
} |
use crate::input::ProxySettings;
use std::env;
use std::convert::{ TryFrom };
#[derive(Debug)]
pub struct Proxy {
pub base_url: String,
pub username: String,
pub password: String
}
impl Proxy {
fn new(username: String, password: String) -> Proxy {
Proxy {
base_url: "http://@proxy.apify.com:8000".to_owned(),
username,
password
}
}
}
impl Clone for Proxy {
fn clone(&self) -> Self {
Proxy {
base_url: self.base_url.to_string(),
password: self.password.to_string(),
username: self.username.to_string()
}
}
}
impl TryFrom<Option<ProxySettings>> for Proxy {
type Error = ();
fn try_from(settings: Option<ProxySettings>) -> Result<Self, Self::Error> {
if let Some(settings) = settings {
if settings.useApifyProxy {
let password = env::var("APIFY_PROXY_PASSWORD")
.expect("Missing APIFY_PROXY_PASSWORD environment variable. This is required to use Apify proxy!");
let username = settings.apifyProxyGroups
.map(|groups| format!("groups-{}", groups.join("+")))
.unwrap_or_else(|| "auto".to_owned());
return Ok(Proxy::new(username, password))
}
}
Err(())
}
}
|
use std::io;
use failure::{self, format_err, Error, ResultExt};
use std::fs;
use std::io::prelude::*;
use std::iter::Peekable;
use std::path::PathBuf;
use crate::dmu_stream;
use crate::dmu_stream::RecordWithPayload;
use super::lsm;
use super::object_merge::ObjectMergeHelper;
use crate::split_tree::{self, SplitTree};
use std::cell::Cell;
use std::cell::RefCell;
use std::rc::Rc;
pub struct LSMSrvConfig {
pub root_dir: PathBuf,
}
fn stream_dir(config: &LSMSrvConfig, name: String) -> PathBuf {
config.root_dir.join(name)
}
fn sorted_stream_path(config: &LSMSrvConfig, name: String) -> PathBuf {
config.root_dir.join(name).join("sorted.bin")
}
pub fn read_stream(
config: &LSMSrvConfig,
stream: &mut io::Read,
name: String,
) -> Result<(), failure::Error> {
let stream_dir = stream_dir(config, name.clone());
if stream_dir.exists() {
return Err(format_err!(
"stream dir {:?} exists, db inconsistent or duplicate name",
stream_dir
))?;
}
fs::create_dir_all(&stream_dir).context("create stream dir")?;
let writer_tmp = stream_dir.join("writer");
fs::create_dir(&writer_tmp).context("create writer tmp dir")?;
let stream_sorted_out = sorted_stream_path(config, name.clone());
use super::lsm::LSMWriter;
let mut writer = LSMWriter::new(writer_tmp.clone(), 1 << 10, 10); // FIXME
dmu_stream::read_with_callback(stream, |mut record| -> Result<(), Error> {
let dmu_stream::Record {
header: drr,
payload_len,
mut payload_reader,
} = record;
let lsm_k = LSMKey(drr);
let mut payload = Vec::new();
payload_reader
.read_to_end(&mut payload)
.context("read payload")?;
let lsm_v = payload;
writer.insert(lsm_k, lsm_v);
Ok(())
})
.unwrap();
writer.merge_completely(&stream_sorted_out); // FIXME
Ok(())
}
trait LSMReaderIterTrait: Iterator<Item = (dmu_replay_record, Vec<u8>)> {}
impl<I: Iterator<Item = (dmu_replay_record, Vec<u8>)>> LSMReaderIterTrait for I {}
type LSMReaderIter = Rc<RefCell<Peekable<Box<LSMReaderIterTrait>>>>;
struct ObjectRangeIter {
stream_idx: usize,
stream: LSMReaderIter,
current_objects_within_range_drained: Rc<Cell<bool>>,
}
use std::fmt;
impl fmt::Debug for ObjectRangeIter {
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result {
out.debug_struct("ObjectRangeIter")
.field("stream_idx", &self.stream_idx)
.finish()
}
}
#[derive(Debug)]
struct ObjectRangeInfo {
object_range_drr: RecordWithPayload,
objid_space: SplitTree<RecordWithPayload>,
}
impl ObjectRangeInfo {
fn firstobj(&self) -> u64 {
assert!(self.object_range_drr.drr.drr_type == dmu_replay_record_DRR_OBJECT_RANGE);
unsafe {
self.object_range_drr
.drr
.drr_u
.drr_object_range
.drr_firstobj
}
}
fn contains_id(&self, obj_id: u64) -> bool {
assert!(self.object_range_drr.drr.drr_type == dmu_replay_record_DRR_OBJECT_RANGE);
unsafe {
let drr_o = &self.object_range_drr.drr.drr_u.drr_object_range;
drr_o.drr_firstobj <= obj_id && obj_id < (drr_o.drr_firstobj + drr_o.drr_numslots)
}
}
}
struct ObjectsWithinObjectRangeIterator {
stream_idx: usize,
stream: LSMReaderIter,
drained: Rc<Cell<bool>>,
object_iter_drained: Rc<Cell<bool>>,
}
impl fmt::Debug for ObjectsWithinObjectRangeIterator {
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result {
out.debug_struct("ObjectsWithinObjectRangeIterator")
.field("stream_idx", &self.stream_idx)
.finish()
}
}
struct ObjectIter {
obj_id: u64,
stream: LSMReaderIter,
drained: Rc<Cell<bool>>,
}
impl Iterator for ObjectRangeIter {
type Item = (ObjectRangeInfo, ObjectsWithinObjectRangeIterator);
fn next(&mut self) -> Option<Self::Item> {
if !self.current_objects_within_range_drained.get() {
panic!("must drain previously returned iterator")
}
let mut stream = self.stream.borrow_mut();
let (end_record, _) = stream.peek().unwrap();
assert!(
ObjectsWithinObjectRangeIterator::is_follow(&end_record),
"{:?}",
drr_debug(end_record)
);
assert!(
!ObjectsWithinObjectRangeIterator::should_consume(end_record),
"{:?}",
drr_debug(end_record)
);
if Self::is_follow(end_record) {
return None;
}
// INVARIANT: all follows of ObjectsWithinObjectRangeIterator::is_follow covered
let (or_record, payload) = stream.next().expect("expecting a record");
use itertools::Itertools;
let object_range_drr = RecordWithPayload {
drr: or_record,
payload,
};
let object_range_info_records = stream.peeking_take_while(|(drr, _)| {
drr.drr_type == dmu_replay_record_DRR_FREEOBJECTS
|| drr.drr_type == dmu_replay_record_DRR_OBJECT
});
let mut objid_space = SplitTree::new();
for (drr, payload) in object_range_info_records {
let (offset, length) = unsafe {
match drr.drr_type {
dmu_replay_record_DRR_FREEOBJECTS => (
drr.drr_u.drr_freeobjects.drr_firstobj,
drr.drr_u.drr_freeobjects.drr_numobjs,
),
dmu_replay_record_DRR_OBJECT => (drr.drr_u.drr_object.drr_object, 1),
_ => panic!("unexpected drr_type {:?}", drr_debug(drr)), // FIXME
}
};
objid_space.insert(offset, length, RecordWithPayload { drr, payload });
}
let or_info = ObjectRangeInfo {
object_range_drr,
objid_space,
};
drop(stream);
self.current_objects_within_range_drained = Rc::new(Cell::new(false));
let iter = ObjectsWithinObjectRangeIterator {
stream_idx: self.stream_idx,
stream: self.stream.clone(),
drained: self.current_objects_within_range_drained.clone(),
object_iter_drained: Rc::new(Cell::new(true)),
};
return Some((or_info, iter));
}
}
impl Iterator for ObjectsWithinObjectRangeIterator {
type Item = (u64, ObjectIter);
fn next(&mut self) -> Option<Self::Item> {
if !self.object_iter_drained.get() {
panic!("must drain previously returned iterator")
}
if self.drained.get() {
return None;
}
let mut stream = self.stream.borrow_mut();
let (next_record, _) = stream.peek().unwrap();
if Self::is_follow(next_record) {
self.drained.set(true);
return None;
}
assert!(
Self::should_consume(next_record)
" unexpected record type {:?}",
drr_debug(next_record)
);
let obj_id = unsafe { LSMKey(*next_record).lower_obj_id() }.unwrap(); // FIXME
drop(stream);
self.object_iter_drained = Rc::new(Cell::new(false));
let object_iter = ObjectIter {
obj_id,
stream: self.stream.clone(),
drained: self.object_iter_drained.clone(),
};
Some((obj_id, object_iter))
}
}
impl ObjectRangeIter {
fn is_follow(drr: &dmu_replay_record) -> bool {
// END RECORD
if drr.drr_type == dmu_replay_record_DRR_END {
return true;
}
// Trailing FREEOBJECTS
if LSMKey::is_trailing_freeobjects(drr) {
return true;
}
return false;
}
}
impl ObjectsWithinObjectRangeIterator {
// returns `true` iff drr is a follow-element of an object range stream,
// i.e. the a stream element not consumed by this iterator
fn is_follow(drr: &dmu_replay_record) -> bool {
// next OBJECT_RANGE
if drr.drr_type == dmu_replay_record_DRR_OBJECT_RANGE {
return true;
}
// we are nested within ObjectRangeIter
if ObjectRangeIter::is_follow(drr) {
return true;
}
return false;
}
fn should_consume(drr: &dmu_replay_record) -> bool {
if drr.drr_type == dmu_replay_record_DRR_WRITE || drr.drr_type == dmu_replay_record_DRR_FREE
{
let _obj_id = unsafe { LSMKey(*drr).lower_obj_id() }.unwrap();
// TODO validate that obj_id is within this iterator's object range
return true;
}
return false;
}
fn peek_objid(&self) -> Option<u64> {
if self.drained.get() {
return None;
}
match self.stream.borrow_mut().peek() {
Some((drr, _)) => {
if Self::is_follow(drr) {
None
} else {
assert!(
Self::should_consume(drr)
" unexpected record type {:?}",
drr_debug(drr)
); // FIXME
Some(unsafe { LSMKey(*drr).lower_obj_id() }.unwrap())
}
}
None => None,
}
}
}
impl Iterator for ObjectIter {
type Item = RecordWithPayload;
fn next(&mut self) -> Option<Self::Item> {
if self.drained.get() {
return None;
}
let mut stream = self.stream.borrow_mut();
let (next_record, _) = stream.peek().unwrap();
let same_objid =
unsafe { LSMKey(*next_record).lower_obj_id() }.map_or(false, |id| self.obj_id == id);
if !same_objid {
// TODO need check for invalid record type here?
self.drained.set(true);
return None;
}
unsafe {
match next_record.drr_type {
dmu_replay_record_DRR_WRITE | dmu_replay_record_DRR_FREE => {
let (drr, payload) = stream.next().unwrap(); // checked above
Some(RecordWithPayload { drr, payload })
}
dmu_replay_record_DRR_OBJECT_RANGE => None,
_ => panic!("unexpected drr_type {:?}", drr_debug(next_record)),
}
}
}
}
unsafe fn consume_until_object_range_return_begin(
stream: Rc<RefCell<Peekable<Box<dyn LSMReaderIterTrait>>>>,
) -> Option<RecordWithPayload> {
let s = &mut *stream.borrow_mut();
let mut begin = None;
loop {
let (r, pay) = s.peek().unwrap();
if r.drr_type == dmu_replay_record_DRR_OBJECT_RANGE {
break;
} else if r.drr_type == dmu_replay_record_DRR_BEGIN {
assert!(begin.is_none()); // TODO error handling
begin = Some(RecordWithPayload {
drr: *r,
payload: pay.clone(),
});
s.next();
} else {
dbg!(drr_debug(r));
s.next();
}
}
return begin;
}
unsafe fn symbolic_dump_consume_lsm_reader(
stream: Rc<RefCell<Peekable<Box<dyn LSMReaderIterTrait>>>>,
) {
println!("suck up until OBJECT_RANGE begins");
let begin = consume_until_object_range_return_begin(stream.clone());
dbg!(begin);
println!("dump ObjectRangeIter and child iterators");
let mut iter = ObjectRangeIter {
stream_idx: 0,
stream: stream.clone(),
current_objects_within_range_drained: Rc::default(),
};
for (or, it) in iter {
dbg!(or);
for (o, rec_it) in it {
dbg!(o);
for rec in rec_it {
dbg!(rec);
}
}
}
println!("dump remainder of the stream");
{
// scope for Drop
let s = &mut *stream.borrow_mut();
loop {
if let Some((r, _)) = s.next() {
dbg!(drr_debug(r));
} else {
println!("end of stream");
break;
}
}
}
}
pub unsafe fn show(config: &LSMSrvConfig, loaded_stream: &str) {
let mut r = lsm::LSMReader::<LSMKey, Vec<u8>>::open(&sorted_stream_path(
config,
(*loaded_stream).to_owned(),
));
for (k, _) in r {
println!("{:?}", drr_debug(&k.0));
}
}
pub unsafe fn merge_streams(
config: &LSMSrvConfig,
streams_newest_to_oldest: &[&str],
target: String,
) -> Result<(), failure::Error> {
let mut streams: Vec<Peekable<Box<dyn LSMReaderIterTrait>>> = vec![];
for (stream, stream_path) in streams_newest_to_oldest.iter().enumerate() {
let x: Box<dyn Iterator<Item = (LSMKey, Vec<u8>)>> = Box::new(lsm::LSMReader::open(
&sorted_stream_path(config, (*stream_path).to_owned()),
));
// let x: Box<dyn Iterator<Item=(dmu_replay_record, Vec<u8>)>> =
let x: Box<dyn LSMReaderIterTrait> =
Box::new(x.map(|(LSMKey(drr), payload): (LSMKey, Vec<u8>)| (drr, payload)));
let x = x.peekable();
streams.push(x);
}
let mut streams: Vec<Rc<RefCell<Peekable<Box<dyn LSMReaderIterTrait>>>>> = streams
.into_iter()
.map(|b| Rc::new(RefCell::new(b)))
.collect();
let writer_out = sorted_stream_path(config, target);
fs::create_dir_all(writer_out.parent().unwrap())?;
let mut target: lsm::SortedLSMWriter<LSMKey, Vec<u8>> = lsm::SortedLSMWriter::new(&writer_out); // FIXME
// merge BEGIN record, drop noop FREEOBJECTS(0, 0) record
use std::collections::VecDeque;
let mut begins_oldest_to_newest = streams
.iter()
.rev()
.map(|s| consume_until_object_range_return_begin(s.clone()).unwrap())
.collect::<VecDeque<_>>();
assert!(streams.len() > 0);
let begin = begins_oldest_to_newest.pop_front().unwrap();
let begin = begins_oldest_to_newest.into_iter().fold(begin, |mut b, r| {
unsafe {
use const_cstr::const_cstr;
assert_eq!(r.drr.drr_type, dmu_replay_record_DRR_BEGIN);
assert_eq!(
dbg!(&b).drr.drr_u.drr_begin.drr_toguid,
dbg!(&r).drr.drr_u.drr_begin.drr_fromguid
); // TODO error
let fromguid = b.drr.drr_u.drr_begin.drr_fromguid;
let from_ivset_guid = {
let mut from_ivset_guid: u64 = 0;
b.drr_begin_mutate_crypt_keydata(&mut |crypt_keydata| -> Result<(), ()> {
nvpair_sys::nvlist_lookup_uint64(
crypt_keydata,
const_cstr!("from_ivset_guid").as_ptr(),
&mut from_ivset_guid,
);
Err(()) // abort mutation
});
from_ivset_guid
};
b = r;
b.drr.drr_u.drr_begin.drr_fromguid = fromguid;
b.drr_begin_mutate_crypt_keydata(&mut |crypt_keydata| -> Result<(), ()> {
nvpair_sys::nvlist_add_uint64(
crypt_keydata,
const_cstr!("from_ivset_guid").as_ptr(),
from_ivset_guid,
);
Ok(())
})
.expect("mutate ivset guid");
b
}
});
target.insert(LSMKey(begin.drr.clone()), begin.payload);
let drr_toguid = unsafe { begin.drr.drr_u.drr_begin.drr_toguid };
let mut streams: Vec<_> = streams
.into_iter()
.enumerate()
.map(|(stream_idx, stream)| {
ObjectRangeIter {
stream_idx,
stream: stream.clone(),
current_objects_within_range_drained: Rc::new(Cell::new(true)),
}
.peekable()
})
.collect();
let mut next_object_range = 0; // we know every stream touches object range [0, 32)
loop {
assert_eq!(next_object_range % 32, 0);
let this_range_streams = streams
.iter_mut()
.enumerate()
.filter_map(|(i, s)| {
let (ori, or_iter) = s.peek()?; // TODO
if ori.firstobj() == next_object_range {
Some(i)
} else {
None
}
})
.collect::<Vec<_>>();
println!("this_range_streams = {:?}", this_range_streams);
if this_range_streams.len() == 0 {
// check if there is any next range
let min = streams
.iter_mut()
.enumerate()
.filter_map(|(i, s)| Some((i, s.peek()?)))
.min_by_key(|(i, (ori, _))| ori.firstobj());
if let Some((min_idx, (ori, _))) = min {
next_object_range = ori.firstobj();
continue;
} else {
break;
}
} else {
// advance next_object_range
next_object_range = streams[this_range_streams[0]].peek().unwrap().0.firstobj();
}
assert!(is_sorted::IsSorted::is_sorted(
&mut this_range_streams.iter()
));
assert!(this_range_streams.len() > 0);
// the highest-level (= first) OBJECT_RANGE record wins
// and with it, all FREEOBJECTS and OBJECT records (encoded in objid_space)
//
// However, we still gotta merge the READ and WRITE records per object
let this_range_streams = this_range_streams
.into_iter()
// consume from those streams that have the current object range
.map(|stream_idx| streams[stream_idx].next().unwrap())
.collect::<Vec<_>>();
// split the (ObjectRangeInfo, ObjectsWithinObjectRange) iterators into two separate iterators
let (oris, mut or_iters) = this_range_streams.into_iter().fold(
(vec![], vec![]),
|(mut oris, mut or_iters), (ori, or_iter)| {
oris.push(ori);
or_iters.push(or_iter);
(oris, or_iters)
},
);
let ori: &ObjectRangeInfo = &oris[0]; // most recent OBJECT_RANGE wins
target.insert(LSMKey(ori.object_range_drr.drr), Vec::<u8>::new());
// insert FREEOBJECTS and OBJECT records
for (obj_id, len, occ_obj) in ori.objid_space.iter() {
match occ_obj {
split_tree::Occ::Unknown => (),
split_tree::Occ::Free => {
let freeobjects = unsafe {
let mut r: dmu_replay_record = std::mem::zeroed();
r.drr_type = dmu_replay_record_DRR_FREEOBJECTS;
r.drr_payloadlen = 0;
r.drr_u.drr_freeobjects = dmu_replay_record__bindgen_ty_2_drr_freeobjects {
drr_firstobj: obj_id,
drr_numobjs: len,
drr_toguid,
};
r
};
// sw.write_record_and_payload_reader(or.unwrap(), &mut io::empty())?;
target.insert(LSMKey(freeobjects), vec![]);
}
split_tree::Occ::Occupied(obj) => {
target.insert(LSMKey(obj.drr), obj.payload.clone());
}
}
}
let mut next_object_id = ori.firstobj();
while ori.contains_id(next_object_id) {
println!("next_object_id = {:?}", next_object_id);
let mut this_object_streams = or_iters
.iter_mut()
.filter_map(|or_iter| {
if let Some(obj_id) = or_iter.peek_objid() {
if obj_id == next_object_id {
Some(or_iter)
} else {
None
}
} else {
None
}
})
.collect::<Vec<_>>();
println!("this_object_streams = {:?}", this_object_streams);
let this_object_id = next_object_id;
next_object_id += 1;
if this_object_streams.len() == 0 {
continue;
}
// merge (= consume) iterators for this object
let mut merger: ObjectMergeHelper<DrrWrapper> =
ObjectMergeHelper::new(this_object_streams.len());
let object_iters =
this_object_streams
.iter_mut()
.enumerate()
.map(|(level, oior_iter)| {
let (obj_id, obj_iter) = oior_iter.next().unwrap(); // consume object()
assert_eq!(obj_id, this_object_id);
obj_iter.map(move |drr| (drr, level))
});
let unified = itertools::kmerge_by(
object_iters,
|(drr_a, level_a): &(RecordWithPayload, usize),
(drr_b, level_b): &(RecordWithPayload, usize)| {
let offset_ord = LSMKey(drr_a.drr)
.offset_in_object_if_any()
.cmp(&LSMKey(drr_b.drr).offset_in_object_if_any());
if offset_ord == Ordering::Equal {
level_a < level_b
} else {
offset_ord == Ordering::Less
}
},
);
#[derive(Clone, Debug, From, Into)]
struct DrrWrapper(RecordWithPayload);
impl Default for DrrWrapper {
fn default() -> Self {
let drr = unsafe {
let drr: dmu_replay_record = std::mem::zeroed();
drr
};
DrrWrapper(RecordWithPayload {
drr,
payload: vec![],
})
}
}
let mut out = VecDeque::new();
for (record, level) in unified {
assert_eq!(
Some(this_object_id),
unsafe { LSMKey(record.drr).lower_obj_id() },
"unexpected object id {:?}",
record
);
if record.drr.drr_type == dmu_replay_record_DRR_WRITE {
let o = &record.drr.drr_u.drr_write;
merger.insert_write(
&mut out,
level,
o.drr_offset,
o.drr_logical_size,
record.into(),
);
} else if record.drr.drr_type == dmu_replay_record_DRR_FREE {
assert_eq!(record.payload.len(), 0);
let o = &dbg!(record).drr.drr_u.drr_free;
let len = if o.drr_length == std::u64::MAX {
std::u64::MAX - o.drr_offset // free to end
} else {
o.drr_length
};
merger.insert_free(&mut out, level, o.drr_offset, len);
} else {
panic!(" unexpected record type {:?}", record);
}
}
// for each level, end it
for level in 0..this_object_streams.len() {
merger.insert_end(&mut out, level)
}
// write out object_info
for knowledge in out {
use super::object_merge::KnowledgeKind::{End, Free, Occupied};
match knowledge.kind {
Occupied(DrrWrapper(RecordWithPayload { drr, payload })) => {
target.insert(LSMKey(drr), payload);
}
Free => {
let free = unsafe {
let mut r: dmu_replay_record = std::mem::zeroed();
r.drr_type = dmu_replay_record_DRR_FREE;
r.drr_payloadlen = 0;
r.drr_u.drr_free = dmu_replay_record__bindgen_ty_2_drr_free {
drr_object: this_object_id,
drr_offset: knowledge.from,
drr_length: knowledge.len,
drr_toguid,
};
r
};
dbg!(drr_debug(&free));
target.insert(LSMKey(free), Vec::new());
}
End => panic!("merger emitted End record, unsupported"),
}
}
}
// After while ori.contains_id(next_object_id)
for stream in or_iters {
println!("draining stream {:?}", stream);
let stream_dbg = format!("{:?}", stream);
for (obj_id, obj_iter) in stream {
println!("draining object {:?}", obj_id);
obj_iter.for_each(|x| {
println!(
"drain leftovers of object range {:?} {:?} {:?} {:?}",
ori,
stream_dbg,
obj_id,
drr_debug(&x.drr)
)
});
}
}
}
// synthesize END record
let end_drr = unsafe {
let mut drr: dmu_replay_record = std::mem::zeroed();
drr.drr_type = dmu_replay_record_DRR_END;
drr.drr_payloadlen = 0;
drr.drr_u.drr_end.drr_toguid = drr_toguid;
drr
};
target.insert(LSMKey(end_drr), vec![]);
Ok(())
}
pub unsafe fn write_stream(
config: &LSMSrvConfig,
name: String,
out: &mut std::io::Write,
) -> Result<(), failure::Error> {
let mut out = dmu_stream::StreamWriter::new(out);
let reader: lsm::LSMReader<LSMKey, Vec<u8>> =
lsm::LSMReader::open(&sorted_stream_path(config, name).to_owned());
for (LSMKey(drr), payload) in reader {
out.write_record_and_payload_reader(&drr, &mut io::Cursor::new(&payload))
.context(format!("drr {:?}", drr_debug(drr)))?;
}
Ok(())
}
use bindings::*;
use serde::{Deserialize, Serialize, Serializer};
/// sort such that
/// BEGIN
/// FREEOBJECTS (firstobj=0, numobjs=0)
/// OBJECT_RANGE by firstobj / 32
/// FREEOBJECTS, OBJECT by object_id
/// FREE, WRITE by (object_id, offset_in_object)
/// FREEOBJECTS (firstobj=X, numobjs=0)
/// FREEOBJECTS (firstobj=X, numobjs=u64max-X)
/// END
#[derive(Serialize, Deserialize)]
struct LSMKey(dmu_replay_record);
use std::cmp::Ordering;
impl LSMKey {
unsafe fn lower_obj_id(&self) -> Option<u64> {
let u = &self.0.drr_u;
let obj_id = match self.0.drr_type {
dmu_replay_record_DRR_BEGIN => return None, // TOOD
dmu_replay_record_DRR_END => return None, // TODO
dmu_replay_record_DRR_OBJECT_RANGE => u.drr_object_range.drr_firstobj,
dmu_replay_record_DRR_OBJECT => u.drr_object.drr_object,
dmu_replay_record_DRR_FREEOBJECTS => u.drr_freeobjects.drr_firstobj,
dmu_replay_record_DRR_WRITE => u.drr_write.drr_object,
dmu_replay_record_DRR_FREE => u.drr_free.drr_object,
_ => unimplemented!(), // TODO
};
Some(obj_id)
}
unsafe fn offset_in_object_if_any(&self) -> Option<u64> {
let u = &self.0.drr_u;
match self.0.drr_type {
dmu_replay_record_DRR_BEGIN => None,
dmu_replay_record_DRR_END => None,
dmu_replay_record_DRR_OBJECT_RANGE => None,
dmu_replay_record_DRR_OBJECT => None,
dmu_replay_record_DRR_FREEOBJECTS => None,
dmu_replay_record_DRR_WRITE => Some(u.drr_write.drr_offset),
dmu_replay_record_DRR_FREE => Some(u.drr_free.drr_offset),
_ => unimplemented!(), // TODO
}
}
unsafe fn length_in_object_if_any(&self) -> Option<u64> {
let u = &self.0.drr_u;
match self.0.drr_type {
dmu_replay_record_DRR_BEGIN => None,
dmu_replay_record_DRR_END => None,
dmu_replay_record_DRR_OBJECT_RANGE => None,
dmu_replay_record_DRR_OBJECT => None,
dmu_replay_record_DRR_FREEOBJECTS => None,
dmu_replay_record_DRR_WRITE => Some(u.drr_write.drr_logical_size),
dmu_replay_record_DRR_FREE => Some(u.drr_free.drr_length),
_ => unimplemented!(), // TODO
}
}
fn is_trailing_freeobjects(drr: &dmu_replay_record) -> bool {
if !drr.drr_type == dmu_replay_record_DRR_FREEOBJECTS {
return false;
}
unsafe {
let (firstobj, numobjs) = unsafe {
let fos = drr.drr_u.drr_freeobjects;
(fos.drr_firstobj, fos.drr_numobjs)
};
if firstobj == 0 && numobjs == 0 {
return true;
}
if numobjs == std::u64::MAX - firstobj {
return true;
}
}
return false;
}
}
impl PartialEq for LSMKey {
fn eq(&self, o: &LSMKey) -> bool {
self.cmp(o) == Ordering::Equal
}
}
impl Eq for LSMKey {}
impl PartialOrd for LSMKey {
fn partial_cmp(&self, other: &LSMKey) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for LSMKey {
fn cmp(&self, o: &LSMKey) -> Ordering {
unsafe {
let diff_it = || {
format!(
"{}",
difference::Changeset::new(
&format!("{:#?}", drr_debug(&self.0)),
&format!("{:#?}", drr_debug(&o.0)),
"\n"
)
)
};
const begin_end_type_ordering: &'_ [(usize, dmu_replay_record__bindgen_ty_1)] = &[
(0, dmu_replay_record_DRR_BEGIN),
(1, dmu_replay_record_DRR_OBJECT_RANGE),
(1, dmu_replay_record_DRR_FREEOBJECTS),
(1, dmu_replay_record_DRR_OBJECT),
(1, dmu_replay_record_DRR_FREE),
(1, dmu_replay_record_DRR_WRITE),
(2, dmu_replay_record_DRR_END),
];
let type_ordering_find_prio =
|req_ty, type_ordering: &[(usize, dmu_replay_record__bindgen_ty_1)]| {
for (prio, ty) in type_ordering {
if *ty == req_ty {
return Some(*prio);
}
}
return None;
};
let s_ty_prio =
type_ordering_find_prio(self.0.drr_type, begin_end_type_ordering).unwrap(); // FIXME
let o_ty_prio = type_ordering_find_prio(o.0.drr_type, begin_end_type_ordering).unwrap(); // FIXME
if s_ty_prio.cmp(&o_ty_prio) != Ordering::Equal {
return s_ty_prio.cmp(&o_ty_prio);
}
// handle all special cases that do not fit into OBJECT_RANGE
let order_for_records_after_last_object_range_and_its_frees_and_writes =
|drr: &dmu_replay_record| {
if drr.drr_type == dmu_replay_record_DRR_FREEOBJECTS {
let (firstobj, numobj) = {
let fos = drr.drr_u.drr_freeobjects;
(fos.drr_firstobj, fos.drr_numobjs)
};
// FREEOBJECTS(firstobj=X, numobjs=0)
// <
// FREEOBJECTS (firstobj=X, numobjs=u64max-X)
if (numobj == 0) {
(1, 0)
} else if (numobj == std::u64::MAX - firstobj) {
(1, numobj)
} else {
(0, 0) // indeterminate
}
} else if drr.drr_type == dmu_replay_record_DRR_END {
(2, 0)
} else {
(0, 0) // indeterminate
}
};
{
let cmp =
order_for_records_after_last_object_range_and_its_frees_and_writes(&self.0)
.cmp(
&order_for_records_after_last_object_range_and_its_frees_and_writes(
&o.0,
),
);
if cmp != Ordering::Equal {
return cmp;
}
}
let self_lobjid = self
.lower_obj_id()
.expect("expected record type to have object id");
let other_lobjid = o
.lower_obj_id()
.expect("expected record type to have object id");
// group all records by OBJECT_RANGE
let self_or = (self_lobjid / 32);
let othr_or = (other_lobjid / 32);
if self_or.cmp(&othr_or) != Ordering::Equal {
return self_or.cmp(&othr_or);
}
// within an OBJECT_RANGE order by the following cohorts:
// 1 2 3
// OBJECT_RANGE < (FREEOBJECTS,OBJECT) < (WRITE,FREE)
const by_object_range_ordering: &'_ [(usize, dmu_replay_record__bindgen_ty_1)] = &[
(1, dmu_replay_record_DRR_OBJECT_RANGE),
(2, dmu_replay_record_DRR_FREEOBJECTS),
(2, dmu_replay_record_DRR_OBJECT),
(3, dmu_replay_record_DRR_FREE),
(3, dmu_replay_record_DRR_WRITE),
];
let s_ty_prio =
type_ordering_find_prio(self.0.drr_type, by_object_range_ordering).unwrap(); // FIXME
let o_ty_prio =
type_ordering_find_prio(o.0.drr_type, by_object_range_ordering).unwrap(); // FIXME
if s_ty_prio.cmp(&o_ty_prio) != Ordering::Equal {
return s_ty_prio.cmp(&o_ty_prio);
}
// within each cohort, it is sufficient to order by firstobj
if self_lobjid.cmp(&other_lobjid) != Ordering::Equal {
return self_lobjid.cmp(&other_lobjid);
}
// within cohort 3, order by offset, then FREE < WRITE, then length
assert_eq!(s_ty_prio, o_ty_prio, "equality, see above");
if s_ty_prio == 3 {
let s_offset = self
.offset_in_object_if_any()
.ok_or_else(|| {
format_err!("record type must have offset in object:\n{}", diff_it())
})
.unwrap();
let o_offset = o
.offset_in_object_if_any()
.ok_or_else(|| {
format_err!("record type must have offset in object:\n{}", diff_it())
})
.unwrap();
if s_offset.cmp(&o_offset) != Ordering::Equal {
return s_offset.cmp(&o_offset);
}
// within cohort 3: if offset is same, FREE < WRITE
const cohort_3_type_ordering: &'_ [(usize, dmu_replay_record__bindgen_ty_1)] = &[
(1, dmu_replay_record_DRR_FREE),
(2, dmu_replay_record_DRR_WRITE),
];
let s_ty_prio =
type_ordering_find_prio(self.0.drr_type, cohort_3_type_ordering).unwrap(); // FIXME
let o_ty_prio =
type_ordering_find_prio(o.0.drr_type, cohort_3_type_ordering).unwrap(); // FIXME
if s_ty_prio.cmp(&o_ty_prio) != Ordering::Equal {
return s_ty_prio.cmp(&o_ty_prio);
}
// same offset and same type (FREE / WRITE): let the length win
let s_length = self
.length_in_object_if_any()
.ok_or_else(|| {
format_err!("record type must have length in object:\n{}", diff_it())
})
.unwrap();
let o_length = o
.length_in_object_if_any()
.ok_or_else(|| {
format_err!("record type must have length in object:\n{}", diff_it())
})
.unwrap();
if s_length.cmp(&o_length) != Ordering::Equal {
return s_length.cmp(&o_length);
}
// FALLTHROUGH
}
// we have no more discriminators (that we know of at the time of writing)
// => if the records are not equal, the stream is considered invalid
if format!("{:?}", drr_debug(&self.0)) == format!("{:?}", drr_debug(&o.0)) {
// FIXME 111!!!!
return Ordering::Equal;
} else {
panic!("unexpected record:\n{}", diff_it());
}
}
}
}
|
use super::*;
use gfx_hal::command::Level;
use gfx_hal::MemoryTypeId;
pub struct LoadedTexture<B: Backend> {
pub image: ManuallyDrop<B::Image>,
pub format: Format,
pub requirements: Requirements,
pub memory: ManuallyDrop<B::Memory>,
pub image_view: ManuallyDrop<B::ImageView>,
pub sampler: ManuallyDrop<B::Sampler>,
}
// type Texture = LoadedImage<B: Backend, D: Device<Backend>>;
impl<B: Backend> LoadedTexture<B> {
pub fn from_image(
adapter: &Adapter<B>,
device: &Dev<B>,
command_pool: &mut B::CommandPool,
command_queue: &mut B::CommandQueue,
img: image::RgbaImage,
) -> Result<Self, Error> {
Self::from_buffer(
adapter,
device,
command_pool,
command_queue,
&(*img),
img.width(),
img.height(),
Format::Rgba8Srgb,
)
}
pub fn from_texture_spec(
adapter: &Adapter<B>,
device: &Dev<B>,
command_pool: &mut B::CommandPool,
command_queue: &mut B::CommandQueue,
spec: &TextureSpec,
) -> Result<Self, Error> {
Self::from_buffer(
adapter,
device,
command_pool,
command_queue,
spec.buffer,
spec.width,
spec.height,
spec.format,
)
}
pub fn from_buffer(
adapter: &Adapter<B>,
device: &Dev<B>,
command_pool: &mut B::CommandPool,
command_queue: &mut B::CommandQueue,
buffer: &[u8],
width: u32,
height: u32,
format: Format,
) -> Result<Self, Error> {
let pixel_size = (format.surface_desc().bits / 8) as usize; // size_of::<image::Rgba<u8>>();
let row_size = pixel_size * width as usize;
let limits = adapter.physical_device.limits();
let row_alignment_mask = limits.optimal_buffer_copy_pitch_alignment as u32 - 1;
let row_pitch = ((row_size as u32 + row_alignment_mask) & !row_alignment_mask) as usize;
debug_assert!(row_pitch as usize >= row_size);
// 1. make a staging buffer with enough memory for the image, and a
// transfer_src usage
let required_bytes = row_pitch * height as usize;
unsafe {
// 0. First we compute some memory related values.
let staging_bundle: BufferBundle<B> =
BufferBundle::new(&adapter, device, required_bytes, BufferUsage::TRANSFER_SRC)?;
let range = 0..staging_bundle.requirements.size;
let memory = &(*staging_bundle.memory);
let mut target = std::slice::from_raw_parts_mut(
device.map_memory(memory, range.clone()).unwrap(),
height as usize * row_pitch,
);
for y in 0..height as usize {
let row = &buffer[y * row_size..(y + 1) * row_size];
let dest_base = y * row_pitch;
target[dest_base..dest_base + row.len()].copy_from_slice(row);
}
let res = device.flush_mapped_memory_ranges(Some(&(memory, range)));
device.unmap_memory(memory);
res?;
// 3. Make an image with transfer_dst and SAMPLED usage
let mut the_image = device
.create_image(
gfx_hal::image::Kind::D2(width, height, 1, 1),
1,
format,
gfx_hal::image::Tiling::Optimal,
gfx_hal::image::Usage::TRANSFER_DST | gfx_hal::image::Usage::SAMPLED,
gfx_hal::image::ViewCapabilities::empty(),
)
.map_err(|_| "Couldn't create the image!")?;
// 4. allocate memory for the image and bind it
let requirements = device.get_image_requirements(&the_image);
let memory_type_id = adapter
.physical_device
.memory_properties()
.memory_types
.iter()
.enumerate()
.find(|&(id, memory_type)| {
// BIG NOTE: THIS IS DEVICE LOCAL NOT CPU VISIBLE
requirements.type_mask & (1 << id) != 0
&& memory_type.properties.contains(Properties::DEVICE_LOCAL)
})
.map(|(id, _)| MemoryTypeId(id))
.ok_or("Couldn't find a memory type to support the image!")?;
let memory = device
.allocate_memory(memory_type_id, requirements.size)
.map_err(|_| "Couldn't allocate image memory!")?;
device
.bind_image_memory(&memory, 0, &mut the_image)
.map_err(|_| "Couldn't bind the image memory!")?;
// 5. create image view and sampler
let image_view = device
.create_image_view(
&the_image,
gfx_hal::image::ViewKind::D2,
format,
gfx_hal::format::Swizzle::NO,
SubresourceRange {
aspects: Aspects::COLOR,
levels: 0..1,
layers: 0..1,
},
)
.map_err(|_| "Couldn't create the image view!")?;
let sampler = device
.create_sampler(&gfx_hal::image::SamplerDesc::new(
gfx_hal::image::Filter::Linear,
gfx_hal::image::WrapMode::Clamp,
))
.map_err(|_| "Couldn't create the sampler!")?;
// 6. create a command buffer
let mut cmd_buffer = command_pool.allocate_one(Level::Primary);
cmd_buffer.begin(
CommandBufferFlags::EMPTY,
CommandBufferInheritanceInfo::default(),
);
// 7. Use a pipeline barrier to transition the image from empty/undefined
// to TRANSFER_WRITE/TransferDstOptimal
let image_barrier = gfx_hal::memory::Barrier::Image {
states: (gfx_hal::image::Access::empty(), Layout::Undefined)
..(
gfx_hal::image::Access::TRANSFER_WRITE,
Layout::TransferDstOptimal,
),
target: &the_image,
families: None,
range: SubresourceRange {
aspects: Aspects::COLOR,
levels: 0..1,
layers: 0..1,
},
};
cmd_buffer.pipeline_barrier(
PipelineStage::TOP_OF_PIPE..PipelineStage::TRANSFER,
gfx_hal::memory::Dependencies::empty(),
&[image_barrier],
);
// 8. perform copy from staging buffer to image
cmd_buffer.copy_buffer_to_image(
&staging_bundle.buffer,
&the_image,
Layout::TransferDstOptimal,
&[gfx_hal::command::BufferImageCopy {
buffer_offset: 0,
buffer_width: (row_pitch / pixel_size) as u32,
buffer_height: height,
image_layers: gfx_hal::image::SubresourceLayers {
aspects: Aspects::COLOR,
level: 0,
layers: 0..1,
},
image_offset: gfx_hal::image::Offset { x: 0, y: 0, z: 0 },
image_extent: gfx_hal::image::Extent {
width: width,
height: height,
depth: 1,
},
}],
);
// 9. use pipeline barrier to transition the image to SHADER_READ access/
// ShaderReadOnlyOptimal layout
let image_barrier = gfx_hal::memory::Barrier::Image {
states: (
gfx_hal::image::Access::TRANSFER_WRITE,
Layout::TransferDstOptimal,
)
..(
gfx_hal::image::Access::SHADER_READ,
Layout::ShaderReadOnlyOptimal,
),
target: &the_image,
families: None,
range: SubresourceRange {
aspects: Aspects::COLOR,
levels: 0..1,
layers: 0..1,
},
};
cmd_buffer.pipeline_barrier(
PipelineStage::TRANSFER..PipelineStage::FRAGMENT_SHADER,
gfx_hal::memory::Dependencies::empty(),
&[image_barrier],
);
// 10. Submit the cmd buffer to queue and wait for it
cmd_buffer.finish();
let upload_fence = device
.create_fence(false)
.map_err(|_| "Couldn't create an upload fence!")?;
command_queue.submit_without_semaphores(Some(&cmd_buffer), Some(&upload_fence));
device
.wait_for_fence(&upload_fence, core::u64::MAX)
.map_err(|_| "Couldn't wait for the fence!")?;
device.destroy_fence(upload_fence);
// 11. Destroy the staging bundle and one shot buffer now that we're done
staging_bundle.manually_drop(device);
command_pool.free(Some(cmd_buffer));
Ok(Self {
format,
image: ManuallyDrop::new(the_image),
requirements,
memory: ManuallyDrop::new(memory),
image_view: ManuallyDrop::new(image_view),
sampler: ManuallyDrop::new(sampler),
})
}
}
pub unsafe fn manually_drop(&self, device: &Dev<B>) {
use core::ptr::read;
device.destroy_sampler(ManuallyDrop::into_inner(read(&self.sampler)));
device.destroy_image_view(ManuallyDrop::into_inner(read(&self.image_view)));
device.destroy_image(ManuallyDrop::into_inner(read(&self.image)));
device.free_memory(ManuallyDrop::into_inner(read(&self.memory)));
}
}
#[derive(Debug, Copy, Clone)]
pub struct TextureSpec<'a> {
pub format: Format,
pub width: u32,
pub height: u32,
pub buffer: &'a [u8],
}
impl<'a> TextureSpec<'a> {
pub fn save_as_image(&self, path: &std::path::Path) -> Result<(), Error> {
use image::ColorType::*;
let color_format = match self.format {
Format::R8Unorm => Gray(8),
Format::Rgba8Srgb => RGBA(8),
_ => Err("texture format with unknown image color mapping")?,
};
image::save_buffer(path, self.buffer, self.width, self.height, color_format)
.map_err(|e| e.into())
}
}
|
use crate::{
error::{self, AuthError, Error},
token::{RequestReason, Token, TokenOrRequest, TokenProvider},
};
mod jwt;
use jwt::{Algorithm, Header, Key};
pub mod prelude {
pub use super::{
get_default_google_credentials, EndUserCredentials, MetadataServerProvider,
ServiceAccountAccess, ServiceAccountInfo, TokenProviderWrapper,
};
pub use crate::token::{Token, TokenOrRequest, TokenProvider};
}
const GRANT_TYPE: &str = "urn:ietf:params:oauth:grant-type:jwt-bearer";
/// Minimal parts needed from a GCP service acccount key for token acquisition
#[derive(serde::Deserialize, Debug, Clone)]
pub struct ServiceAccountInfo {
/// The private key we use to sign
pub private_key: String,
/// The unique id used as the issuer of the JWT claim
pub client_email: String,
/// The URI we send the token requests to, eg https://oauth2.googleapis.com/token
pub token_uri: String,
}
impl ServiceAccountInfo {
/// Deserializes service account from a byte slice. This data is typically
/// acquired by reading a service account JSON file from disk
pub fn deserialize<T>(key_data: T) -> Result<Self, Error>
where
T: AsRef<[u8]>,
{
let slice = key_data.as_ref();
let account_info: Self = serde_json::from_slice(slice)?;
Ok(account_info)
}
}
struct Entry {
hash: u64,
token: Token,
}
/// A token provider for a GCP service account.
pub struct ServiceAccountAccess {
info: ServiceAccountInfo,
priv_key: Vec<u8>,
cache: std::sync::Mutex<Vec<Entry>>,
}
pub struct MetadataServerProvider {
account_name: String,
}
/// Both the `ServiceAccountAccess` and `MetadataServerProvider` get
/// back JSON responses with this schema from their endpoints.
#[derive(serde::Deserialize, Debug)]
struct TokenResponse {
/// The actual token
access_token: String,
/// The token type, pretty much always Header
token_type: String,
/// The time until the token expires and a new one needs to be requested
expires_in: i64,
}
impl ServiceAccountAccess {
/// Creates a new `ServiceAccountAccess` given the provided service
/// account info. This can fail if the private key is encoded incorrectly.
pub fn new(info: ServiceAccountInfo) -> Result<Self, Error> {
let key_string = info
.private_key
.splitn(5, "-----")
.nth(2)
.ok_or(Error::InvalidKeyFormat)?;
// Strip out all of the newlines
let key_string = key_string.split_whitespace().fold(
String::with_capacity(key_string.len()),
|mut s, line| {
s.push_str(line);
s
},
);
let key_bytes = base64::decode_config(key_string.as_bytes(), base64::STANDARD)?;
Ok(Self {
info,
cache: std::sync::Mutex::new(Vec::new()),
priv_key: key_bytes,
})
}
/// Gets the [`ServiceAccountInfo`] this was created for
pub fn get_account_info(&self) -> &ServiceAccountInfo {
&self.info
}
/// Hashes a set of scopes to a numeric key we can use to have an in-memory
/// cache of scopes -> token
fn serialize_scopes<'a, I, S>(scopes: I) -> (u64, String)
where
S: AsRef<str> + 'a,
I: Iterator<Item = &'a S>,
{
use std::hash::Hasher;
let scopes = scopes.map(|s| s.as_ref()).collect::<Vec<&str>>().join(" ");
let hash = {
let mut hasher = twox_hash::XxHash::default();
hasher.write(scopes.as_bytes());
hasher.finish()
};
(hash, scopes)
}
}
impl TokenProvider for ServiceAccountAccess {
/// Like [`ServiceAccountAccess::get_token`], but allows the JWT "subject"
/// to be passed in.
fn get_token_with_subject<'a, S, I, T>(
&self,
subject: Option<T>,
scopes: I,
) -> Result<TokenOrRequest, Error>
where
S: AsRef<str> + 'a,
I: IntoIterator<Item = &'a S>,
T: Into<String>,
{
let (hash, scopes) = Self::serialize_scopes(scopes.into_iter());
let reason = {
let cache = self.cache.lock().map_err(|_e| Error::Poisoned)?;
match cache.binary_search_by(|i| i.hash.cmp(&hash)) {
Ok(i) => {
let token = &cache[i].token;
if !token.has_expired() {
return Ok(TokenOrRequest::Token(token.clone()));
}
RequestReason::Expired
}
Err(_) => RequestReason::ScopesChanged,
}
};
let issued = chrono::Utc::now().timestamp();
let expiry = issued + 3600 - 5; // Give us some wiggle room near the hour mark
let claims = jwt::Claims {
issuer: self.info.client_email.clone(),
scope: scopes,
audience: self.info.token_uri.clone(),
expiration: expiry,
issued_at: issued,
subject: subject.map(|s| s.into()),
};
let assertion = jwt::encode(
&Header::new(Algorithm::RS256),
&claims,
Key::Pkcs8(&self.priv_key),
)?;
let body = url::form_urlencoded::Serializer::new(String::new())
.append_pair("grant_type", GRANT_TYPE)
.append_pair("assertion", &assertion)
.finish();
let body = Vec::from(body);
let request = http::Request::builder()
.method("POST")
.uri(&self.info.token_uri)
.header(
http::header::CONTENT_TYPE,
"application/x-www-form-urlencoded",
)
.header(http::header::CONTENT_LENGTH, body.len())
.body(body)?;
Ok(TokenOrRequest::Request {
reason,
request,
scope_hash: hash,
})
}
/// Handle responses from the token URI request we generated in
/// `get_token`. This method deserializes the response and stores
/// the token in a local cache, so that future lookups for the
/// same scopes don't require new http requests.
fn parse_token_response<S>(
&self,
hash: u64,
response: http::Response<S>,
) -> Result<Token, Error>
where
S: AsRef<[u8]>,
{
let (parts, body) = response.into_parts();
if !parts.status.is_success() {
let body_bytes = body.as_ref();
if parts
.headers
.get(http::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
== Some("application/json; charset=utf-8")
{
if let Ok(auth_error) = serde_json::from_slice::<error::AuthError>(body_bytes) {
return Err(Error::Auth(auth_error));
}
}
return Err(Error::HttpStatus(parts.status));
}
let token_res: TokenResponse = serde_json::from_slice(body.as_ref())?;
let token: Token = token_res.into();
// Last token wins, which...should?...be fine
{
let mut cache = self.cache.lock().map_err(|_e| Error::Poisoned)?;
match cache.binary_search_by(|i| i.hash.cmp(&hash)) {
Ok(i) => cache[i].token = token.clone(),
Err(i) => {
cache.insert(
i,
Entry {
hash,
token: token.clone(),
},
);
}
};
}
Ok(token)
}
}
impl MetadataServerProvider {
pub fn new(account_name: Option<String>) -> Self {
if let Some(name) = account_name {
Self { account_name: name }
} else {
// GCP uses "default" as the name in URIs.
Self {
account_name: "default".to_string(),
}
}
}
}
impl TokenProvider for MetadataServerProvider {
fn get_token_with_subject<'a, S, I, T>(
&self,
subject: Option<T>,
scopes: I,
) -> Result<TokenOrRequest, Error>
where
S: AsRef<str> + 'a,
I: IntoIterator<Item = &'a S>,
T: Into<String>,
{
// We can only support subject being none
if subject.is_some() {
return Err(Error::Auth(AuthError {
error: Some("Unsupported".to_string()),
error_description: Some(
"Metadata server tokens do not support jwt subjects".to_string(),
),
}));
}
// Regardless of GCE or GAE, the token_uri is
// computeMetadata/v1/instance/service-accounts/<name or
// id>/token.
let mut url = format!(
"http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/{}/token",
self.account_name
);
// Merge all the scopes into a single string.
let scopes_str = scopes
.into_iter()
.map(|s| s.as_ref())
.collect::<Vec<&str>>()
.join(",");
// If we have any scopes, pass them along in the querystring.
if !scopes_str.is_empty() {
url.push_str("?scopes=");
url.push_str(&scopes_str);
}
// Make an empty body, but as Vec<u8> to match the request in
// TokenOrRequest.
let empty_body: Vec<u8> = vec![];
let request = http::Request::builder()
.method("GET")
.uri(url)
// To get responses from GCE, we must pass along the
// Metadata-Flavor header with a value of "Google".
.header("Metadata-Flavor", "Google")
.body(empty_body)?;
Ok(TokenOrRequest::Request {
request,
reason: RequestReason::ScopesChanged,
scope_hash: 0,
})
}
fn parse_token_response<S>(
&self,
_hash: u64,
response: http::Response<S>,
) -> Result<Token, Error>
where
S: AsRef<[u8]>,
{
let (parts, body) = response.into_parts();
if !parts.status.is_success() {
return Err(Error::HttpStatus(parts.status));
}
// Deserialize our response, or fail.
let token_res: TokenResponse = serde_json::from_slice(body.as_ref())?;
// Convert it into our output.
let token: Token = token_res.into();
Ok(token)
}
}
/// The fields from a well formed `application_default_credentials.json`.
#[derive(serde::Deserialize, Debug, Clone)]
pub struct EndUserCredentials {
/// The OAuth2 client_id
pub client_id: String,
/// The OAuth2 client_secret
pub client_secret: String,
/// The OAuth2 refresh_token
pub refresh_token: String,
/// The client type (the value must be authorized_user)
#[serde(rename = "type")]
pub client_type: String,
}
impl EndUserCredentials {
/// Deserializes the `EndUserCredentials` from a byte slice. This
/// data is typically acquired by reading an
/// `application_default_credentials.json` file from disk.
pub fn deserialize<T>(key_data: T) -> Result<Self, Error>
where
T: AsRef<[u8]>,
{
let slice = key_data.as_ref();
let account_info: Self = serde_json::from_slice(slice)?;
Ok(account_info)
}
}
impl TokenProvider for EndUserCredentials {
fn get_token_with_subject<'a, S, I, T>(
&self,
subject: Option<T>,
// EndUserCredentials only have the scopes they were granted
// via their authorization. So whatever scopes you're asking
// for, better have been handled when authorized. `gcloud auth
// application-default login` will get the
// https://www.googleapis.com/auth/cloud-platform which
// includes all *GCP* APIs.
_scopes: I,
) -> Result<TokenOrRequest, Error>
where
S: AsRef<str> + 'a,
I: IntoIterator<Item = &'a S>,
T: Into<String>,
{
// We can only support subject being none
if subject.is_some() {
return Err(Error::Auth(AuthError {
error: Some("Unsupported".to_string()),
error_description: Some(
"ADC / User tokens do not support jwt subjects".to_string(),
),
}));
}
// To get an access token, we need to perform a refresh
// following the instructions at
// https://developers.google.com/identity/protocols/oauth2/web-server#offline
// (i.e., POST our client data as a refresh_token request to
// the /token endpoint).
let url = "https://oauth2.googleapis.com/token";
// Build up the parameters as a form encoded string.
let body = url::form_urlencoded::Serializer::new(String::new())
.append_pair("client_id", &self.client_id)
.append_pair("client_secret", &self.client_secret)
.append_pair("grant_type", "refresh_token")
.append_pair("refresh_token", &self.refresh_token)
.finish();
let body = Vec::from(body);
let request = http::Request::builder()
.method("POST")
.uri(url)
.header(
http::header::CONTENT_TYPE,
"application/x-www-form-urlencoded",
)
.header(http::header::CONTENT_LENGTH, body.len())
.body(body)?;
Ok(TokenOrRequest::Request {
request,
reason: RequestReason::ScopesChanged,
scope_hash: 0,
})
}
fn parse_token_response<S>(
&self,
_hash: u64,
response: http::Response<S>,
) -> Result<Token, Error>
where
S: AsRef<[u8]>,
{
let (parts, body) = response.into_parts();
if !parts.status.is_success() {
return Err(Error::HttpStatus(parts.status));
}
// Deserialize our response, or fail.
let token_res: TokenResponse = serde_json::from_slice(body.as_ref())?;
// TODO(boulos): The response also includes the set of scopes
// (as "scope") that we're granted. We could check that
// cloud-platform is in it.
// Convert it into our output.
let token: Token = token_res.into();
Ok(token)
}
}
/// Simple wrapper of our three GCP token providers.
pub enum TokenProviderWrapper {
EndUser(EndUserCredentials),
Metadata(MetadataServerProvider),
ServiceAccount(ServiceAccountAccess),
}
/// Implement `TokenProvider` for `TokenProviderWrapper` so that
/// clients don't have to do the dispatch themselves.
impl TokenProvider for TokenProviderWrapper {
fn get_token_with_subject<'a, S, I, T>(
&self,
subject: Option<T>,
scopes: I,
) -> Result<TokenOrRequest, Error>
where
S: AsRef<str> + 'a,
I: IntoIterator<Item = &'a S>,
T: Into<String>,
{
match self {
Self::EndUser(x) => x.get_token_with_subject(subject, scopes),
Self::Metadata(x) => x.get_token_with_subject(subject, scopes),
Self::ServiceAccount(x) => x.get_token_with_subject(subject, scopes),
}
}
fn parse_token_response<S>(
&self,
hash: u64,
response: http::Response<S>,
) -> Result<Token, Error>
where
S: AsRef<[u8]>,
{
match self {
Self::EndUser(x) => x.parse_token_response(hash, response),
Self::Metadata(x) => x.parse_token_response(hash, response),
Self::ServiceAccount(x) => x.parse_token_response(hash, response),
}
}
}
/// Get the path to the gcloud `application_default_credentials.json`
/// file. This function respects the `CLOUDSDK_CONFIG` environment
/// variable. If unset, it looks in the platform-specific gcloud
/// configuration directories (%APPDATA%/gcloud on Windows or
/// $HOME/.config/gcloud otherwise).
fn gcloud_config_file() -> Result<std::path::PathBuf, std::env::VarError> {
let cred_file = "application_default_credentials.json";
// If the user has set CLOUDSDK_CONFIG, that overrides the default directory.
let env_key = "CLOUDSDK_CONFIG";
let override_dir_or_none = std::env::var(env_key);
if let Ok(override_dir) = override_dir_or_none {
return Ok(std::path::Path::new(&override_dir).join(cred_file));
}
// Otherwise, use the default for the
// platform. %APPDATA%/gcloud/<file> on Windows and
// $HOME/.config/gcloud/<file> elsewhere.
let base_dir = if cfg!(windows) {
std::path::Path::new(&std::env::var("APPDATA")?).to_path_buf()
} else {
std::path::Path::new(&std::env::var("HOME")?).join(".config")
};
Ok(base_dir.join("gcloud").join(cred_file))
}
/// Get a `TokenProvider` following the "Google Default Credentials"
/// flow, in order:
///
/// * If the `GOOGLE_APPLICATION_CREDENTIALS` environment variable is
/// set. Use that as a path to a service account JSON file.
///
/// * Check for a gcloud config file (see `gcloud_config_file`) to
/// get `EndUserCredentials`.
///
/// * If we're running on GCP, use the local metadata server.
///
/// * Otherwise, return None.
pub fn get_default_google_credentials() -> Option<TokenProviderWrapper> {
// Read in the usual key file.
let env_key = "GOOGLE_APPLICATION_CREDENTIALS";
// Use var_os to get the path.
let cred_env = std::env::var_os(env_key);
// If the environment variable is present, try to open it as a
// Service Account. Otherwise, proceed to step 2 (checking the
// gcloud credentials).
if let Some(cred_path) = cred_env {
let key_data = std::fs::read_to_string(cred_path).expect("Failed to read credential file");
let acct_info =
ServiceAccountInfo::deserialize(key_data).expect("Failed to decode credential file");
return Some(TokenProviderWrapper::ServiceAccount(
ServiceAccountAccess::new(acct_info).expect("failed to create OAuth Token Provider"),
));
}
let gcloud_file = gcloud_config_file();
if let Ok(gcloud_file) = gcloud_file {
let gcloud_data = std::fs::read_to_string(gcloud_file);
match gcloud_data {
Ok(json_data) => {
let end_user_credentials = EndUserCredentials::deserialize(json_data)
.expect("Failed to decode application_default_credentials.json");
return Some(TokenProviderWrapper::EndUser(end_user_credentials));
}
Err(error) => match error.kind() {
// Skip not found errors, so we fall to the metadata server check.
std::io::ErrorKind::NotFound => {}
other_error => panic!(
"Failed to open gcloud credential file. Error {:?}",
other_error
),
},
}
}
// Finaly, if we are on GCP, use the metadata server. If we're not
// on GCP, this will just fail to read the file.
let product_file = "/sys/class/dmi/id/product_name";
let product_name = std::fs::read_to_string(product_file);
if let Ok(full_name) = product_name {
// The product name can annoyingly include a newline...
let trimmed = full_name.trim();
match trimmed {
// This matches the Golang client. If new products
// add additional values, this will need to be updated.
"Google" | "Google Compute Engine" => {
return Some(TokenProviderWrapper::Metadata(MetadataServerProvider::new(
None,
)));
}
_ => {}
}
}
// None of our checks worked. Give up.
None
}
impl From<TokenResponse> for Token {
fn from(tr: TokenResponse) -> Self {
let expires_ts = chrono::Utc::now().timestamp() + tr.expires_in;
Self {
access_token: tr.access_token,
token_type: tr.token_type,
refresh_token: String::new(),
expires_in: Some(tr.expires_in),
expires_in_timestamp: Some(expires_ts),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn hash_scopes() {
use std::hash::Hasher;
let expected = {
let mut hasher = twox_hash::XxHash::default();
hasher.write(b"scope1 ");
hasher.write(b"scope2 ");
hasher.write(b"scope3");
hasher.finish()
};
let (hash, scopes) =
ServiceAccountAccess::serialize_scopes(["scope1", "scope2", "scope3"].iter());
assert_eq!(expected, hash);
assert_eq!("scope1 scope2 scope3", scopes);
let (hash, scopes) = ServiceAccountAccess::serialize_scopes(
vec![
"scope1".to_owned(),
"scope2".to_owned(),
"scope3".to_owned(),
]
.iter(),
);
assert_eq!(expected, hash);
assert_eq!("scope1 scope2 scope3", scopes);
}
#[test]
fn metadata_noscopes() {
let provider = MetadataServerProvider::new(None);
let scopes: Vec<&str> = vec![];
let token_or_req = provider
.get_token(&scopes)
.expect("Should have gotten a request");
match token_or_req {
TokenOrRequest::Token(_) => panic!("Shouldn't have gotten a token"),
TokenOrRequest::Request { request, .. } => {
// Should be the metadata server
assert_eq!(request.uri().host(), Some("metadata.google.internal"));
// Since we had no scopes, no querystring.
assert_eq!(request.uri().query(), None);
}
}
}
#[test]
fn metadata_with_scopes() {
let provider = MetadataServerProvider::new(None);
let scopes: Vec<&str> = vec!["scope1", "scope2"];
let token_or_req = provider
.get_token(&scopes)
.expect("Should have gotten a request");
match token_or_req {
TokenOrRequest::Token(_) => panic!("Shouldn't have gotten a token"),
TokenOrRequest::Request { request, .. } => {
// Should be the metadata server
assert_eq!(request.uri().host(), Some("metadata.google.internal"));
// Since we had some scopes, we should have a querystring.
assert!(request.uri().query().is_some());
let query_string = request.uri().query().unwrap();
// We don't care about ordering, but the query_string
// should be comma-separated and only include the
// scopes.
assert!(
query_string == "scopes=scope1,scope2"
|| query_string == "scopes=scope2,scope1"
);
}
}
}
#[test]
fn end_user_credentials() {
let provider = EndUserCredentials {
client_id: "fake_client@domain.com".into(),
client_secret: "TOP_SECRET".into(),
refresh_token: "REFRESH_TOKEN".into(),
client_type: "authorized_user".into(),
};
// End-user credentials don't let you override scopes.
let scopes: Vec<&str> = vec!["better_not_be_there"];
let token_or_req = provider
.get_token(&scopes)
.expect("Should have gotten a request");
match token_or_req {
TokenOrRequest::Token(_) => panic!("Shouldn't have gotten a token"),
TokenOrRequest::Request { request, .. } => {
// Should be the Google oauth2 API
assert_eq!(request.uri().host(), Some("oauth2.googleapis.com"));
// Scopes aren't passed for end user credentials
assert_eq!(request.uri().query(), None);
}
}
}
#[test]
fn wrapper_dispatch() {
// Wrap the metadata server provider.
let provider = TokenProviderWrapper::Metadata(MetadataServerProvider::new(None));
// And then have the same test as metadata_with_scopes
let scopes: Vec<&str> = vec!["scope1", "scope2"];
let token_or_req = provider
.get_token(&scopes)
.expect("Should have gotten a request");
match token_or_req {
TokenOrRequest::Token(_) => panic!("Shouldn't have gotten a token"),
TokenOrRequest::Request { request, .. } => {
// Should be the metadata server
assert_eq!(request.uri().host(), Some("metadata.google.internal"));
// Since we had some scopes, we should have a querystring.
assert!(request.uri().query().is_some());
let query_string = request.uri().query().unwrap();
// We don't care about ordering, but the query_string
// should be comma-separated and only include the
// scopes.
assert!(
query_string == "scopes=scope1,scope2"
|| query_string == "scopes=scope2,scope1"
);
}
}
}
}
|
use std::env;
use dotenv;
use url::Url;
use std::io::{BufRead, BufReader, Write};
use std::net::TcpListener;
use oauth2::prelude::*;
use oauth2::{
AccessToken,
AuthorizationCode,
AuthUrl,
ClientId,
ClientSecret,
CsrfToken,
RedirectUrl,
Scope,
TokenResponse,
TokenUrl
};
use oauth2::basic::BasicClient;
pub struct OAuth2 {
pub client_id: String,
pub client_secret: String,
}
pub fn get_old_access_code() -> String {
dotenv::dotenv().expect("Failed to read .env file");
env::var("ACCESS_CODE").expect("ACCESS_CODE not found")
}
pub fn get_init_oauth() -> OAuth2 {
dotenv::dotenv().expect("Failed to read .env file");
let client_id = env::var("CLIENT_ID").expect("CLIENT_ID not found");
let client_secret = env::var("CLIENT_SECRET").expect("CLIENT_SECRET not found");
OAuth2 {
client_id,
client_secret,
}
}
// main.rs passes in ID and secret, get_access_code() returns access code
pub fn get_new_access_code (oauth_vars: OAuth2) -> Option<String> {
let google_client_id = ClientId::new(
oauth_vars.client_id,
);
let google_client_secret = ClientSecret::new(
oauth_vars.client_secret,
);
let auth_url = AuthUrl::new(
Url::parse("https://accounts.google.com/o/oauth2/v2/auth")
.expect("Invalid authorization endpoint URL"),
);
let token_url = TokenUrl::new(
Url::parse("https://www.googleapis.com/oauth2/v3/token")
.expect("Invalid token endpoint URL"),
);
// Set up the config for the Google OAuth2 process.
let client = BasicClient::new(
google_client_id,
Some(google_client_secret),
auth_url,
Some(token_url),
)
// This requests access to gmail specifically
.add_scope(Scope::new(
"https://mail.google.com/".to_string(),
))
// This example will be running its own server at localhost:8080.
// See below for the server implementation.
.set_redirect_url(RedirectUrl::new(
Url::parse("http://localhost:8080").expect("Invalid redirect URL"),
));
// Generate the authorization URL to which we'll redirect the user.
let (authorize_url, csrf_state) = client.authorize_url(CsrfToken::new_random);
println!(
"Open this URL in your browser:\n{}\n",
authorize_url.to_string()
);
// A very naive implementation of the redirect server.
let listener = TcpListener::bind("127.0.0.1:8080").unwrap();
for stream in listener.incoming() {
if let Ok(mut stream) = stream {
let state;
let code;
{
let mut reader = BufReader::new(&stream);
let mut request_line = String::new();
reader.read_line(&mut request_line).unwrap();
let redirect_url = request_line.split_whitespace().nth(1).unwrap();
let url = Url::parse(&("http://localhost".to_string() + redirect_url)).unwrap();
let code_pair = url
.query_pairs()
.find(|pair| {
let &(ref key, _) = pair;
key == "code"
})
.unwrap();
let (_, value) = code_pair;
code = AuthorizationCode::new(value.into_owned());
let state_pair = url
.query_pairs()
.find(|pair| {
let &(ref key, _) = pair;
key == "state"
})
.unwrap();
let (_, value) = state_pair;
state = CsrfToken::new(value.into_owned());
}
let message = "Go back to your terminal :)";
let response = format!(
"HTTP/1.1 200 OK\r\ncontent-length: {}\r\n\r\n{}",
message.len(),
message
);
stream.write_all(response.as_bytes()).unwrap();
println!("Google returned the following code:\n{}\n", code.secret());
println!(
"Google returned the following state:\n{} (expected `{}`)\n",
state.secret(),
csrf_state.secret()
);
// Exchange the code with a token.
let token = client.exchange_code(code);
println!("Google returned the following token:\n{:?}\n", token);
// if token is gucci, return that mf
match token {
Ok(response) => return Some(response.access_token().secret().to_string()),
_ => {
println!("bad response: {:?}", token);
return None::<String>
}
};
// The server will terminate itself after collecting the first code.
break;
}
}
println!("got past for loop :0 fuck", );
None::<String>
}
fn save_token(token: AccessToken){
} |
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
_reserved_0_cr: [u8; 0x04],
#[doc = "0x04 - I3C configuration register"]
pub cfgr: CFGR,
_reserved2: [u8; 0x08],
#[doc = "0x10 - I3C receive data byte register"]
pub rdr: RDR,
#[doc = "0x14 - I3C receive data word register"]
pub rdwr: RDWR,
#[doc = "0x18 - I3C transmit data byte register"]
pub tdr: TDR,
#[doc = "0x1c - I3C transmit data word register"]
pub tdwr: TDWR,
#[doc = "0x20 - I3C IBI payload data register"]
pub ibidr: IBIDR,
#[doc = "0x24 - I3C target transmit configuration register"]
pub tgttdr: TGTTDR,
_reserved8: [u8; 0x08],
#[doc = "0x30 - I3C status register"]
pub sr: SR,
#[doc = "0x34 - I3C status error register"]
pub ser: SER,
_reserved10: [u8; 0x08],
#[doc = "0x40 - I3C received message register"]
pub rmr: RMR,
_reserved11: [u8; 0x0c],
#[doc = "0x50 - I3C event register"]
pub evr: EVR,
#[doc = "0x54 - I3C interrupt enable register"]
pub ier: IER,
#[doc = "0x58 - I3C clear event register"]
pub cevr: CEVR,
_reserved14: [u8; 0x04],
#[doc = "0x60 - I3C own device characteristics register"]
pub devr0: DEVR0,
#[doc = "0x64 - I3C device 1 characteristics register"]
pub devr1: DEVR1,
#[doc = "0x68 - I3C device 2 characteristics register"]
pub devr2: DEVR2,
#[doc = "0x6c - I3C device 3 characteristics register"]
pub devr3: DEVR3,
#[doc = "0x70 - I3C device 4 characteristics register"]
pub devr4: DEVR4,
_reserved19: [u8; 0x1c],
#[doc = "0x90 - I3C maximum read length register"]
pub maxrlr: MAXRLR,
#[doc = "0x94 - I3C maximum write length register"]
pub maxwlr: MAXWLR,
_reserved21: [u8; 0x08],
#[doc = "0xa0 - I3C timing register 0"]
pub timingr0: TIMINGR0,
#[doc = "0xa4 - I3C timing register 1"]
pub timingr1: TIMINGR1,
#[doc = "0xa8 - I3C timing register 2"]
pub timingr2: TIMINGR2,
_reserved24: [u8; 0x14],
#[doc = "0xc0 - I3C bus characteristics register"]
pub bcr: BCR,
#[doc = "0xc4 - I3C device characteristics register"]
pub dcr: DCR,
#[doc = "0xc8 - I3C get capability register"]
pub getcapr: GETCAPR,
#[doc = "0xcc - I3C controller-role capability register"]
pub crcapr: CRCAPR,
#[doc = "0xd0 - I3C get capability register"]
pub getmxdsr: GETMXDSR,
#[doc = "0xd4 - I3C extended provisioned ID register"]
pub epidr: EPIDR,
}
impl RegisterBlock {
#[doc = "0x00 - I3C message control register alternate"]
#[inline(always)]
pub const fn cr_alternate(&self) -> &CR_ALTERNATE {
unsafe { &*(self as *const Self).cast::<u8>().add(0usize).cast() }
}
#[doc = "0x00 - I3C message control register"]
#[inline(always)]
pub const fn cr(&self) -> &CR {
unsafe { &*(self as *const Self).cast::<u8>().add(0usize).cast() }
}
}
#[doc = "CR (w) register accessor: I3C message control register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "I3C message control register"]
pub mod cr;
#[doc = "CR_ALTERNATE (w) register accessor: I3C message control register alternate\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr_alternate::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr_alternate`]
module"]
pub type CR_ALTERNATE = crate::Reg<cr_alternate::CR_ALTERNATE_SPEC>;
#[doc = "I3C message control register alternate"]
pub mod cr_alternate;
#[doc = "CFGR (rw) register accessor: I3C configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfgr`]
module"]
pub type CFGR = crate::Reg<cfgr::CFGR_SPEC>;
#[doc = "I3C configuration register"]
pub mod cfgr;
#[doc = "RDR (r) register accessor: I3C receive data byte register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdr`]
module"]
pub type RDR = crate::Reg<rdr::RDR_SPEC>;
#[doc = "I3C receive data byte register"]
pub mod rdr;
#[doc = "RDWR (r) register accessor: I3C receive data word register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdwr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdwr`]
module"]
pub type RDWR = crate::Reg<rdwr::RDWR_SPEC>;
#[doc = "I3C receive data word register"]
pub mod rdwr;
#[doc = "TDR (w) register accessor: I3C transmit data byte register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tdr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tdr`]
module"]
pub type TDR = crate::Reg<tdr::TDR_SPEC>;
#[doc = "I3C transmit data byte register"]
pub mod tdr;
#[doc = "TDWR (w) register accessor: I3C transmit data word register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tdwr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tdwr`]
module"]
pub type TDWR = crate::Reg<tdwr::TDWR_SPEC>;
#[doc = "I3C transmit data word register"]
pub mod tdwr;
#[doc = "IBIDR (rw) register accessor: I3C IBI payload data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ibidr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ibidr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ibidr`]
module"]
pub type IBIDR = crate::Reg<ibidr::IBIDR_SPEC>;
#[doc = "I3C IBI payload data register"]
pub mod ibidr;
#[doc = "TGTTDR (rw) register accessor: I3C target transmit configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tgttdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tgttdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tgttdr`]
module"]
pub type TGTTDR = crate::Reg<tgttdr::TGTTDR_SPEC>;
#[doc = "I3C target transmit configuration register"]
pub mod tgttdr;
#[doc = "SR (r) register accessor: I3C status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`]
module"]
pub type SR = crate::Reg<sr::SR_SPEC>;
#[doc = "I3C status register"]
pub mod sr;
#[doc = "SER (r) register accessor: I3C status error register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ser::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ser`]
module"]
pub type SER = crate::Reg<ser::SER_SPEC>;
#[doc = "I3C status error register"]
pub mod ser;
#[doc = "RMR (r) register accessor: I3C received message register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rmr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rmr`]
module"]
pub type RMR = crate::Reg<rmr::RMR_SPEC>;
#[doc = "I3C received message register"]
pub mod rmr;
#[doc = "EVR (r) register accessor: I3C event register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`evr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`evr`]
module"]
pub type EVR = crate::Reg<evr::EVR_SPEC>;
#[doc = "I3C event register"]
pub mod evr;
#[doc = "IER (r) register accessor: I3C interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ier::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ier`]
module"]
pub type IER = crate::Reg<ier::IER_SPEC>;
#[doc = "I3C interrupt enable register"]
pub mod ier;
#[doc = "CEVR (w) register accessor: I3C clear event register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cevr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cevr`]
module"]
pub type CEVR = crate::Reg<cevr::CEVR_SPEC>;
#[doc = "I3C clear event register"]
pub mod cevr;
#[doc = "DEVR0 (rw) register accessor: I3C own device characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`devr0::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`devr0::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`devr0`]
module"]
pub type DEVR0 = crate::Reg<devr0::DEVR0_SPEC>;
#[doc = "I3C own device characteristics register"]
pub mod devr0;
#[doc = "DEVR1 (rw) register accessor: I3C device 1 characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`devr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`devr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`devr1`]
module"]
pub type DEVR1 = crate::Reg<devr1::DEVR1_SPEC>;
#[doc = "I3C device 1 characteristics register"]
pub mod devr1;
#[doc = "DEVR2 (rw) register accessor: I3C device 2 characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`devr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`devr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`devr2`]
module"]
pub type DEVR2 = crate::Reg<devr2::DEVR2_SPEC>;
#[doc = "I3C device 2 characteristics register"]
pub mod devr2;
#[doc = "DEVR3 (rw) register accessor: I3C device 3 characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`devr3::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`devr3::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`devr3`]
module"]
pub type DEVR3 = crate::Reg<devr3::DEVR3_SPEC>;
#[doc = "I3C device 3 characteristics register"]
pub mod devr3;
#[doc = "DEVR4 (rw) register accessor: I3C device 4 characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`devr4::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`devr4::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`devr4`]
module"]
pub type DEVR4 = crate::Reg<devr4::DEVR4_SPEC>;
#[doc = "I3C device 4 characteristics register"]
pub mod devr4;
#[doc = "MAXRLR (rw) register accessor: I3C maximum read length register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`maxrlr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`maxrlr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`maxrlr`]
module"]
pub type MAXRLR = crate::Reg<maxrlr::MAXRLR_SPEC>;
#[doc = "I3C maximum read length register"]
pub mod maxrlr;
#[doc = "MAXWLR (rw) register accessor: I3C maximum write length register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`maxwlr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`maxwlr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`maxwlr`]
module"]
pub type MAXWLR = crate::Reg<maxwlr::MAXWLR_SPEC>;
#[doc = "I3C maximum write length register"]
pub mod maxwlr;
#[doc = "TIMINGR0 (rw) register accessor: I3C timing register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`timingr0::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`timingr0::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`timingr0`]
module"]
pub type TIMINGR0 = crate::Reg<timingr0::TIMINGR0_SPEC>;
#[doc = "I3C timing register 0"]
pub mod timingr0;
#[doc = "TIMINGR1 (rw) register accessor: I3C timing register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`timingr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`timingr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`timingr1`]
module"]
pub type TIMINGR1 = crate::Reg<timingr1::TIMINGR1_SPEC>;
#[doc = "I3C timing register 1"]
pub mod timingr1;
#[doc = "TIMINGR2 (rw) register accessor: I3C timing register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`timingr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`timingr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`timingr2`]
module"]
pub type TIMINGR2 = crate::Reg<timingr2::TIMINGR2_SPEC>;
#[doc = "I3C timing register 2"]
pub mod timingr2;
#[doc = "BCR (rw) register accessor: I3C bus characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bcr`]
module"]
pub type BCR = crate::Reg<bcr::BCR_SPEC>;
#[doc = "I3C bus characteristics register"]
pub mod bcr;
#[doc = "DCR (rw) register accessor: I3C device characteristics register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dcr`]
module"]
pub type DCR = crate::Reg<dcr::DCR_SPEC>;
#[doc = "I3C device characteristics register"]
pub mod dcr;
#[doc = "GETCAPR (rw) register accessor: I3C get capability register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`getcapr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`getcapr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`getcapr`]
module"]
pub type GETCAPR = crate::Reg<getcapr::GETCAPR_SPEC>;
#[doc = "I3C get capability register"]
pub mod getcapr;
#[doc = "CRCAPR (rw) register accessor: I3C controller-role capability register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crcapr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crcapr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crcapr`]
module"]
pub type CRCAPR = crate::Reg<crcapr::CRCAPR_SPEC>;
#[doc = "I3C controller-role capability register"]
pub mod crcapr;
#[doc = "GETMXDSR (rw) register accessor: I3C get capability register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`getmxdsr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`getmxdsr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`getmxdsr`]
module"]
pub type GETMXDSR = crate::Reg<getmxdsr::GETMXDSR_SPEC>;
#[doc = "I3C get capability register"]
pub mod getmxdsr;
#[doc = "EPIDR (rw) register accessor: I3C extended provisioned ID register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`epidr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`epidr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`epidr`]
module"]
pub type EPIDR = crate::Reg<epidr::EPIDR_SPEC>;
#[doc = "I3C extended provisioned ID register"]
pub mod epidr;
|
use std::cell::{Cell, Ref, RefCell};
use std::rc::Rc;
use std::f64::consts::PI;
use std::collections::HashMap;
const PI2: f64 = 2.0 * PI;
/// Parameter that depends on one or more children
pub struct ParamPrivate {
pub name: String,
pub unit: String,
value: Cell<f64>,
display: RefCell<String>,
pub min: f64,
pub max: f64,
pub update_fn: Option<fn(&Parameters) -> f64>,
precision: Cell<usize>,
children: RefCell<Vec<Param>>,
parents: RefCell<Vec<Param>>,
}
impl ParamPrivate {
fn update(&self, params: &Parameters) {
if let Some(update_fn) = self.update_fn {
self.set(update_fn(params))
}
}
pub fn v(&self) -> f64 {
self.value.get()
}
/*
pub fn q<N>(&self) -> Quantity {
self.unit
}
*/
pub fn to_percent(&self) -> f64 {
(self.v() - self.min) / (self.max - self.min)
}
pub fn set_percent(&self, percent: f64) {
let new_value = self.min + percent*(self.max - self.min);
self.set(new_value);
let new_display = format!("{:.*}", self.precision(), new_value);
let _ = self.display.replace(new_display);
}
pub fn set(&self, new_value: f64) {
self.value.set(new_value)
}
pub fn set_display(&self, display: String) {
if let Ok(val) = display.trim().parse::<f64>() {
self.set(val);
} else {
println!("Bad intermediate display");
}
let _ = self.display.replace(display);
}
pub fn display(&self) -> Ref<String> {
self.display.borrow()
}
pub fn precision(&self) -> usize {
self.precision.get()
}
pub fn set_precision(&self, precision: usize) {
self.precision.set(precision)
}
}
pub type Param = Rc<ParamPrivate>;
#[allow(non_snake_case)]
pub struct Parameters {
pub param_map: HashMap<String, Param>,
pub driver: [Param; 23],
pub passive: [Param; 12],
pub enclosure: [Param; 8],
pub constant: [Param; 3],
// Environmental parameters
pub ρ0: Param,
pub c: Param,
pub t: Param,
// Driver low level parameters
pub Xmax: Param,
pub Vd: Param,
pub Sd: Param,
pub Bl: Param,
pub Re: Param,
pub Mmd: Param,
pub Mms: Param,
pub Mas: Param,
pub Rms: Param,
pub Ras: Param,
pub Cms: Param,
pub Cas: Param,
pub Vas: Param,
pub Rg: Param,
// Driver mid level parameters
pub Ts: Param,
pub ωs: Param,
pub Fs: Param,
pub Qes: Param,
pub Qms: Param,
pub Qts: Param,
pub Qs: Param,
pub Cab: Param,
pub Vb: Param,
// Passive radiator low level parameters
pub Vap: Param,
pub Cmp: Param,
pub Cap: Param,
pub Rmp: Param,
pub Rap: Param,
pub Mmp: Param,
pub Map: Param,
pub Sp: Param,
// Passive radiator mid level parameters
pub Qmp: Param,
pub ωp: Param,
pub Fp: Param,
pub Tp: Param,
// Enclosure parameters
pub ωb: Param,
pub Fb: Param,
pub Tb: Param,
pub α: Param,
pub δ: Param,
pub y: Param,
pub h: Param,
pub η0: Param,
}
impl Parameters {
pub fn get(&self, name: &str) -> Option<Param> {
if let Some(param) = self.param_map.get(name) {
Some(param.clone())
} else {
None
}
}
}
pub fn param_simple(name: &str, unit: &str, value: f64, min: f64, max: f64, precision: usize) -> Param {
make_param(name, unit, value, min, max, precision, None)
}
pub fn param(name: &str, unit: &str, value: f64, min: f64, max: f64, precision: usize,
update: fn(&Parameters) -> f64) -> Param {
make_param(name, unit, value, min, max, precision, Some(update))
}
pub fn make_param(name: &str, unit: &str, value: f64, min: f64, max: f64, precision: usize,
update: Option<fn(&Parameters) -> f64>) -> Param {
Rc::new(ParamPrivate {
name: name.to_string(),
unit: unit.to_string(),
value: Cell::new(value),
display: RefCell::new(value.to_string()),
min,
max,
precision: Cell::new(precision),
update_fn: update,
children: RefCell::new(vec![]),
parents: RefCell::new(vec![]),
})
}
pub fn set_children(param_ref: &mut Param, children: Vec<Param>) {
for mut child in children.into_iter() {
param_ref.children.borrow_mut().push(child.clone());
child.parents.borrow_mut().push(param_ref.clone());
//Rc::get_mut(param).unwrap().parents.push(param.clone())
}
}
// 10000 * cm^2 * mm = L
pub fn vd_update(P: &Parameters) -> f64 {
10000.0 * P.Sd.v() * P.Xmax.v()
}
// g + 1000 * ((kg / m^3) / sqrt(cm^2)) * cm^4 = g
pub fn mms_update(P: &Parameters) -> f64 {
let Sd = P.Sd.v();
P.Mmd.v() + 1000.0 * (2.0 * ((8.0 * P.ρ0.v()) / (3.0 * PI2 * ( Sd / PI ).sqrt()))) * Sd.powi(2)
}
// g / cm^4 = g / cm^4
pub fn mas_update(P: &Parameters) -> f64 {
P.Mms.v() / P.Sd.v().powi(2)
}
// (N * s / m) / (100000000 * cm^4) = (Pa * s) / m^3
pub fn ras_update(P: &Parameters) -> f64 {
P.Rms.v() / (100000000.0 * P.Sd.v().powi(2))
}
// 100000000 * (1 m / N) * cm^4 = m^5 / N
pub fn cas_update(P: &Parameters) -> f64 {
100000000.0 * P.Cms.v() * P.Sd.v().powi(2)
}
// (kg / m^3) * (m/s)^2 * (m^5 / N) / 1000 = L
pub fn vas_update(P: &Parameters) -> f64 {
P.ρ0.v() * P.c.v().powi(2) * P.Cas.v() / 1000.0
}
// 1 / Hz = s
pub fn ts_update(P: &Parameters) -> f64 {
1.0 / P.ωs.v()
}
// Hz = Hz
pub fn ωs_update(P: &Parameters) -> f64 {
P.Fs.v() * PI2
}
// 1 / sqrt((g/cm^4) * (m^5 / N) / 100000) = Hz
pub fn fs_update(P: &Parameters) -> f64 {
1.0 / ( PI2 * (P.Mas.v() * P.Cas.v() / 100000.0).sqrt())
}
// 1000 * (Hz * Ohm * (g/cm^4) * cm^4) / (tesla * m)^2 = 1
pub fn qes_update(P: &Parameters) -> f64 {
1000.0 * (P.ωs.v() * P.Re.v() * P.Mas.v() * P.Sd.v().powi(2)) / P.Bl.v().powi(2)
}
// 1 / (Hz * (m^5 / N) * (Pa * s) / m^3) = 1
pub fn qms_update(P: &Parameters) -> f64 {
1.0 / (P.ωs.v() * P.Cas.v() * P.Ras.v())
}
pub fn qts_update(P: &Parameters) -> f64 {
(P.Qes.v() * P.Qms.v()) / (P.Qes.v() + P.Qms.v())
}
pub fn qs_update(P: &Parameters) -> f64 {
P.Qts.v()
}
// (kg/m^3) * (m/s)^2 * (m^5 / N) / 1000 = L
pub fn vb_update(P: &Parameters) -> f64 {
P.ρ0.v() * P.c.v().powi(2) * P.Cab.v() / 1000.0
}
// (kg/m^3) * (m/s)^2 * (m^5 / N) / 1000 = L
pub fn vap_update(P: &Parameters) -> f64 {
P.ρ0.v() * P.c.v().powi(2) * P.Cap.v()
}
// 100000000 * (m/N) * cm^4 = m^5 / N
pub fn cap_update(P: &Parameters) -> f64 {
100000000.0 * P.Cmp.v() * P.Sp.v().powi(2)
}
// ((N * s / m) / cm^4) / 100000000 = (Pa * s) / m^3
pub fn rap_update(P: &Parameters) -> f64 {
(P.Rmp.v() / P.Sp.v().powi(2)) * 100000000.0
}
// kg / cm^4 = kg / cm^4
pub fn map_update(P: &Parameters) -> f64 {
P.Mmp.v() / P.Sp.v().powi(2)
}
// 1 / (Hz * (m^5 / N) * (Pa * s)/m^3) = 1
pub fn qmp_update(P: &Parameters) -> f64 {
1.0 / (P.ωp.v() * P.Cap.v() * P.Rap.v())
}
// 10000 / sqrt((kg/cm^4) * (m^5/N)) = Hz
pub fn fp_update(P: &Parameters) -> f64 {
10000.0 / ( PI2 * (P.Map.v() * P.Cap.v()).sqrt())
}
// 1 / Hz = s
pub fn tp_update(P: &Parameters) -> f64 {
1.0 / P.ωp.v()
}
pub fn ωp_update(P: &Parameters) -> f64 {
P.Fp.v() * PI2
}
// 10000 * sqrt(1 / (m^5/N * kg/cm^4)) = Hz
pub fn fb_update(P: &Parameters) -> f64 {
10000.0 * ((1.0 + (P.Cab.v() / P.Cap.v())) / (PI2 * P.Cab.v() * P.Map.v())).sqrt()
}
// 1 / Hz = s
pub fn tb_update(P: &Parameters) -> f64 {
1.0 / P.ωp.v()
}
pub fn ωb_update(P: &Parameters) -> f64 {
P.Fp.v() * PI2
}
pub fn α_update(P: &Parameters) -> f64 {
P.Cas.v() / P.Cab.v()
}
pub fn δ_update(P: &Parameters) -> f64 {
P.Cap.v() / P.Cab.v()
}
pub fn y_update(P: &Parameters) -> f64 {
P.Fp.v() / P.Fs.v()
}
pub fn h_update(P: &Parameters) -> f64 {
P.Fb.v() / P.Fs.v()
}
// (1000 / (m/s)^3) * (Hz^3 * L) = 1
pub fn η0_update(P: &Parameters) -> f64 {
((4.0 * PI.powi(2)) / P.c.v().powi(3)) * (P.Fs.v().powi(3) * P.Vas.v() / P.Qes.v())
}
|
use rand::Rng;
use std::cmp::Ordering;
use std::io;
fn main() {
println!("GUESSING GAME");
let secret_number = rand::thread_rng().gen_range(1, 101); // 1-100
// println!("The secret_number is: {}", secret_number);
loop {
println!("Please input your guess");
let mut guess = String::new();
io::stdin()
.read_line(&mut guess)
.expect("Failed to read line");
// convert user input into unsigned 32-bit number
// simultaneously, we shadow `guess`, converting it from a string to a integer type
// `trim()` removes carriage returns \n from user input
// `:` annotates the variable tpye, meaning we define guess's type as a unsigned 32 bit number
let guess: u32 = match guess.trim().parse() {
// parse returns enum `exepect` with 2 variants OK and Err
Ok(num) => num,
Err(_) => {
// `_` matches any value
println!("Please enter a valid number...");
continue;
}
};
println!("...You guessed: {}", guess);
match guess.cmp(&secret_number) {
Ordering::Less => println!("Too small"),
Ordering::Greater => println!("Too big"),
Ordering::Equal => {
println!("Congrats! You win!");
break;
}
}
}
}
|
#[macro_use]
extern crate criterion;
extern crate pdatastructs;
use criterion::Criterion;
use pdatastructs::hyperloglog::HyperLogLog;
fn hyperloglog_add_single(c: &mut Criterion) {
c.bench_function("hyperloglog_add_single", |b| {
let address_bits = 4; // so we store 2^4 = 16 registers in total
let mut hll = HyperLogLog::<&str>::new(address_bits);
let obj = "foo bar";
b.iter(|| {
hll.add(&obj);
})
});
}
fn hyperloglog_count_empty(c: &mut Criterion) {
c.bench_function("hyperloglog_count_empty", |b| {
let address_bits = 4; // so we store 2^4 = 16 registers in total
let hll = HyperLogLog::<u64>::new(address_bits);
b.iter(|| {
hll.count();
})
});
}
criterion_group!(benches, hyperloglog_add_single, hyperloglog_count_empty);
criterion_main!(benches);
|
extern crate baroque;
use std::rc::Rc;
use baroque::baroque::*;
use baroque::players::*;
use baroque::thread_pool::*;
fn main() {
let mut board = Board::new_board();
let pool = Rc::new(ThreadPool::new(12));
let white: Box<Player> =
match std::env::args().nth(1).and_then(|s| s.parse().ok()).unwrap_or(0) {
n if n > 0 && n < 3 => Box::new(MinimaxThreadedAI::new(Side::White, n, &pool)),
_ => Box::new(Human{}),
};
let black: Box<Player> =
match std::env::args().nth(2).and_then(|s| s.parse().ok()).unwrap_or(0) {
n if n > 0 && n < 3 => Box::new(MinimaxThreadedAI::new(Side::Black, n, &pool)),
_ => Box::new(Human{}),
};
let mut current_player: &Player = &*white;
loop {
board.display();
match current_player.play(&board) {
Some((begin, end)) => {
let result = board.make_move(begin, end);
for s in result.1 {
println!("{}", s);
}
if let Some(new_board) = result.0 {
current_player = match board.get_current_side() {
Side::Black => &*white,
Side::White => &*black,
};
board = new_board;
}
},
None => {
if board.is_in_check(board.get_current_side()) {
println!("Checkmate; {} has won!", board.get_current_side().flip());
} else {
println!("Stalemate!");
}
break;
}
}
}
}
|
use ppm::entities::pixel::Pixel;
/// Test if pixel have the same RGB values of the instantiated object
#[test]
fn pixel_creation_works() {
let pixel = Pixel::new(128, 128, 128);
assert_eq!(pixel.red(), &128u8);
assert_eq!(pixel.green(), &128u8);
assert_eq!(pixel.blue(), &128u8);
}
/// Prints a pixel
#[test]
fn pixel_display_works() {
let pixel = Pixel::new(128, 64, 196);
pixel.display();
}
/// Test if the inversion of a Pixel works
#[test]
fn pixel_invert_works() {
let mut pixel = Pixel::new(128, 64, 196);
pixel.invert();
assert_eq!(
(&(255u8 - 128u8), &(255u8 - 64u8), &(255u8 - 196u8)),
(pixel.red(), pixel.green(), pixel.blue())
);
}
/// Test the equality between two pixels
#[test]
fn pixel_eq_works() {
let pixel_1 = Pixel::new(128, 64, 196);
let pixel_2 = Pixel::new(128, 64, 196);
let pixel_3 = Pixel::new(127, 63, 195);
assert_eq!(true, pixel_1 == pixel_2);
assert_eq!(false, pixel_1 == pixel_3);
}
/// Test the grayscale function
#[test]
fn pixel_grayscale_works() {
let mut pixel = Pixel::new(128, 64, 178);
let rgb_mean: u8 = 128 / 3 + 64 / 3 + 178 / 3;
pixel.grayscale();
assert_eq!(&rgb_mean, pixel.red());
assert_eq!(&rgb_mean, pixel.green());
assert_eq!(&rgb_mean, pixel.blue());
}
/// Test if mutability is unique for each pixel object
#[test]
fn pixel_mutability_works() {
let mut pixel_1 = Pixel::new(128, 64, 23);
let pixel_2 = Pixel::new(255, 255, 255);
pixel_1.grayscale();
assert_eq!(true, pixel_1 != pixel_2);
}
|
use crate::project_context::{BuildEnv, Context};
use crate::recipe::rust::DOCKER_IMAGE;
use crate::signal::Signal;
use crate::util::DockerCommand;
use anyhow::Result;
const TEST_ENV_VAR: &str = "CAPSULE_TEST_ENV";
pub struct Tester;
impl Tester {
pub fn run(project_context: &Context, env: BuildEnv, signal: &Signal) -> Result<()> {
let env_arg = match env {
BuildEnv::Debug => "debug",
BuildEnv::Release => "release",
};
let project_path = project_context
.project_path
.to_str()
.expect("project path")
.to_string();
let cmd =
DockerCommand::with_context(project_context, DOCKER_IMAGE.to_string(), project_path)
.fix_dir_permission("target".to_string())
.fix_dir_permission("Cargo.lock".to_string());
cmd.run(
format!("{}={} cargo test -- --nocapture", TEST_ENV_VAR, env_arg),
signal,
)?;
Ok(())
}
}
|
pub mod audio_stream;
use audio_stream::common as audio_common;
use audio_stream::audio_source as source;
use audio_stream::default_implementer;
|
pub struct Foo;
pub trait Bar {
fn bar(&self) -> Foo;
}
/// # Examples
///
/// ```
/// use foo::Foo;
/// use foo::Bar;
///
/// struct MyFoo;
/// impl Bar for MyFoo {
/// fn bar(&self) -> Foo {
/// Foo
/// }
/// }
/// ```
pub fn foo() {
}
|
extern crate ray_tracer_challenge;
use ray_tracer_challenge::camera::Camera;
use ray_tracer_challenge::lighting::PointLight;
use ray_tracer_challenge::materials::Material;
use ray_tracer_challenge::matrices::Matrix4;
use ray_tracer_challenge::patterns::Pattern;
use ray_tracer_challenge::shapes::Sphere;
use ray_tracer_challenge::transforms::view_transform;
use ray_tracer_challenge::tuples::Tuple;
use ray_tracer_challenge::world::World;
use std::f32::consts::PI;
use std::fs::File;
use std::io::prelude::*;
fn main() -> std::io::Result<()> {
let light = PointLight::new(
Tuple::point(-10.0, 10.0, -10.0),
Tuple::color(1.0, 1.0, 1.0),
);
let mut world = World::new();
world.light_source = Some(light);
let mut side_color = Material::default();
let mut side_pattern = Pattern::checkers(
Tuple::color(1.0, 0.9, 0.9),
Tuple::color(0.7, 0.6, 0.6),
);
side_pattern.transform = Matrix4::translation(0.00002, 0.00005, 1.5)
* Matrix4::rotation_x(PI / 2.0);
side_color.pattern = Some(side_pattern);
side_color.specular = 0.0;
{
let mut floor = Sphere::new();
floor.transform = Matrix4::scaling(10.0, 0.01, 10.0);
floor.material = Material::default();
floor.material.reflective = 0.2;
floor.material.specular = 0.0;
floor.material.diffuse = 0.2;
let mut floor_pattern = Pattern::checkers(
Tuple::color(0.0, 0.0, 0.0),
Tuple::color(1.0, 1.0, 1.0),
);
floor_pattern.transform = Matrix4::translation(0.0002, 0.0005, 0.0002)
* Matrix4::rotation_x(PI / 2.0);
floor.material.pattern = Some(floor_pattern);
world.add_shape(floor);
}
{
let mut left_wall = Sphere::new();
left_wall.transform = Matrix4::translation(0.0, 0.0, 5.0)
* Matrix4::rotation_y(-PI / 4.0)
* Matrix4::rotation_x(PI / 2.0)
* Matrix4::scaling(10.0, 0.01, 10.0);
left_wall.material = side_color;
world.add_shape(left_wall);
}
{
let mut right_wall = Sphere::new();
right_wall.transform = Matrix4::translation(0.0, 0.0, 5.0)
* Matrix4::rotation_y(PI / 4.0)
* Matrix4::rotation_x(PI / 2.0)
* Matrix4::scaling(10.0, 0.01, 10.0);
right_wall.material = side_color;
world.add_shape(right_wall);
}
{
let mut middle = Sphere::new();
middle.transform = Matrix4::translation(-0.5, 1.0, 0.5);
middle.material = Material::default();
let mut pattern = Pattern::stripe(
Tuple::color(0.1, 0.6, 0.5),
Tuple::color(0.3, 0.9, 0.7),
);
pattern.transform = Matrix4::scaling(0.5, 0.5, 0.5);
middle.material.pattern = Some(pattern);
middle.material.diffuse = 0.7;
middle.material.specular = 0.3;
middle.material.reflective = 0.9;
world.add_shape(middle);
}
{
let mut right = Sphere::new();
right.transform = Matrix4::translation(1.5, 0.5, -0.5)
* Matrix4::scaling(0.5, 0.5, 0.5);
right.material = Material::default();
let mut pattern = Pattern::stripe(
Tuple::color(0.5, 1.0, 0.1),
Tuple::color(0.2, 0.6, 0.1),
);
pattern.transform = Matrix4::scaling(0.2, 0.2, 0.2);
right.material.pattern = Some(pattern);
right.material.diffuse = 0.7;
right.material.specular = 0.3;
world.add_shape(right);
}
{
let mut left = Sphere::new();
left.transform = Matrix4::translation(-1.5, 0.33, -0.75)
* Matrix4::scaling(0.33, 0.33, 0.33);
left.material = Material::default();
let mut pattern = Pattern::stripe(
Tuple::color(1.0, 0.8, 0.1),
Tuple::color(0.6, 0.2, 0.2),
);
pattern.transform = Matrix4::scaling(1.5, 0.5, 0.2);
left.material.pattern = Some(pattern);
left.material.diffuse = 0.7;
left.material.specular = 0.3;
world.add_shape(left);
}
let mut camera = Camera::new(1000, 500, PI / 3.0);
camera.transform = view_transform(
Tuple::point(0.0, 1.5, -5.0),
Tuple::point(0.0, 1.0, 0.0),
Tuple::vector(0.0, 1.0, 0.0),
);
println!("Rendering world with {} pixels", camera.num_pixels());
let mut image = camera.render(world);
let draw_debug = false;
if draw_debug {
// 400,10
let debug_point = (40, 2);
let red = Tuple::color(1.0, 0.0, 0.0);
for pre_x in 1..4 {
for pre_y in 1..4 {
image.write_pixel(
debug_point.0 - pre_x,
debug_point.1 - pre_y,
&red,
);
image.write_pixel(
debug_point.0 - pre_x,
debug_point.1 + pre_y,
&red,
);
image.write_pixel(
debug_point.0 + pre_x,
debug_point.1 - pre_y,
&red,
);
image.write_pixel(
debug_point.0 + pre_x,
debug_point.1 + pre_y,
&red,
);
}
}
}
let ppm = image.to_ppm();
let filename = "red_circle.ppm";
let mut file = File::create(filename)?;
file.write_all(ppm.as_bytes())?;
Ok(())
}
|
use crate::io::Write;
use std::{env, fs, io, path::Path};
fn rrr_table<P: AsRef<Path>>(path: P, ty: &str, n: usize) -> io::Result<()> {
fn gen_rrr_table(size: usize) -> Vec<Vec<u128>> {
let mut table = vec![vec![0u128; size]; size];
for k in 0..size {
table[k][k] = 1; // initialize diagonal
table[0][k] = 0; // initialize first row
table[k][0] = 1; // initialize first col
}
for i in 1..size {
for j in 1..size {
table[i][j] = table[i - 1][j - 1] + table[i - 1][j];
}
}
table
}
let dir = env::var("OUT_DIR").unwrap();
let mut file = fs::File::create(Path::new(&dir).join(path))?;
writeln!(
file,
r#"#[cfg_attr(feature = "cargo-clippy", allow(unreadable_literal))]
pub static TABLE: {} = {:#?};
"#,
ty,
gen_rrr_table(n)
)
}
#[cfg_attr(rustfmt, rustfmt_skip)]
fn main() -> io::Result<()> {
rrr_table( "table15.rs", "[[u16; 15 ]; 15 ]", 15)?;
rrr_table( "table31.rs", "[[u32; 31 ]; 31 ]", 31)?;
rrr_table( "table63.rs", "[[u64; 63 ]; 63 ]", 63)?;
// rrr_table(Path::new(&dir).join("table255.rs"), "[[u256; 255]; 255]", 255)?;
Ok(())
}
|
use oxygengine::{prelude::*, user_interface::raui::core::widget::utils::Color as RauiColor};
pub fn rgba_to_raui_color(r: u8, g: u8, b: u8, a: u8) -> RauiColor {
RauiColor {
r: r as Scalar / 255.0,
g: g as Scalar / 255.0,
b: b as Scalar / 255.0,
a: a as Scalar / 255.0,
}
}
|
extern crate zmq;
extern crate protobuf;
use protobuf::*;
mod pingpong;
fn run_client(ctx: &mut zmq::Context, addr: &str) -> Result<(), zmq::Error> {
let mut socket = try!(ctx.socket(zmq::REQ));
try!(socket.connect(addr));
let payload = "PING";
loop {
let mut p = pingpong::PingPong::new();
p.set_action(payload.to_string());
p.set_name("Nolan".to_string());
println!("Sending -> {:?}", p.get_action());
let mut msg = try!(zmq::Message::new());
try!(socket.send(&p.write_to_bytes().unwrap(), 0));
try!(socket.recv(&mut msg, 0));
let contents = msg.as_str().unwrap();
println!("Received <- {:?}", contents);
}
Ok(())
}
fn run_server(ctx: &mut zmq::Context, addr: &str) -> Result<(), zmq::Error> {
let mut socket = try!(ctx.socket(zmq::REP));
try!(socket.bind(addr));
let mut msg = try!(zmq::Message::new());
loop {
if let Ok(_) = socket.recv(&mut msg, 0) {
let p: pingpong::PingPong = parse_from_bytes(&msg).unwrap();
let mut r: pingpong::PingPong = pingpong::PingPong::new();
if p.get_action() == "PING" {
try!(socket.send_str("PONG", 0));
}
}
}
Ok(())
}
fn main() {
let args = std::env::args().collect::<Vec<_>>();
let mut ctx = zmq::Context::new();
let addr = "tcp://127.0.0.1:5555";
if args[1] == "client" {
println!("ZeroMQ client connecting to {}", addr);
run_client(&mut ctx, addr).unwrap_or_else(|err| println!("{:?}", err));
} else {
println!("ZeroMQ server listening on {}", addr);
run_server(&mut ctx, addr).unwrap_or_else(|err| println!("{:?}", err));
}
}
|
use anyhow::bail;
use async_trait::async_trait;
use drogue_cloud_registry_events::stream::EventHandler;
use futures::{stream, Stream, StreamExt, TryStreamExt};
use kube::Resource;
use kube_runtime::watcher::Event;
use std::fmt::Debug;
/// Run a stream to completion, sending items to the handler
#[async_trait]
pub trait RunStream<R>
where
R: Resource + Debug + Send + Sync,
{
type Error;
async fn run_stream<H>(self, handler: H) -> Result<(), Self::Error>
where
H: EventHandler<Event = R> + Send + Sync + 'static;
}
#[async_trait]
impl<S, R, E> RunStream<R> for S
where
E: std::error::Error + Send + Sync + 'static,
R: Resource + Debug + Send + Sync,
S: Stream<Item = Result<Event<R>, E>> + Send + 'static,
{
type Error = anyhow::Error;
async fn run_stream<H>(self, handler: H) -> Result<(), Self::Error>
where
H: EventHandler<Event = R> + Send + Sync + 'static,
{
let stream = Box::pin(self);
// expand resources from events
let mut stream = stream
.map_err(anyhow::Error::from)
.map_ok(|event| {
match event {
Event::Applied(resource) | Event::Deleted(resource) => {
stream::iter(vec![resource])
}
Event::Restarted(resources) => stream::iter(resources),
}
.map(Result::<_, anyhow::Error>::Ok)
})
.try_flatten();
// handle events
while let Some(event) = stream.try_next().await? {
log::debug!("Processing event: {:?}", event);
let mut cnt = 0;
while handler.handle(&event).await.is_err() {
if cnt > 10 {
bail!("Failed to process event");
} else {
cnt += 1;
}
}
}
bail!("Stream must not end")
}
}
|
//use crate::vulkan::utils::cstr2string;
use ash::{
extensions::ext::DebugReport,
version::{DeviceV1_0, EntryV1_0, InstanceV1_0},
vk::{self, make_version, PhysicalDevice},
Device, Entry, Instance,
};
use log::info;
use std::{ffi::CString, rc::Rc};
use thiserror::Error;
#[derive(Error, Debug)]
/// Error types for vulkan devices
pub enum VkStateError {
#[error("Null string")]
Nul(#[from] std::ffi::NulError),
#[error("Vulkan error")]
VkResult(#[from] ash::vk::Result),
#[error("Unable to get vulkan instance")]
Instance(#[from] ash::InstanceError),
#[error("Unable to load ash")]
Loading(#[from] ash::LoadingError),
#[error("Unable to get vulkan queue")]
NoQueue,
}
#[cfg_attr(feature = "python", pyclass)]
pub struct VulkanState {
pub entry: Entry,
pub instance: Instance,
pub physical_device: PhysicalDevice,
pub device: Device,
pub queue_family_index: u32,
// pub debug_report_loader: ash::extensions::ext::DebugReport,
// pub debug_callback: vk::DebugReportCallbackEXT,
}
impl Drop for VulkanState {
fn drop(&mut self) {
unsafe {
self.device.device_wait_idle().unwrap();
self.device.destroy_device(None);
// self.debug_report_loader
// .destroy_debug_report_callback(self.debug_callback, None);
self.instance.destroy_instance(None);
}
}
}
pub fn print_work_limits(vulkan: Rc<VulkanState>) {
let physical_device_props = unsafe { vulkan.instance.get_physical_device_properties(vulkan.physical_device) };
let physical_limits = physical_device_props.limits;
let work_group_count = physical_limits.max_compute_work_group_count;
let work_group_size = physical_limits.max_compute_work_group_size;
let work_group_invocation = physical_limits.max_compute_work_group_invocations;
info!(
"Device max work group count: [{}, {}, {}]",
work_group_count[0], work_group_count[1], work_group_count[2]
);
info!(
"Device max work group size: [{}, {}, {}]",
work_group_size[0], work_group_size[1], work_group_size[2]
);
info!("Device max work group invocation: {}", work_group_invocation);
info!(
"minStorageBufferOffset: {}",
physical_limits.min_storage_buffer_offset_alignment
);
}
/*
unsafe extern "system" fn vulkan_debug_callback(
_: vk::DebugReportFlagsEXT,
_: vk::DebugReportObjectTypeEXT,
_: u64,
_: usize,
_: i32,
_: *const c_char,
p_message: *const c_char,
_: *mut c_void,
) -> u32 {
warn!("\n{:?}", CStr::from_ptr(p_message));
vk::FALSE
}
*/
fn extension_names() -> Vec<*const i8> { vec![DebugReport::name().as_ptr()] }
pub fn init_vulkan() -> Result<VulkanState, VkStateError> {
//let layer_names = [CString::new("VK_LAYER_LUNARG_standard_validation").unwrap()];
//let layers_names_raw: Vec<*const i8> = layer_names
// .iter()
// .map(|raw_name| raw_name.as_ptr())
// .collect();
let extension_names_raw = extension_names();
let app_name = CString::new("printer_geo")?;
let entry = Entry::new()?;
let app_info = vk::ApplicationInfo::builder()
.api_version(make_version(1, 0, 0))
.application_name(&app_name)
.application_version(make_version(1, 0, 0));
let create_info = vk::InstanceCreateInfo::builder()
.application_info(&app_info)
// .enabled_layer_names(&layers_names_raw)
.enabled_extension_names(&extension_names_raw);
let instance: Instance = unsafe { entry.create_instance(&create_info, None)? };
//let debug_info = vk::DebugReportCallbackCreateInfoEXT::builder()
// .flags(
// vk::DebugReportFlagsEXT::ERROR
// | vk::DebugReportFlagsEXT::WARNING
// | vk::DebugReportFlagsEXT::PERFORMANCE_WARNING,
// )
// .pfn_callback(Some(vulkan_debug_callback));
//let debug_report_loader = DebugReport::new(&entry, &instance);
//let debug_callback = unsafe {
// debug_report_loader
// .create_debug_report_callback(&debug_info, None)
// .unwrap()
//};
let physical: PhysicalDevice;
let phy_count = unsafe { instance.enumerate_physical_devices()? };
physical = phy_count[0];
/*
if phy_count.len() == 1 {
let properties = unsafe { instance.get_physical_device_properties(physical) };
let phy_name = cstr2string(properties.device_name.to_vec());
info!("Only one physical device ({}) defaulting to it.", phy_name);
} else {
// We don't use the logger here because we need user
// feedback so we need whatever we print to be visible in all cases.
println!("Physical device:");
let mut i = 0;
for dev in phy_count.clone() {
let properties = unsafe { instance.get_physical_device_properties(dev) };
let dev_name = cstr2string(properties.device_name.to_vec());
let (mut dev_graphics, mut dev_compute, mut dev_transfer, mut dev_sparse) = (false, false, false, false);
unsafe {
instance
.get_physical_device_queue_family_properties(dev)
.iter()
.for_each(|nfo| {
if !dev_graphics && nfo.queue_flags.contains(vk::QueueFlags::GRAPHICS) {
dev_graphics = true;
}
if !dev_compute && nfo.queue_flags.contains(vk::QueueFlags::COMPUTE) {
dev_compute = true;
}
if !dev_transfer && nfo.queue_flags.contains(vk::QueueFlags::TRANSFER) {
dev_transfer = true;
}
if !dev_sparse && nfo.queue_flags.contains(vk::QueueFlags::SPARSE_BINDING) {
dev_sparse = true;
}
});
}
println!("- [{}] {}:", i, dev_name);
println!("\t* GRAPHICS: {}", tick(dev_graphics));
println!("\t* COMPUTE: {}", tick(dev_compute));
println!("\t* TRANSFER: {}", tick(dev_transfer));
println!("\t* SPARSE OPS: {}", tick(dev_sparse));
i += 1;
}
println!("Use: ");
let mut line = String::new();
let stdin = io::stdin();
stdin.lock().read_line(&mut line).unwrap();
let phy_id = line.trim().parse::<usize>().expect("[ERR] Please write a number.");
physical = phy_count[phy_id];
let properties = unsafe { instance.get_physical_device_properties(physical) };
let phy_name = cstr2string(properties.device_name.to_vec());
info!("Using device {}.", phy_name);
}
*/
// Get queue family:
let queue_index = unsafe {
instance
.get_physical_device_queue_family_properties(physical)
.iter()
.enumerate()
.filter_map(|(index, ref nfo)| {
let support_compute = nfo.queue_flags.contains(vk::QueueFlags::COMPUTE);
let support_transfer = nfo.queue_flags.contains(vk::QueueFlags::TRANSFER);
if support_compute && support_transfer {
Some(index)
} else {
None
}
})
.next()
.ok_or(VkStateError::NoQueue)? as u32
};
let features = vk::PhysicalDeviceFeatures { ..Default::default() };
let queue_create_info = [vk::DeviceQueueCreateInfo::builder()
.queue_family_index(queue_index)
.queue_priorities(&[1.0])
.build()];
let device_create_info_builder = vk::DeviceCreateInfo::builder()
.queue_create_infos(&queue_create_info)
.enabled_features(&features)
.enabled_extension_names(&[]);
let device: Device = unsafe { instance.create_device(physical, &device_create_info_builder, None)? };
Ok(VulkanState {
entry,
instance,
physical_device: physical,
device,
queue_family_index: queue_index,
// debug_callback,
// debug_report_loader,
})
}
|
#![cfg_attr(not(any(std, test)), no_std)]
//! Offers a reversed view into a slice.
//!
//! To use, import the `SliceExt` trait to get the `.rev()` and `.rev_mut`
//! extension methods on slices. Then treat the returned `RevSlice` like
//! you would an ordinary slice: index it, split it, iterate it, whatever.
//!
//! Example:
//!
//! ```
//! extern crate rev_slice;
//! use rev_slice::SliceExt;
//!
//! let r = [1, 2, 4, 9, 16, 25].rev();
//! assert_eq!(r[0], 25);
//! assert_eq!(r[1..3].rev(), &[9, 16]);
//! assert_eq!(r.split_first().unwrap().0, &25);
//!
//! let mut it = r.iter().cloned().skip(2);
//! assert_eq!(it.next(), Some(9));
//! assert_eq!(it.next(), Some(4));
//! assert_eq!(it.next(), Some(2));
//! ```
#[cfg(any(std, test))]
extern crate core;
use core::{iter, slice};
use core::ops::{Index, IndexMut};
use core::ops::Range;
/// Adds `.rev()` and `.rev_mut()` methods to slices.
///
/// There's no reason to implement this yourself.
pub trait SliceExt {
/// The element type of the slice
type Element;
/// Get a proxy providing a reversed view of the slice.
fn rev(&self) -> &RevSlice<Self::Element>;
/// Get a proxy providing a mutable reversed view of the mutable slice.
fn rev_mut(&mut self) -> &mut RevSlice<Self::Element>;
#[doc(hidden)]
fn sealed(_: internal::Sealed);
}
mod internal {
pub struct Sealed;
}
impl<T> SliceExt for [T] {
type Element = T;
fn rev(&self) -> &RevSlice<Self::Element> {
unsafe { core::mem::transmute(self) }
}
fn rev_mut(&mut self) -> &mut RevSlice<Self::Element> {
unsafe { core::mem::transmute(self) }
}
fn sealed(_: internal::Sealed) {}
}
/// A DST newtype providing a reversed view of a slice.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
#[repr(transparent)]
pub struct RevSlice<T>([T]);
impl<T> RevSlice<T> {
/// Provides a reversed view of the reversed slice, aka the original slice.
pub fn rev(&self) -> &[T] {
&self.0
}
/// Provides a reversed view of the reversed slice, aka the original mutable slice.
pub fn rev_mut(&mut self) -> &mut [T] {
&mut self.0
}
fn flip_index(&self, index: usize) -> usize {
self.len() - (index+1)
}
fn flip_fencepost(&self, index: usize) -> usize {
self.len() - index
}
fn flip_range(&self, range: Range<usize>) -> Range<usize> {
self.flip_fencepost(range.end)..self.flip_fencepost(range.start)
}
}
/// These methods work like their equivalents in `core`.
impl<T> RevSlice<T> {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn first(&self) -> Option<&T> {
self.0.last()
}
pub fn first_mut(&mut self) -> Option<&mut T> {
self.0.last_mut()
}
pub fn last(&self) -> Option<&T> {
self.0.first()
}
pub fn last_mut(&mut self) -> Option<&mut T> {
self.0.first_mut()
}
pub fn split_first(&self) -> Option<(&T, &RevSlice<T>)> {
let (item, rest) = self.0.split_last()?;
Some((item, rest.rev()))
}
pub fn split_first_mut(&mut self) -> Option<(&T, &RevSlice<T>)> {
let (item, rest) = self.0.split_last_mut()?;
Some((item, rest.rev_mut()))
}
pub fn split_last(&self) -> Option<(&T, &RevSlice<T>)> {
let (item, rest) = self.0.split_first()?;
Some((item, rest.rev()))
}
pub fn split_last_mut(&mut self) -> Option<(&T, &RevSlice<T>)> {
let (item, rest) = self.0.split_first_mut()?;
Some((item, rest.rev_mut()))
}
pub fn split_at(&self, mid: usize) -> (&RevSlice<T>, &RevSlice<T>) {
let rmid = self.flip_fencepost(mid);
let (a, b) = self.0.split_at(rmid);
(b.rev(), a.rev())
}
pub fn split_at_mut(&mut self, mid: usize) -> (&mut RevSlice<T>, &mut RevSlice<T>) {
let rmid = self.flip_fencepost(mid);
let (a, b) = self.0.split_at_mut(rmid);
(b.rev_mut(), a.rev_mut())
}
}
impl<T> Index<usize> for RevSlice<T> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
let rindex = self.flip_index(index);
&self.0[rindex]
}
}
impl<T> IndexMut<usize> for RevSlice<T> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
let rindex = self.flip_index(index);
&mut self.0[rindex]
}
}
impl<T> Index<Range<usize>> for RevSlice<T> {
type Output = RevSlice<T>;
fn index(&self, index: Range<usize>) -> &Self::Output {
let rindex = self.flip_range(index);
self.0[rindex].rev()
}
}
impl<T> IndexMut<Range<usize>> for RevSlice<T> {
fn index_mut(&mut self, index: Range<usize>) -> &mut Self::Output {
let rindex = self.flip_range(index);
self.0[rindex].rev_mut()
}
}
impl<T> RevSlice<T> {
/// `my_slice.rev().iter()` and `my_slice.iter().rev()` are equivalent.
pub fn iter(&self) -> iter::Rev<slice::Iter<T>> {
self.0.iter().rev()
}
/// `my_slice.rev().iter_mut()` and `my_slice.iter_mut().rev()` are equivalent.
pub fn iter_mut(&mut self) -> iter::Rev<slice::IterMut<T>> {
self.0.iter_mut().rev()
}
}
impl<'a, T> iter::IntoIterator for &'a RevSlice<T> {
type Item = &'a T;
type IntoIter = iter::Rev<slice::Iter<'a, T>>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> iter::IntoIterator for &'a mut RevSlice<T> {
type Item = &'a mut T;
type IntoIter = iter::Rev<slice::IterMut<'a, T>>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
#[cfg(test)]
mod tests {
use super::SliceExt;
#[test]
fn it_works() {
let mut a = [1, 2, 3, 4, 5, 6, 7];
assert_eq!(a.rev()[1], 6);
assert_eq!(a.rev().iter().nth(1), Some(&6));
a.rev_mut()[6] = 10;
assert_eq!(a[0], 10);
let b = &a.rev()[1..4];
assert_eq!(b.len(), 3);
assert_eq!(b[0], 6);
assert_eq!(b[1], 5);
assert_eq!(b[2], 4);
let (x, y) = a.rev().split_at(3);
assert_eq!(x.len(), 3);
assert_eq!(y.len(), 4);
assert_eq!(x.rev(), &[5, 6, 7]);
assert_eq!(y.rev(), &[10, 2, 3, 4]);
}
#[test]
fn iter_works_too() {
assert_eq!((0..10).rev().nth(1), Some(8));
}
}
|
// Kata: https://www.codewars.com/kata/52f787eb172a8b4ae1000a34/solutions/rust
pub fn trailing_zeros(n: u128) -> u32 {
if n < 5 {
return 0;
}
let mut five_multiplers = vec![];
let mut five_multipler = 5;
while five_multipler <= n {
five_multiplers.push(five_multipler);
five_multipler *= 5;
}
let divide = |multiplier: &u128| -> u32 {
let float_n = n as f64;
(float_n / (*multiplier as f64)).floor() as u32
};
five_multiplers.iter().map(divide).sum()
}
|
#[derive(Debug, PartialEq)]
pub enum CandidateType {
HostUdp,
HostTcp(String),
ServerReflexive(String, u16),
}
|
pub mod gen_custom;
use rand::Rng;
// use rand::distributions::{Distribution, Uniform};
use crate::{AlgorithmParams, Chromosome};
/// Produces genetic material for genetic algorithm
pub trait Genetic {
const LENGTH: usize;
/// Returns the gene of an object
fn gene(&self) -> Chromosome;
/// Returns object from gene
fn from_gene(chromosome: &Chromosome) -> Self;
/// Mutation
/// Pick mutation_rate bits at random and flip them.
fn mutation(&self, mutation_rate: f64) -> Self where Self: Sized {
let mut gene = self.gene();
let mut rng = rand::thread_rng();
// Check for iterator
for i in 0..gene.len() {
let number: f64 = rng.gen();
if number < mutation_rate {
gene.set(i, !gene.get(i).unwrap());
}
}
return Genetic::from_gene(&gene);
}
fn mutate_step(&self, other: &Self, params: &AlgorithmParams) -> (Self, Self) where Self: Sized;
/// Crossover
/// Gene 1 and Gene 2 assumed to be the same length
// TODO: Check for duplicates
fn cross_over(&self, other: &Self, co_factor: f64) -> (Self, Self) where Self: Sized {
let mut rng = rand::thread_rng();
let mut gene_1 = self.gene();
let mut gene_2 = other.gene();
for i in 0..gene_1.len() {
let number: f64 = rng.gen();
if number < co_factor {
// Swap
let temp = gene_1.get(i).unwrap();
gene_1.set(i, gene_2.get(i).unwrap());
gene_2.set(i, temp)
}
}
//Generate return genes
let ret1 = Genetic::from_gene(&gene_1);
let ret2 = Genetic::from_gene(&gene_2);
return (ret1, ret2);
}
// TODO: Implement a reverse function
fn reverse(&self) -> Self where Self: Sized {
todo!();
}
fn negation(&self) -> Self where Self: Sized {
let mut gene = self.gene();
// 1 1 0
// 0 1 1
gene.negate();
return Genetic::from_gene(&gene);
}
// TODO: Produce random.
fn generate_random() -> Self;
} |
/// Ownership is Rust’s most unique feature,
/// and it enables Rust to make memory safety
/// guarantees without needing a garbage collector.
/// ownership rules
/// 1. Each value in Rust has a variable that’s called its owner.
/// 2. There can only be one owner at a time.
/// 3. When the owner goes out of scope, the value will be dropped.
fn string_type() {
let mut s = String::from("hello");
s.push_str(", world!");
println!("{}", s);
}
fn move_var() {
let s1 = String::from("Hello");
let _s2 = s1; // move s1 to s2, the reference of s1 will be moved
// it is now shallow copy, it's move
}
fn deepcopy() {
let s1 = String::from("Hello");
let _s2 = s1.clone(); // the heap data get copied
}
fn calculate_len(s: String) -> (String, usize) {
let len = s.len();
(s, len)
}
/// The &s1 syntax lets us create a reference that
/// refers to the value of s1 but does not own it.
/// Because it does not own it, the value it points
/// to will not be dropped when the reference goes out of scope.
fn reference(s: &String) -> usize {
s.len()
}
/// At any given time, you can have either (but not both of) one
/// mutable reference or any number of immutable references.
/// References must always be valid.
fn mut_reference(s: &mut String) {
s.push_str(", world");
}
/// get first word -> slice
pub fn first_word(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s
}
fn slice() {
let s = String::from("Hello World");
let h = &s[0..=4];
let w = &s[6..];
let t = &s[..];
println!("{} {} {}", h, w, t);
}
fn main() {
string_type();
move_var();
deepcopy();
// reference
let s1 = String::from("Hello");
let (s2, _len) = calculate_len(s1);
reference(&s2);
// mut reference
let mut s3 = String::from("Hello");
// let r2 = &mut s3; this will throw error
// you can only have one mutable reference to
// a particular piece of data in a particular scope.
mut_reference(&mut s3);
// right code
{
let _r2 = &mut s3;
}
// slice
slice();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_first_word() {
let s = String::from("Hello World");
let w = first_word(&s);
assert_eq!(w, "Hello".to_string());
}
}
|
use super::{DataClass, DataIdDefinition};
use ::std::marker::PhantomData;
pub type DataIdSimpleType = f32;
pub(crate) static DATAID_DEFINITION : DataIdDefinition<DataIdSimpleType, DataIdType> =
DataIdDefinition {
data_id: 56,
class: DataClass::RemoteBoilerParameters,
read: true,
write: true,
check: None,
phantom_simple: PhantomData {},
phantom_complex: PhantomData {}
};
dataidtypedef!(domestic_hot_water_setpoint: f32); |
// #[macro_use] extern crate time_test;
// #[macro_use] extern crate common_macros;
#[allow(dead_code)]
// mod array_diff;
// mod bleatrix_trotter;
// mod bouncing_balls;
// mod deadfish;
// mod desc_order;
// mod directions_reduction;
// mod divisibility_by_13;
// mod game;
// mod gap_in_primes;
// mod irr_sum_of_rationals;
// mod last_digit_huge_number;
// mod mini_bit_move;
// mod moves_in_squared_strings;
// mod pattern_zoom;
// mod recover_secret_string;
// mod reducing_by_steps;
// mod rolldice_sum_prob;
// mod roman_numerals;
// mod starts_ends_with;
// mod sum_of_pairs;
// mod summy;
// mod word_order;
// mod a_disguised_sequence;
// mod digital_cypher;
// mod find_the_nth_digit;
// mod dec_to_factorial_and_back;
// mod reverse_or_rotate;
// mod validate_sudoku_with_size_nxn;
// mod diophantine_equation;
// mod scheduling_sjf;
// mod bit_counting;
// mod weight_for_weight;
// mod to_leet_speak;
// mod simple_beads_count;
// mod coloured_triangles;
// mod target_date;
// mod build_a_square;
// mod duplicate_encoder;
// mod help_the_bookseller;
// mod bankers_plan;
// mod find_missing_letter;
// mod growth_of_population;
// mod categorize_new_member;
// mod best_travel;
// mod camel_case_method;
// mod rectangle_into_squares;
// mod play_w_passphrases;
// mod cafeteria;
// mod build_pile_cubes;
// mod catalog;
// mod num_int_partitions;
// mod point_in_polygon;
// mod get_along_int_partitions;
// mod sum_by_factors;
// mod primes_in_numbers;
// mod buying_a_car;
// mod backwards_read_primes;
// mod twice_linear;
// mod highest_scoring_word;
// mod range_execution;
// mod simple_sub_cipher_helper;
// mod ball_upwards;
// mod color_choice;
// mod help_granny;
mod character_frequency;
// This one has trouble because it uses a special class
// mod morse_decoder;
|
use rust_decimal::Decimal;
use rust_decimal_macros::*;
use std::fmt;
use std::time::Instant;
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, Debug, PartialEq, SmartDefault, Serialize, Deserialize)]
#[allow(dead_code)]
pub enum OrderOp {
#[default]
Limit, // limit order type
Market, // market order type
Cancel, // cancel order type
}
#[derive(Copy, Clone, Debug, PartialEq, SmartDefault, Serialize, Deserialize)]
#[allow(dead_code)]
pub enum OrderSide {
#[default]
Bid, // bid
Ask, // ask
}
#[derive(Copy, Clone, Debug, PartialEq, SmartDefault, Serialize, Deserialize)]
#[allow(dead_code)]
pub enum OrderStatus {
#[default]
PaddingTrade, // padding trade
AllTrade, // all trade
PartTrade, // part trade
AllCancel, // all cancel
PartCancel, // part cancel
AutoCancel, // auto cancel, market order untrade part
}
#[derive(Copy, Clone, Debug, PartialEq, SmartDefault, Serialize, Deserialize)]
#[allow(dead_code)]
pub enum TradeType {
#[default]
SimpleTrade, // simple trade trade record type
CancelTrade, // cancel order trade record type
}
#[derive(Copy, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct OrderInfo {
//{{{
pub id: u64, // order id
pub uid: u64, // order user id
pub op: OrderOp, // order opreation
pub side: OrderSide, // order side
pub price: Decimal, // order price
pub avg_trade_price: Decimal, // order tarde average price
pub raw_qty: Decimal, // order quantity
pub remain_qty: Decimal,
pub trade_qty: Decimal, // order traded quantity
pub trade_oppo_qty: Decimal, // order traded oppo quantity
pub status: OrderStatus, // current order status
pub taker_fee_rate: Decimal, // order taker fee rate
pub maker_fee_rate: Decimal, // order maker fee rate
pub fee: Decimal,
pub logic: OrderLogic, // order logic info
} //}}}
impl fmt::Display for OrderInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "id:{}\nuid:{}\nop:{:?}\nside:{:?}\nprice:{}\navg_trade_price::{}\nraw_qty::{}\nremain_qty:{}\ntrade_qty:{}\ntrade_oppo_qty:{}\nstatus:{:?}\ntaker_fee:{}\nmaker_fee:{}\nfee:{}\ncurr_slot:{}\npre_slot:{}\nnext_slot:{}\nused:{}\n", self.id, self.uid, self.op, self.side, self.price, self.avg_trade_price, self.raw_qty, self.remain_qty, self.trade_qty, self.trade_oppo_qty, self.status, self.taker_fee_rate, self.maker_fee_rate, self.fee, self.logic.curr_slot, self.logic.pre_slot, self.logic.next_slot, self.logic.used)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_print() {
let order = OrderInfo::default();
println!("{:?}", order);
}
}
#[derive(Copy, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[allow(dead_code)]
pub struct OrderLogic {
pub curr_slot: usize, // current order slot
pub pre_slot: usize, // pre order slot, 0 / unuse
pub next_slot: usize, // next order slot, 0 unuse
pub used: bool,
}
#[allow(dead_code)]
enum TradeError {
OrderQtyIllegal,
OrderPriceIllegal,
}
#[allow(dead_code)]
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct TradeRecord {
//{{{
trade_id: u64, // unique trade record id
bid_order_id: u64, // bid order id
bid_uid: u64, // user of the bid order
bid_type: OrderOp, // bid order type
bid_raw_qty: Decimal, // bid order raw quantity
bid_remain_qty: Decimal, // bid order remain quantity
bid_raw_price: Decimal, // bid order raw quantity
bid_avg_price: Decimal, // bid order trade avg price
bid_fee: Decimal,
ask_order_id: u64, // ask order id
ask_uid: u64, // user of the ask order
ask_type: OrderOp, // ask order type
ask_raw_qty: Decimal, // ask order raw quantity
ask_remain_qty: Decimal, // ask order remain quantity
ask_raw_price: Decimal, // ask order raw price
ask_avg_price: Decimal, // ask order trade avg price
ask_fee: Decimal,
trade_qty: Decimal, // trade qty
trade_price: Decimal, // trade price
trade_oppo_qty: Decimal, // trade_oppo_qty = trade_qty * trade_price
trade_unfreeze_qty: Decimal, // taker order should be unfreeze qty
time_stamp: u64, // trade timestap
trade_type: TradeType,
} //}}}
impl Default for TradeRecord {
//{{{
fn default() -> TradeRecord {
let zero = dec!(0);
TradeRecord {
trade_id: 0u64,
bid_order_id: 0u64,
bid_uid: 0u64,
bid_raw_qty: zero,
bid_remain_qty: zero,
bid_type: OrderOp::Limit,
bid_raw_price: zero,
bid_avg_price: zero,
bid_fee: zero,
ask_order_id: 0u64,
ask_uid: 0u64,
ask_type: OrderOp::Limit,
ask_raw_qty: zero,
ask_remain_qty: zero,
ask_raw_price: zero,
ask_avg_price: zero,
ask_fee: zero,
trade_qty: zero,
trade_price: zero,
trade_oppo_qty: zero,
trade_unfreeze_qty: zero,
time_stamp: 0u64,
trade_type: TradeType::SimpleTrade,
}
}
} //}}}
pub(crate) fn gen_trade_id() -> u64 {
// TODO:
1u64
}
impl OrderInfo {
#[inline]
pub fn new(
id: u64,
uid: u64,
side: OrderSide,
qty: Decimal,
price: Decimal,
(taker_fee, maker_fee): (Decimal, Decimal),
) -> OrderInfo {
OrderInfo {
//{{{
id: id,
uid: uid,
op: OrderOp::Limit,
side: side,
price: price,
avg_trade_price: dec!(0),
raw_qty: qty,
trade_qty: dec!(0),
remain_qty: qty,
trade_oppo_qty: dec!(0),
status: OrderStatus::PaddingTrade,
taker_fee_rate: taker_fee,
maker_fee_rate: maker_fee,
fee: dec!(0),
logic: OrderLogic {
curr_slot: 0,
pre_slot: 0,
next_slot: 0,
used: true,
},
} //}}}
}
// gennerate new unique trade record id
pub fn trade(&mut self, taker: &mut OrderInfo) -> Option<TradeRecord> {
//{{{
// ensure the taker order is limit type
assert_eq!(self.op, OrderOp::Limit);
match taker.op {
OrderOp::Limit => {
//{{{
// colc trade qty
let trade_qty = if self.remain_qty > taker.remain_qty {
taker.remain_qty
} else {
self.remain_qty
};
assert!(trade_qty > dec!(0));
self.trade_qty = self.trade_qty + trade_qty;
self.remain_qty = self.remain_qty - trade_qty;
taker.trade_qty = taker.trade_qty + trade_qty;
taker.remain_qty = taker.remain_qty - trade_qty;
let oppo_qty = trade_qty * self.price;
match self.side {
OrderSide::Ask => {
self.fee = self.fee + oppo_qty * self.maker_fee_rate;
taker.fee = taker.fee + trade_qty * taker.taker_fee_rate;
}
OrderSide::Bid => {
self.fee = self.fee + trade_qty * self.maker_fee_rate;
taker.fee = taker.fee + oppo_qty * taker.taker_fee_rate;
}
}
self.trade_oppo_qty = self.trade_oppo_qty + oppo_qty;
taker.trade_oppo_qty = taker.trade_oppo_qty + oppo_qty;
self.avg_trade_price = self.trade_oppo_qty / self.trade_qty;
taker.avg_trade_price = taker.trade_oppo_qty / taker.trade_qty;
// colc self and taker order status.
self.status = if self.raw_qty == self.trade_qty {
self.logic.used = false;
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
taker.status = if taker.raw_qty == taker.trade_qty {
taker.logic.used = false;
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
let trade_id = gen_trade_id();
let bid_order = if self.side == OrderSide::Ask {
*taker
} else {
*self
};
let ask_order = if self.side == OrderSide::Ask {
*self
} else {
*taker
};
let trade_unfreeze_qty =
if taker.side == OrderSide::Bid && taker.op == OrderOp::Limit {
trade_qty * (taker.price - self.price)
} else {
dec!(0)
};
Some(TradeRecord {
trade_id: trade_id,
bid_order_id: bid_order.id,
bid_uid: bid_order.uid,
bid_type: OrderOp::Limit,
bid_raw_qty: bid_order.raw_qty,
bid_remain_qty: bid_order.remain_qty,
bid_raw_price: bid_order.price,
bid_avg_price: bid_order.avg_trade_price,
bid_fee: bid_order.fee,
ask_order_id: ask_order.id,
ask_uid: ask_order.uid,
ask_type: OrderOp::Limit,
ask_raw_qty: ask_order.raw_qty,
ask_remain_qty: ask_order.remain_qty,
ask_raw_price: ask_order.price,
ask_avg_price: ask_order.avg_trade_price,
ask_fee: ask_order.fee,
time_stamp: Instant::now().elapsed().as_secs(),
trade_qty: trade_qty,
trade_price: self.price,
trade_oppo_qty: oppo_qty,
trade_unfreeze_qty: trade_unfreeze_qty,
trade_type: TradeType::SimpleTrade,
})
} //}}}
OrderOp::Market => {
match taker.side {
OrderSide::Ask => {
//{{{
// colc trade qty
let trade_qty = if self.remain_qty > taker.remain_qty {
taker.remain_qty
} else {
self.remain_qty
};
self.trade_qty = self.trade_qty + trade_qty;
self.remain_qty = self.remain_qty - trade_qty;
self.fee = self.fee + trade_qty * self.maker_fee_rate;
taker.trade_qty = taker.trade_qty + trade_qty;
taker.remain_qty = taker.remain_qty - trade_qty;
let oppo_qty = trade_qty * self.price;
self.trade_oppo_qty = self.trade_oppo_qty + oppo_qty;
taker.trade_oppo_qty = taker.trade_oppo_qty + oppo_qty;
taker.fee = taker.fee + oppo_qty * taker.taker_fee_rate;
self.avg_trade_price = self.trade_qty / self.trade_oppo_qty;
taker.avg_trade_price = taker.trade_qty / taker.trade_oppo_qty;
// colc self and taker order status.
self.status = if self.raw_qty == self.trade_qty {
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
taker.status = if taker.raw_qty == taker.trade_qty {
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
let trade_id = gen_trade_id();
let bid_order = if self.side == OrderSide::Ask {
*taker
} else {
*self
};
let ask_order = if self.side == OrderSide::Ask {
*self
} else {
*taker
};
Some(TradeRecord {
trade_id: trade_id,
bid_order_id: bid_order.id,
bid_uid: bid_order.uid,
bid_type: OrderOp::Limit,
bid_raw_qty: bid_order.raw_qty,
bid_remain_qty: bid_order.remain_qty,
bid_raw_price: bid_order.price,
bid_avg_price: bid_order.avg_trade_price,
bid_fee: bid_order.fee,
ask_order_id: ask_order.id,
ask_uid: ask_order.uid,
ask_type: OrderOp::Limit,
ask_raw_qty: ask_order.raw_qty,
ask_remain_qty: ask_order.remain_qty,
ask_raw_price: ask_order.price,
ask_avg_price: ask_order.avg_trade_price,
ask_fee: ask_order.fee,
time_stamp: Instant::now().elapsed().as_secs(),
trade_qty: trade_qty,
trade_price: self.price,
trade_oppo_qty: oppo_qty,
trade_unfreeze_qty: dec!(0),
trade_type: TradeType::SimpleTrade,
});
} //}}}
OrderSide::Bid => {
//{{{
let trade_qty = if self.remain_qty * self.price > taker.remain_qty {
taker.remain_qty / self.price
} else {
self.remain_qty
};
let trade_oppo_qty = trade_qty * self.price;
self.trade_qty = self.trade_qty + trade_qty;
self.remain_qty = self.remain_qty - trade_qty;
taker.trade_qty = taker.trade_qty + trade_oppo_qty;
taker.remain_qty = taker.remain_qty - trade_oppo_qty;
self.trade_oppo_qty = self.trade_oppo_qty + trade_oppo_qty;
taker.trade_oppo_qty = taker.trade_oppo_qty + trade_qty;
self.avg_trade_price = self.trade_qty / self.trade_oppo_qty;
taker.avg_trade_price = taker.trade_oppo_qty / taker.trade_qty;
self.fee = self.fee + trade_oppo_qty * self.maker_fee_rate;
taker.fee = taker.fee + trade_qty * taker.taker_fee_rate;
self.status = if self.raw_qty == self.trade_qty {
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
taker.status = if taker.raw_qty == self.trade_qty {
OrderStatus::AllTrade
} else {
OrderStatus::PartTrade
};
let trade_id = gen_trade_id();
let bid_order = if self.side == OrderSide::Ask {
*taker
} else {
*self
};
let ask_order = if self.side == OrderSide::Ask {
*self
} else {
*taker
};
Some(TradeRecord {
trade_id: trade_id,
bid_order_id: bid_order.id,
bid_uid: bid_order.uid,
bid_type: OrderOp::Limit,
bid_raw_qty: bid_order.raw_qty,
bid_remain_qty: bid_order.remain_qty,
bid_raw_price: bid_order.price,
bid_avg_price: bid_order.avg_trade_price,
bid_fee: bid_order.fee,
ask_order_id: ask_order.id,
ask_uid: ask_order.uid,
ask_type: OrderOp::Limit,
ask_raw_qty: ask_order.raw_qty,
ask_remain_qty: ask_order.remain_qty,
ask_raw_price: ask_order.price,
ask_avg_price: ask_order.avg_trade_price,
ask_fee: ask_order.fee,
time_stamp: Instant::now().elapsed().as_secs(),
trade_qty: trade_qty,
trade_price: self.price,
trade_oppo_qty: trade_oppo_qty,
trade_unfreeze_qty: dec!(0),
trade_type: TradeType::SimpleTrade,
});
} //}}}
}
None
}
_ => {
println! {"unsupport order operation"}
None
}
}
} //}}}
pub fn cancel(&mut self) -> Option<TradeRecord> {
//{{{
if !self.logic.used || self.remain_qty == dec!(0) {
return None;
}
let trade_id = gen_trade_id();
self.logic.used = false;
match self.side {
OrderSide::Ask => Some(TradeRecord {
trade_id: trade_id,
ask_order_id: self.id,
ask_uid: self.uid,
ask_type: self.op,
ask_raw_qty: self.raw_qty,
ask_remain_qty: self.remain_qty,
ask_raw_price: self.price,
ask_avg_price: self.avg_trade_price,
ask_fee: self.fee,
trade_type: TradeType::CancelTrade,
..Default::default()
}),
OrderSide::Bid => Some(TradeRecord {
bid_order_id: self.id,
bid_uid: self.uid,
bid_type: self.op,
bid_raw_qty: self.raw_qty,
bid_remain_qty: self.remain_qty,
bid_raw_price: self.price,
bid_avg_price: self.avg_trade_price,
bid_fee: self.fee,
trade_type: TradeType::CancelTrade,
..Default::default()
}),
}
} //}}}
}
|
//! Cookie Filters
use futures::future;
use headers::Cookie;
use super::header;
use crate::filter::{Filter, One};
use crate::reject::Rejection;
use std::convert::Infallible;
/// Creates a `Filter` that requires a cookie by name.
///
/// If found, extracts the value of the cookie, otherwise rejects.
pub fn cookie(name: &'static str) -> impl Filter<Extract = One<String>, Error = Rejection> + Copy {
header::header2().and_then(move |cookie: Cookie| {
let cookie = cookie
.get(name)
.map(String::from)
.ok_or_else(|| crate::reject::missing_cookie(name));
future::ready(cookie)
})
}
/// Creates a `Filter` that looks for an optional cookie by name.
///
/// If found, extracts the value of the cookie, otherwise continues
/// the request, extracting `None`.
pub fn optional(
name: &'static str,
) -> impl Filter<Extract = One<Option<String>>, Error = Infallible> + Copy {
header::optional2()
.map(move |opt: Option<Cookie>| opt.and_then(|cookie| cookie.get(name).map(String::from)))
}
|
use crate::model::{Service, ValueContainer, ValueType};
use petgraph::graph::Graph;
use rayon::prelude::*;
use std::sync::{Arc, Mutex};
pub(crate) fn build(topics: Vec<Service>, subscriptions: Vec<Service>) -> Graph<String, String> {
let mut graph = Graph::<String, String>::new();
let nodes = subscriptions
.iter()
.map(|service| graph.add_node(service.name.clone()))
.collect::<Vec<_>>();
subscriptions
.iter()
.enumerate()
.for_each(|(i, subscription)| {
subscription.resources.iter().for_each(|resource| {
let subscribing_topic = match resource.attributes.get("topic_arn") {
Some(ValueContainer::Value(ValueType::Str(subscribing_topic))) => {
subscribing_topic
}
_ => panic!(),
};
topics.iter().for_each(|topic| {
topic.resources.iter().for_each(|topic_resource| {
let policy = match topic_resource.attributes.get("policy") {
Some(ValueContainer::Dictionary(policy)) => policy,
_ => panic!(),
};
let statements = match policy.get("Statement") {
Some(ValueContainer::Array(statements)) => statements,
_ => panic!(),
};
let statement = match statements.get(0) {
Some(statement) => statement,
_ => panic!(),
};
let statement = match statement.as_ref() {
ValueContainer::Dictionary(statement) => statement,
_ => panic!(),
};
let resource = match statement.get("Resource") {
Some(ValueContainer::Value(ValueType::Str(resource))) => resource,
_ => panic!(),
};
if subscribing_topic == resource {
if let Some(j) = subscriptions
.iter()
.position(|subscription| subscription.name == topic.name)
{
graph.add_edge(nodes[j], nodes[i], resource.clone());
}
};
});
});
});
});
graph
}
pub(crate) fn build_concurrent(
topics: Vec<Service>,
subscriptions: Vec<Service>,
) -> Graph<String, String> {
let graph = Arc::new(Mutex::new(Graph::<String, String>::new()));
let nodes = subscriptions
.par_iter()
.map(|service| graph.lock().unwrap().add_node(service.name.clone()))
.collect::<Vec<_>>();
subscriptions
.par_iter()
.enumerate()
.for_each(|(i, subscription)| {
subscription.resources.par_iter().for_each(|resource| {
let subscribing_topic = match resource.attributes.get("topic_arn") {
Some(ValueContainer::Value(ValueType::Str(subscribing_topic))) => {
subscribing_topic
}
_ => panic!(),
};
topics.par_iter().for_each(|topic| {
topic.resources.par_iter().for_each(|topic_resource| {
let policy = match topic_resource.attributes.get("policy") {
Some(ValueContainer::Dictionary(policy)) => policy,
_ => panic!(),
};
let statements = match policy.get("Statement") {
Some(ValueContainer::Array(statements)) => statements,
_ => panic!(),
};
let statement = match statements.get(0) {
Some(statement) => statement,
_ => panic!(),
};
let statement = match statement.as_ref() {
ValueContainer::Dictionary(statement) => statement,
_ => panic!(),
};
let resource = match statement.get("Resource") {
Some(ValueContainer::Value(ValueType::Str(resource))) => resource,
_ => panic!(),
};
if subscribing_topic == resource {
if let Some(j) = subscriptions
.iter()
.position(|subscription| subscription.name == topic.name)
{
graph.lock().unwrap().add_edge(
nodes[j],
nodes[i],
resource.clone(),
);
}
};
});
});
});
});
Arc::try_unwrap(graph).unwrap().into_inner().unwrap()
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.